* configure.in (--enable-deterministic-archives): Grok new
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
6aba47ca 3 Copyright (C) 1988, 1989, 1991, 1992, 1993, 1995, 1996, 1998, 1999, 2000,
7b6bb8da 4 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
9b254dd1 5 Free Software Foundation, Inc.
c906108c 6
c5aa993b 7 This file is part of GDB.
c906108c 8
c5aa993b
JM
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
a9762ec7 11 the Free Software Foundation; either version 3 of the License, or
c5aa993b 12 (at your option) any later version.
c906108c 13
c5aa993b
JM
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
c906108c 18
c5aa993b 19 You should have received a copy of the GNU General Public License
a9762ec7 20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "defs.h"
25#include "frame.h"
26#include "inferior.h"
27#include "gdbcmd.h"
28#include "gdbcore.h"
c906108c 29#include "gdb_string.h"
0963b4bd 30#include "dis-asm.h" /* For register styles. */
4e052eda 31#include "regcache.h"
54483882 32#include "reggroups.h"
d16aafd8 33#include "doublest.h"
fd0407d6 34#include "value.h"
34e8f22d 35#include "arch-utils.h"
4be87837 36#include "osabi.h"
eb5492fa
DJ
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
842e1f1e
DJ
40#include "objfiles.h"
41#include "dwarf2-frame.h"
e4c16157 42#include "gdbtypes.h"
29d73ae4 43#include "prologue-value.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d
RE
47
48#include "arm-tdep.h"
26216b98 49#include "gdb/sim-arm.h"
34e8f22d 50
082fc60d
RE
51#include "elf-bfd.h"
52#include "coff/internal.h"
97e03143 53#include "elf/arm.h"
c906108c 54
26216b98 55#include "gdb_assert.h"
60c5725c 56#include "vec.h"
26216b98 57
9779414d 58#include "features/arm-with-m.c"
ef7e8358
UW
59#include "features/arm-with-iwmmxt.c"
60#include "features/arm-with-vfpv2.c"
61#include "features/arm-with-vfpv3.c"
62#include "features/arm-with-neon.c"
9779414d 63
6529d2dd
AC
64static int arm_debug;
65
082fc60d
RE
66/* Macros for setting and testing a bit in a minimal symbol that marks
67 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 68 is used for this purpose.
082fc60d
RE
69
70 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 71 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 72
0963b4bd 73#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 74 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
75
76#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 77 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 78
60c5725c
DJ
79/* Per-objfile data used for mapping symbols. */
80static const struct objfile_data *arm_objfile_data_key;
81
82struct arm_mapping_symbol
83{
84 bfd_vma value;
85 char type;
86};
87typedef struct arm_mapping_symbol arm_mapping_symbol_s;
88DEF_VEC_O(arm_mapping_symbol_s);
89
90struct arm_per_objfile
91{
92 VEC(arm_mapping_symbol_s) **section_maps;
93};
94
afd7eef0
RE
95/* The list of available "set arm ..." and "show arm ..." commands. */
96static struct cmd_list_element *setarmcmdlist = NULL;
97static struct cmd_list_element *showarmcmdlist = NULL;
98
fd50bc42
RE
99/* The type of floating-point to use. Keep this in sync with enum
100 arm_float_model, and the help string in _initialize_arm_tdep. */
101static const char *fp_model_strings[] =
102{
103 "auto",
104 "softfpa",
105 "fpa",
106 "softvfp",
28e97307
DJ
107 "vfp",
108 NULL
fd50bc42
RE
109};
110
111/* A variable that can be configured by the user. */
112static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
113static const char *current_fp_model = "auto";
114
28e97307
DJ
115/* The ABI to use. Keep this in sync with arm_abi_kind. */
116static const char *arm_abi_strings[] =
117{
118 "auto",
119 "APCS",
120 "AAPCS",
121 NULL
122};
123
124/* A variable that can be configured by the user. */
125static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
126static const char *arm_abi_string = "auto";
127
0428b8f5
DJ
128/* The execution mode to assume. */
129static const char *arm_mode_strings[] =
130 {
131 "auto",
132 "arm",
68770265
MGD
133 "thumb",
134 NULL
0428b8f5
DJ
135 };
136
137static const char *arm_fallback_mode_string = "auto";
138static const char *arm_force_mode_string = "auto";
139
18819fa6
UW
140/* Internal override of the execution mode. -1 means no override,
141 0 means override to ARM mode, 1 means override to Thumb mode.
142 The effect is the same as if arm_force_mode has been set by the
143 user (except the internal override has precedence over a user's
144 arm_force_mode override). */
145static int arm_override_mode = -1;
146
94c30b78 147/* Number of different reg name sets (options). */
afd7eef0 148static int num_disassembly_options;
bc90b915 149
f32bf4a4
YQ
150/* The standard register names, and all the valid aliases for them. Note
151 that `fp', `sp' and `pc' are not added in this alias list, because they
152 have been added as builtin user registers in
153 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
154static const struct
155{
156 const char *name;
157 int regnum;
158} arm_register_aliases[] = {
159 /* Basic register numbers. */
160 { "r0", 0 },
161 { "r1", 1 },
162 { "r2", 2 },
163 { "r3", 3 },
164 { "r4", 4 },
165 { "r5", 5 },
166 { "r6", 6 },
167 { "r7", 7 },
168 { "r8", 8 },
169 { "r9", 9 },
170 { "r10", 10 },
171 { "r11", 11 },
172 { "r12", 12 },
173 { "r13", 13 },
174 { "r14", 14 },
175 { "r15", 15 },
176 /* Synonyms (argument and variable registers). */
177 { "a1", 0 },
178 { "a2", 1 },
179 { "a3", 2 },
180 { "a4", 3 },
181 { "v1", 4 },
182 { "v2", 5 },
183 { "v3", 6 },
184 { "v4", 7 },
185 { "v5", 8 },
186 { "v6", 9 },
187 { "v7", 10 },
188 { "v8", 11 },
189 /* Other platform-specific names for r9. */
190 { "sb", 9 },
191 { "tr", 9 },
192 /* Special names. */
193 { "ip", 12 },
123dc839 194 { "lr", 14 },
123dc839
DJ
195 /* Names used by GCC (not listed in the ARM EABI). */
196 { "sl", 10 },
123dc839
DJ
197 /* A special name from the older ATPCS. */
198 { "wr", 7 },
199};
bc90b915 200
123dc839 201static const char *const arm_register_names[] =
da59e081
JM
202{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
203 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
204 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
205 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
206 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
207 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 208 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 209
afd7eef0
RE
210/* Valid register name styles. */
211static const char **valid_disassembly_styles;
ed9a39eb 212
afd7eef0
RE
213/* Disassembly style to use. Default to "std" register names. */
214static const char *disassembly_style;
96baa820 215
ed9a39eb 216/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
217 style. */
218static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 219 struct cmd_list_element *);
afd7eef0 220static void set_disassembly_style (void);
ed9a39eb 221
b508a996 222static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 223 void *, int);
b508a996 224static void convert_to_extended (const struct floatformat *, void *,
be8626e0 225 const void *, int);
ed9a39eb 226
05d1431c
PA
227static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
228 struct regcache *regcache,
229 int regnum, gdb_byte *buf);
58d6951d
DJ
230static void arm_neon_quad_write (struct gdbarch *gdbarch,
231 struct regcache *regcache,
232 int regnum, const gdb_byte *buf);
233
db24da6d
YQ
234static int thumb_insn_size (unsigned short inst1);
235
9b8d791a 236struct arm_prologue_cache
c3b4394c 237{
eb5492fa
DJ
238 /* The stack pointer at the time this frame was created; i.e. the
239 caller's stack pointer when this function was called. It is used
240 to identify this frame. */
241 CORE_ADDR prev_sp;
242
4be43953
DJ
243 /* The frame base for this frame is just prev_sp - frame size.
244 FRAMESIZE is the distance from the frame pointer to the
245 initial stack pointer. */
eb5492fa 246
c3b4394c 247 int framesize;
eb5492fa
DJ
248
249 /* The register used to hold the frame pointer for this frame. */
c3b4394c 250 int framereg;
eb5492fa
DJ
251
252 /* Saved register offsets. */
253 struct trad_frame_saved_reg *saved_regs;
c3b4394c 254};
ed9a39eb 255
0d39a070
DJ
256static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
257 CORE_ADDR prologue_start,
258 CORE_ADDR prologue_end,
259 struct arm_prologue_cache *cache);
260
cca44b1b
JB
261/* Architecture version for displaced stepping. This effects the behaviour of
262 certain instructions, and really should not be hard-wired. */
263
264#define DISPLACED_STEPPING_ARCH_VERSION 5
265
bc90b915
FN
266/* Addresses for calling Thumb functions have the bit 0 set.
267 Here are some macros to test, set, or clear bit 0 of addresses. */
268#define IS_THUMB_ADDR(addr) ((addr) & 1)
269#define MAKE_THUMB_ADDR(addr) ((addr) | 1)
270#define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
271
94c30b78 272/* Set to true if the 32-bit mode is in use. */
c906108c
SS
273
274int arm_apcs_32 = 1;
275
9779414d
DJ
276/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
277
478fd957 278int
9779414d
DJ
279arm_psr_thumb_bit (struct gdbarch *gdbarch)
280{
281 if (gdbarch_tdep (gdbarch)->is_m)
282 return XPSR_T;
283 else
284 return CPSR_T;
285}
286
b39cc962
DJ
287/* Determine if FRAME is executing in Thumb mode. */
288
25b41d01 289int
b39cc962
DJ
290arm_frame_is_thumb (struct frame_info *frame)
291{
292 CORE_ADDR cpsr;
9779414d 293 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
294
295 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
296 directly (from a signal frame or dummy frame) or by interpreting
297 the saved LR (from a prologue or DWARF frame). So consult it and
298 trust the unwinders. */
299 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
300
9779414d 301 return (cpsr & t_bit) != 0;
b39cc962
DJ
302}
303
60c5725c
DJ
304/* Callback for VEC_lower_bound. */
305
306static inline int
307arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
308 const struct arm_mapping_symbol *rhs)
309{
310 return lhs->value < rhs->value;
311}
312
f9d67f43
DJ
313/* Search for the mapping symbol covering MEMADDR. If one is found,
314 return its type. Otherwise, return 0. If START is non-NULL,
315 set *START to the location of the mapping symbol. */
c906108c 316
f9d67f43
DJ
317static char
318arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 319{
60c5725c 320 struct obj_section *sec;
0428b8f5 321
60c5725c
DJ
322 /* If there are mapping symbols, consult them. */
323 sec = find_pc_section (memaddr);
324 if (sec != NULL)
325 {
326 struct arm_per_objfile *data;
327 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
328 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
329 0 };
60c5725c
DJ
330 unsigned int idx;
331
332 data = objfile_data (sec->objfile, arm_objfile_data_key);
333 if (data != NULL)
334 {
335 map = data->section_maps[sec->the_bfd_section->index];
336 if (!VEC_empty (arm_mapping_symbol_s, map))
337 {
338 struct arm_mapping_symbol *map_sym;
339
340 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
341 arm_compare_mapping_symbols);
342
343 /* VEC_lower_bound finds the earliest ordered insertion
344 point. If the following symbol starts at this exact
345 address, we use that; otherwise, the preceding
346 mapping symbol covers this address. */
347 if (idx < VEC_length (arm_mapping_symbol_s, map))
348 {
349 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
350 if (map_sym->value == map_key.value)
f9d67f43
DJ
351 {
352 if (start)
353 *start = map_sym->value + obj_section_addr (sec);
354 return map_sym->type;
355 }
60c5725c
DJ
356 }
357
358 if (idx > 0)
359 {
360 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
361 if (start)
362 *start = map_sym->value + obj_section_addr (sec);
363 return map_sym->type;
60c5725c
DJ
364 }
365 }
366 }
367 }
368
f9d67f43
DJ
369 return 0;
370}
371
372/* Determine if the program counter specified in MEMADDR is in a Thumb
373 function. This function should be called for addresses unrelated to
374 any executing frame; otherwise, prefer arm_frame_is_thumb. */
375
e3039479 376int
9779414d 377arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43
DJ
378{
379 struct obj_section *sec;
380 struct minimal_symbol *sym;
381 char type;
a42244db
YQ
382 struct displaced_step_closure* dsc
383 = get_displaced_step_closure_by_addr(memaddr);
384
385 /* If checking the mode of displaced instruction in copy area, the mode
386 should be determined by instruction on the original address. */
387 if (dsc)
388 {
389 if (debug_displaced)
390 fprintf_unfiltered (gdb_stdlog,
391 "displaced: check mode of %.8lx instead of %.8lx\n",
392 (unsigned long) dsc->insn_addr,
393 (unsigned long) memaddr);
394 memaddr = dsc->insn_addr;
395 }
f9d67f43
DJ
396
397 /* If bit 0 of the address is set, assume this is a Thumb address. */
398 if (IS_THUMB_ADDR (memaddr))
399 return 1;
400
18819fa6
UW
401 /* Respect internal mode override if active. */
402 if (arm_override_mode != -1)
403 return arm_override_mode;
404
f9d67f43
DJ
405 /* If the user wants to override the symbol table, let him. */
406 if (strcmp (arm_force_mode_string, "arm") == 0)
407 return 0;
408 if (strcmp (arm_force_mode_string, "thumb") == 0)
409 return 1;
410
9779414d
DJ
411 /* ARM v6-M and v7-M are always in Thumb mode. */
412 if (gdbarch_tdep (gdbarch)->is_m)
413 return 1;
414
f9d67f43
DJ
415 /* If there are mapping symbols, consult them. */
416 type = arm_find_mapping_symbol (memaddr, NULL);
417 if (type)
418 return type == 't';
419
ed9a39eb 420 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c
SS
421 sym = lookup_minimal_symbol_by_pc (memaddr);
422 if (sym)
0428b8f5
DJ
423 return (MSYMBOL_IS_SPECIAL (sym));
424
425 /* If the user wants to override the fallback mode, let them. */
426 if (strcmp (arm_fallback_mode_string, "arm") == 0)
427 return 0;
428 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
429 return 1;
430
431 /* If we couldn't find any symbol, but we're talking to a running
432 target, then trust the current value of $cpsr. This lets
433 "display/i $pc" always show the correct mode (though if there is
434 a symbol table we will not reach here, so it still may not be
18819fa6 435 displayed in the mode it will be executed). */
0428b8f5 436 if (target_has_registers)
18819fa6 437 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
438
439 /* Otherwise we're out of luck; we assume ARM. */
440 return 0;
c906108c
SS
441}
442
181c1381 443/* Remove useless bits from addresses in a running program. */
34e8f22d 444static CORE_ADDR
24568a2c 445arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 446{
a3a2ee65 447 if (arm_apcs_32)
dd6be234 448 return UNMAKE_THUMB_ADDR (val);
c906108c 449 else
a3a2ee65 450 return (val & 0x03fffffc);
c906108c
SS
451}
452
181c1381
RE
453/* When reading symbols, we need to zap the low bit of the address,
454 which may be set to 1 for Thumb functions. */
34e8f22d 455static CORE_ADDR
24568a2c 456arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
181c1381
RE
457{
458 return val & ~1;
459}
460
0d39a070 461/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
462 can be safely ignored during prologue skipping. IS_THUMB is true
463 if the function is known to be a Thumb function due to the way it
464 is being called. */
0d39a070 465static int
e0634ccf 466skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 467{
e0634ccf 468 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
0d39a070 469 struct minimal_symbol *msym;
0d39a070
DJ
470
471 msym = lookup_minimal_symbol_by_pc (pc);
e0634ccf
UW
472 if (msym != NULL
473 && SYMBOL_VALUE_ADDRESS (msym) == pc
474 && SYMBOL_LINKAGE_NAME (msym) != NULL)
475 {
476 const char *name = SYMBOL_LINKAGE_NAME (msym);
0d39a070 477
e0634ccf
UW
478 /* The GNU linker's Thumb call stub to foo is named
479 __foo_from_thumb. */
480 if (strstr (name, "_from_thumb") != NULL)
481 name += 2;
0d39a070 482
e0634ccf
UW
483 /* On soft-float targets, __truncdfsf2 is called to convert promoted
484 arguments to their argument types in non-prototyped
485 functions. */
486 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
487 return 1;
488 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
489 return 1;
0d39a070 490
e0634ccf
UW
491 /* Internal functions related to thread-local storage. */
492 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
493 return 1;
494 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
495 return 1;
496 }
497 else
498 {
499 /* If we run against a stripped glibc, we may be unable to identify
500 special functions by name. Check for one important case,
501 __aeabi_read_tp, by comparing the *code* against the default
502 implementation (this is hand-written ARM assembler in glibc). */
503
504 if (!is_thumb
505 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
506 == 0xe3e00a0f /* mov r0, #0xffff0fff */
507 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
508 == 0xe240f01f) /* sub pc, r0, #31 */
509 return 1;
510 }
ec3d575a 511
0d39a070
DJ
512 return 0;
513}
514
515/* Support routines for instruction parsing. */
516#define submask(x) ((1L << ((x) + 1)) - 1)
517#define bit(obj,st) (((obj) >> (st)) & 1)
518#define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
519#define sbits(obj,st,fn) \
520 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
521#define BranchDest(addr,instr) \
522 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
523
621c6d5b
YQ
524/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
525 the first 16-bit of instruction, and INSN2 is the second 16-bit of
526 instruction. */
527#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
528 ((bits ((insn1), 0, 3) << 12) \
529 | (bits ((insn1), 10, 10) << 11) \
530 | (bits ((insn2), 12, 14) << 8) \
531 | bits ((insn2), 0, 7))
532
533/* Extract the immediate from instruction movw/movt of encoding A. INSN is
534 the 32-bit instruction. */
535#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
536 ((bits ((insn), 16, 19) << 12) \
537 | bits ((insn), 0, 11))
538
ec3d575a
UW
539/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
540
541static unsigned int
542thumb_expand_immediate (unsigned int imm)
543{
544 unsigned int count = imm >> 7;
545
546 if (count < 8)
547 switch (count / 2)
548 {
549 case 0:
550 return imm & 0xff;
551 case 1:
552 return (imm & 0xff) | ((imm & 0xff) << 16);
553 case 2:
554 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
555 case 3:
556 return (imm & 0xff) | ((imm & 0xff) << 8)
557 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
558 }
559
560 return (0x80 | (imm & 0x7f)) << (32 - count);
561}
562
563/* Return 1 if the 16-bit Thumb instruction INST might change
564 control flow, 0 otherwise. */
565
566static int
567thumb_instruction_changes_pc (unsigned short inst)
568{
569 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
570 return 1;
571
572 if ((inst & 0xf000) == 0xd000) /* conditional branch */
573 return 1;
574
575 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
576 return 1;
577
578 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
579 return 1;
580
ad8b5167
UW
581 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
582 return 1;
583
ec3d575a
UW
584 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
585 return 1;
586
587 return 0;
588}
589
590/* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
591 might change control flow, 0 otherwise. */
592
593static int
594thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
595{
596 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
597 {
598 /* Branches and miscellaneous control instructions. */
599
600 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
601 {
602 /* B, BL, BLX. */
603 return 1;
604 }
605 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
606 {
607 /* SUBS PC, LR, #imm8. */
608 return 1;
609 }
610 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
611 {
612 /* Conditional branch. */
613 return 1;
614 }
615
616 return 0;
617 }
618
619 if ((inst1 & 0xfe50) == 0xe810)
620 {
621 /* Load multiple or RFE. */
622
623 if (bit (inst1, 7) && !bit (inst1, 8))
624 {
625 /* LDMIA or POP */
626 if (bit (inst2, 15))
627 return 1;
628 }
629 else if (!bit (inst1, 7) && bit (inst1, 8))
630 {
631 /* LDMDB */
632 if (bit (inst2, 15))
633 return 1;
634 }
635 else if (bit (inst1, 7) && bit (inst1, 8))
636 {
637 /* RFEIA */
638 return 1;
639 }
640 else if (!bit (inst1, 7) && !bit (inst1, 8))
641 {
642 /* RFEDB */
643 return 1;
644 }
645
646 return 0;
647 }
648
649 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
650 {
651 /* MOV PC or MOVS PC. */
652 return 1;
653 }
654
655 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
656 {
657 /* LDR PC. */
658 if (bits (inst1, 0, 3) == 15)
659 return 1;
660 if (bit (inst1, 7))
661 return 1;
662 if (bit (inst2, 11))
663 return 1;
664 if ((inst2 & 0x0fc0) == 0x0000)
665 return 1;
666
667 return 0;
668 }
669
670 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
671 {
672 /* TBB. */
673 return 1;
674 }
675
676 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
677 {
678 /* TBH. */
679 return 1;
680 }
681
682 return 0;
683}
684
29d73ae4
DJ
685/* Analyze a Thumb prologue, looking for a recognizable stack frame
686 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
687 clobber the stack frame unexpectedly, or an unknown instruction.
688 Return the last address which is definitely safe to skip for an
689 initial breakpoint. */
c906108c
SS
690
691static CORE_ADDR
29d73ae4
DJ
692thumb_analyze_prologue (struct gdbarch *gdbarch,
693 CORE_ADDR start, CORE_ADDR limit,
694 struct arm_prologue_cache *cache)
c906108c 695{
0d39a070 696 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 697 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
698 int i;
699 pv_t regs[16];
700 struct pv_area *stack;
701 struct cleanup *back_to;
702 CORE_ADDR offset;
ec3d575a 703 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 704
29d73ae4
DJ
705 for (i = 0; i < 16; i++)
706 regs[i] = pv_register (i, 0);
55f960e1 707 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
708 back_to = make_cleanup_free_pv_area (stack);
709
29d73ae4 710 while (start < limit)
c906108c 711 {
29d73ae4
DJ
712 unsigned short insn;
713
e17a4113 714 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 715
94c30b78 716 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 717 {
29d73ae4
DJ
718 int regno;
719 int mask;
4be43953
DJ
720
721 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
722 break;
29d73ae4
DJ
723
724 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
725 whether to save LR (R14). */
726 mask = (insn & 0xff) | ((insn & 0x100) << 6);
727
728 /* Calculate offsets of saved R0-R7 and LR. */
729 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
730 if (mask & (1 << regno))
731 {
29d73ae4
DJ
732 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
733 -4);
734 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
735 }
da59e081 736 }
da3c6d4a
MS
737 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
738 sub sp, #simm */
da59e081 739 {
29d73ae4
DJ
740 offset = (insn & 0x7f) << 2; /* get scaled offset */
741 if (insn & 0x80) /* Check for SUB. */
742 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
743 -offset);
da59e081 744 else
29d73ae4
DJ
745 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 offset);
da59e081 747 }
0d39a070
DJ
748 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
749 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
750 (insn & 0xff) << 2);
751 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
752 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
753 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
754 bits (insn, 6, 8));
755 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
756 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
757 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
758 bits (insn, 0, 7));
759 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
760 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
761 && pv_is_constant (regs[bits (insn, 3, 5)]))
762 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
763 regs[bits (insn, 6, 8)]);
764 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
765 && pv_is_constant (regs[bits (insn, 3, 6)]))
766 {
767 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
768 int rm = bits (insn, 3, 6);
769 regs[rd] = pv_add (regs[rd], regs[rm]);
770 }
29d73ae4 771 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 772 {
29d73ae4
DJ
773 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
774 int src_reg = (insn & 0x78) >> 3;
775 regs[dst_reg] = regs[src_reg];
da59e081 776 }
29d73ae4 777 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 778 {
29d73ae4
DJ
779 /* Handle stores to the stack. Normally pushes are used,
780 but with GCC -mtpcs-frame, there may be other stores
781 in the prologue to create the frame. */
782 int regno = (insn >> 8) & 0x7;
783 pv_t addr;
784
785 offset = (insn & 0xff) << 2;
786 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
787
788 if (pv_area_store_would_trash (stack, addr))
789 break;
790
791 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 792 }
0d39a070
DJ
793 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
794 {
795 int rd = bits (insn, 0, 2);
796 int rn = bits (insn, 3, 5);
797 pv_t addr;
798
799 offset = bits (insn, 6, 10) << 2;
800 addr = pv_add_constant (regs[rn], offset);
801
802 if (pv_area_store_would_trash (stack, addr))
803 break;
804
805 pv_area_store (stack, addr, 4, regs[rd]);
806 }
807 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
808 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
809 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
810 /* Ignore stores of argument registers to the stack. */
811 ;
812 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
813 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
814 /* Ignore block loads from the stack, potentially copying
815 parameters from memory. */
816 ;
817 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
818 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
819 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
820 /* Similarly ignore single loads from the stack. */
821 ;
822 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
823 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
824 /* Skip register copies, i.e. saves to another register
825 instead of the stack. */
826 ;
827 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
828 /* Recognize constant loads; even with small stacks these are necessary
829 on Thumb. */
830 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
831 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
832 {
833 /* Constant pool loads, for the same reason. */
834 unsigned int constant;
835 CORE_ADDR loc;
836
837 loc = start + 4 + bits (insn, 0, 7) * 4;
838 constant = read_memory_unsigned_integer (loc, 4, byte_order);
839 regs[bits (insn, 8, 10)] = pv_constant (constant);
840 }
db24da6d 841 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 842 {
0d39a070
DJ
843 unsigned short inst2;
844
845 inst2 = read_memory_unsigned_integer (start + 2, 2,
846 byte_order_for_code);
847
848 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
849 {
850 /* BL, BLX. Allow some special function calls when
851 skipping the prologue; GCC generates these before
852 storing arguments to the stack. */
853 CORE_ADDR nextpc;
854 int j1, j2, imm1, imm2;
855
856 imm1 = sbits (insn, 0, 10);
857 imm2 = bits (inst2, 0, 10);
858 j1 = bit (inst2, 13);
859 j2 = bit (inst2, 11);
860
861 offset = ((imm1 << 12) + (imm2 << 1));
862 offset ^= ((!j2) << 22) | ((!j1) << 23);
863
864 nextpc = start + 4 + offset;
865 /* For BLX make sure to clear the low bits. */
866 if (bit (inst2, 12) == 0)
867 nextpc = nextpc & 0xfffffffc;
868
e0634ccf
UW
869 if (!skip_prologue_function (gdbarch, nextpc,
870 bit (inst2, 12) != 0))
0d39a070
DJ
871 break;
872 }
ec3d575a 873
0963b4bd
MS
874 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
875 { registers } */
ec3d575a
UW
876 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
877 {
878 pv_t addr = regs[bits (insn, 0, 3)];
879 int regno;
880
881 if (pv_area_store_would_trash (stack, addr))
882 break;
883
884 /* Calculate offsets of saved registers. */
885 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
886 if (inst2 & (1 << regno))
887 {
888 addr = pv_add_constant (addr, -4);
889 pv_area_store (stack, addr, 4, regs[regno]);
890 }
891
892 if (insn & 0x0020)
893 regs[bits (insn, 0, 3)] = addr;
894 }
895
0963b4bd
MS
896 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
897 [Rn, #+/-imm]{!} */
ec3d575a
UW
898 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
899 {
900 int regno1 = bits (inst2, 12, 15);
901 int regno2 = bits (inst2, 8, 11);
902 pv_t addr = regs[bits (insn, 0, 3)];
903
904 offset = inst2 & 0xff;
905 if (insn & 0x0080)
906 addr = pv_add_constant (addr, offset);
907 else
908 addr = pv_add_constant (addr, -offset);
909
910 if (pv_area_store_would_trash (stack, addr))
911 break;
912
913 pv_area_store (stack, addr, 4, regs[regno1]);
914 pv_area_store (stack, pv_add_constant (addr, 4),
915 4, regs[regno2]);
916
917 if (insn & 0x0020)
918 regs[bits (insn, 0, 3)] = addr;
919 }
920
921 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
922 && (inst2 & 0x0c00) == 0x0c00
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 {
925 int regno = bits (inst2, 12, 15);
926 pv_t addr = regs[bits (insn, 0, 3)];
927
928 offset = inst2 & 0xff;
929 if (inst2 & 0x0200)
930 addr = pv_add_constant (addr, offset);
931 else
932 addr = pv_add_constant (addr, -offset);
933
934 if (pv_area_store_would_trash (stack, addr))
935 break;
936
937 pv_area_store (stack, addr, 4, regs[regno]);
938
939 if (inst2 & 0x0100)
940 regs[bits (insn, 0, 3)] = addr;
941 }
942
943 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
945 {
946 int regno = bits (inst2, 12, 15);
947 pv_t addr;
948
949 offset = inst2 & 0xfff;
950 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
951
952 if (pv_area_store_would_trash (stack, addr))
953 break;
954
955 pv_area_store (stack, addr, 4, regs[regno]);
956 }
957
958 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 959 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 960 /* Ignore stores of argument registers to the stack. */
0d39a070 961 ;
ec3d575a
UW
962
963 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
964 && (inst2 & 0x0d00) == 0x0c00
0d39a070 965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 966 /* Ignore stores of argument registers to the stack. */
0d39a070 967 ;
ec3d575a 968
0963b4bd
MS
969 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
970 { registers } */
ec3d575a
UW
971 && (inst2 & 0x8000) == 0x0000
972 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
973 /* Ignore block loads from the stack, potentially copying
974 parameters from memory. */
0d39a070 975 ;
ec3d575a 976
0963b4bd
MS
977 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
978 [Rn, #+/-imm] */
0d39a070 979 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 980 /* Similarly ignore dual loads from the stack. */
0d39a070 981 ;
ec3d575a
UW
982
983 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
984 && (inst2 & 0x0d00) == 0x0c00
0d39a070 985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 986 /* Similarly ignore single loads from the stack. */
0d39a070 987 ;
ec3d575a
UW
988
989 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 990 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 991 /* Similarly ignore single loads from the stack. */
0d39a070 992 ;
ec3d575a
UW
993
994 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
995 && (inst2 & 0x8000) == 0x0000)
996 {
997 unsigned int imm = ((bits (insn, 10, 10) << 11)
998 | (bits (inst2, 12, 14) << 8)
999 | bits (inst2, 0, 7));
1000
1001 regs[bits (inst2, 8, 11)]
1002 = pv_add_constant (regs[bits (insn, 0, 3)],
1003 thumb_expand_immediate (imm));
1004 }
1005
1006 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
0d39a070 1008 {
ec3d575a
UW
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1012
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1015 }
1016
1017 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1018 && (inst2 & 0x8000) == 0x0000)
1019 {
1020 unsigned int imm = ((bits (insn, 10, 10) << 11)
1021 | (bits (inst2, 12, 14) << 8)
1022 | bits (inst2, 0, 7));
1023
1024 regs[bits (inst2, 8, 11)]
1025 = pv_add_constant (regs[bits (insn, 0, 3)],
1026 - (CORE_ADDR) thumb_expand_immediate (imm));
1027 }
1028
1029 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1031 {
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1035
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1038 }
1039
1040 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1041 {
1042 unsigned int imm = ((bits (insn, 10, 10) << 11)
1043 | (bits (inst2, 12, 14) << 8)
1044 | bits (inst2, 0, 7));
1045
1046 regs[bits (inst2, 8, 11)]
1047 = pv_constant (thumb_expand_immediate (imm));
1048 }
1049
1050 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1051 {
621c6d5b
YQ
1052 unsigned int imm
1053 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1054
1055 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1056 }
1057
1058 else if (insn == 0xea5f /* mov.w Rd,Rm */
1059 && (inst2 & 0xf0f0) == 0)
1060 {
1061 int dst_reg = (inst2 & 0x0f00) >> 8;
1062 int src_reg = inst2 & 0xf;
1063 regs[dst_reg] = regs[src_reg];
1064 }
1065
1066 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1067 {
1068 /* Constant pool loads. */
1069 unsigned int constant;
1070 CORE_ADDR loc;
1071
1072 offset = bits (insn, 0, 11);
1073 if (insn & 0x0080)
1074 loc = start + 4 + offset;
1075 else
1076 loc = start + 4 - offset;
1077
1078 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1079 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1080 }
1081
1082 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1083 {
1084 /* Constant pool loads. */
1085 unsigned int constant;
1086 CORE_ADDR loc;
1087
1088 offset = bits (insn, 0, 7) << 2;
1089 if (insn & 0x0080)
1090 loc = start + 4 + offset;
1091 else
1092 loc = start + 4 - offset;
1093
1094 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1095 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1096
1097 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1098 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1099 }
1100
1101 else if (thumb2_instruction_changes_pc (insn, inst2))
1102 {
1103 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1104 break;
1105 }
ec3d575a
UW
1106 else
1107 {
1108 /* The optimizer might shove anything into the prologue,
1109 so we just skip what we don't recognize. */
1110 unrecognized_pc = start;
1111 }
0d39a070
DJ
1112
1113 start += 2;
1114 }
ec3d575a 1115 else if (thumb_instruction_changes_pc (insn))
3d74b771 1116 {
ec3d575a 1117 /* Don't scan past anything that might change control flow. */
da3c6d4a 1118 break;
3d74b771 1119 }
ec3d575a
UW
1120 else
1121 {
1122 /* The optimizer might shove anything into the prologue,
1123 so we just skip what we don't recognize. */
1124 unrecognized_pc = start;
1125 }
29d73ae4
DJ
1126
1127 start += 2;
c906108c
SS
1128 }
1129
0d39a070
DJ
1130 if (arm_debug)
1131 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1132 paddress (gdbarch, start));
1133
ec3d575a
UW
1134 if (unrecognized_pc == 0)
1135 unrecognized_pc = start;
1136
29d73ae4
DJ
1137 if (cache == NULL)
1138 {
1139 do_cleanups (back_to);
ec3d575a 1140 return unrecognized_pc;
29d73ae4
DJ
1141 }
1142
29d73ae4
DJ
1143 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1144 {
1145 /* Frame pointer is fp. Frame size is constant. */
1146 cache->framereg = ARM_FP_REGNUM;
1147 cache->framesize = -regs[ARM_FP_REGNUM].k;
1148 }
1149 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1150 {
1151 /* Frame pointer is r7. Frame size is constant. */
1152 cache->framereg = THUMB_FP_REGNUM;
1153 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1154 }
72a2e3dc 1155 else
29d73ae4
DJ
1156 {
1157 /* Try the stack pointer... this is a bit desperate. */
1158 cache->framereg = ARM_SP_REGNUM;
1159 cache->framesize = -regs[ARM_SP_REGNUM].k;
1160 }
29d73ae4
DJ
1161
1162 for (i = 0; i < 16; i++)
1163 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1164 cache->saved_regs[i].addr = offset;
1165
1166 do_cleanups (back_to);
ec3d575a 1167 return unrecognized_pc;
c906108c
SS
1168}
1169
621c6d5b
YQ
1170
1171/* Try to analyze the instructions starting from PC, which load symbol
1172 __stack_chk_guard. Return the address of instruction after loading this
1173 symbol, set the dest register number to *BASEREG, and set the size of
1174 instructions for loading symbol in OFFSET. Return 0 if instructions are
1175 not recognized. */
1176
1177static CORE_ADDR
1178arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1179 unsigned int *destreg, int *offset)
1180{
1181 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1182 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1183 unsigned int low, high, address;
1184
1185 address = 0;
1186 if (is_thumb)
1187 {
1188 unsigned short insn1
1189 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1190
1191 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1192 {
1193 *destreg = bits (insn1, 8, 10);
1194 *offset = 2;
1195 address = bits (insn1, 0, 7);
1196 }
1197 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1198 {
1199 unsigned short insn2
1200 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1201
1202 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203
1204 insn1
1205 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1206 insn2
1207 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1208
1209 /* movt Rd, #const */
1210 if ((insn1 & 0xfbc0) == 0xf2c0)
1211 {
1212 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1213 *destreg = bits (insn2, 8, 11);
1214 *offset = 8;
1215 address = (high << 16 | low);
1216 }
1217 }
1218 }
1219 else
1220 {
2e9e421f
UW
1221 unsigned int insn
1222 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1223
1224 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1225 {
1226 address = bits (insn, 0, 11);
1227 *destreg = bits (insn, 12, 15);
1228 *offset = 4;
1229 }
1230 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1231 {
1232 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1233
1234 insn
1235 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1236
1237 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1238 {
1239 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1240 *destreg = bits (insn, 12, 15);
1241 *offset = 8;
1242 address = (high << 16 | low);
1243 }
1244 }
621c6d5b
YQ
1245 }
1246
1247 return address;
1248}
1249
1250/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1251 points to the first instruction of this sequence, return the address of
1252 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1253
1254 On arm, this sequence of instructions is composed of mainly three steps,
1255 Step 1: load symbol __stack_chk_guard,
1256 Step 2: load from address of __stack_chk_guard,
1257 Step 3: store it to somewhere else.
1258
1259 Usually, instructions on step 2 and step 3 are the same on various ARM
1260 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1261 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1262 instructions in step 1 vary from different ARM architectures. On ARMv7,
1263 they are,
1264
1265 movw Rn, #:lower16:__stack_chk_guard
1266 movt Rn, #:upper16:__stack_chk_guard
1267
1268 On ARMv5t, it is,
1269
1270 ldr Rn, .Label
1271 ....
1272 .Lable:
1273 .word __stack_chk_guard
1274
1275 Since ldr/str is a very popular instruction, we can't use them as
1276 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1277 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1278 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1279
1280static CORE_ADDR
1281arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1282{
1283 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1284 unsigned int address, basereg;
1285 struct minimal_symbol *stack_chk_guard;
1286 int offset;
1287 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1288 CORE_ADDR addr;
1289
1290 /* Try to parse the instructions in Step 1. */
1291 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1292 &basereg, &offset);
1293 if (!addr)
1294 return pc;
1295
1296 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1297 /* If name of symbol doesn't start with '__stack_chk_guard', this
1298 instruction sequence is not for stack protector. If symbol is
1299 removed, we conservatively think this sequence is for stack protector. */
1300 if (stack_chk_guard
c1c2ab58
UW
1301 && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1302 strlen ("__stack_chk_guard")) != 0)
621c6d5b
YQ
1303 return pc;
1304
1305 if (is_thumb)
1306 {
1307 unsigned int destreg;
1308 unsigned short insn
1309 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1310
1311 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1312 if ((insn & 0xf800) != 0x6800)
1313 return pc;
1314 if (bits (insn, 3, 5) != basereg)
1315 return pc;
1316 destreg = bits (insn, 0, 2);
1317
1318 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1319 byte_order_for_code);
1320 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1321 if ((insn & 0xf800) != 0x6000)
1322 return pc;
1323 if (destreg != bits (insn, 0, 2))
1324 return pc;
1325 }
1326 else
1327 {
1328 unsigned int destreg;
1329 unsigned int insn
1330 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1331
1332 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1333 if ((insn & 0x0e500000) != 0x04100000)
1334 return pc;
1335 if (bits (insn, 16, 19) != basereg)
1336 return pc;
1337 destreg = bits (insn, 12, 15);
1338 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1339 insn = read_memory_unsigned_integer (pc + offset + 4,
1340 4, byte_order_for_code);
1341 if ((insn & 0x0e500000) != 0x04000000)
1342 return pc;
1343 if (bits (insn, 12, 15) != destreg)
1344 return pc;
1345 }
1346 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1347 on arm. */
1348 if (is_thumb)
1349 return pc + offset + 4;
1350 else
1351 return pc + offset + 8;
1352}
1353
da3c6d4a
MS
1354/* Advance the PC across any function entry prologue instructions to
1355 reach some "real" code.
34e8f22d
RE
1356
1357 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1358 prologue:
c906108c 1359
c5aa993b
JM
1360 mov ip, sp
1361 [stmfd sp!, {a1,a2,a3,a4}]
1362 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1363 [stfe f7, [sp, #-12]!]
1364 [stfe f6, [sp, #-12]!]
1365 [stfe f5, [sp, #-12]!]
1366 [stfe f4, [sp, #-12]!]
0963b4bd 1367 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1368
34e8f22d 1369static CORE_ADDR
6093d2eb 1370arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1371{
e17a4113 1372 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c906108c
SS
1373 unsigned long inst;
1374 CORE_ADDR skip_pc;
a89fea3c 1375 CORE_ADDR func_addr, limit_pc;
c906108c
SS
1376 struct symtab_and_line sal;
1377
a89fea3c
JL
1378 /* See if we can determine the end of the prologue via the symbol table.
1379 If so, then return either PC, or the PC after the prologue, whichever
1380 is greater. */
1381 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1382 {
d80b854b
UW
1383 CORE_ADDR post_prologue_pc
1384 = skip_prologue_using_sal (gdbarch, func_addr);
0d39a070
DJ
1385 struct symtab *s = find_pc_symtab (func_addr);
1386
621c6d5b
YQ
1387 if (post_prologue_pc)
1388 post_prologue_pc
1389 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1390
1391
0d39a070
DJ
1392 /* GCC always emits a line note before the prologue and another
1393 one after, even if the two are at the same address or on the
1394 same line. Take advantage of this so that we do not need to
1395 know every instruction that might appear in the prologue. We
1396 will have producer information for most binaries; if it is
1397 missing (e.g. for -gstabs), assuming the GNU tools. */
1398 if (post_prologue_pc
1399 && (s == NULL
1400 || s->producer == NULL
1401 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1402 return post_prologue_pc;
1403
a89fea3c 1404 if (post_prologue_pc != 0)
0d39a070
DJ
1405 {
1406 CORE_ADDR analyzed_limit;
1407
1408 /* For non-GCC compilers, make sure the entire line is an
1409 acceptable prologue; GDB will round this function's
1410 return value up to the end of the following line so we
1411 can not skip just part of a line (and we do not want to).
1412
1413 RealView does not treat the prologue specially, but does
1414 associate prologue code with the opening brace; so this
1415 lets us skip the first line if we think it is the opening
1416 brace. */
9779414d 1417 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1418 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1419 post_prologue_pc, NULL);
1420 else
1421 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1422 post_prologue_pc, NULL);
1423
1424 if (analyzed_limit != post_prologue_pc)
1425 return func_addr;
1426
1427 return post_prologue_pc;
1428 }
c906108c
SS
1429 }
1430
a89fea3c
JL
1431 /* Can't determine prologue from the symbol table, need to examine
1432 instructions. */
c906108c 1433
a89fea3c
JL
1434 /* Find an upper limit on the function prologue using the debug
1435 information. If the debug information could not be used to provide
1436 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1437 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1438 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1439 if (limit_pc == 0)
1440 limit_pc = pc + 64; /* Magic. */
1441
c906108c 1442
29d73ae4 1443 /* Check if this is Thumb code. */
9779414d 1444 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1445 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
29d73ae4 1446
a89fea3c 1447 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
f43845b3 1448 {
e17a4113 1449 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
9d4fde75 1450
b8d5e71d
MS
1451 /* "mov ip, sp" is no longer a required part of the prologue. */
1452 if (inst == 0xe1a0c00d) /* mov ip, sp */
1453 continue;
c906108c 1454
28cd8767
JG
1455 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1456 continue;
1457
1458 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1459 continue;
1460
b8d5e71d
MS
1461 /* Some prologues begin with "str lr, [sp, #-4]!". */
1462 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1463 continue;
c906108c 1464
b8d5e71d
MS
1465 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1466 continue;
c906108c 1467
b8d5e71d
MS
1468 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1469 continue;
11d3b27d 1470
b8d5e71d
MS
1471 /* Any insns after this point may float into the code, if it makes
1472 for better instruction scheduling, so we skip them only if we
1473 find them, but still consider the function to be frame-ful. */
f43845b3 1474
b8d5e71d
MS
1475 /* We may have either one sfmfd instruction here, or several stfe
1476 insns, depending on the version of floating point code we
1477 support. */
1478 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1479 continue;
1480
1481 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1482 continue;
1483
1484 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1485 continue;
1486
1487 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1488 continue;
1489
f8bf5763
PM
1490 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1491 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1492 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
b8d5e71d
MS
1493 continue;
1494
f8bf5763
PM
1495 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1496 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1497 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
b8d5e71d
MS
1498 continue;
1499
1500 /* Un-recognized instruction; stop scanning. */
1501 break;
f43845b3 1502 }
c906108c 1503
0963b4bd 1504 return skip_pc; /* End of prologue. */
c906108c 1505}
94c30b78 1506
c5aa993b 1507/* *INDENT-OFF* */
c906108c
SS
1508/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1509 This function decodes a Thumb function prologue to determine:
1510 1) the size of the stack frame
1511 2) which registers are saved on it
1512 3) the offsets of saved regs
1513 4) the offset from the stack pointer to the frame pointer
c906108c 1514
da59e081
JM
1515 A typical Thumb function prologue would create this stack frame
1516 (offsets relative to FP)
c906108c
SS
1517 old SP -> 24 stack parameters
1518 20 LR
1519 16 R7
1520 R7 -> 0 local variables (16 bytes)
1521 SP -> -12 additional stack space (12 bytes)
1522 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1523 12 bytes. The frame register is R7.
da59e081 1524
da3c6d4a
MS
1525 The comments for thumb_skip_prolog() describe the algorithm we use
1526 to detect the end of the prolog. */
c5aa993b
JM
1527/* *INDENT-ON* */
1528
c906108c 1529static void
be8626e0 1530thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1531 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1532{
1533 CORE_ADDR prologue_start;
1534 CORE_ADDR prologue_end;
1535 CORE_ADDR current_pc;
c906108c 1536
b39cc962
DJ
1537 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1538 &prologue_end))
c906108c 1539 {
ec3d575a
UW
1540 /* See comment in arm_scan_prologue for an explanation of
1541 this heuristics. */
1542 if (prologue_end > prologue_start + 64)
1543 {
1544 prologue_end = prologue_start + 64;
1545 }
c906108c
SS
1546 }
1547 else
f7060f85
DJ
1548 /* We're in the boondocks: we have no idea where the start of the
1549 function is. */
1550 return;
c906108c 1551
eb5492fa 1552 prologue_end = min (prologue_end, prev_pc);
c906108c 1553
be8626e0 1554 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1555}
1556
0d39a070 1557/* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
c906108c 1558
0d39a070
DJ
1559static int
1560arm_instruction_changes_pc (uint32_t this_instr)
c906108c 1561{
0d39a070
DJ
1562 if (bits (this_instr, 28, 31) == INST_NV)
1563 /* Unconditional instructions. */
1564 switch (bits (this_instr, 24, 27))
1565 {
1566 case 0xa:
1567 case 0xb:
1568 /* Branch with Link and change to Thumb. */
1569 return 1;
1570 case 0xc:
1571 case 0xd:
1572 case 0xe:
1573 /* Coprocessor register transfer. */
1574 if (bits (this_instr, 12, 15) == 15)
1575 error (_("Invalid update to pc in instruction"));
1576 return 0;
1577 default:
1578 return 0;
1579 }
1580 else
1581 switch (bits (this_instr, 25, 27))
1582 {
1583 case 0x0:
1584 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1585 {
1586 /* Multiplies and extra load/stores. */
1587 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1588 /* Neither multiplies nor extension load/stores are allowed
1589 to modify PC. */
1590 return 0;
1591
1592 /* Otherwise, miscellaneous instructions. */
1593
1594 /* BX <reg>, BXJ <reg>, BLX <reg> */
1595 if (bits (this_instr, 4, 27) == 0x12fff1
1596 || bits (this_instr, 4, 27) == 0x12fff2
1597 || bits (this_instr, 4, 27) == 0x12fff3)
1598 return 1;
1599
1600 /* Other miscellaneous instructions are unpredictable if they
1601 modify PC. */
1602 return 0;
1603 }
1604 /* Data processing instruction. Fall through. */
c906108c 1605
0d39a070
DJ
1606 case 0x1:
1607 if (bits (this_instr, 12, 15) == 15)
1608 return 1;
1609 else
1610 return 0;
c906108c 1611
0d39a070
DJ
1612 case 0x2:
1613 case 0x3:
1614 /* Media instructions and architecturally undefined instructions. */
1615 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1616 return 0;
c906108c 1617
0d39a070
DJ
1618 /* Stores. */
1619 if (bit (this_instr, 20) == 0)
1620 return 0;
2a451106 1621
0d39a070
DJ
1622 /* Loads. */
1623 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1624 return 1;
1625 else
1626 return 0;
2a451106 1627
0d39a070
DJ
1628 case 0x4:
1629 /* Load/store multiple. */
1630 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1631 return 1;
1632 else
1633 return 0;
2a451106 1634
0d39a070
DJ
1635 case 0x5:
1636 /* Branch and branch with link. */
1637 return 1;
2a451106 1638
0d39a070
DJ
1639 case 0x6:
1640 case 0x7:
1641 /* Coprocessor transfers or SWIs can not affect PC. */
1642 return 0;
eb5492fa 1643
0d39a070 1644 default:
9b20d036 1645 internal_error (__FILE__, __LINE__, _("bad value in switch"));
0d39a070
DJ
1646 }
1647}
c906108c 1648
0d39a070
DJ
1649/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1650 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1651 fill it in. Return the first address not recognized as a prologue
1652 instruction.
eb5492fa 1653
0d39a070
DJ
1654 We recognize all the instructions typically found in ARM prologues,
1655 plus harmless instructions which can be skipped (either for analysis
1656 purposes, or a more restrictive set that can be skipped when finding
1657 the end of the prologue). */
1658
1659static CORE_ADDR
1660arm_analyze_prologue (struct gdbarch *gdbarch,
1661 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1662 struct arm_prologue_cache *cache)
1663{
1664 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1665 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1666 int regno;
1667 CORE_ADDR offset, current_pc;
1668 pv_t regs[ARM_FPS_REGNUM];
1669 struct pv_area *stack;
1670 struct cleanup *back_to;
1671 int framereg, framesize;
1672 CORE_ADDR unrecognized_pc = 0;
1673
1674 /* Search the prologue looking for instructions that set up the
96baa820 1675 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1676
96baa820
JM
1677 Be careful, however, and if it doesn't look like a prologue,
1678 don't try to scan it. If, for instance, a frameless function
1679 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1680 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1681 and other operations that rely on a knowledge of the stack
0d39a070 1682 traceback. */
d4473757 1683
4be43953
DJ
1684 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1685 regs[regno] = pv_register (regno, 0);
55f960e1 1686 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1687 back_to = make_cleanup_free_pv_area (stack);
1688
94c30b78
MS
1689 for (current_pc = prologue_start;
1690 current_pc < prologue_end;
f43845b3 1691 current_pc += 4)
96baa820 1692 {
e17a4113
UW
1693 unsigned int insn
1694 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1695
94c30b78 1696 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1697 {
4be43953 1698 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1699 continue;
1700 }
0d39a070
DJ
1701 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1702 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1703 {
1704 unsigned imm = insn & 0xff; /* immediate value */
1705 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1706 int rd = bits (insn, 12, 15);
28cd8767 1707 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1708 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1709 continue;
1710 }
0d39a070
DJ
1711 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1712 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1713 {
1714 unsigned imm = insn & 0xff; /* immediate value */
1715 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1716 int rd = bits (insn, 12, 15);
28cd8767 1717 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1718 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1719 continue;
1720 }
0963b4bd
MS
1721 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1722 [sp, #-4]! */
f43845b3 1723 {
4be43953
DJ
1724 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1725 break;
1726 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1727 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1728 regs[bits (insn, 12, 15)]);
f43845b3
MS
1729 continue;
1730 }
1731 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1732 /* stmfd sp!, {..., fp, ip, lr, pc}
1733 or
1734 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1735 {
d4473757 1736 int mask = insn & 0xffff;
ed9a39eb 1737
4be43953
DJ
1738 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1739 break;
1740
94c30b78 1741 /* Calculate offsets of saved registers. */
34e8f22d 1742 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1743 if (mask & (1 << regno))
1744 {
0963b4bd
MS
1745 regs[ARM_SP_REGNUM]
1746 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1747 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1748 }
1749 }
0d39a070
DJ
1750 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1751 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1752 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1753 {
1754 /* No need to add this to saved_regs -- it's just an arg reg. */
1755 continue;
1756 }
0d39a070
DJ
1757 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1758 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1759 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1760 {
1761 /* No need to add this to saved_regs -- it's just an arg reg. */
1762 continue;
1763 }
0963b4bd
MS
1764 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1765 { registers } */
0d39a070
DJ
1766 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1767 {
1768 /* No need to add this to saved_regs -- it's just arg regs. */
1769 continue;
1770 }
d4473757
KB
1771 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1772 {
94c30b78
MS
1773 unsigned imm = insn & 0xff; /* immediate value */
1774 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1775 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1776 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1777 }
1778 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1779 {
94c30b78
MS
1780 unsigned imm = insn & 0xff; /* immediate value */
1781 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1782 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1783 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1784 }
0963b4bd
MS
1785 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1786 [sp, -#c]! */
2af46ca0 1787 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1788 {
4be43953
DJ
1789 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1790 break;
1791
1792 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1793 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1794 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1795 }
0963b4bd
MS
1796 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1797 [sp!] */
2af46ca0 1798 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1799 {
1800 int n_saved_fp_regs;
1801 unsigned int fp_start_reg, fp_bound_reg;
1802
4be43953
DJ
1803 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1804 break;
1805
94c30b78 1806 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1807 {
d4473757
KB
1808 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1809 n_saved_fp_regs = 3;
1810 else
1811 n_saved_fp_regs = 1;
96baa820 1812 }
d4473757 1813 else
96baa820 1814 {
d4473757
KB
1815 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1816 n_saved_fp_regs = 2;
1817 else
1818 n_saved_fp_regs = 4;
96baa820 1819 }
d4473757 1820
34e8f22d 1821 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1822 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1823 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1824 {
4be43953
DJ
1825 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1826 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1827 regs[fp_start_reg++]);
96baa820 1828 }
c906108c 1829 }
0d39a070
DJ
1830 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1831 {
1832 /* Allow some special function calls when skipping the
1833 prologue; GCC generates these before storing arguments to
1834 the stack. */
1835 CORE_ADDR dest = BranchDest (current_pc, insn);
1836
e0634ccf 1837 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1838 continue;
1839 else
1840 break;
1841 }
d4473757 1842 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1843 break; /* Condition not true, exit early. */
0d39a070
DJ
1844 else if (arm_instruction_changes_pc (insn))
1845 /* Don't scan past anything that might change control flow. */
1846 break;
d19f7eee
UW
1847 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1848 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1849 /* Ignore block loads from the stack, potentially copying
1850 parameters from memory. */
1851 continue;
1852 else if ((insn & 0xfc500000) == 0xe4100000
1853 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1854 /* Similarly ignore single loads from the stack. */
1855 continue;
0d39a070
DJ
1856 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1857 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1858 register instead of the stack. */
d4473757 1859 continue;
0d39a070
DJ
1860 else
1861 {
1862 /* The optimizer might shove anything into the prologue,
1863 so we just skip what we don't recognize. */
1864 unrecognized_pc = current_pc;
1865 continue;
1866 }
c906108c
SS
1867 }
1868
0d39a070
DJ
1869 if (unrecognized_pc == 0)
1870 unrecognized_pc = current_pc;
1871
4be43953
DJ
1872 /* The frame size is just the distance from the frame register
1873 to the original stack pointer. */
1874 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1875 {
1876 /* Frame pointer is fp. */
0d39a070
DJ
1877 framereg = ARM_FP_REGNUM;
1878 framesize = -regs[ARM_FP_REGNUM].k;
4be43953 1879 }
72a2e3dc 1880 else
4be43953
DJ
1881 {
1882 /* Try the stack pointer... this is a bit desperate. */
0d39a070
DJ
1883 framereg = ARM_SP_REGNUM;
1884 framesize = -regs[ARM_SP_REGNUM].k;
4be43953 1885 }
4be43953 1886
0d39a070
DJ
1887 if (cache)
1888 {
1889 cache->framereg = framereg;
1890 cache->framesize = framesize;
1891
1892 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1893 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1894 cache->saved_regs[regno].addr = offset;
1895 }
1896
1897 if (arm_debug)
1898 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1899 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1900
1901 do_cleanups (back_to);
0d39a070
DJ
1902 return unrecognized_pc;
1903}
1904
1905static void
1906arm_scan_prologue (struct frame_info *this_frame,
1907 struct arm_prologue_cache *cache)
1908{
1909 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1910 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1911 int regno;
1912 CORE_ADDR prologue_start, prologue_end, current_pc;
1913 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1914 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1915 pv_t regs[ARM_FPS_REGNUM];
1916 struct pv_area *stack;
1917 struct cleanup *back_to;
1918 CORE_ADDR offset;
1919
1920 /* Assume there is no frame until proven otherwise. */
1921 cache->framereg = ARM_SP_REGNUM;
1922 cache->framesize = 0;
1923
1924 /* Check for Thumb prologue. */
1925 if (arm_frame_is_thumb (this_frame))
1926 {
1927 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1928 return;
1929 }
1930
1931 /* Find the function prologue. If we can't find the function in
1932 the symbol table, peek in the stack frame to find the PC. */
1933 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1934 &prologue_end))
1935 {
1936 /* One way to find the end of the prologue (which works well
1937 for unoptimized code) is to do the following:
1938
1939 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1940
1941 if (sal.line == 0)
1942 prologue_end = prev_pc;
1943 else if (sal.end < prologue_end)
1944 prologue_end = sal.end;
1945
1946 This mechanism is very accurate so long as the optimizer
1947 doesn't move any instructions from the function body into the
1948 prologue. If this happens, sal.end will be the last
1949 instruction in the first hunk of prologue code just before
1950 the first instruction that the scheduler has moved from
1951 the body to the prologue.
1952
1953 In order to make sure that we scan all of the prologue
1954 instructions, we use a slightly less accurate mechanism which
1955 may scan more than necessary. To help compensate for this
1956 lack of accuracy, the prologue scanning loop below contains
1957 several clauses which'll cause the loop to terminate early if
1958 an implausible prologue instruction is encountered.
1959
1960 The expression
1961
1962 prologue_start + 64
1963
1964 is a suitable endpoint since it accounts for the largest
1965 possible prologue plus up to five instructions inserted by
1966 the scheduler. */
1967
1968 if (prologue_end > prologue_start + 64)
1969 {
1970 prologue_end = prologue_start + 64; /* See above. */
1971 }
1972 }
1973 else
1974 {
1975 /* We have no symbol information. Our only option is to assume this
1976 function has a standard stack frame and the normal frame register.
1977 Then, we can find the value of our frame pointer on entrance to
1978 the callee (or at the present moment if this is the innermost frame).
1979 The value stored there should be the address of the stmfd + 8. */
1980 CORE_ADDR frame_loc;
1981 LONGEST return_value;
1982
1983 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1984 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1985 return;
1986 else
1987 {
1988 prologue_start = gdbarch_addr_bits_remove
1989 (gdbarch, return_value) - 8;
1990 prologue_end = prologue_start + 64; /* See above. */
1991 }
1992 }
1993
1994 if (prev_pc < prologue_end)
1995 prologue_end = prev_pc;
1996
1997 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1998}
1999
eb5492fa 2000static struct arm_prologue_cache *
a262aec2 2001arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 2002{
eb5492fa
DJ
2003 int reg;
2004 struct arm_prologue_cache *cache;
2005 CORE_ADDR unwound_fp;
c5aa993b 2006
35d5d4ee 2007 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2008 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 2009
a262aec2 2010 arm_scan_prologue (this_frame, cache);
848cfffb 2011
a262aec2 2012 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
2013 if (unwound_fp == 0)
2014 return cache;
c906108c 2015
4be43953 2016 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 2017
eb5492fa
DJ
2018 /* Calculate actual addresses of saved registers using offsets
2019 determined by arm_scan_prologue. */
a262aec2 2020 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 2021 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
2022 cache->saved_regs[reg].addr += cache->prev_sp;
2023
2024 return cache;
c906108c
SS
2025}
2026
eb5492fa
DJ
2027/* Our frame ID for a normal frame is the current function's starting PC
2028 and the caller's SP when we were called. */
c906108c 2029
148754e5 2030static void
a262aec2 2031arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
2032 void **this_cache,
2033 struct frame_id *this_id)
c906108c 2034{
eb5492fa
DJ
2035 struct arm_prologue_cache *cache;
2036 struct frame_id id;
2c404490 2037 CORE_ADDR pc, func;
f079148d 2038
eb5492fa 2039 if (*this_cache == NULL)
a262aec2 2040 *this_cache = arm_make_prologue_cache (this_frame);
eb5492fa 2041 cache = *this_cache;
2a451106 2042
2c404490
DJ
2043 /* This is meant to halt the backtrace at "_start". */
2044 pc = get_frame_pc (this_frame);
2045 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
eb5492fa 2046 return;
5a203e44 2047
eb5492fa
DJ
2048 /* If we've hit a wall, stop. */
2049 if (cache->prev_sp == 0)
2050 return;
24de872b 2051
0e9e9abd
UW
2052 /* Use function start address as part of the frame ID. If we cannot
2053 identify the start address (due to missing symbol information),
2054 fall back to just using the current PC. */
2c404490 2055 func = get_frame_func (this_frame);
0e9e9abd
UW
2056 if (!func)
2057 func = pc;
2058
eb5492fa 2059 id = frame_id_build (cache->prev_sp, func);
eb5492fa 2060 *this_id = id;
c906108c
SS
2061}
2062
a262aec2
DJ
2063static struct value *
2064arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 2065 void **this_cache,
a262aec2 2066 int prev_regnum)
24de872b 2067{
24568a2c 2068 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
2069 struct arm_prologue_cache *cache;
2070
eb5492fa 2071 if (*this_cache == NULL)
a262aec2 2072 *this_cache = arm_make_prologue_cache (this_frame);
eb5492fa 2073 cache = *this_cache;
24de872b 2074
eb5492fa 2075 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
2076 instead. The prologue may save PC, but it will point into this
2077 frame's prologue, not the next frame's resume location. Also
2078 strip the saved T bit. A valid LR may have the low bit set, but
2079 a valid PC never does. */
eb5492fa 2080 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
2081 {
2082 CORE_ADDR lr;
2083
2084 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2085 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 2086 arm_addr_bits_remove (gdbarch, lr));
b39cc962 2087 }
24de872b 2088
eb5492fa 2089 /* SP is generally not saved to the stack, but this frame is
a262aec2 2090 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
2091 The value was already reconstructed into PREV_SP. */
2092 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 2093 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 2094
b39cc962
DJ
2095 /* The CPSR may have been changed by the call instruction and by the
2096 called function. The only bit we can reconstruct is the T bit,
2097 by checking the low bit of LR as of the call. This is a reliable
2098 indicator of Thumb-ness except for some ARM v4T pre-interworking
2099 Thumb code, which could get away with a clear low bit as long as
2100 the called function did not use bx. Guess that all other
2101 bits are unchanged; the condition flags are presumably lost,
2102 but the processor status is likely valid. */
2103 if (prev_regnum == ARM_PS_REGNUM)
2104 {
2105 CORE_ADDR lr, cpsr;
9779414d 2106 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
2107
2108 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2109 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2110 if (IS_THUMB_ADDR (lr))
9779414d 2111 cpsr |= t_bit;
b39cc962 2112 else
9779414d 2113 cpsr &= ~t_bit;
b39cc962
DJ
2114 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2115 }
2116
a262aec2
DJ
2117 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2118 prev_regnum);
eb5492fa
DJ
2119}
2120
2121struct frame_unwind arm_prologue_unwind = {
2122 NORMAL_FRAME,
8fbca658 2123 default_frame_unwind_stop_reason,
eb5492fa 2124 arm_prologue_this_id,
a262aec2
DJ
2125 arm_prologue_prev_register,
2126 NULL,
2127 default_frame_sniffer
eb5492fa
DJ
2128};
2129
0e9e9abd
UW
2130/* Maintain a list of ARM exception table entries per objfile, similar to the
2131 list of mapping symbols. We only cache entries for standard ARM-defined
2132 personality routines; the cache will contain only the frame unwinding
2133 instructions associated with the entry (not the descriptors). */
2134
2135static const struct objfile_data *arm_exidx_data_key;
2136
2137struct arm_exidx_entry
2138{
2139 bfd_vma addr;
2140 gdb_byte *entry;
2141};
2142typedef struct arm_exidx_entry arm_exidx_entry_s;
2143DEF_VEC_O(arm_exidx_entry_s);
2144
2145struct arm_exidx_data
2146{
2147 VEC(arm_exidx_entry_s) **section_maps;
2148};
2149
2150static void
2151arm_exidx_data_free (struct objfile *objfile, void *arg)
2152{
2153 struct arm_exidx_data *data = arg;
2154 unsigned int i;
2155
2156 for (i = 0; i < objfile->obfd->section_count; i++)
2157 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2158}
2159
2160static inline int
2161arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2162 const struct arm_exidx_entry *rhs)
2163{
2164 return lhs->addr < rhs->addr;
2165}
2166
2167static struct obj_section *
2168arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2169{
2170 struct obj_section *osect;
2171
2172 ALL_OBJFILE_OSECTIONS (objfile, osect)
2173 if (bfd_get_section_flags (objfile->obfd,
2174 osect->the_bfd_section) & SEC_ALLOC)
2175 {
2176 bfd_vma start, size;
2177 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2178 size = bfd_get_section_size (osect->the_bfd_section);
2179
2180 if (start <= vma && vma < start + size)
2181 return osect;
2182 }
2183
2184 return NULL;
2185}
2186
2187/* Parse contents of exception table and exception index sections
2188 of OBJFILE, and fill in the exception table entry cache.
2189
2190 For each entry that refers to a standard ARM-defined personality
2191 routine, extract the frame unwinding instructions (from either
2192 the index or the table section). The unwinding instructions
2193 are normalized by:
2194 - extracting them from the rest of the table data
2195 - converting to host endianness
2196 - appending the implicit 0xb0 ("Finish") code
2197
2198 The extracted and normalized instructions are stored for later
2199 retrieval by the arm_find_exidx_entry routine. */
2200
2201static void
2202arm_exidx_new_objfile (struct objfile *objfile)
2203{
3bb47e8b 2204 struct cleanup *cleanups;
0e9e9abd
UW
2205 struct arm_exidx_data *data;
2206 asection *exidx, *extab;
2207 bfd_vma exidx_vma = 0, extab_vma = 0;
2208 bfd_size_type exidx_size = 0, extab_size = 0;
2209 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2210 LONGEST i;
2211
2212 /* If we've already touched this file, do nothing. */
2213 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2214 return;
3bb47e8b 2215 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2216
2217 /* Read contents of exception table and index. */
2218 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2219 if (exidx)
2220 {
2221 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2222 exidx_size = bfd_get_section_size (exidx);
2223 exidx_data = xmalloc (exidx_size);
2224 make_cleanup (xfree, exidx_data);
2225
2226 if (!bfd_get_section_contents (objfile->obfd, exidx,
2227 exidx_data, 0, exidx_size))
2228 {
2229 do_cleanups (cleanups);
2230 return;
2231 }
2232 }
2233
2234 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2235 if (extab)
2236 {
2237 extab_vma = bfd_section_vma (objfile->obfd, extab);
2238 extab_size = bfd_get_section_size (extab);
2239 extab_data = xmalloc (extab_size);
2240 make_cleanup (xfree, extab_data);
2241
2242 if (!bfd_get_section_contents (objfile->obfd, extab,
2243 extab_data, 0, extab_size))
2244 {
2245 do_cleanups (cleanups);
2246 return;
2247 }
2248 }
2249
2250 /* Allocate exception table data structure. */
2251 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2252 set_objfile_data (objfile, arm_exidx_data_key, data);
2253 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2254 objfile->obfd->section_count,
2255 VEC(arm_exidx_entry_s) *);
2256
2257 /* Fill in exception table. */
2258 for (i = 0; i < exidx_size / 8; i++)
2259 {
2260 struct arm_exidx_entry new_exidx_entry;
2261 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2262 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2263 bfd_vma addr = 0, word = 0;
2264 int n_bytes = 0, n_words = 0;
2265 struct obj_section *sec;
2266 gdb_byte *entry = NULL;
2267
2268 /* Extract address of start of function. */
2269 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2270 idx += exidx_vma + i * 8;
2271
2272 /* Find section containing function and compute section offset. */
2273 sec = arm_obj_section_from_vma (objfile, idx);
2274 if (sec == NULL)
2275 continue;
2276 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2277
2278 /* Determine address of exception table entry. */
2279 if (val == 1)
2280 {
2281 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2282 }
2283 else if ((val & 0xff000000) == 0x80000000)
2284 {
2285 /* Exception table entry embedded in .ARM.exidx
2286 -- must be short form. */
2287 word = val;
2288 n_bytes = 3;
2289 }
2290 else if (!(val & 0x80000000))
2291 {
2292 /* Exception table entry in .ARM.extab. */
2293 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2294 addr += exidx_vma + i * 8 + 4;
2295
2296 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2297 {
2298 word = bfd_h_get_32 (objfile->obfd,
2299 extab_data + addr - extab_vma);
2300 addr += 4;
2301
2302 if ((word & 0xff000000) == 0x80000000)
2303 {
2304 /* Short form. */
2305 n_bytes = 3;
2306 }
2307 else if ((word & 0xff000000) == 0x81000000
2308 || (word & 0xff000000) == 0x82000000)
2309 {
2310 /* Long form. */
2311 n_bytes = 2;
2312 n_words = ((word >> 16) & 0xff);
2313 }
2314 else if (!(word & 0x80000000))
2315 {
2316 bfd_vma pers;
2317 struct obj_section *pers_sec;
2318 int gnu_personality = 0;
2319
2320 /* Custom personality routine. */
2321 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2322 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2323
2324 /* Check whether we've got one of the variants of the
2325 GNU personality routines. */
2326 pers_sec = arm_obj_section_from_vma (objfile, pers);
2327 if (pers_sec)
2328 {
2329 static const char *personality[] =
2330 {
2331 "__gcc_personality_v0",
2332 "__gxx_personality_v0",
2333 "__gcj_personality_v0",
2334 "__gnu_objc_personality_v0",
2335 NULL
2336 };
2337
2338 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2339 int k;
2340
2341 for (k = 0; personality[k]; k++)
2342 if (lookup_minimal_symbol_by_pc_name
2343 (pc, personality[k], objfile))
2344 {
2345 gnu_personality = 1;
2346 break;
2347 }
2348 }
2349
2350 /* If so, the next word contains a word count in the high
2351 byte, followed by the same unwind instructions as the
2352 pre-defined forms. */
2353 if (gnu_personality
2354 && addr + 4 <= extab_vma + extab_size)
2355 {
2356 word = bfd_h_get_32 (objfile->obfd,
2357 extab_data + addr - extab_vma);
2358 addr += 4;
2359 n_bytes = 3;
2360 n_words = ((word >> 24) & 0xff);
2361 }
2362 }
2363 }
2364 }
2365
2366 /* Sanity check address. */
2367 if (n_words)
2368 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2369 n_words = n_bytes = 0;
2370
2371 /* The unwind instructions reside in WORD (only the N_BYTES least
2372 significant bytes are valid), followed by N_WORDS words in the
2373 extab section starting at ADDR. */
2374 if (n_bytes || n_words)
2375 {
2376 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2377 n_bytes + n_words * 4 + 1);
2378
2379 while (n_bytes--)
2380 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2381
2382 while (n_words--)
2383 {
2384 word = bfd_h_get_32 (objfile->obfd,
2385 extab_data + addr - extab_vma);
2386 addr += 4;
2387
2388 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2389 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2390 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2391 *p++ = (gdb_byte) (word & 0xff);
2392 }
2393
2394 /* Implied "Finish" to terminate the list. */
2395 *p++ = 0xb0;
2396 }
2397
2398 /* Push entry onto vector. They are guaranteed to always
2399 appear in order of increasing addresses. */
2400 new_exidx_entry.addr = idx;
2401 new_exidx_entry.entry = entry;
2402 VEC_safe_push (arm_exidx_entry_s,
2403 data->section_maps[sec->the_bfd_section->index],
2404 &new_exidx_entry);
2405 }
2406
2407 do_cleanups (cleanups);
2408}
2409
2410/* Search for the exception table entry covering MEMADDR. If one is found,
2411 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2412 set *START to the start of the region covered by this entry. */
2413
2414static gdb_byte *
2415arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2416{
2417 struct obj_section *sec;
2418
2419 sec = find_pc_section (memaddr);
2420 if (sec != NULL)
2421 {
2422 struct arm_exidx_data *data;
2423 VEC(arm_exidx_entry_s) *map;
2424 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2425 unsigned int idx;
2426
2427 data = objfile_data (sec->objfile, arm_exidx_data_key);
2428 if (data != NULL)
2429 {
2430 map = data->section_maps[sec->the_bfd_section->index];
2431 if (!VEC_empty (arm_exidx_entry_s, map))
2432 {
2433 struct arm_exidx_entry *map_sym;
2434
2435 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2436 arm_compare_exidx_entries);
2437
2438 /* VEC_lower_bound finds the earliest ordered insertion
2439 point. If the following symbol starts at this exact
2440 address, we use that; otherwise, the preceding
2441 exception table entry covers this address. */
2442 if (idx < VEC_length (arm_exidx_entry_s, map))
2443 {
2444 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2445 if (map_sym->addr == map_key.addr)
2446 {
2447 if (start)
2448 *start = map_sym->addr + obj_section_addr (sec);
2449 return map_sym->entry;
2450 }
2451 }
2452
2453 if (idx > 0)
2454 {
2455 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2456 if (start)
2457 *start = map_sym->addr + obj_section_addr (sec);
2458 return map_sym->entry;
2459 }
2460 }
2461 }
2462 }
2463
2464 return NULL;
2465}
2466
2467/* Given the current frame THIS_FRAME, and its associated frame unwinding
2468 instruction list from the ARM exception table entry ENTRY, allocate and
2469 return a prologue cache structure describing how to unwind this frame.
2470
2471 Return NULL if the unwinding instruction list contains a "spare",
2472 "reserved" or "refuse to unwind" instruction as defined in section
2473 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2474 for the ARM Architecture" document. */
2475
2476static struct arm_prologue_cache *
2477arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2478{
2479 CORE_ADDR vsp = 0;
2480 int vsp_valid = 0;
2481
2482 struct arm_prologue_cache *cache;
2483 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2484 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2485
2486 for (;;)
2487 {
2488 gdb_byte insn;
2489
2490 /* Whenever we reload SP, we actually have to retrieve its
2491 actual value in the current frame. */
2492 if (!vsp_valid)
2493 {
2494 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2495 {
2496 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2497 vsp = get_frame_register_unsigned (this_frame, reg);
2498 }
2499 else
2500 {
2501 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2502 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2503 }
2504
2505 vsp_valid = 1;
2506 }
2507
2508 /* Decode next unwind instruction. */
2509 insn = *entry++;
2510
2511 if ((insn & 0xc0) == 0)
2512 {
2513 int offset = insn & 0x3f;
2514 vsp += (offset << 2) + 4;
2515 }
2516 else if ((insn & 0xc0) == 0x40)
2517 {
2518 int offset = insn & 0x3f;
2519 vsp -= (offset << 2) + 4;
2520 }
2521 else if ((insn & 0xf0) == 0x80)
2522 {
2523 int mask = ((insn & 0xf) << 8) | *entry++;
2524 int i;
2525
2526 /* The special case of an all-zero mask identifies
2527 "Refuse to unwind". We return NULL to fall back
2528 to the prologue analyzer. */
2529 if (mask == 0)
2530 return NULL;
2531
2532 /* Pop registers r4..r15 under mask. */
2533 for (i = 0; i < 12; i++)
2534 if (mask & (1 << i))
2535 {
2536 cache->saved_regs[4 + i].addr = vsp;
2537 vsp += 4;
2538 }
2539
2540 /* Special-case popping SP -- we need to reload vsp. */
2541 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2542 vsp_valid = 0;
2543 }
2544 else if ((insn & 0xf0) == 0x90)
2545 {
2546 int reg = insn & 0xf;
2547
2548 /* Reserved cases. */
2549 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2550 return NULL;
2551
2552 /* Set SP from another register and mark VSP for reload. */
2553 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2554 vsp_valid = 0;
2555 }
2556 else if ((insn & 0xf0) == 0xa0)
2557 {
2558 int count = insn & 0x7;
2559 int pop_lr = (insn & 0x8) != 0;
2560 int i;
2561
2562 /* Pop r4..r[4+count]. */
2563 for (i = 0; i <= count; i++)
2564 {
2565 cache->saved_regs[4 + i].addr = vsp;
2566 vsp += 4;
2567 }
2568
2569 /* If indicated by flag, pop LR as well. */
2570 if (pop_lr)
2571 {
2572 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2573 vsp += 4;
2574 }
2575 }
2576 else if (insn == 0xb0)
2577 {
2578 /* We could only have updated PC by popping into it; if so, it
2579 will show up as address. Otherwise, copy LR into PC. */
2580 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2581 cache->saved_regs[ARM_PC_REGNUM]
2582 = cache->saved_regs[ARM_LR_REGNUM];
2583
2584 /* We're done. */
2585 break;
2586 }
2587 else if (insn == 0xb1)
2588 {
2589 int mask = *entry++;
2590 int i;
2591
2592 /* All-zero mask and mask >= 16 is "spare". */
2593 if (mask == 0 || mask >= 16)
2594 return NULL;
2595
2596 /* Pop r0..r3 under mask. */
2597 for (i = 0; i < 4; i++)
2598 if (mask & (1 << i))
2599 {
2600 cache->saved_regs[i].addr = vsp;
2601 vsp += 4;
2602 }
2603 }
2604 else if (insn == 0xb2)
2605 {
2606 ULONGEST offset = 0;
2607 unsigned shift = 0;
2608
2609 do
2610 {
2611 offset |= (*entry & 0x7f) << shift;
2612 shift += 7;
2613 }
2614 while (*entry++ & 0x80);
2615
2616 vsp += 0x204 + (offset << 2);
2617 }
2618 else if (insn == 0xb3)
2619 {
2620 int start = *entry >> 4;
2621 int count = (*entry++) & 0xf;
2622 int i;
2623
2624 /* Only registers D0..D15 are valid here. */
2625 if (start + count >= 16)
2626 return NULL;
2627
2628 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2629 for (i = 0; i <= count; i++)
2630 {
2631 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2632 vsp += 8;
2633 }
2634
2635 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2636 vsp += 4;
2637 }
2638 else if ((insn & 0xf8) == 0xb8)
2639 {
2640 int count = insn & 0x7;
2641 int i;
2642
2643 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2644 for (i = 0; i <= count; i++)
2645 {
2646 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2647 vsp += 8;
2648 }
2649
2650 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2651 vsp += 4;
2652 }
2653 else if (insn == 0xc6)
2654 {
2655 int start = *entry >> 4;
2656 int count = (*entry++) & 0xf;
2657 int i;
2658
2659 /* Only registers WR0..WR15 are valid. */
2660 if (start + count >= 16)
2661 return NULL;
2662
2663 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2664 for (i = 0; i <= count; i++)
2665 {
2666 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2667 vsp += 8;
2668 }
2669 }
2670 else if (insn == 0xc7)
2671 {
2672 int mask = *entry++;
2673 int i;
2674
2675 /* All-zero mask and mask >= 16 is "spare". */
2676 if (mask == 0 || mask >= 16)
2677 return NULL;
2678
2679 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2680 for (i = 0; i < 4; i++)
2681 if (mask & (1 << i))
2682 {
2683 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2684 vsp += 4;
2685 }
2686 }
2687 else if ((insn & 0xf8) == 0xc0)
2688 {
2689 int count = insn & 0x7;
2690 int i;
2691
2692 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2693 for (i = 0; i <= count; i++)
2694 {
2695 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2696 vsp += 8;
2697 }
2698 }
2699 else if (insn == 0xc8)
2700 {
2701 int start = *entry >> 4;
2702 int count = (*entry++) & 0xf;
2703 int i;
2704
2705 /* Only registers D0..D31 are valid. */
2706 if (start + count >= 16)
2707 return NULL;
2708
2709 /* Pop VFP double-precision registers
2710 D[16+start]..D[16+start+count]. */
2711 for (i = 0; i <= count; i++)
2712 {
2713 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2714 vsp += 8;
2715 }
2716 }
2717 else if (insn == 0xc9)
2718 {
2719 int start = *entry >> 4;
2720 int count = (*entry++) & 0xf;
2721 int i;
2722
2723 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2724 for (i = 0; i <= count; i++)
2725 {
2726 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2727 vsp += 8;
2728 }
2729 }
2730 else if ((insn & 0xf8) == 0xd0)
2731 {
2732 int count = insn & 0x7;
2733 int i;
2734
2735 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2736 for (i = 0; i <= count; i++)
2737 {
2738 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2739 vsp += 8;
2740 }
2741 }
2742 else
2743 {
2744 /* Everything else is "spare". */
2745 return NULL;
2746 }
2747 }
2748
2749 /* If we restore SP from a register, assume this was the frame register.
2750 Otherwise just fall back to SP as frame register. */
2751 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2752 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2753 else
2754 cache->framereg = ARM_SP_REGNUM;
2755
2756 /* Determine offset to previous frame. */
2757 cache->framesize
2758 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2759
2760 /* We already got the previous SP. */
2761 cache->prev_sp = vsp;
2762
2763 return cache;
2764}
2765
2766/* Unwinding via ARM exception table entries. Note that the sniffer
2767 already computes a filled-in prologue cache, which is then used
2768 with the same arm_prologue_this_id and arm_prologue_prev_register
2769 routines also used for prologue-parsing based unwinding. */
2770
2771static int
2772arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2773 struct frame_info *this_frame,
2774 void **this_prologue_cache)
2775{
2776 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2777 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2778 CORE_ADDR addr_in_block, exidx_region, func_start;
2779 struct arm_prologue_cache *cache;
2780 gdb_byte *entry;
2781
2782 /* See if we have an ARM exception table entry covering this address. */
2783 addr_in_block = get_frame_address_in_block (this_frame);
2784 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2785 if (!entry)
2786 return 0;
2787
2788 /* The ARM exception table does not describe unwind information
2789 for arbitrary PC values, but is guaranteed to be correct only
2790 at call sites. We have to decide here whether we want to use
2791 ARM exception table information for this frame, or fall back
2792 to using prologue parsing. (Note that if we have DWARF CFI,
2793 this sniffer isn't even called -- CFI is always preferred.)
2794
2795 Before we make this decision, however, we check whether we
2796 actually have *symbol* information for the current frame.
2797 If not, prologue parsing would not work anyway, so we might
2798 as well use the exception table and hope for the best. */
2799 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2800 {
2801 int exc_valid = 0;
2802
2803 /* If the next frame is "normal", we are at a call site in this
2804 frame, so exception information is guaranteed to be valid. */
2805 if (get_next_frame (this_frame)
2806 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2807 exc_valid = 1;
2808
2809 /* We also assume exception information is valid if we're currently
2810 blocked in a system call. The system library is supposed to
2811 ensure this, so that e.g. pthread cancellation works. */
2812 if (arm_frame_is_thumb (this_frame))
2813 {
2814 LONGEST insn;
2815
2816 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2817 byte_order_for_code, &insn)
2818 && (insn & 0xff00) == 0xdf00 /* svc */)
2819 exc_valid = 1;
2820 }
2821 else
2822 {
2823 LONGEST insn;
2824
2825 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2826 byte_order_for_code, &insn)
2827 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2828 exc_valid = 1;
2829 }
2830
2831 /* Bail out if we don't know that exception information is valid. */
2832 if (!exc_valid)
2833 return 0;
2834
2835 /* The ARM exception index does not mark the *end* of the region
2836 covered by the entry, and some functions will not have any entry.
2837 To correctly recognize the end of the covered region, the linker
2838 should have inserted dummy records with a CANTUNWIND marker.
2839
2840 Unfortunately, current versions of GNU ld do not reliably do
2841 this, and thus we may have found an incorrect entry above.
2842 As a (temporary) sanity check, we only use the entry if it
2843 lies *within* the bounds of the function. Note that this check
2844 might reject perfectly valid entries that just happen to cover
2845 multiple functions; therefore this check ought to be removed
2846 once the linker is fixed. */
2847 if (func_start > exidx_region)
2848 return 0;
2849 }
2850
2851 /* Decode the list of unwinding instructions into a prologue cache.
2852 Note that this may fail due to e.g. a "refuse to unwind" code. */
2853 cache = arm_exidx_fill_cache (this_frame, entry);
2854 if (!cache)
2855 return 0;
2856
2857 *this_prologue_cache = cache;
2858 return 1;
2859}
2860
2861struct frame_unwind arm_exidx_unwind = {
2862 NORMAL_FRAME,
8fbca658 2863 default_frame_unwind_stop_reason,
0e9e9abd
UW
2864 arm_prologue_this_id,
2865 arm_prologue_prev_register,
2866 NULL,
2867 arm_exidx_unwind_sniffer
2868};
2869
909cf6ea 2870static struct arm_prologue_cache *
a262aec2 2871arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2872{
909cf6ea 2873 struct arm_prologue_cache *cache;
909cf6ea 2874
35d5d4ee 2875 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2876 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2877
a262aec2 2878 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2879
2880 return cache;
2881}
2882
2883/* Our frame ID for a stub frame is the current SP and LR. */
2884
2885static void
a262aec2 2886arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2887 void **this_cache,
2888 struct frame_id *this_id)
2889{
2890 struct arm_prologue_cache *cache;
2891
2892 if (*this_cache == NULL)
a262aec2 2893 *this_cache = arm_make_stub_cache (this_frame);
909cf6ea
DJ
2894 cache = *this_cache;
2895
a262aec2 2896 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2897}
2898
a262aec2
DJ
2899static int
2900arm_stub_unwind_sniffer (const struct frame_unwind *self,
2901 struct frame_info *this_frame,
2902 void **this_prologue_cache)
909cf6ea 2903{
93d42b30 2904 CORE_ADDR addr_in_block;
909cf6ea
DJ
2905 char dummy[4];
2906
a262aec2 2907 addr_in_block = get_frame_address_in_block (this_frame);
93d42b30 2908 if (in_plt_section (addr_in_block, NULL)
fc36e839
DE
2909 /* We also use the stub winder if the target memory is unreadable
2910 to avoid having the prologue unwinder trying to read it. */
a262aec2
DJ
2911 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2912 return 1;
909cf6ea 2913
a262aec2 2914 return 0;
909cf6ea
DJ
2915}
2916
a262aec2
DJ
2917struct frame_unwind arm_stub_unwind = {
2918 NORMAL_FRAME,
8fbca658 2919 default_frame_unwind_stop_reason,
a262aec2
DJ
2920 arm_stub_this_id,
2921 arm_prologue_prev_register,
2922 NULL,
2923 arm_stub_unwind_sniffer
2924};
2925
24de872b 2926static CORE_ADDR
a262aec2 2927arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
2928{
2929 struct arm_prologue_cache *cache;
2930
eb5492fa 2931 if (*this_cache == NULL)
a262aec2 2932 *this_cache = arm_make_prologue_cache (this_frame);
eb5492fa
DJ
2933 cache = *this_cache;
2934
4be43953 2935 return cache->prev_sp - cache->framesize;
24de872b
DJ
2936}
2937
eb5492fa
DJ
2938struct frame_base arm_normal_base = {
2939 &arm_prologue_unwind,
2940 arm_normal_frame_base,
2941 arm_normal_frame_base,
2942 arm_normal_frame_base
2943};
2944
a262aec2 2945/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
2946 dummy frame. The frame ID's base needs to match the TOS value
2947 saved by save_dummy_frame_tos() and returned from
2948 arm_push_dummy_call, and the PC needs to match the dummy frame's
2949 breakpoint. */
c906108c 2950
eb5492fa 2951static struct frame_id
a262aec2 2952arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 2953{
0963b4bd
MS
2954 return frame_id_build (get_frame_register_unsigned (this_frame,
2955 ARM_SP_REGNUM),
a262aec2 2956 get_frame_pc (this_frame));
eb5492fa 2957}
c3b4394c 2958
eb5492fa
DJ
2959/* Given THIS_FRAME, find the previous frame's resume PC (which will
2960 be used to construct the previous frame's ID, after looking up the
2961 containing function). */
c3b4394c 2962
eb5492fa
DJ
2963static CORE_ADDR
2964arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2965{
2966 CORE_ADDR pc;
2967 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 2968 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
2969}
2970
2971static CORE_ADDR
2972arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2973{
2974 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
2975}
2976
b39cc962
DJ
2977static struct value *
2978arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2979 int regnum)
2980{
24568a2c 2981 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 2982 CORE_ADDR lr, cpsr;
9779414d 2983 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
2984
2985 switch (regnum)
2986 {
2987 case ARM_PC_REGNUM:
2988 /* The PC is normally copied from the return column, which
2989 describes saves of LR. However, that version may have an
2990 extra bit set to indicate Thumb state. The bit is not
2991 part of the PC. */
2992 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2993 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 2994 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
2995
2996 case ARM_PS_REGNUM:
2997 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 2998 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
2999 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3000 if (IS_THUMB_ADDR (lr))
9779414d 3001 cpsr |= t_bit;
b39cc962 3002 else
9779414d 3003 cpsr &= ~t_bit;
ca38c58e 3004 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3005
3006 default:
3007 internal_error (__FILE__, __LINE__,
3008 _("Unexpected register %d"), regnum);
3009 }
3010}
3011
3012static void
3013arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3014 struct dwarf2_frame_state_reg *reg,
3015 struct frame_info *this_frame)
3016{
3017 switch (regnum)
3018 {
3019 case ARM_PC_REGNUM:
3020 case ARM_PS_REGNUM:
3021 reg->how = DWARF2_FRAME_REG_FN;
3022 reg->loc.fn = arm_dwarf2_prev_register;
3023 break;
3024 case ARM_SP_REGNUM:
3025 reg->how = DWARF2_FRAME_REG_CFA;
3026 break;
3027 }
3028}
3029
4024ca99
UW
3030/* Return true if we are in the function's epilogue, i.e. after the
3031 instruction that destroyed the function's stack frame. */
3032
3033static int
3034thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3035{
3036 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3037 unsigned int insn, insn2;
3038 int found_return = 0, found_stack_adjust = 0;
3039 CORE_ADDR func_start, func_end;
3040 CORE_ADDR scan_pc;
3041 gdb_byte buf[4];
3042
3043 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3044 return 0;
3045
3046 /* The epilogue is a sequence of instructions along the following lines:
3047
3048 - add stack frame size to SP or FP
3049 - [if frame pointer used] restore SP from FP
3050 - restore registers from SP [may include PC]
3051 - a return-type instruction [if PC wasn't already restored]
3052
3053 In a first pass, we scan forward from the current PC and verify the
3054 instructions we find as compatible with this sequence, ending in a
3055 return instruction.
3056
3057 However, this is not sufficient to distinguish indirect function calls
3058 within a function from indirect tail calls in the epilogue in some cases.
3059 Therefore, if we didn't already find any SP-changing instruction during
3060 forward scan, we add a backward scanning heuristic to ensure we actually
3061 are in the epilogue. */
3062
3063 scan_pc = pc;
3064 while (scan_pc < func_end && !found_return)
3065 {
3066 if (target_read_memory (scan_pc, buf, 2))
3067 break;
3068
3069 scan_pc += 2;
3070 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3071
3072 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3073 found_return = 1;
3074 else if (insn == 0x46f7) /* mov pc, lr */
3075 found_return = 1;
3076 else if (insn == 0x46bd) /* mov sp, r7 */
3077 found_stack_adjust = 1;
3078 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3079 found_stack_adjust = 1;
3080 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3081 {
3082 found_stack_adjust = 1;
3083 if (insn & 0x0100) /* <registers> include PC. */
3084 found_return = 1;
3085 }
db24da6d 3086 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3087 {
3088 if (target_read_memory (scan_pc, buf, 2))
3089 break;
3090
3091 scan_pc += 2;
3092 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3093
3094 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3095 {
3096 found_stack_adjust = 1;
3097 if (insn2 & 0x8000) /* <registers> include PC. */
3098 found_return = 1;
3099 }
3100 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3101 && (insn2 & 0x0fff) == 0x0b04)
3102 {
3103 found_stack_adjust = 1;
3104 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3105 found_return = 1;
3106 }
3107 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3108 && (insn2 & 0x0e00) == 0x0a00)
3109 found_stack_adjust = 1;
3110 else
3111 break;
3112 }
3113 else
3114 break;
3115 }
3116
3117 if (!found_return)
3118 return 0;
3119
3120 /* Since any instruction in the epilogue sequence, with the possible
3121 exception of return itself, updates the stack pointer, we need to
3122 scan backwards for at most one instruction. Try either a 16-bit or
3123 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3124 too much about false positives. */
4024ca99
UW
3125
3126 if (!found_stack_adjust)
3127 {
3128 if (pc - 4 < func_start)
3129 return 0;
3130 if (target_read_memory (pc - 4, buf, 4))
3131 return 0;
3132
3133 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3134 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3135
3136 if (insn2 == 0x46bd) /* mov sp, r7 */
3137 found_stack_adjust = 1;
3138 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3139 found_stack_adjust = 1;
3140 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3141 found_stack_adjust = 1;
3142 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3143 found_stack_adjust = 1;
3144 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3145 && (insn2 & 0x0fff) == 0x0b04)
3146 found_stack_adjust = 1;
3147 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3148 && (insn2 & 0x0e00) == 0x0a00)
3149 found_stack_adjust = 1;
3150 }
3151
3152 return found_stack_adjust;
3153}
3154
3155/* Return true if we are in the function's epilogue, i.e. after the
3156 instruction that destroyed the function's stack frame. */
3157
3158static int
3159arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3160{
3161 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3162 unsigned int insn;
3163 int found_return, found_stack_adjust;
3164 CORE_ADDR func_start, func_end;
3165
3166 if (arm_pc_is_thumb (gdbarch, pc))
3167 return thumb_in_function_epilogue_p (gdbarch, pc);
3168
3169 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3170 return 0;
3171
3172 /* We are in the epilogue if the previous instruction was a stack
3173 adjustment and the next instruction is a possible return (bx, mov
3174 pc, or pop). We could have to scan backwards to find the stack
3175 adjustment, or forwards to find the return, but this is a decent
3176 approximation. First scan forwards. */
3177
3178 found_return = 0;
3179 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3180 if (bits (insn, 28, 31) != INST_NV)
3181 {
3182 if ((insn & 0x0ffffff0) == 0x012fff10)
3183 /* BX. */
3184 found_return = 1;
3185 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3186 /* MOV PC. */
3187 found_return = 1;
3188 else if ((insn & 0x0fff0000) == 0x08bd0000
3189 && (insn & 0x0000c000) != 0)
3190 /* POP (LDMIA), including PC or LR. */
3191 found_return = 1;
3192 }
3193
3194 if (!found_return)
3195 return 0;
3196
3197 /* Scan backwards. This is just a heuristic, so do not worry about
3198 false positives from mode changes. */
3199
3200 if (pc < func_start + 4)
3201 return 0;
3202
73c964d6 3203 found_stack_adjust = 0;
4024ca99
UW
3204 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3205 if (bits (insn, 28, 31) != INST_NV)
3206 {
3207 if ((insn & 0x0df0f000) == 0x0080d000)
3208 /* ADD SP (register or immediate). */
3209 found_stack_adjust = 1;
3210 else if ((insn & 0x0df0f000) == 0x0040d000)
3211 /* SUB SP (register or immediate). */
3212 found_stack_adjust = 1;
3213 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3214 /* MOV SP. */
77bc0675 3215 found_stack_adjust = 1;
4024ca99
UW
3216 else if ((insn & 0x0fff0000) == 0x08bd0000)
3217 /* POP (LDMIA). */
3218 found_stack_adjust = 1;
3219 }
3220
3221 if (found_stack_adjust)
3222 return 1;
3223
3224 return 0;
3225}
3226
3227
2dd604e7
RE
3228/* When arguments must be pushed onto the stack, they go on in reverse
3229 order. The code below implements a FILO (stack) to do this. */
3230
3231struct stack_item
3232{
3233 int len;
3234 struct stack_item *prev;
3235 void *data;
3236};
3237
3238static struct stack_item *
8c6363cf 3239push_stack_item (struct stack_item *prev, const void *contents, int len)
2dd604e7
RE
3240{
3241 struct stack_item *si;
3242 si = xmalloc (sizeof (struct stack_item));
226c7fbc 3243 si->data = xmalloc (len);
2dd604e7
RE
3244 si->len = len;
3245 si->prev = prev;
3246 memcpy (si->data, contents, len);
3247 return si;
3248}
3249
3250static struct stack_item *
3251pop_stack_item (struct stack_item *si)
3252{
3253 struct stack_item *dead = si;
3254 si = si->prev;
3255 xfree (dead->data);
3256 xfree (dead);
3257 return si;
3258}
3259
2af48f68
PB
3260
3261/* Return the alignment (in bytes) of the given type. */
3262
3263static int
3264arm_type_align (struct type *t)
3265{
3266 int n;
3267 int align;
3268 int falign;
3269
3270 t = check_typedef (t);
3271 switch (TYPE_CODE (t))
3272 {
3273 default:
3274 /* Should never happen. */
3275 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3276 return 4;
3277
3278 case TYPE_CODE_PTR:
3279 case TYPE_CODE_ENUM:
3280 case TYPE_CODE_INT:
3281 case TYPE_CODE_FLT:
3282 case TYPE_CODE_SET:
3283 case TYPE_CODE_RANGE:
3284 case TYPE_CODE_BITSTRING:
3285 case TYPE_CODE_REF:
3286 case TYPE_CODE_CHAR:
3287 case TYPE_CODE_BOOL:
3288 return TYPE_LENGTH (t);
3289
3290 case TYPE_CODE_ARRAY:
3291 case TYPE_CODE_COMPLEX:
3292 /* TODO: What about vector types? */
3293 return arm_type_align (TYPE_TARGET_TYPE (t));
3294
3295 case TYPE_CODE_STRUCT:
3296 case TYPE_CODE_UNION:
3297 align = 1;
3298 for (n = 0; n < TYPE_NFIELDS (t); n++)
3299 {
3300 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3301 if (falign > align)
3302 align = falign;
3303 }
3304 return align;
3305 }
3306}
3307
90445bd3
DJ
3308/* Possible base types for a candidate for passing and returning in
3309 VFP registers. */
3310
3311enum arm_vfp_cprc_base_type
3312{
3313 VFP_CPRC_UNKNOWN,
3314 VFP_CPRC_SINGLE,
3315 VFP_CPRC_DOUBLE,
3316 VFP_CPRC_VEC64,
3317 VFP_CPRC_VEC128
3318};
3319
3320/* The length of one element of base type B. */
3321
3322static unsigned
3323arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3324{
3325 switch (b)
3326 {
3327 case VFP_CPRC_SINGLE:
3328 return 4;
3329 case VFP_CPRC_DOUBLE:
3330 return 8;
3331 case VFP_CPRC_VEC64:
3332 return 8;
3333 case VFP_CPRC_VEC128:
3334 return 16;
3335 default:
3336 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3337 (int) b);
3338 }
3339}
3340
3341/* The character ('s', 'd' or 'q') for the type of VFP register used
3342 for passing base type B. */
3343
3344static int
3345arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3346{
3347 switch (b)
3348 {
3349 case VFP_CPRC_SINGLE:
3350 return 's';
3351 case VFP_CPRC_DOUBLE:
3352 return 'd';
3353 case VFP_CPRC_VEC64:
3354 return 'd';
3355 case VFP_CPRC_VEC128:
3356 return 'q';
3357 default:
3358 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3359 (int) b);
3360 }
3361}
3362
3363/* Determine whether T may be part of a candidate for passing and
3364 returning in VFP registers, ignoring the limit on the total number
3365 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3366 classification of the first valid component found; if it is not
3367 VFP_CPRC_UNKNOWN, all components must have the same classification
3368 as *BASE_TYPE. If it is found that T contains a type not permitted
3369 for passing and returning in VFP registers, a type differently
3370 classified from *BASE_TYPE, or two types differently classified
3371 from each other, return -1, otherwise return the total number of
3372 base-type elements found (possibly 0 in an empty structure or
3373 array). Vectors and complex types are not currently supported,
3374 matching the generic AAPCS support. */
3375
3376static int
3377arm_vfp_cprc_sub_candidate (struct type *t,
3378 enum arm_vfp_cprc_base_type *base_type)
3379{
3380 t = check_typedef (t);
3381 switch (TYPE_CODE (t))
3382 {
3383 case TYPE_CODE_FLT:
3384 switch (TYPE_LENGTH (t))
3385 {
3386 case 4:
3387 if (*base_type == VFP_CPRC_UNKNOWN)
3388 *base_type = VFP_CPRC_SINGLE;
3389 else if (*base_type != VFP_CPRC_SINGLE)
3390 return -1;
3391 return 1;
3392
3393 case 8:
3394 if (*base_type == VFP_CPRC_UNKNOWN)
3395 *base_type = VFP_CPRC_DOUBLE;
3396 else if (*base_type != VFP_CPRC_DOUBLE)
3397 return -1;
3398 return 1;
3399
3400 default:
3401 return -1;
3402 }
3403 break;
3404
3405 case TYPE_CODE_ARRAY:
3406 {
3407 int count;
3408 unsigned unitlen;
3409 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3410 if (count == -1)
3411 return -1;
3412 if (TYPE_LENGTH (t) == 0)
3413 {
3414 gdb_assert (count == 0);
3415 return 0;
3416 }
3417 else if (count == 0)
3418 return -1;
3419 unitlen = arm_vfp_cprc_unit_length (*base_type);
3420 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3421 return TYPE_LENGTH (t) / unitlen;
3422 }
3423 break;
3424
3425 case TYPE_CODE_STRUCT:
3426 {
3427 int count = 0;
3428 unsigned unitlen;
3429 int i;
3430 for (i = 0; i < TYPE_NFIELDS (t); i++)
3431 {
3432 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3433 base_type);
3434 if (sub_count == -1)
3435 return -1;
3436 count += sub_count;
3437 }
3438 if (TYPE_LENGTH (t) == 0)
3439 {
3440 gdb_assert (count == 0);
3441 return 0;
3442 }
3443 else if (count == 0)
3444 return -1;
3445 unitlen = arm_vfp_cprc_unit_length (*base_type);
3446 if (TYPE_LENGTH (t) != unitlen * count)
3447 return -1;
3448 return count;
3449 }
3450
3451 case TYPE_CODE_UNION:
3452 {
3453 int count = 0;
3454 unsigned unitlen;
3455 int i;
3456 for (i = 0; i < TYPE_NFIELDS (t); i++)
3457 {
3458 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3459 base_type);
3460 if (sub_count == -1)
3461 return -1;
3462 count = (count > sub_count ? count : sub_count);
3463 }
3464 if (TYPE_LENGTH (t) == 0)
3465 {
3466 gdb_assert (count == 0);
3467 return 0;
3468 }
3469 else if (count == 0)
3470 return -1;
3471 unitlen = arm_vfp_cprc_unit_length (*base_type);
3472 if (TYPE_LENGTH (t) != unitlen * count)
3473 return -1;
3474 return count;
3475 }
3476
3477 default:
3478 break;
3479 }
3480
3481 return -1;
3482}
3483
3484/* Determine whether T is a VFP co-processor register candidate (CPRC)
3485 if passed to or returned from a non-variadic function with the VFP
3486 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3487 *BASE_TYPE to the base type for T and *COUNT to the number of
3488 elements of that base type before returning. */
3489
3490static int
3491arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3492 int *count)
3493{
3494 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3495 int c = arm_vfp_cprc_sub_candidate (t, &b);
3496 if (c <= 0 || c > 4)
3497 return 0;
3498 *base_type = b;
3499 *count = c;
3500 return 1;
3501}
3502
3503/* Return 1 if the VFP ABI should be used for passing arguments to and
3504 returning values from a function of type FUNC_TYPE, 0
3505 otherwise. */
3506
3507static int
3508arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3509{
3510 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3511 /* Variadic functions always use the base ABI. Assume that functions
3512 without debug info are not variadic. */
3513 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3514 return 0;
3515 /* The VFP ABI is only supported as a variant of AAPCS. */
3516 if (tdep->arm_abi != ARM_ABI_AAPCS)
3517 return 0;
3518 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3519}
3520
3521/* We currently only support passing parameters in integer registers, which
3522 conforms with GCC's default model, and VFP argument passing following
3523 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3524 we should probably support some of them based on the selected ABI. */
3525
3526static CORE_ADDR
7d9b040b 3527arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3528 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3529 struct value **args, CORE_ADDR sp, int struct_return,
3530 CORE_ADDR struct_addr)
2dd604e7 3531{
e17a4113 3532 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3533 int argnum;
3534 int argreg;
3535 int nstack;
3536 struct stack_item *si = NULL;
90445bd3
DJ
3537 int use_vfp_abi;
3538 struct type *ftype;
3539 unsigned vfp_regs_free = (1 << 16) - 1;
3540
3541 /* Determine the type of this function and whether the VFP ABI
3542 applies. */
3543 ftype = check_typedef (value_type (function));
3544 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3545 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3546 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3547
6a65450a
AC
3548 /* Set the return address. For the ARM, the return breakpoint is
3549 always at BP_ADDR. */
9779414d 3550 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3551 bp_addr |= 1;
6a65450a 3552 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3553
3554 /* Walk through the list of args and determine how large a temporary
3555 stack is required. Need to take care here as structs may be
7a9dd1b2 3556 passed on the stack, and we have to push them. */
2dd604e7
RE
3557 nstack = 0;
3558
3559 argreg = ARM_A1_REGNUM;
3560 nstack = 0;
3561
2dd604e7
RE
3562 /* The struct_return pointer occupies the first parameter
3563 passing register. */
3564 if (struct_return)
3565 {
3566 if (arm_debug)
5af949e3 3567 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3568 gdbarch_register_name (gdbarch, argreg),
5af949e3 3569 paddress (gdbarch, struct_addr));
2dd604e7
RE
3570 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3571 argreg++;
3572 }
3573
3574 for (argnum = 0; argnum < nargs; argnum++)
3575 {
3576 int len;
3577 struct type *arg_type;
3578 struct type *target_type;
3579 enum type_code typecode;
8c6363cf 3580 const bfd_byte *val;
2af48f68 3581 int align;
90445bd3
DJ
3582 enum arm_vfp_cprc_base_type vfp_base_type;
3583 int vfp_base_count;
3584 int may_use_core_reg = 1;
2dd604e7 3585
df407dfe 3586 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3587 len = TYPE_LENGTH (arg_type);
3588 target_type = TYPE_TARGET_TYPE (arg_type);
3589 typecode = TYPE_CODE (arg_type);
8c6363cf 3590 val = value_contents (args[argnum]);
2dd604e7 3591
2af48f68
PB
3592 align = arm_type_align (arg_type);
3593 /* Round alignment up to a whole number of words. */
3594 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3595 /* Different ABIs have different maximum alignments. */
3596 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3597 {
3598 /* The APCS ABI only requires word alignment. */
3599 align = INT_REGISTER_SIZE;
3600 }
3601 else
3602 {
3603 /* The AAPCS requires at most doubleword alignment. */
3604 if (align > INT_REGISTER_SIZE * 2)
3605 align = INT_REGISTER_SIZE * 2;
3606 }
3607
90445bd3
DJ
3608 if (use_vfp_abi
3609 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3610 &vfp_base_count))
3611 {
3612 int regno;
3613 int unit_length;
3614 int shift;
3615 unsigned mask;
3616
3617 /* Because this is a CPRC it cannot go in a core register or
3618 cause a core register to be skipped for alignment.
3619 Either it goes in VFP registers and the rest of this loop
3620 iteration is skipped for this argument, or it goes on the
3621 stack (and the stack alignment code is correct for this
3622 case). */
3623 may_use_core_reg = 0;
3624
3625 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3626 shift = unit_length / 4;
3627 mask = (1 << (shift * vfp_base_count)) - 1;
3628 for (regno = 0; regno < 16; regno += shift)
3629 if (((vfp_regs_free >> regno) & mask) == mask)
3630 break;
3631
3632 if (regno < 16)
3633 {
3634 int reg_char;
3635 int reg_scaled;
3636 int i;
3637
3638 vfp_regs_free &= ~(mask << regno);
3639 reg_scaled = regno / shift;
3640 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3641 for (i = 0; i < vfp_base_count; i++)
3642 {
3643 char name_buf[4];
3644 int regnum;
58d6951d
DJ
3645 if (reg_char == 'q')
3646 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3647 val + i * unit_length);
58d6951d
DJ
3648 else
3649 {
3650 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3651 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3652 strlen (name_buf));
3653 regcache_cooked_write (regcache, regnum,
3654 val + i * unit_length);
3655 }
90445bd3
DJ
3656 }
3657 continue;
3658 }
3659 else
3660 {
3661 /* This CPRC could not go in VFP registers, so all VFP
3662 registers are now marked as used. */
3663 vfp_regs_free = 0;
3664 }
3665 }
3666
2af48f68
PB
3667 /* Push stack padding for dowubleword alignment. */
3668 if (nstack & (align - 1))
3669 {
3670 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3671 nstack += INT_REGISTER_SIZE;
3672 }
3673
3674 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3675 if (may_use_core_reg
3676 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3677 && align > INT_REGISTER_SIZE
3678 && argreg & 1)
3679 argreg++;
3680
2dd604e7
RE
3681 /* If the argument is a pointer to a function, and it is a
3682 Thumb function, create a LOCAL copy of the value and set
3683 the THUMB bit in it. */
3684 if (TYPE_CODE_PTR == typecode
3685 && target_type != NULL
f96b8fa0 3686 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3687 {
e17a4113 3688 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3689 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3690 {
8c6363cf
TT
3691 bfd_byte *copy = alloca (len);
3692 store_unsigned_integer (copy, len, byte_order,
e17a4113 3693 MAKE_THUMB_ADDR (regval));
8c6363cf 3694 val = copy;
2dd604e7
RE
3695 }
3696 }
3697
3698 /* Copy the argument to general registers or the stack in
3699 register-sized pieces. Large arguments are split between
3700 registers and stack. */
3701 while (len > 0)
3702 {
f0c9063c 3703 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
2dd604e7 3704
90445bd3 3705 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3706 {
3707 /* The argument is being passed in a general purpose
3708 register. */
e17a4113
UW
3709 CORE_ADDR regval
3710 = extract_unsigned_integer (val, partial_len, byte_order);
3711 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3712 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3713 if (arm_debug)
3714 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3715 argnum,
3716 gdbarch_register_name
2af46ca0 3717 (gdbarch, argreg),
f0c9063c 3718 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3719 regcache_cooked_write_unsigned (regcache, argreg, regval);
3720 argreg++;
3721 }
3722 else
3723 {
3724 /* Push the arguments onto the stack. */
3725 if (arm_debug)
3726 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3727 argnum, nstack);
f0c9063c
UW
3728 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3729 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3730 }
3731
3732 len -= partial_len;
3733 val += partial_len;
3734 }
3735 }
3736 /* If we have an odd number of words to push, then decrement the stack
3737 by one word now, so first stack argument will be dword aligned. */
3738 if (nstack & 4)
3739 sp -= 4;
3740
3741 while (si)
3742 {
3743 sp -= si->len;
3744 write_memory (sp, si->data, si->len);
3745 si = pop_stack_item (si);
3746 }
3747
3748 /* Finally, update teh SP register. */
3749 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3750
3751 return sp;
3752}
3753
f53f0d0b
PB
3754
3755/* Always align the frame to an 8-byte boundary. This is required on
3756 some platforms and harmless on the rest. */
3757
3758static CORE_ADDR
3759arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3760{
3761 /* Align the stack to eight bytes. */
3762 return sp & ~ (CORE_ADDR) 7;
3763}
3764
c906108c 3765static void
ed9a39eb 3766print_fpu_flags (int flags)
c906108c 3767{
c5aa993b
JM
3768 if (flags & (1 << 0))
3769 fputs ("IVO ", stdout);
3770 if (flags & (1 << 1))
3771 fputs ("DVZ ", stdout);
3772 if (flags & (1 << 2))
3773 fputs ("OFL ", stdout);
3774 if (flags & (1 << 3))
3775 fputs ("UFL ", stdout);
3776 if (flags & (1 << 4))
3777 fputs ("INX ", stdout);
3778 putchar ('\n');
c906108c
SS
3779}
3780
5e74b15c
RE
3781/* Print interesting information about the floating point processor
3782 (if present) or emulator. */
34e8f22d 3783static void
d855c300 3784arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3785 struct frame_info *frame, const char *args)
c906108c 3786{
9c9acae0 3787 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3788 int type;
3789
3790 type = (status >> 24) & 127;
edefbb7c
AC
3791 if (status & (1 << 31))
3792 printf (_("Hardware FPU type %d\n"), type);
3793 else
3794 printf (_("Software FPU type %d\n"), type);
3795 /* i18n: [floating point unit] mask */
3796 fputs (_("mask: "), stdout);
c5aa993b 3797 print_fpu_flags (status >> 16);
edefbb7c
AC
3798 /* i18n: [floating point unit] flags */
3799 fputs (_("flags: "), stdout);
c5aa993b 3800 print_fpu_flags (status);
c906108c
SS
3801}
3802
27067745
UW
3803/* Construct the ARM extended floating point type. */
3804static struct type *
3805arm_ext_type (struct gdbarch *gdbarch)
3806{
3807 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3808
3809 if (!tdep->arm_ext_type)
3810 tdep->arm_ext_type
e9bb382b 3811 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3812 floatformats_arm_ext);
3813
3814 return tdep->arm_ext_type;
3815}
3816
58d6951d
DJ
3817static struct type *
3818arm_neon_double_type (struct gdbarch *gdbarch)
3819{
3820 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3821
3822 if (tdep->neon_double_type == NULL)
3823 {
3824 struct type *t, *elem;
3825
3826 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3827 TYPE_CODE_UNION);
3828 elem = builtin_type (gdbarch)->builtin_uint8;
3829 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3830 elem = builtin_type (gdbarch)->builtin_uint16;
3831 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3832 elem = builtin_type (gdbarch)->builtin_uint32;
3833 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3834 elem = builtin_type (gdbarch)->builtin_uint64;
3835 append_composite_type_field (t, "u64", elem);
3836 elem = builtin_type (gdbarch)->builtin_float;
3837 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3838 elem = builtin_type (gdbarch)->builtin_double;
3839 append_composite_type_field (t, "f64", elem);
3840
3841 TYPE_VECTOR (t) = 1;
3842 TYPE_NAME (t) = "neon_d";
3843 tdep->neon_double_type = t;
3844 }
3845
3846 return tdep->neon_double_type;
3847}
3848
3849/* FIXME: The vector types are not correctly ordered on big-endian
3850 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3851 bits of d0 - regardless of what unit size is being held in d0. So
3852 the offset of the first uint8 in d0 is 7, but the offset of the
3853 first float is 4. This code works as-is for little-endian
3854 targets. */
3855
3856static struct type *
3857arm_neon_quad_type (struct gdbarch *gdbarch)
3858{
3859 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3860
3861 if (tdep->neon_quad_type == NULL)
3862 {
3863 struct type *t, *elem;
3864
3865 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3866 TYPE_CODE_UNION);
3867 elem = builtin_type (gdbarch)->builtin_uint8;
3868 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3869 elem = builtin_type (gdbarch)->builtin_uint16;
3870 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3871 elem = builtin_type (gdbarch)->builtin_uint32;
3872 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3873 elem = builtin_type (gdbarch)->builtin_uint64;
3874 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3875 elem = builtin_type (gdbarch)->builtin_float;
3876 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3877 elem = builtin_type (gdbarch)->builtin_double;
3878 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3879
3880 TYPE_VECTOR (t) = 1;
3881 TYPE_NAME (t) = "neon_q";
3882 tdep->neon_quad_type = t;
3883 }
3884
3885 return tdep->neon_quad_type;
3886}
3887
34e8f22d
RE
3888/* Return the GDB type object for the "standard" data type of data in
3889 register N. */
3890
3891static struct type *
7a5ea0d4 3892arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 3893{
58d6951d
DJ
3894 int num_regs = gdbarch_num_regs (gdbarch);
3895
3896 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3897 && regnum >= num_regs && regnum < num_regs + 32)
3898 return builtin_type (gdbarch)->builtin_float;
3899
3900 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3901 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3902 return arm_neon_quad_type (gdbarch);
3903
3904 /* If the target description has register information, we are only
3905 in this function so that we can override the types of
3906 double-precision registers for NEON. */
3907 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3908 {
3909 struct type *t = tdesc_register_type (gdbarch, regnum);
3910
3911 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3912 && TYPE_CODE (t) == TYPE_CODE_FLT
3913 && gdbarch_tdep (gdbarch)->have_neon)
3914 return arm_neon_double_type (gdbarch);
3915 else
3916 return t;
3917 }
3918
34e8f22d 3919 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
3920 {
3921 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3922 return builtin_type (gdbarch)->builtin_void;
3923
3924 return arm_ext_type (gdbarch);
3925 }
e4c16157 3926 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 3927 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 3928 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 3929 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
3930 else if (regnum >= ARRAY_SIZE (arm_register_names))
3931 /* These registers are only supported on targets which supply
3932 an XML description. */
df4df182 3933 return builtin_type (gdbarch)->builtin_int0;
032758dc 3934 else
df4df182 3935 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
3936}
3937
ff6f572f
DJ
3938/* Map a DWARF register REGNUM onto the appropriate GDB register
3939 number. */
3940
3941static int
d3f73121 3942arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
3943{
3944 /* Core integer regs. */
3945 if (reg >= 0 && reg <= 15)
3946 return reg;
3947
3948 /* Legacy FPA encoding. These were once used in a way which
3949 overlapped with VFP register numbering, so their use is
3950 discouraged, but GDB doesn't support the ARM toolchain
3951 which used them for VFP. */
3952 if (reg >= 16 && reg <= 23)
3953 return ARM_F0_REGNUM + reg - 16;
3954
3955 /* New assignments for the FPA registers. */
3956 if (reg >= 96 && reg <= 103)
3957 return ARM_F0_REGNUM + reg - 96;
3958
3959 /* WMMX register assignments. */
3960 if (reg >= 104 && reg <= 111)
3961 return ARM_WCGR0_REGNUM + reg - 104;
3962
3963 if (reg >= 112 && reg <= 127)
3964 return ARM_WR0_REGNUM + reg - 112;
3965
3966 if (reg >= 192 && reg <= 199)
3967 return ARM_WC0_REGNUM + reg - 192;
3968
58d6951d
DJ
3969 /* VFP v2 registers. A double precision value is actually
3970 in d1 rather than s2, but the ABI only defines numbering
3971 for the single precision registers. This will "just work"
3972 in GDB for little endian targets (we'll read eight bytes,
3973 starting in s0 and then progressing to s1), but will be
3974 reversed on big endian targets with VFP. This won't
3975 be a problem for the new Neon quad registers; you're supposed
3976 to use DW_OP_piece for those. */
3977 if (reg >= 64 && reg <= 95)
3978 {
3979 char name_buf[4];
3980
3981 sprintf (name_buf, "s%d", reg - 64);
3982 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3983 strlen (name_buf));
3984 }
3985
3986 /* VFP v3 / Neon registers. This range is also used for VFP v2
3987 registers, except that it now describes d0 instead of s0. */
3988 if (reg >= 256 && reg <= 287)
3989 {
3990 char name_buf[4];
3991
3992 sprintf (name_buf, "d%d", reg - 256);
3993 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3994 strlen (name_buf));
3995 }
3996
ff6f572f
DJ
3997 return -1;
3998}
3999
26216b98
AC
4000/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4001static int
e7faf938 4002arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4003{
4004 int reg = regnum;
e7faf938 4005 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4006
ff6f572f
DJ
4007 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4008 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4009
4010 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4011 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4012
4013 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4014 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4015
26216b98
AC
4016 if (reg < NUM_GREGS)
4017 return SIM_ARM_R0_REGNUM + reg;
4018 reg -= NUM_GREGS;
4019
4020 if (reg < NUM_FREGS)
4021 return SIM_ARM_FP0_REGNUM + reg;
4022 reg -= NUM_FREGS;
4023
4024 if (reg < NUM_SREGS)
4025 return SIM_ARM_FPS_REGNUM + reg;
4026 reg -= NUM_SREGS;
4027
edefbb7c 4028 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4029}
34e8f22d 4030
a37b3cc0
AC
4031/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4032 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4033 It is thought that this is is the floating-point register format on
4034 little-endian systems. */
c906108c 4035
ed9a39eb 4036static void
b508a996 4037convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4038 void *dbl, int endianess)
c906108c 4039{
a37b3cc0 4040 DOUBLEST d;
be8626e0
MD
4041
4042 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4043 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4044 else
4045 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4046 ptr, &d);
b508a996 4047 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4048}
4049
34e8f22d 4050static void
be8626e0
MD
4051convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4052 int endianess)
c906108c 4053{
a37b3cc0 4054 DOUBLEST d;
be8626e0 4055
b508a996 4056 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4057 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4058 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4059 else
4060 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4061 &d, dbl);
c906108c 4062}
ed9a39eb 4063
c906108c 4064static int
ed9a39eb 4065condition_true (unsigned long cond, unsigned long status_reg)
c906108c
SS
4066{
4067 if (cond == INST_AL || cond == INST_NV)
4068 return 1;
4069
4070 switch (cond)
4071 {
4072 case INST_EQ:
4073 return ((status_reg & FLAG_Z) != 0);
4074 case INST_NE:
4075 return ((status_reg & FLAG_Z) == 0);
4076 case INST_CS:
4077 return ((status_reg & FLAG_C) != 0);
4078 case INST_CC:
4079 return ((status_reg & FLAG_C) == 0);
4080 case INST_MI:
4081 return ((status_reg & FLAG_N) != 0);
4082 case INST_PL:
4083 return ((status_reg & FLAG_N) == 0);
4084 case INST_VS:
4085 return ((status_reg & FLAG_V) != 0);
4086 case INST_VC:
4087 return ((status_reg & FLAG_V) == 0);
4088 case INST_HI:
4089 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4090 case INST_LS:
4091 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4092 case INST_GE:
4093 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4094 case INST_LT:
4095 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4096 case INST_GT:
f8bf5763
PM
4097 return (((status_reg & FLAG_Z) == 0)
4098 && (((status_reg & FLAG_N) == 0)
4099 == ((status_reg & FLAG_V) == 0)));
c906108c 4100 case INST_LE:
f8bf5763
PM
4101 return (((status_reg & FLAG_Z) != 0)
4102 || (((status_reg & FLAG_N) == 0)
4103 != ((status_reg & FLAG_V) == 0)));
c906108c
SS
4104 }
4105 return 1;
4106}
4107
c906108c 4108static unsigned long
0b1b3e42
UW
4109shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4110 unsigned long pc_val, unsigned long status_reg)
c906108c
SS
4111{
4112 unsigned long res, shift;
4113 int rm = bits (inst, 0, 3);
4114 unsigned long shifttype = bits (inst, 5, 6);
c5aa993b
JM
4115
4116 if (bit (inst, 4))
c906108c
SS
4117 {
4118 int rs = bits (inst, 8, 11);
0b1b3e42
UW
4119 shift = (rs == 15 ? pc_val + 8
4120 : get_frame_register_unsigned (frame, rs)) & 0xFF;
c906108c
SS
4121 }
4122 else
4123 shift = bits (inst, 7, 11);
c5aa993b 4124
bf9f652a 4125 res = (rm == ARM_PC_REGNUM
0d39a070 4126 ? (pc_val + (bit (inst, 4) ? 12 : 8))
0b1b3e42 4127 : get_frame_register_unsigned (frame, rm));
c906108c
SS
4128
4129 switch (shifttype)
4130 {
c5aa993b 4131 case 0: /* LSL */
c906108c
SS
4132 res = shift >= 32 ? 0 : res << shift;
4133 break;
c5aa993b
JM
4134
4135 case 1: /* LSR */
c906108c
SS
4136 res = shift >= 32 ? 0 : res >> shift;
4137 break;
4138
c5aa993b
JM
4139 case 2: /* ASR */
4140 if (shift >= 32)
4141 shift = 31;
c906108c
SS
4142 res = ((res & 0x80000000L)
4143 ? ~((~res) >> shift) : res >> shift);
4144 break;
4145
c5aa993b 4146 case 3: /* ROR/RRX */
c906108c
SS
4147 shift &= 31;
4148 if (shift == 0)
4149 res = (res >> 1) | (carry ? 0x80000000L : 0);
4150 else
c5aa993b 4151 res = (res >> shift) | (res << (32 - shift));
c906108c
SS
4152 break;
4153 }
4154
4155 return res & 0xffffffff;
4156}
4157
c906108c
SS
4158/* Return number of 1-bits in VAL. */
4159
4160static int
ed9a39eb 4161bitcount (unsigned long val)
c906108c
SS
4162{
4163 int nbits;
4164 for (nbits = 0; val != 0; nbits++)
0963b4bd 4165 val &= val - 1; /* Delete rightmost 1-bit in val. */
c906108c
SS
4166 return nbits;
4167}
4168
177321bd
DJ
4169/* Return the size in bytes of the complete Thumb instruction whose
4170 first halfword is INST1. */
4171
4172static int
4173thumb_insn_size (unsigned short inst1)
4174{
4175 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4176 return 4;
4177 else
4178 return 2;
4179}
4180
4181static int
4182thumb_advance_itstate (unsigned int itstate)
4183{
4184 /* Preserve IT[7:5], the first three bits of the condition. Shift
4185 the upcoming condition flags left by one bit. */
4186 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4187
4188 /* If we have finished the IT block, clear the state. */
4189 if ((itstate & 0x0f) == 0)
4190 itstate = 0;
4191
4192 return itstate;
4193}
4194
4195/* Find the next PC after the current instruction executes. In some
4196 cases we can not statically determine the answer (see the IT state
4197 handling in this function); in that case, a breakpoint may be
4198 inserted in addition to the returned PC, which will be used to set
4199 another breakpoint by our caller. */
4200
ad527d2e 4201static CORE_ADDR
18819fa6 4202thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
c906108c 4203{
2af46ca0 4204 struct gdbarch *gdbarch = get_frame_arch (frame);
177321bd 4205 struct address_space *aspace = get_frame_address_space (frame);
e17a4113
UW
4206 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4207 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c5aa993b 4208 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
e17a4113 4209 unsigned short inst1;
0963b4bd 4210 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
c906108c 4211 unsigned long offset;
177321bd 4212 ULONGEST status, itstate;
c906108c 4213
50e98be4
DJ
4214 nextpc = MAKE_THUMB_ADDR (nextpc);
4215 pc_val = MAKE_THUMB_ADDR (pc_val);
4216
e17a4113 4217 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
9d4fde75 4218
9dca5578
DJ
4219 /* Thumb-2 conditional execution support. There are eight bits in
4220 the CPSR which describe conditional execution state. Once
4221 reconstructed (they're in a funny order), the low five bits
4222 describe the low bit of the condition for each instruction and
4223 how many instructions remain. The high three bits describe the
4224 base condition. One of the low four bits will be set if an IT
4225 block is active. These bits read as zero on earlier
4226 processors. */
4227 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
177321bd 4228 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
9dca5578 4229
177321bd
DJ
4230 /* If-Then handling. On GNU/Linux, where this routine is used, we
4231 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4232 can disable execution of the undefined instruction. So we might
4233 miss the breakpoint if we set it on a skipped conditional
4234 instruction. Because conditional instructions can change the
4235 flags, affecting the execution of further instructions, we may
4236 need to set two breakpoints. */
9dca5578 4237
177321bd
DJ
4238 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4239 {
4240 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4241 {
4242 /* An IT instruction. Because this instruction does not
4243 modify the flags, we can accurately predict the next
4244 executed instruction. */
4245 itstate = inst1 & 0x00ff;
4246 pc += thumb_insn_size (inst1);
4247
4248 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4249 {
0963b4bd
MS
4250 inst1 = read_memory_unsigned_integer (pc, 2,
4251 byte_order_for_code);
177321bd
DJ
4252 pc += thumb_insn_size (inst1);
4253 itstate = thumb_advance_itstate (itstate);
4254 }
4255
50e98be4 4256 return MAKE_THUMB_ADDR (pc);
177321bd
DJ
4257 }
4258 else if (itstate != 0)
4259 {
4260 /* We are in a conditional block. Check the condition. */
4261 if (! condition_true (itstate >> 4, status))
4262 {
4263 /* Advance to the next executed instruction. */
4264 pc += thumb_insn_size (inst1);
4265 itstate = thumb_advance_itstate (itstate);
4266
4267 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4268 {
0963b4bd
MS
4269 inst1 = read_memory_unsigned_integer (pc, 2,
4270 byte_order_for_code);
177321bd
DJ
4271 pc += thumb_insn_size (inst1);
4272 itstate = thumb_advance_itstate (itstate);
4273 }
4274
50e98be4 4275 return MAKE_THUMB_ADDR (pc);
177321bd
DJ
4276 }
4277 else if ((itstate & 0x0f) == 0x08)
4278 {
4279 /* This is the last instruction of the conditional
4280 block, and it is executed. We can handle it normally
4281 because the following instruction is not conditional,
4282 and we must handle it normally because it is
4283 permitted to branch. Fall through. */
4284 }
4285 else
4286 {
4287 int cond_negated;
4288
4289 /* There are conditional instructions after this one.
4290 If this instruction modifies the flags, then we can
4291 not predict what the next executed instruction will
4292 be. Fortunately, this instruction is architecturally
4293 forbidden to branch; we know it will fall through.
4294 Start by skipping past it. */
4295 pc += thumb_insn_size (inst1);
4296 itstate = thumb_advance_itstate (itstate);
4297
4298 /* Set a breakpoint on the following instruction. */
4299 gdb_assert ((itstate & 0x0f) != 0);
18819fa6
UW
4300 arm_insert_single_step_breakpoint (gdbarch, aspace,
4301 MAKE_THUMB_ADDR (pc));
177321bd
DJ
4302 cond_negated = (itstate >> 4) & 1;
4303
4304 /* Skip all following instructions with the same
4305 condition. If there is a later instruction in the IT
4306 block with the opposite condition, set the other
4307 breakpoint there. If not, then set a breakpoint on
4308 the instruction after the IT block. */
4309 do
4310 {
0963b4bd
MS
4311 inst1 = read_memory_unsigned_integer (pc, 2,
4312 byte_order_for_code);
177321bd
DJ
4313 pc += thumb_insn_size (inst1);
4314 itstate = thumb_advance_itstate (itstate);
4315 }
4316 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4317
50e98be4 4318 return MAKE_THUMB_ADDR (pc);
177321bd
DJ
4319 }
4320 }
4321 }
4322 else if (itstate & 0x0f)
9dca5578
DJ
4323 {
4324 /* We are in a conditional block. Check the condition. */
177321bd 4325 int cond = itstate >> 4;
9dca5578
DJ
4326
4327 if (! condition_true (cond, status))
db24da6d
YQ
4328 /* Advance to the next instruction. All the 32-bit
4329 instructions share a common prefix. */
4330 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
177321bd
DJ
4331
4332 /* Otherwise, handle the instruction normally. */
9dca5578
DJ
4333 }
4334
c906108c
SS
4335 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4336 {
4337 CORE_ADDR sp;
4338
4339 /* Fetch the saved PC from the stack. It's stored above
4340 all of the other registers. */
f0c9063c 4341 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
0b1b3e42 4342 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
e17a4113 4343 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
c906108c
SS
4344 }
4345 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4346 {
c5aa993b 4347 unsigned long cond = bits (inst1, 8, 11);
25b41d01
YQ
4348 if (cond == 0x0f) /* 0x0f = SWI */
4349 {
4350 struct gdbarch_tdep *tdep;
4351 tdep = gdbarch_tdep (gdbarch);
4352
4353 if (tdep->syscall_next_pc != NULL)
4354 nextpc = tdep->syscall_next_pc (frame);
4355
4356 }
4357 else if (cond != 0x0f && condition_true (cond, status))
c906108c
SS
4358 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4359 }
4360 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4361 {
4362 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4363 }
db24da6d 4364 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
c906108c 4365 {
e17a4113
UW
4366 unsigned short inst2;
4367 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
9dca5578
DJ
4368
4369 /* Default to the next instruction. */
4370 nextpc = pc + 4;
50e98be4 4371 nextpc = MAKE_THUMB_ADDR (nextpc);
9dca5578
DJ
4372
4373 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4374 {
4375 /* Branches and miscellaneous control instructions. */
4376
4377 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4378 {
4379 /* B, BL, BLX. */
4380 int j1, j2, imm1, imm2;
4381
4382 imm1 = sbits (inst1, 0, 10);
4383 imm2 = bits (inst2, 0, 10);
4384 j1 = bit (inst2, 13);
4385 j2 = bit (inst2, 11);
4386
4387 offset = ((imm1 << 12) + (imm2 << 1));
4388 offset ^= ((!j2) << 22) | ((!j1) << 23);
4389
4390 nextpc = pc_val + offset;
4391 /* For BLX make sure to clear the low bits. */
4392 if (bit (inst2, 12) == 0)
4393 nextpc = nextpc & 0xfffffffc;
4394 }
4395 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4396 {
4397 /* SUBS PC, LR, #imm8. */
4398 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4399 nextpc -= inst2 & 0x00ff;
4400 }
4069ebbe 4401 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
9dca5578
DJ
4402 {
4403 /* Conditional branch. */
4404 if (condition_true (bits (inst1, 6, 9), status))
4405 {
4406 int sign, j1, j2, imm1, imm2;
4407
4408 sign = sbits (inst1, 10, 10);
4409 imm1 = bits (inst1, 0, 5);
4410 imm2 = bits (inst2, 0, 10);
4411 j1 = bit (inst2, 13);
4412 j2 = bit (inst2, 11);
4413
4414 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4415 offset += (imm1 << 12) + (imm2 << 1);
4416
4417 nextpc = pc_val + offset;
4418 }
4419 }
4420 }
4421 else if ((inst1 & 0xfe50) == 0xe810)
4422 {
4423 /* Load multiple or RFE. */
4424 int rn, offset, load_pc = 1;
4425
4426 rn = bits (inst1, 0, 3);
4427 if (bit (inst1, 7) && !bit (inst1, 8))
4428 {
4429 /* LDMIA or POP */
4430 if (!bit (inst2, 15))
4431 load_pc = 0;
4432 offset = bitcount (inst2) * 4 - 4;
4433 }
4434 else if (!bit (inst1, 7) && bit (inst1, 8))
4435 {
4436 /* LDMDB */
4437 if (!bit (inst2, 15))
4438 load_pc = 0;
4439 offset = -4;
4440 }
4441 else if (bit (inst1, 7) && bit (inst1, 8))
4442 {
4443 /* RFEIA */
4444 offset = 0;
4445 }
4446 else if (!bit (inst1, 7) && !bit (inst1, 8))
4447 {
4448 /* RFEDB */
4449 offset = -8;
4450 }
4451 else
4452 load_pc = 0;
4453
4454 if (load_pc)
4455 {
4456 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4457 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4458 }
4459 }
4460 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4461 {
4462 /* MOV PC or MOVS PC. */
4463 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
50e98be4 4464 nextpc = MAKE_THUMB_ADDR (nextpc);
9dca5578
DJ
4465 }
4466 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4467 {
4468 /* LDR PC. */
4469 CORE_ADDR base;
4470 int rn, load_pc = 1;
4471
4472 rn = bits (inst1, 0, 3);
4473 base = get_frame_register_unsigned (frame, rn);
bf9f652a 4474 if (rn == ARM_PC_REGNUM)
9dca5578
DJ
4475 {
4476 base = (base + 4) & ~(CORE_ADDR) 0x3;
4477 if (bit (inst1, 7))
4478 base += bits (inst2, 0, 11);
4479 else
4480 base -= bits (inst2, 0, 11);
4481 }
4482 else if (bit (inst1, 7))
4483 base += bits (inst2, 0, 11);
4484 else if (bit (inst2, 11))
4485 {
4486 if (bit (inst2, 10))
4487 {
4488 if (bit (inst2, 9))
4489 base += bits (inst2, 0, 7);
4490 else
4491 base -= bits (inst2, 0, 7);
4492 }
4493 }
4494 else if ((inst2 & 0x0fc0) == 0x0000)
4495 {
4496 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4497 base += get_frame_register_unsigned (frame, rm) << shift;
4498 }
4499 else
4500 /* Reserved. */
4501 load_pc = 0;
4502
4503 if (load_pc)
4504 nextpc = get_frame_memory_unsigned (frame, base, 4);
4505 }
4506 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4507 {
4508 /* TBB. */
d476da0e
RE
4509 CORE_ADDR tbl_reg, table, offset, length;
4510
4511 tbl_reg = bits (inst1, 0, 3);
4512 if (tbl_reg == 0x0f)
4513 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4514 else
4515 table = get_frame_register_unsigned (frame, tbl_reg);
9dca5578 4516
9dca5578
DJ
4517 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4518 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4519 nextpc = pc_val + length;
4520 }
d476da0e 4521 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
9dca5578
DJ
4522 {
4523 /* TBH. */
d476da0e
RE
4524 CORE_ADDR tbl_reg, table, offset, length;
4525
4526 tbl_reg = bits (inst1, 0, 3);
4527 if (tbl_reg == 0x0f)
4528 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4529 else
4530 table = get_frame_register_unsigned (frame, tbl_reg);
9dca5578 4531
9dca5578
DJ
4532 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4533 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4534 nextpc = pc_val + length;
4535 }
c906108c 4536 }
aa17d93e 4537 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
9498281f
DJ
4538 {
4539 if (bits (inst1, 3, 6) == 0x0f)
4540 nextpc = pc_val;
4541 else
0b1b3e42 4542 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
9498281f 4543 }
ad8b5167
UW
4544 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4545 {
4546 if (bits (inst1, 3, 6) == 0x0f)
4547 nextpc = pc_val;
4548 else
4549 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4550
4551 nextpc = MAKE_THUMB_ADDR (nextpc);
4552 }
9dca5578
DJ
4553 else if ((inst1 & 0xf500) == 0xb100)
4554 {
4555 /* CBNZ or CBZ. */
4556 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4557 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4558
4559 if (bit (inst1, 11) && reg != 0)
4560 nextpc = pc_val + imm;
4561 else if (!bit (inst1, 11) && reg == 0)
4562 nextpc = pc_val + imm;
4563 }
c906108c
SS
4564 return nextpc;
4565}
4566
50e98be4 4567/* Get the raw next address. PC is the current program counter, in
18819fa6 4568 FRAME, which is assumed to be executing in ARM mode.
50e98be4
DJ
4569
4570 The value returned has the execution state of the next instruction
4571 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4572 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
0963b4bd
MS
4573 address. */
4574
50e98be4 4575static CORE_ADDR
18819fa6 4576arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
c906108c 4577{
2af46ca0 4578 struct gdbarch *gdbarch = get_frame_arch (frame);
e17a4113
UW
4579 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4580 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c906108c
SS
4581 unsigned long pc_val;
4582 unsigned long this_instr;
4583 unsigned long status;
4584 CORE_ADDR nextpc;
4585
c906108c 4586 pc_val = (unsigned long) pc;
e17a4113 4587 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
9d4fde75 4588
0b1b3e42 4589 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
c5aa993b 4590 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
c906108c 4591
daddc3c1
DJ
4592 if (bits (this_instr, 28, 31) == INST_NV)
4593 switch (bits (this_instr, 24, 27))
4594 {
4595 case 0xa:
4596 case 0xb:
4597 {
4598 /* Branch with Link and change to Thumb. */
4599 nextpc = BranchDest (pc, this_instr);
4600 nextpc |= bit (this_instr, 24) << 1;
50e98be4 4601 nextpc = MAKE_THUMB_ADDR (nextpc);
daddc3c1
DJ
4602 break;
4603 }
4604 case 0xc:
4605 case 0xd:
4606 case 0xe:
4607 /* Coprocessor register transfer. */
4608 if (bits (this_instr, 12, 15) == 15)
4609 error (_("Invalid update to pc in instruction"));
4610 break;
4611 }
4612 else if (condition_true (bits (this_instr, 28, 31), status))
c906108c
SS
4613 {
4614 switch (bits (this_instr, 24, 27))
4615 {
c5aa993b 4616 case 0x0:
94c30b78 4617 case 0x1: /* data processing */
c5aa993b
JM
4618 case 0x2:
4619 case 0x3:
c906108c
SS
4620 {
4621 unsigned long operand1, operand2, result = 0;
4622 unsigned long rn;
4623 int c;
c5aa993b 4624
c906108c
SS
4625 if (bits (this_instr, 12, 15) != 15)
4626 break;
4627
4628 if (bits (this_instr, 22, 25) == 0
c5aa993b 4629 && bits (this_instr, 4, 7) == 9) /* multiply */
edefbb7c 4630 error (_("Invalid update to pc in instruction"));
c906108c 4631
9498281f 4632 /* BX <reg>, BLX <reg> */
e150acc7
PB
4633 if (bits (this_instr, 4, 27) == 0x12fff1
4634 || bits (this_instr, 4, 27) == 0x12fff3)
9498281f
DJ
4635 {
4636 rn = bits (this_instr, 0, 3);
bf9f652a
YQ
4637 nextpc = ((rn == ARM_PC_REGNUM)
4638 ? (pc_val + 8)
4639 : get_frame_register_unsigned (frame, rn));
4640
9498281f
DJ
4641 return nextpc;
4642 }
4643
0963b4bd 4644 /* Multiply into PC. */
c906108c
SS
4645 c = (status & FLAG_C) ? 1 : 0;
4646 rn = bits (this_instr, 16, 19);
bf9f652a
YQ
4647 operand1 = ((rn == ARM_PC_REGNUM)
4648 ? (pc_val + 8)
4649 : get_frame_register_unsigned (frame, rn));
c5aa993b 4650
c906108c
SS
4651 if (bit (this_instr, 25))
4652 {
4653 unsigned long immval = bits (this_instr, 0, 7);
4654 unsigned long rotate = 2 * bits (this_instr, 8, 11);
c5aa993b
JM
4655 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4656 & 0xffffffff;
c906108c 4657 }
0963b4bd
MS
4658 else /* operand 2 is a shifted register. */
4659 operand2 = shifted_reg_val (frame, this_instr, c,
4660 pc_val, status);
c5aa993b 4661
c906108c
SS
4662 switch (bits (this_instr, 21, 24))
4663 {
c5aa993b 4664 case 0x0: /*and */
c906108c
SS
4665 result = operand1 & operand2;
4666 break;
4667
c5aa993b 4668 case 0x1: /*eor */
c906108c
SS
4669 result = operand1 ^ operand2;
4670 break;
4671
c5aa993b 4672 case 0x2: /*sub */
c906108c
SS
4673 result = operand1 - operand2;
4674 break;
4675
c5aa993b 4676 case 0x3: /*rsb */
c906108c
SS
4677 result = operand2 - operand1;
4678 break;
4679
c5aa993b 4680 case 0x4: /*add */
c906108c
SS
4681 result = operand1 + operand2;
4682 break;
4683
c5aa993b 4684 case 0x5: /*adc */
c906108c
SS
4685 result = operand1 + operand2 + c;
4686 break;
4687
c5aa993b 4688 case 0x6: /*sbc */
c906108c
SS
4689 result = operand1 - operand2 + c;
4690 break;
4691
c5aa993b 4692 case 0x7: /*rsc */
c906108c
SS
4693 result = operand2 - operand1 + c;
4694 break;
4695
c5aa993b
JM
4696 case 0x8:
4697 case 0x9:
4698 case 0xa:
4699 case 0xb: /* tst, teq, cmp, cmn */
c906108c
SS
4700 result = (unsigned long) nextpc;
4701 break;
4702
c5aa993b 4703 case 0xc: /*orr */
c906108c
SS
4704 result = operand1 | operand2;
4705 break;
4706
c5aa993b 4707 case 0xd: /*mov */
c906108c
SS
4708 /* Always step into a function. */
4709 result = operand2;
c5aa993b 4710 break;
c906108c 4711
c5aa993b 4712 case 0xe: /*bic */
c906108c
SS
4713 result = operand1 & ~operand2;
4714 break;
4715
c5aa993b 4716 case 0xf: /*mvn */
c906108c
SS
4717 result = ~operand2;
4718 break;
4719 }
c906108c 4720
50e98be4
DJ
4721 /* In 26-bit APCS the bottom two bits of the result are
4722 ignored, and we always end up in ARM state. */
4723 if (!arm_apcs_32)
4724 nextpc = arm_addr_bits_remove (gdbarch, result);
4725 else
4726 nextpc = result;
4727
c906108c
SS
4728 break;
4729 }
c5aa993b
JM
4730
4731 case 0x4:
4732 case 0x5: /* data transfer */
4733 case 0x6:
4734 case 0x7:
c906108c
SS
4735 if (bit (this_instr, 20))
4736 {
4737 /* load */
4738 if (bits (this_instr, 12, 15) == 15)
4739 {
4740 /* rd == pc */
c5aa993b 4741 unsigned long rn;
c906108c 4742 unsigned long base;
c5aa993b 4743
c906108c 4744 if (bit (this_instr, 22))
edefbb7c 4745 error (_("Invalid update to pc in instruction"));
c906108c
SS
4746
4747 /* byte write to PC */
4748 rn = bits (this_instr, 16, 19);
bf9f652a
YQ
4749 base = ((rn == ARM_PC_REGNUM)
4750 ? (pc_val + 8)
4751 : get_frame_register_unsigned (frame, rn));
4752
c906108c
SS
4753 if (bit (this_instr, 24))
4754 {
4755 /* pre-indexed */
4756 int c = (status & FLAG_C) ? 1 : 0;
4757 unsigned long offset =
c5aa993b 4758 (bit (this_instr, 25)
0b1b3e42 4759 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
c5aa993b 4760 : bits (this_instr, 0, 11));
c906108c
SS
4761
4762 if (bit (this_instr, 23))
4763 base += offset;
4764 else
4765 base -= offset;
4766 }
51370a33
YQ
4767 nextpc =
4768 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4769 4, byte_order);
c906108c
SS
4770 }
4771 }
4772 break;
c5aa993b
JM
4773
4774 case 0x8:
4775 case 0x9: /* block transfer */
c906108c
SS
4776 if (bit (this_instr, 20))
4777 {
4778 /* LDM */
4779 if (bit (this_instr, 15))
4780 {
4781 /* loading pc */
4782 int offset = 0;
51370a33
YQ
4783 unsigned long rn_val
4784 = get_frame_register_unsigned (frame,
4785 bits (this_instr, 16, 19));
c906108c
SS
4786
4787 if (bit (this_instr, 23))
4788 {
4789 /* up */
4790 unsigned long reglist = bits (this_instr, 0, 14);
4791 offset = bitcount (reglist) * 4;
c5aa993b 4792 if (bit (this_instr, 24)) /* pre */
c906108c
SS
4793 offset += 4;
4794 }
4795 else if (bit (this_instr, 24))
4796 offset = -4;
c5aa993b 4797
51370a33
YQ
4798 nextpc =
4799 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4800 (rn_val + offset),
4801 4, byte_order);
c906108c
SS
4802 }
4803 }
4804 break;
c5aa993b
JM
4805
4806 case 0xb: /* branch & link */
4807 case 0xa: /* branch */
c906108c
SS
4808 {
4809 nextpc = BranchDest (pc, this_instr);
c906108c
SS
4810 break;
4811 }
c5aa993b
JM
4812
4813 case 0xc:
4814 case 0xd:
4815 case 0xe: /* coproc ops */
25b41d01 4816 break;
c5aa993b 4817 case 0xf: /* SWI */
25b41d01
YQ
4818 {
4819 struct gdbarch_tdep *tdep;
4820 tdep = gdbarch_tdep (gdbarch);
4821
4822 if (tdep->syscall_next_pc != NULL)
4823 nextpc = tdep->syscall_next_pc (frame);
4824
4825 }
c906108c
SS
4826 break;
4827
4828 default:
edefbb7c 4829 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
c906108c
SS
4830 return (pc);
4831 }
4832 }
4833
4834 return nextpc;
4835}
4836
18819fa6
UW
4837/* Determine next PC after current instruction executes. Will call either
4838 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
4839 loop is detected. */
4840
50e98be4
DJ
4841CORE_ADDR
4842arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4843{
18819fa6
UW
4844 CORE_ADDR nextpc;
4845
4846 if (arm_frame_is_thumb (frame))
4847 {
4848 nextpc = thumb_get_next_pc_raw (frame, pc);
4849 if (nextpc == MAKE_THUMB_ADDR (pc))
4850 error (_("Infinite loop detected"));
4851 }
4852 else
4853 {
4854 nextpc = arm_get_next_pc_raw (frame, pc);
4855 if (nextpc == pc)
4856 error (_("Infinite loop detected"));
4857 }
4858
50e98be4
DJ
4859 return nextpc;
4860}
4861
18819fa6
UW
4862/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4863 of the appropriate mode (as encoded in the PC value), even if this
4864 differs from what would be expected according to the symbol tables. */
4865
4866void
4867arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4868 struct address_space *aspace,
4869 CORE_ADDR pc)
4870{
4871 struct cleanup *old_chain
4872 = make_cleanup_restore_integer (&arm_override_mode);
4873
4874 arm_override_mode = IS_THUMB_ADDR (pc);
4875 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4876
4877 insert_single_step_breakpoint (gdbarch, aspace, pc);
4878
4879 do_cleanups (old_chain);
4880}
4881
35f73cfc
UW
4882/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
4883 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
4884 is found, attempt to step through it. A breakpoint is placed at the end of
4885 the sequence. */
4886
4887static int
4888thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
4889{
4890 struct gdbarch *gdbarch = get_frame_arch (frame);
4891 struct address_space *aspace = get_frame_address_space (frame);
4892 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4893 CORE_ADDR pc = get_frame_pc (frame);
4894 CORE_ADDR breaks[2] = {-1, -1};
4895 CORE_ADDR loc = pc;
4896 unsigned short insn1, insn2;
4897 int insn_count;
4898 int index;
4899 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
4900 const int atomic_sequence_length = 16; /* Instruction sequence length. */
4901 ULONGEST status, itstate;
4902
4903 /* We currently do not support atomic sequences within an IT block. */
4904 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4905 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4906 if (itstate & 0x0f)
4907 return 0;
4908
4909 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
4910 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4911 loc += 2;
4912 if (thumb_insn_size (insn1) != 4)
4913 return 0;
4914
4915 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4916 loc += 2;
4917 if (!((insn1 & 0xfff0) == 0xe850
4918 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
4919 return 0;
4920
4921 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
4922 instructions. */
4923 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
4924 {
4925 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4926 loc += 2;
4927
4928 if (thumb_insn_size (insn1) != 4)
4929 {
4930 /* Assume that there is at most one conditional branch in the
4931 atomic sequence. If a conditional branch is found, put a
4932 breakpoint in its destination address. */
4933 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
4934 {
4935 if (last_breakpoint > 0)
4936 return 0; /* More than one conditional branch found,
4937 fallback to the standard code. */
4938
4939 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
4940 last_breakpoint++;
4941 }
4942
4943 /* We do not support atomic sequences that use any *other*
4944 instructions but conditional branches to change the PC.
4945 Fall back to standard code to avoid losing control of
4946 execution. */
4947 else if (thumb_instruction_changes_pc (insn1))
4948 return 0;
4949 }
4950 else
4951 {
4952 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4953 loc += 2;
4954
4955 /* Assume that there is at most one conditional branch in the
4956 atomic sequence. If a conditional branch is found, put a
4957 breakpoint in its destination address. */
4958 if ((insn1 & 0xf800) == 0xf000
4959 && (insn2 & 0xd000) == 0x8000
4960 && (insn1 & 0x0380) != 0x0380)
4961 {
4962 int sign, j1, j2, imm1, imm2;
4963 unsigned int offset;
4964
4965 sign = sbits (insn1, 10, 10);
4966 imm1 = bits (insn1, 0, 5);
4967 imm2 = bits (insn2, 0, 10);
4968 j1 = bit (insn2, 13);
4969 j2 = bit (insn2, 11);
4970
4971 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4972 offset += (imm1 << 12) + (imm2 << 1);
4973
4974 if (last_breakpoint > 0)
4975 return 0; /* More than one conditional branch found,
4976 fallback to the standard code. */
4977
4978 breaks[1] = loc + offset;
4979 last_breakpoint++;
4980 }
4981
4982 /* We do not support atomic sequences that use any *other*
4983 instructions but conditional branches to change the PC.
4984 Fall back to standard code to avoid losing control of
4985 execution. */
4986 else if (thumb2_instruction_changes_pc (insn1, insn2))
4987 return 0;
4988
4989 /* If we find a strex{,b,h,d}, we're done. */
4990 if ((insn1 & 0xfff0) == 0xe840
4991 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
4992 break;
4993 }
4994 }
4995
4996 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
4997 if (insn_count == atomic_sequence_length)
4998 return 0;
4999
5000 /* Insert a breakpoint right after the end of the atomic sequence. */
5001 breaks[0] = loc;
5002
5003 /* Check for duplicated breakpoints. Check also for a breakpoint
5004 placed (branch instruction's destination) anywhere in sequence. */
5005 if (last_breakpoint
5006 && (breaks[1] == breaks[0]
5007 || (breaks[1] >= pc && breaks[1] < loc)))
5008 last_breakpoint = 0;
5009
5010 /* Effectively inserts the breakpoints. */
5011 for (index = 0; index <= last_breakpoint; index++)
5012 arm_insert_single_step_breakpoint (gdbarch, aspace,
5013 MAKE_THUMB_ADDR (breaks[index]));
5014
5015 return 1;
5016}
5017
5018static int
5019arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5020{
5021 struct gdbarch *gdbarch = get_frame_arch (frame);
5022 struct address_space *aspace = get_frame_address_space (frame);
5023 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5024 CORE_ADDR pc = get_frame_pc (frame);
5025 CORE_ADDR breaks[2] = {-1, -1};
5026 CORE_ADDR loc = pc;
5027 unsigned int insn;
5028 int insn_count;
5029 int index;
5030 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5031 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5032
5033 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5034 Note that we do not currently support conditionally executed atomic
5035 instructions. */
5036 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5037 loc += 4;
5038 if ((insn & 0xff9000f0) != 0xe1900090)
5039 return 0;
5040
5041 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5042 instructions. */
5043 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5044 {
5045 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5046 loc += 4;
5047
5048 /* Assume that there is at most one conditional branch in the atomic
5049 sequence. If a conditional branch is found, put a breakpoint in
5050 its destination address. */
5051 if (bits (insn, 24, 27) == 0xa)
5052 {
5053 if (last_breakpoint > 0)
5054 return 0; /* More than one conditional branch found, fallback
5055 to the standard single-step code. */
5056
5057 breaks[1] = BranchDest (loc - 4, insn);
5058 last_breakpoint++;
5059 }
5060
5061 /* We do not support atomic sequences that use any *other* instructions
5062 but conditional branches to change the PC. Fall back to standard
5063 code to avoid losing control of execution. */
5064 else if (arm_instruction_changes_pc (insn))
5065 return 0;
5066
5067 /* If we find a strex{,b,h,d}, we're done. */
5068 if ((insn & 0xff9000f0) == 0xe1800090)
5069 break;
5070 }
5071
5072 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5073 if (insn_count == atomic_sequence_length)
5074 return 0;
5075
5076 /* Insert a breakpoint right after the end of the atomic sequence. */
5077 breaks[0] = loc;
5078
5079 /* Check for duplicated breakpoints. Check also for a breakpoint
5080 placed (branch instruction's destination) anywhere in sequence. */
5081 if (last_breakpoint
5082 && (breaks[1] == breaks[0]
5083 || (breaks[1] >= pc && breaks[1] < loc)))
5084 last_breakpoint = 0;
5085
5086 /* Effectively inserts the breakpoints. */
5087 for (index = 0; index <= last_breakpoint; index++)
5088 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5089
5090 return 1;
5091}
5092
5093int
5094arm_deal_with_atomic_sequence (struct frame_info *frame)
5095{
5096 if (arm_frame_is_thumb (frame))
5097 return thumb_deal_with_atomic_sequence_raw (frame);
5098 else
5099 return arm_deal_with_atomic_sequence_raw (frame);
5100}
5101
9512d7fd
FN
5102/* single_step() is called just before we want to resume the inferior,
5103 if we want to single-step it but there is no hardware or kernel
5104 single-step support. We find the target of the coming instruction
e0cd558a 5105 and breakpoint it. */
9512d7fd 5106
190dce09 5107int
0b1b3e42 5108arm_software_single_step (struct frame_info *frame)
9512d7fd 5109{
a6d9a66e 5110 struct gdbarch *gdbarch = get_frame_arch (frame);
6c95b8df 5111 struct address_space *aspace = get_frame_address_space (frame);
35f73cfc
UW
5112 CORE_ADDR next_pc;
5113
5114 if (arm_deal_with_atomic_sequence (frame))
5115 return 1;
18819fa6 5116
35f73cfc 5117 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
18819fa6 5118 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
e6590a1b
UW
5119
5120 return 1;
9512d7fd 5121}
9512d7fd 5122
f9d67f43
DJ
5123/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5124 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5125 NULL if an error occurs. BUF is freed. */
5126
5127static gdb_byte *
5128extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5129 int old_len, int new_len)
5130{
5131 gdb_byte *new_buf, *middle;
5132 int bytes_to_read = new_len - old_len;
5133
5134 new_buf = xmalloc (new_len);
5135 memcpy (new_buf + bytes_to_read, buf, old_len);
5136 xfree (buf);
5137 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5138 {
5139 xfree (new_buf);
5140 return NULL;
5141 }
5142 return new_buf;
5143}
5144
5145/* An IT block is at most the 2-byte IT instruction followed by
5146 four 4-byte instructions. The furthest back we must search to
5147 find an IT block that affects the current instruction is thus
5148 2 + 3 * 4 == 14 bytes. */
5149#define MAX_IT_BLOCK_PREFIX 14
5150
5151/* Use a quick scan if there are more than this many bytes of
5152 code. */
5153#define IT_SCAN_THRESHOLD 32
5154
5155/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5156 A breakpoint in an IT block may not be hit, depending on the
5157 condition flags. */
5158static CORE_ADDR
5159arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5160{
5161 gdb_byte *buf;
5162 char map_type;
5163 CORE_ADDR boundary, func_start;
5164 int buf_len, buf2_len;
5165 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5166 int i, any, last_it, last_it_count;
5167
5168 /* If we are using BKPT breakpoints, none of this is necessary. */
5169 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5170 return bpaddr;
5171
5172 /* ARM mode does not have this problem. */
9779414d 5173 if (!arm_pc_is_thumb (gdbarch, bpaddr))
f9d67f43
DJ
5174 return bpaddr;
5175
5176 /* We are setting a breakpoint in Thumb code that could potentially
5177 contain an IT block. The first step is to find how much Thumb
5178 code there is; we do not need to read outside of known Thumb
5179 sequences. */
5180 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5181 if (map_type == 0)
5182 /* Thumb-2 code must have mapping symbols to have a chance. */
5183 return bpaddr;
5184
5185 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5186
5187 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5188 && func_start > boundary)
5189 boundary = func_start;
5190
5191 /* Search for a candidate IT instruction. We have to do some fancy
5192 footwork to distinguish a real IT instruction from the second
5193 half of a 32-bit instruction, but there is no need for that if
5194 there's no candidate. */
5195 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5196 if (buf_len == 0)
5197 /* No room for an IT instruction. */
5198 return bpaddr;
5199
5200 buf = xmalloc (buf_len);
5201 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5202 return bpaddr;
5203 any = 0;
5204 for (i = 0; i < buf_len; i += 2)
5205 {
5206 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5207 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5208 {
5209 any = 1;
5210 break;
5211 }
5212 }
5213 if (any == 0)
5214 {
5215 xfree (buf);
5216 return bpaddr;
5217 }
5218
5219 /* OK, the code bytes before this instruction contain at least one
5220 halfword which resembles an IT instruction. We know that it's
5221 Thumb code, but there are still two possibilities. Either the
5222 halfword really is an IT instruction, or it is the second half of
5223 a 32-bit Thumb instruction. The only way we can tell is to
5224 scan forwards from a known instruction boundary. */
5225 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5226 {
5227 int definite;
5228
5229 /* There's a lot of code before this instruction. Start with an
5230 optimistic search; it's easy to recognize halfwords that can
5231 not be the start of a 32-bit instruction, and use that to
5232 lock on to the instruction boundaries. */
5233 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5234 if (buf == NULL)
5235 return bpaddr;
5236 buf_len = IT_SCAN_THRESHOLD;
5237
5238 definite = 0;
5239 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5240 {
5241 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5242 if (thumb_insn_size (inst1) == 2)
5243 {
5244 definite = 1;
5245 break;
5246 }
5247 }
5248
5249 /* At this point, if DEFINITE, BUF[I] is the first place we
5250 are sure that we know the instruction boundaries, and it is far
5251 enough from BPADDR that we could not miss an IT instruction
5252 affecting BPADDR. If ! DEFINITE, give up - start from a
5253 known boundary. */
5254 if (! definite)
5255 {
0963b4bd
MS
5256 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5257 bpaddr - boundary);
f9d67f43
DJ
5258 if (buf == NULL)
5259 return bpaddr;
5260 buf_len = bpaddr - boundary;
5261 i = 0;
5262 }
5263 }
5264 else
5265 {
5266 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5267 if (buf == NULL)
5268 return bpaddr;
5269 buf_len = bpaddr - boundary;
5270 i = 0;
5271 }
5272
5273 /* Scan forwards. Find the last IT instruction before BPADDR. */
5274 last_it = -1;
5275 last_it_count = 0;
5276 while (i < buf_len)
5277 {
5278 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5279 last_it_count--;
5280 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5281 {
5282 last_it = i;
5283 if (inst1 & 0x0001)
5284 last_it_count = 4;
5285 else if (inst1 & 0x0002)
5286 last_it_count = 3;
5287 else if (inst1 & 0x0004)
5288 last_it_count = 2;
5289 else
5290 last_it_count = 1;
5291 }
5292 i += thumb_insn_size (inst1);
5293 }
5294
5295 xfree (buf);
5296
5297 if (last_it == -1)
5298 /* There wasn't really an IT instruction after all. */
5299 return bpaddr;
5300
5301 if (last_it_count < 1)
5302 /* It was too far away. */
5303 return bpaddr;
5304
5305 /* This really is a trouble spot. Move the breakpoint to the IT
5306 instruction. */
5307 return bpaddr - buf_len + last_it;
5308}
5309
cca44b1b 5310/* ARM displaced stepping support.
c906108c 5311
cca44b1b 5312 Generally ARM displaced stepping works as follows:
c906108c 5313
cca44b1b
JB
5314 1. When an instruction is to be single-stepped, it is first decoded by
5315 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5316 Depending on the type of instruction, it is then copied to a scratch
5317 location, possibly in a modified form. The copy_* set of functions
0963b4bd 5318 performs such modification, as necessary. A breakpoint is placed after
cca44b1b
JB
5319 the modified instruction in the scratch space to return control to GDB.
5320 Note in particular that instructions which modify the PC will no longer
5321 do so after modification.
c5aa993b 5322
cca44b1b
JB
5323 2. The instruction is single-stepped, by setting the PC to the scratch
5324 location address, and resuming. Control returns to GDB when the
5325 breakpoint is hit.
c5aa993b 5326
cca44b1b
JB
5327 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5328 function used for the current instruction. This function's job is to
5329 put the CPU/memory state back to what it would have been if the
5330 instruction had been executed unmodified in its original location. */
c5aa993b 5331
cca44b1b
JB
5332/* NOP instruction (mov r0, r0). */
5333#define ARM_NOP 0xe1a00000
34518530 5334#define THUMB_NOP 0x4600
cca44b1b
JB
5335
5336/* Helper for register reads for displaced stepping. In particular, this
5337 returns the PC as it would be seen by the instruction at its original
5338 location. */
5339
5340ULONGEST
36073a92
YQ
5341displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5342 int regno)
cca44b1b
JB
5343{
5344 ULONGEST ret;
36073a92 5345 CORE_ADDR from = dsc->insn_addr;
cca44b1b 5346
bf9f652a 5347 if (regno == ARM_PC_REGNUM)
cca44b1b 5348 {
4db71c0b
YQ
5349 /* Compute pipeline offset:
5350 - When executing an ARM instruction, PC reads as the address of the
5351 current instruction plus 8.
5352 - When executing a Thumb instruction, PC reads as the address of the
5353 current instruction plus 4. */
5354
36073a92 5355 if (!dsc->is_thumb)
4db71c0b
YQ
5356 from += 8;
5357 else
5358 from += 4;
5359
cca44b1b
JB
5360 if (debug_displaced)
5361 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
5362 (unsigned long) from);
5363 return (ULONGEST) from;
cca44b1b 5364 }
c906108c 5365 else
cca44b1b
JB
5366 {
5367 regcache_cooked_read_unsigned (regs, regno, &ret);
5368 if (debug_displaced)
5369 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5370 regno, (unsigned long) ret);
5371 return ret;
5372 }
c906108c
SS
5373}
5374
cca44b1b
JB
5375static int
5376displaced_in_arm_mode (struct regcache *regs)
5377{
5378 ULONGEST ps;
9779414d 5379 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 5380
cca44b1b 5381 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 5382
9779414d 5383 return (ps & t_bit) == 0;
cca44b1b 5384}
66e810cd 5385
cca44b1b 5386/* Write to the PC as from a branch instruction. */
c906108c 5387
cca44b1b 5388static void
36073a92
YQ
5389branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5390 ULONGEST val)
c906108c 5391{
36073a92 5392 if (!dsc->is_thumb)
cca44b1b
JB
5393 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5394 architecture versions < 6. */
0963b4bd
MS
5395 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5396 val & ~(ULONGEST) 0x3);
cca44b1b 5397 else
0963b4bd
MS
5398 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5399 val & ~(ULONGEST) 0x1);
cca44b1b 5400}
66e810cd 5401
cca44b1b
JB
5402/* Write to the PC as from a branch-exchange instruction. */
5403
5404static void
5405bx_write_pc (struct regcache *regs, ULONGEST val)
5406{
5407 ULONGEST ps;
9779414d 5408 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
5409
5410 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5411
5412 if ((val & 1) == 1)
c906108c 5413 {
9779414d 5414 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
5415 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5416 }
5417 else if ((val & 2) == 0)
5418 {
9779414d 5419 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 5420 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
5421 }
5422 else
5423 {
cca44b1b
JB
5424 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5425 mode, align dest to 4 bytes). */
5426 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 5427 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 5428 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
5429 }
5430}
ed9a39eb 5431
cca44b1b 5432/* Write to the PC as if from a load instruction. */
ed9a39eb 5433
34e8f22d 5434static void
36073a92
YQ
5435load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5436 ULONGEST val)
ed9a39eb 5437{
cca44b1b
JB
5438 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5439 bx_write_pc (regs, val);
5440 else
36073a92 5441 branch_write_pc (regs, dsc, val);
cca44b1b 5442}
be8626e0 5443
cca44b1b
JB
5444/* Write to the PC as if from an ALU instruction. */
5445
5446static void
36073a92
YQ
5447alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5448 ULONGEST val)
cca44b1b 5449{
36073a92 5450 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
5451 bx_write_pc (regs, val);
5452 else
36073a92 5453 branch_write_pc (regs, dsc, val);
cca44b1b
JB
5454}
5455
5456/* Helper for writing to registers for displaced stepping. Writing to the PC
5457 has a varying effects depending on the instruction which does the write:
5458 this is controlled by the WRITE_PC argument. */
5459
5460void
5461displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5462 int regno, ULONGEST val, enum pc_write_style write_pc)
5463{
bf9f652a 5464 if (regno == ARM_PC_REGNUM)
08216dd7 5465 {
cca44b1b
JB
5466 if (debug_displaced)
5467 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5468 (unsigned long) val);
5469 switch (write_pc)
08216dd7 5470 {
cca44b1b 5471 case BRANCH_WRITE_PC:
36073a92 5472 branch_write_pc (regs, dsc, val);
08216dd7
RE
5473 break;
5474
cca44b1b
JB
5475 case BX_WRITE_PC:
5476 bx_write_pc (regs, val);
5477 break;
5478
5479 case LOAD_WRITE_PC:
36073a92 5480 load_write_pc (regs, dsc, val);
cca44b1b
JB
5481 break;
5482
5483 case ALU_WRITE_PC:
36073a92 5484 alu_write_pc (regs, dsc, val);
cca44b1b
JB
5485 break;
5486
5487 case CANNOT_WRITE_PC:
5488 warning (_("Instruction wrote to PC in an unexpected way when "
5489 "single-stepping"));
08216dd7
RE
5490 break;
5491
5492 default:
97b9747c
JB
5493 internal_error (__FILE__, __LINE__,
5494 _("Invalid argument to displaced_write_reg"));
08216dd7 5495 }
b508a996 5496
cca44b1b 5497 dsc->wrote_to_pc = 1;
b508a996 5498 }
ed9a39eb 5499 else
b508a996 5500 {
cca44b1b
JB
5501 if (debug_displaced)
5502 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5503 regno, (unsigned long) val);
5504 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 5505 }
34e8f22d
RE
5506}
5507
cca44b1b
JB
5508/* This function is used to concisely determine if an instruction INSN
5509 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
5510 corresponding fields of BITMASK set to 0b1111. The function
5511 returns return 1 if any of these fields in INSN reference the PC
5512 (also 0b1111, r15), else it returns 0. */
67255d04
RE
5513
5514static int
cca44b1b 5515insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 5516{
cca44b1b 5517 uint32_t lowbit = 1;
67255d04 5518
cca44b1b
JB
5519 while (bitmask != 0)
5520 {
5521 uint32_t mask;
44e1a9eb 5522
cca44b1b
JB
5523 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5524 ;
67255d04 5525
cca44b1b
JB
5526 if (!lowbit)
5527 break;
67255d04 5528
cca44b1b 5529 mask = lowbit * 0xf;
67255d04 5530
cca44b1b
JB
5531 if ((insn & mask) == mask)
5532 return 1;
5533
5534 bitmask &= ~mask;
67255d04
RE
5535 }
5536
cca44b1b
JB
5537 return 0;
5538}
2af48f68 5539
cca44b1b
JB
5540/* The simplest copy function. Many instructions have the same effect no
5541 matter what address they are executed at: in those cases, use this. */
67255d04 5542
cca44b1b 5543static int
7ff120b4
YQ
5544arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5545 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
5546{
5547 if (debug_displaced)
5548 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5549 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5550 iname);
67255d04 5551
cca44b1b 5552 dsc->modinsn[0] = insn;
67255d04 5553
cca44b1b
JB
5554 return 0;
5555}
5556
34518530
YQ
5557static int
5558thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5559 uint16_t insn2, const char *iname,
5560 struct displaced_step_closure *dsc)
5561{
5562 if (debug_displaced)
5563 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5564 "opcode/class '%s' unmodified\n", insn1, insn2,
5565 iname);
5566
5567 dsc->modinsn[0] = insn1;
5568 dsc->modinsn[1] = insn2;
5569 dsc->numinsns = 2;
5570
5571 return 0;
5572}
5573
5574/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5575 modification. */
5576static int
5577thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5578 const char *iname,
5579 struct displaced_step_closure *dsc)
5580{
5581 if (debug_displaced)
5582 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5583 "opcode/class '%s' unmodified\n", insn,
5584 iname);
5585
5586 dsc->modinsn[0] = insn;
5587
5588 return 0;
5589}
5590
cca44b1b
JB
5591/* Preload instructions with immediate offset. */
5592
5593static void
6e39997a 5594cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
5595 struct regcache *regs, struct displaced_step_closure *dsc)
5596{
5597 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5598 if (!dsc->u.preload.immed)
5599 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5600}
5601
7ff120b4
YQ
5602static void
5603install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5604 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 5605{
cca44b1b 5606 ULONGEST rn_val;
cca44b1b
JB
5607 /* Preload instructions:
5608
5609 {pli/pld} [rn, #+/-imm]
5610 ->
5611 {pli/pld} [r0, #+/-imm]. */
5612
36073a92
YQ
5613 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5614 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5615 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
5616 dsc->u.preload.immed = 1;
5617
cca44b1b 5618 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
5619}
5620
cca44b1b 5621static int
7ff120b4 5622arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
5623 struct displaced_step_closure *dsc)
5624{
5625 unsigned int rn = bits (insn, 16, 19);
cca44b1b 5626
7ff120b4
YQ
5627 if (!insn_references_pc (insn, 0x000f0000ul))
5628 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
5629
5630 if (debug_displaced)
5631 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5632 (unsigned long) insn);
5633
7ff120b4
YQ
5634 dsc->modinsn[0] = insn & 0xfff0ffff;
5635
5636 install_preload (gdbarch, regs, dsc, rn);
5637
5638 return 0;
5639}
5640
34518530
YQ
5641static int
5642thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5643 struct regcache *regs, struct displaced_step_closure *dsc)
5644{
5645 unsigned int rn = bits (insn1, 0, 3);
5646 unsigned int u_bit = bit (insn1, 7);
5647 int imm12 = bits (insn2, 0, 11);
5648 ULONGEST pc_val;
5649
5650 if (rn != ARM_PC_REGNUM)
5651 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5652
5653 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5654 PLD (literal) Encoding T1. */
5655 if (debug_displaced)
5656 fprintf_unfiltered (gdb_stdlog,
5657 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5658 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5659 imm12);
5660
5661 if (!u_bit)
5662 imm12 = -1 * imm12;
5663
5664 /* Rewrite instruction {pli/pld} PC imm12 into:
5665 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5666
5667 {pli/pld} [r0, r1]
5668
5669 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5670
5671 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5672 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5673
5674 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5675
5676 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5677 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5678 dsc->u.preload.immed = 0;
5679
5680 /* {pli/pld} [r0, r1] */
5681 dsc->modinsn[0] = insn1 & 0xfff0;
5682 dsc->modinsn[1] = 0xf001;
5683 dsc->numinsns = 2;
5684
5685 dsc->cleanup = &cleanup_preload;
5686 return 0;
5687}
5688
7ff120b4
YQ
5689/* Preload instructions with register offset. */
5690
5691static void
5692install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5693 struct displaced_step_closure *dsc, unsigned int rn,
5694 unsigned int rm)
5695{
5696 ULONGEST rn_val, rm_val;
5697
cca44b1b
JB
5698 /* Preload register-offset instructions:
5699
5700 {pli/pld} [rn, rm {, shift}]
5701 ->
5702 {pli/pld} [r0, r1 {, shift}]. */
5703
36073a92
YQ
5704 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5705 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5706 rn_val = displaced_read_reg (regs, dsc, rn);
5707 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5708 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5709 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
5710 dsc->u.preload.immed = 0;
5711
cca44b1b 5712 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
5713}
5714
5715static int
5716arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5717 struct regcache *regs,
5718 struct displaced_step_closure *dsc)
5719{
5720 unsigned int rn = bits (insn, 16, 19);
5721 unsigned int rm = bits (insn, 0, 3);
5722
5723
5724 if (!insn_references_pc (insn, 0x000f000ful))
5725 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5726
5727 if (debug_displaced)
5728 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5729 (unsigned long) insn);
5730
5731 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 5732
7ff120b4 5733 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
5734 return 0;
5735}
5736
5737/* Copy/cleanup coprocessor load and store instructions. */
5738
5739static void
6e39997a 5740cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
5741 struct regcache *regs,
5742 struct displaced_step_closure *dsc)
5743{
36073a92 5744 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5745
5746 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5747
5748 if (dsc->u.ldst.writeback)
5749 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5750}
5751
7ff120b4
YQ
5752static void
5753install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5754 struct displaced_step_closure *dsc,
5755 int writeback, unsigned int rn)
cca44b1b 5756{
cca44b1b 5757 ULONGEST rn_val;
cca44b1b 5758
cca44b1b
JB
5759 /* Coprocessor load/store instructions:
5760
5761 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5762 ->
5763 {stc/stc2} [r0, #+/-imm].
5764
5765 ldc/ldc2 are handled identically. */
5766
36073a92
YQ
5767 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5768 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
5769 /* PC should be 4-byte aligned. */
5770 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
5771 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5772
7ff120b4 5773 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5774 dsc->u.ldst.rn = rn;
5775
7ff120b4
YQ
5776 dsc->cleanup = &cleanup_copro_load_store;
5777}
5778
5779static int
5780arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5781 struct regcache *regs,
5782 struct displaced_step_closure *dsc)
5783{
5784 unsigned int rn = bits (insn, 16, 19);
5785
5786 if (!insn_references_pc (insn, 0x000f0000ul))
5787 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5788
5789 if (debug_displaced)
5790 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5791 "load/store insn %.8lx\n", (unsigned long) insn);
5792
cca44b1b
JB
5793 dsc->modinsn[0] = insn & 0xfff0ffff;
5794
7ff120b4 5795 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
5796
5797 return 0;
5798}
5799
34518530
YQ
5800static int
5801thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5802 uint16_t insn2, struct regcache *regs,
5803 struct displaced_step_closure *dsc)
5804{
5805 unsigned int rn = bits (insn1, 0, 3);
5806
5807 if (rn != ARM_PC_REGNUM)
5808 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5809 "copro load/store", dsc);
5810
5811 if (debug_displaced)
5812 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5813 "load/store insn %.4x%.4x\n", insn1, insn2);
5814
5815 dsc->modinsn[0] = insn1 & 0xfff0;
5816 dsc->modinsn[1] = insn2;
5817 dsc->numinsns = 2;
5818
5819 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5820 doesn't support writeback, so pass 0. */
5821 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5822
5823 return 0;
5824}
5825
cca44b1b
JB
5826/* Clean up branch instructions (actually perform the branch, by setting
5827 PC). */
5828
5829static void
6e39997a 5830cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5831 struct displaced_step_closure *dsc)
5832{
36073a92 5833 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5834 int branch_taken = condition_true (dsc->u.branch.cond, status);
5835 enum pc_write_style write_pc = dsc->u.branch.exchange
5836 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5837
5838 if (!branch_taken)
5839 return;
5840
5841 if (dsc->u.branch.link)
5842 {
8c8dba6d
YQ
5843 /* The value of LR should be the next insn of current one. In order
5844 not to confuse logic hanlding later insn `bx lr', if current insn mode
5845 is Thumb, the bit 0 of LR value should be set to 1. */
5846 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5847
5848 if (dsc->is_thumb)
5849 next_insn_addr |= 0x1;
5850
5851 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5852 CANNOT_WRITE_PC);
cca44b1b
JB
5853 }
5854
bf9f652a 5855 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
5856}
5857
5858/* Copy B/BL/BLX instructions with immediate destinations. */
5859
7ff120b4
YQ
5860static void
5861install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5862 struct displaced_step_closure *dsc,
5863 unsigned int cond, int exchange, int link, long offset)
5864{
5865 /* Implement "BL<cond> <label>" as:
5866
5867 Preparation: cond <- instruction condition
5868 Insn: mov r0, r0 (nop)
5869 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5870
5871 B<cond> similar, but don't set r14 in cleanup. */
5872
5873 dsc->u.branch.cond = cond;
5874 dsc->u.branch.link = link;
5875 dsc->u.branch.exchange = exchange;
5876
2b16b2e3
YQ
5877 dsc->u.branch.dest = dsc->insn_addr;
5878 if (link && exchange)
5879 /* For BLX, offset is computed from the Align (PC, 4). */
5880 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5881
7ff120b4 5882 if (dsc->is_thumb)
2b16b2e3 5883 dsc->u.branch.dest += 4 + offset;
7ff120b4 5884 else
2b16b2e3 5885 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5886
5887 dsc->cleanup = &cleanup_branch;
5888}
cca44b1b 5889static int
7ff120b4
YQ
5890arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5891 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5892{
5893 unsigned int cond = bits (insn, 28, 31);
5894 int exchange = (cond == 0xf);
5895 int link = exchange || bit (insn, 24);
cca44b1b
JB
5896 long offset;
5897
5898 if (debug_displaced)
5899 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5900 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5901 (unsigned long) insn);
cca44b1b
JB
5902 if (exchange)
5903 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5904 then arrange the switch into Thumb mode. */
5905 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5906 else
5907 offset = bits (insn, 0, 23) << 2;
5908
5909 if (bit (offset, 25))
5910 offset = offset | ~0x3ffffff;
5911
cca44b1b
JB
5912 dsc->modinsn[0] = ARM_NOP;
5913
7ff120b4 5914 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5915 return 0;
5916}
5917
34518530
YQ
5918static int
5919thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5920 uint16_t insn2, struct regcache *regs,
5921 struct displaced_step_closure *dsc)
5922{
5923 int link = bit (insn2, 14);
5924 int exchange = link && !bit (insn2, 12);
5925 int cond = INST_AL;
5926 long offset = 0;
5927 int j1 = bit (insn2, 13);
5928 int j2 = bit (insn2, 11);
5929 int s = sbits (insn1, 10, 10);
5930 int i1 = !(j1 ^ bit (insn1, 10));
5931 int i2 = !(j2 ^ bit (insn1, 10));
5932
5933 if (!link && !exchange) /* B */
5934 {
5935 offset = (bits (insn2, 0, 10) << 1);
5936 if (bit (insn2, 12)) /* Encoding T4 */
5937 {
5938 offset |= (bits (insn1, 0, 9) << 12)
5939 | (i2 << 22)
5940 | (i1 << 23)
5941 | (s << 24);
5942 cond = INST_AL;
5943 }
5944 else /* Encoding T3 */
5945 {
5946 offset |= (bits (insn1, 0, 5) << 12)
5947 | (j1 << 18)
5948 | (j2 << 19)
5949 | (s << 20);
5950 cond = bits (insn1, 6, 9);
5951 }
5952 }
5953 else
5954 {
5955 offset = (bits (insn1, 0, 9) << 12);
5956 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5957 offset |= exchange ?
5958 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5959 }
5960
5961 if (debug_displaced)
5962 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5963 "%.4x %.4x with offset %.8lx\n",
5964 link ? (exchange) ? "blx" : "bl" : "b",
5965 insn1, insn2, offset);
5966
5967 dsc->modinsn[0] = THUMB_NOP;
5968
5969 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5970 return 0;
5971}
5972
5973/* Copy B Thumb instructions. */
5974static int
5975thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
5976 struct displaced_step_closure *dsc)
5977{
5978 unsigned int cond = 0;
5979 int offset = 0;
5980 unsigned short bit_12_15 = bits (insn, 12, 15);
5981 CORE_ADDR from = dsc->insn_addr;
5982
5983 if (bit_12_15 == 0xd)
5984 {
5985 /* offset = SignExtend (imm8:0, 32) */
5986 offset = sbits ((insn << 1), 0, 8);
5987 cond = bits (insn, 8, 11);
5988 }
5989 else if (bit_12_15 == 0xe) /* Encoding T2 */
5990 {
5991 offset = sbits ((insn << 1), 0, 11);
5992 cond = INST_AL;
5993 }
5994
5995 if (debug_displaced)
5996 fprintf_unfiltered (gdb_stdlog,
5997 "displaced: copying b immediate insn %.4x "
5998 "with offset %d\n", insn, offset);
5999
6000 dsc->u.branch.cond = cond;
6001 dsc->u.branch.link = 0;
6002 dsc->u.branch.exchange = 0;
6003 dsc->u.branch.dest = from + 4 + offset;
6004
6005 dsc->modinsn[0] = THUMB_NOP;
6006
6007 dsc->cleanup = &cleanup_branch;
6008
6009 return 0;
6010}
6011
cca44b1b
JB
6012/* Copy BX/BLX with register-specified destinations. */
6013
7ff120b4
YQ
6014static void
6015install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6016 struct displaced_step_closure *dsc, int link,
6017 unsigned int cond, unsigned int rm)
cca44b1b 6018{
cca44b1b
JB
6019 /* Implement {BX,BLX}<cond> <reg>" as:
6020
6021 Preparation: cond <- instruction condition
6022 Insn: mov r0, r0 (nop)
6023 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6024
6025 Don't set r14 in cleanup for BX. */
6026
36073a92 6027 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6028
6029 dsc->u.branch.cond = cond;
6030 dsc->u.branch.link = link;
cca44b1b 6031
7ff120b4 6032 dsc->u.branch.exchange = 1;
cca44b1b
JB
6033
6034 dsc->cleanup = &cleanup_branch;
7ff120b4 6035}
cca44b1b 6036
7ff120b4
YQ
6037static int
6038arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6039 struct regcache *regs, struct displaced_step_closure *dsc)
6040{
6041 unsigned int cond = bits (insn, 28, 31);
6042 /* BX: x12xxx1x
6043 BLX: x12xxx3x. */
6044 int link = bit (insn, 5);
6045 unsigned int rm = bits (insn, 0, 3);
6046
6047 if (debug_displaced)
6048 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6049 (unsigned long) insn);
6050
6051 dsc->modinsn[0] = ARM_NOP;
6052
6053 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
6054 return 0;
6055}
6056
34518530
YQ
6057static int
6058thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6059 struct regcache *regs,
6060 struct displaced_step_closure *dsc)
6061{
6062 int link = bit (insn, 7);
6063 unsigned int rm = bits (insn, 3, 6);
6064
6065 if (debug_displaced)
6066 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6067 (unsigned short) insn);
6068
6069 dsc->modinsn[0] = THUMB_NOP;
6070
6071 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6072
6073 return 0;
6074}
6075
6076
0963b4bd 6077/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
6078
6079static void
6e39997a 6080cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
6081 struct regcache *regs, struct displaced_step_closure *dsc)
6082{
36073a92 6083 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
6084 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6085 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6086 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6087}
6088
6089static int
7ff120b4
YQ
6090arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6091 struct displaced_step_closure *dsc)
cca44b1b
JB
6092{
6093 unsigned int rn = bits (insn, 16, 19);
6094 unsigned int rd = bits (insn, 12, 15);
6095 unsigned int op = bits (insn, 21, 24);
6096 int is_mov = (op == 0xd);
6097 ULONGEST rd_val, rn_val;
cca44b1b
JB
6098
6099 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 6100 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
6101
6102 if (debug_displaced)
6103 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6104 "%.8lx\n", is_mov ? "move" : "ALU",
6105 (unsigned long) insn);
6106
6107 /* Instruction is of form:
6108
6109 <op><cond> rd, [rn,] #imm
6110
6111 Rewrite as:
6112
6113 Preparation: tmp1, tmp2 <- r0, r1;
6114 r0, r1 <- rd, rn
6115 Insn: <op><cond> r0, r1, #imm
6116 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6117 */
6118
36073a92
YQ
6119 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6120 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6121 rn_val = displaced_read_reg (regs, dsc, rn);
6122 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
6123 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6124 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6125 dsc->rd = rd;
6126
6127 if (is_mov)
6128 dsc->modinsn[0] = insn & 0xfff00fff;
6129 else
6130 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6131
6132 dsc->cleanup = &cleanup_alu_imm;
6133
6134 return 0;
6135}
6136
34518530
YQ
6137static int
6138thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6139 uint16_t insn2, struct regcache *regs,
6140 struct displaced_step_closure *dsc)
6141{
6142 unsigned int op = bits (insn1, 5, 8);
6143 unsigned int rn, rm, rd;
6144 ULONGEST rd_val, rn_val;
6145
6146 rn = bits (insn1, 0, 3); /* Rn */
6147 rm = bits (insn2, 0, 3); /* Rm */
6148 rd = bits (insn2, 8, 11); /* Rd */
6149
6150 /* This routine is only called for instruction MOV. */
6151 gdb_assert (op == 0x2 && rn == 0xf);
6152
6153 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6154 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6155
6156 if (debug_displaced)
6157 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6158 "ALU", insn1, insn2);
6159
6160 /* Instruction is of form:
6161
6162 <op><cond> rd, [rn,] #imm
6163
6164 Rewrite as:
6165
6166 Preparation: tmp1, tmp2 <- r0, r1;
6167 r0, r1 <- rd, rn
6168 Insn: <op><cond> r0, r1, #imm
6169 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6170 */
6171
6172 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6173 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6174 rn_val = displaced_read_reg (regs, dsc, rn);
6175 rd_val = displaced_read_reg (regs, dsc, rd);
6176 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6177 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6178 dsc->rd = rd;
6179
6180 dsc->modinsn[0] = insn1;
6181 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6182 dsc->numinsns = 2;
6183
6184 dsc->cleanup = &cleanup_alu_imm;
6185
6186 return 0;
6187}
6188
cca44b1b
JB
6189/* Copy/cleanup arithmetic/logic insns with register RHS. */
6190
6191static void
6e39997a 6192cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
6193 struct regcache *regs, struct displaced_step_closure *dsc)
6194{
6195 ULONGEST rd_val;
6196 int i;
6197
36073a92 6198 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
6199
6200 for (i = 0; i < 3; i++)
6201 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6202
6203 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6204}
6205
7ff120b4
YQ
6206static void
6207install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6208 struct displaced_step_closure *dsc,
6209 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 6210{
cca44b1b 6211 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 6212
cca44b1b
JB
6213 /* Instruction is of form:
6214
6215 <op><cond> rd, [rn,] rm [, <shift>]
6216
6217 Rewrite as:
6218
6219 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6220 r0, r1, r2 <- rd, rn, rm
6221 Insn: <op><cond> r0, r1, r2 [, <shift>]
6222 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6223 */
6224
36073a92
YQ
6225 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6226 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6227 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6228 rd_val = displaced_read_reg (regs, dsc, rd);
6229 rn_val = displaced_read_reg (regs, dsc, rn);
6230 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6231 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6232 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6233 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6234 dsc->rd = rd;
6235
7ff120b4
YQ
6236 dsc->cleanup = &cleanup_alu_reg;
6237}
6238
6239static int
6240arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6241 struct displaced_step_closure *dsc)
6242{
6243 unsigned int op = bits (insn, 21, 24);
6244 int is_mov = (op == 0xd);
6245
6246 if (!insn_references_pc (insn, 0x000ff00ful))
6247 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6248
6249 if (debug_displaced)
6250 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6251 is_mov ? "move" : "ALU", (unsigned long) insn);
6252
cca44b1b
JB
6253 if (is_mov)
6254 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6255 else
6256 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6257
7ff120b4
YQ
6258 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6259 bits (insn, 0, 3));
cca44b1b
JB
6260 return 0;
6261}
6262
34518530
YQ
6263static int
6264thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6265 struct regcache *regs,
6266 struct displaced_step_closure *dsc)
6267{
6268 unsigned rn, rm, rd;
6269
6270 rd = bits (insn, 3, 6);
6271 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6272 rm = 2;
6273
6274 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6275 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6276
6277 if (debug_displaced)
6278 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6279 "ALU", (unsigned short) insn);
6280
6281 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6282
6283 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6284
6285 return 0;
6286}
6287
cca44b1b
JB
6288/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6289
6290static void
6e39997a 6291cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
6292 struct regcache *regs,
6293 struct displaced_step_closure *dsc)
6294{
36073a92 6295 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
6296 int i;
6297
6298 for (i = 0; i < 4; i++)
6299 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6300
6301 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6302}
6303
7ff120b4
YQ
6304static void
6305install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6306 struct displaced_step_closure *dsc,
6307 unsigned int rd, unsigned int rn, unsigned int rm,
6308 unsigned rs)
cca44b1b 6309{
7ff120b4 6310 int i;
cca44b1b 6311 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 6312
cca44b1b
JB
6313 /* Instruction is of form:
6314
6315 <op><cond> rd, [rn,] rm, <shift> rs
6316
6317 Rewrite as:
6318
6319 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6320 r0, r1, r2, r3 <- rd, rn, rm, rs
6321 Insn: <op><cond> r0, r1, r2, <shift> r3
6322 Cleanup: tmp5 <- r0
6323 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6324 rd <- tmp5
6325 */
6326
6327 for (i = 0; i < 4; i++)
36073a92 6328 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 6329
36073a92
YQ
6330 rd_val = displaced_read_reg (regs, dsc, rd);
6331 rn_val = displaced_read_reg (regs, dsc, rn);
6332 rm_val = displaced_read_reg (regs, dsc, rm);
6333 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
6334 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6335 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6336 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6337 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6338 dsc->rd = rd;
7ff120b4
YQ
6339 dsc->cleanup = &cleanup_alu_shifted_reg;
6340}
6341
6342static int
6343arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6344 struct regcache *regs,
6345 struct displaced_step_closure *dsc)
6346{
6347 unsigned int op = bits (insn, 21, 24);
6348 int is_mov = (op == 0xd);
6349 unsigned int rd, rn, rm, rs;
6350
6351 if (!insn_references_pc (insn, 0x000fff0ful))
6352 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6353
6354 if (debug_displaced)
6355 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6356 "%.8lx\n", is_mov ? "move" : "ALU",
6357 (unsigned long) insn);
6358
6359 rn = bits (insn, 16, 19);
6360 rm = bits (insn, 0, 3);
6361 rs = bits (insn, 8, 11);
6362 rd = bits (insn, 12, 15);
cca44b1b
JB
6363
6364 if (is_mov)
6365 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6366 else
6367 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6368
7ff120b4 6369 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
6370
6371 return 0;
6372}
6373
6374/* Clean up load instructions. */
6375
6376static void
6e39997a 6377cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
6378 struct displaced_step_closure *dsc)
6379{
6380 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 6381
36073a92 6382 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 6383 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
6384 rt_val2 = displaced_read_reg (regs, dsc, 1);
6385 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
6386
6387 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6388 if (dsc->u.ldst.xfersize > 4)
6389 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6390 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6391 if (!dsc->u.ldst.immed)
6392 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6393
6394 /* Handle register writeback. */
6395 if (dsc->u.ldst.writeback)
6396 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6397 /* Put result in right place. */
6398 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6399 if (dsc->u.ldst.xfersize == 8)
6400 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6401}
6402
6403/* Clean up store instructions. */
6404
6405static void
6e39997a 6406cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
6407 struct displaced_step_closure *dsc)
6408{
36073a92 6409 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
6410
6411 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6412 if (dsc->u.ldst.xfersize > 4)
6413 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6414 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6415 if (!dsc->u.ldst.immed)
6416 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6417 if (!dsc->u.ldst.restore_r4)
6418 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6419
6420 /* Writeback. */
6421 if (dsc->u.ldst.writeback)
6422 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6423}
6424
6425/* Copy "extra" load/store instructions. These are halfword/doubleword
6426 transfers, which have a different encoding to byte/word transfers. */
6427
6428static int
7ff120b4
YQ
6429arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6430 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6431{
6432 unsigned int op1 = bits (insn, 20, 24);
6433 unsigned int op2 = bits (insn, 5, 6);
6434 unsigned int rt = bits (insn, 12, 15);
6435 unsigned int rn = bits (insn, 16, 19);
6436 unsigned int rm = bits (insn, 0, 3);
6437 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6438 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6439 int immed = (op1 & 0x4) != 0;
6440 int opcode;
6441 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
6442
6443 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 6444 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
6445
6446 if (debug_displaced)
6447 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6448 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6449 (unsigned long) insn);
6450
6451 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6452
6453 if (opcode < 0)
6454 internal_error (__FILE__, __LINE__,
6455 _("copy_extra_ld_st: instruction decode error"));
6456
36073a92
YQ
6457 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6458 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6459 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 6460 if (!immed)
36073a92 6461 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 6462
36073a92 6463 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 6464 if (bytesize[opcode] == 8)
36073a92
YQ
6465 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6466 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 6467 if (!immed)
36073a92 6468 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6469
6470 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6471 if (bytesize[opcode] == 8)
6472 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6473 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6474 if (!immed)
6475 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6476
6477 dsc->rd = rt;
6478 dsc->u.ldst.xfersize = bytesize[opcode];
6479 dsc->u.ldst.rn = rn;
6480 dsc->u.ldst.immed = immed;
6481 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6482 dsc->u.ldst.restore_r4 = 0;
6483
6484 if (immed)
6485 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6486 ->
6487 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6488 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6489 else
6490 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6491 ->
6492 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6493 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6494
6495 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6496
6497 return 0;
6498}
6499
0f6f04ba 6500/* Copy byte/half word/word loads and stores. */
cca44b1b 6501
7ff120b4 6502static void
0f6f04ba
YQ
6503install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6504 struct displaced_step_closure *dsc, int load,
6505 int immed, int writeback, int size, int usermode,
6506 int rt, int rm, int rn)
cca44b1b 6507{
cca44b1b 6508 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 6509
36073a92
YQ
6510 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6511 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 6512 if (!immed)
36073a92 6513 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 6514 if (!load)
36073a92 6515 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 6516
36073a92
YQ
6517 rt_val = displaced_read_reg (regs, dsc, rt);
6518 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 6519 if (!immed)
36073a92 6520 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6521
6522 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6523 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6524 if (!immed)
6525 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 6526 dsc->rd = rt;
0f6f04ba 6527 dsc->u.ldst.xfersize = size;
cca44b1b
JB
6528 dsc->u.ldst.rn = rn;
6529 dsc->u.ldst.immed = immed;
7ff120b4 6530 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
6531
6532 /* To write PC we can do:
6533
494e194e
YQ
6534 Before this sequence of instructions:
6535 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6536 r2 is the Rn value got from dispalced_read_reg.
6537
6538 Insn1: push {pc} Write address of STR instruction + offset on stack
6539 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6540 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6541 = addr(Insn1) + offset - addr(Insn3) - 8
6542 = offset - 16
6543 Insn4: add r4, r4, #8 r4 = offset - 8
6544 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6545 = from + offset
6546 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
6547
6548 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
6549 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6550 of this can be found in Section "Saving from r15" in
6551 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 6552
7ff120b4
YQ
6553 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6554}
6555
34518530
YQ
6556
6557static int
6558thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6559 uint16_t insn2, struct regcache *regs,
6560 struct displaced_step_closure *dsc, int size)
6561{
6562 unsigned int u_bit = bit (insn1, 7);
6563 unsigned int rt = bits (insn2, 12, 15);
6564 int imm12 = bits (insn2, 0, 11);
6565 ULONGEST pc_val;
6566
6567 if (debug_displaced)
6568 fprintf_unfiltered (gdb_stdlog,
6569 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6570 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6571 imm12);
6572
6573 if (!u_bit)
6574 imm12 = -1 * imm12;
6575
6576 /* Rewrite instruction LDR Rt imm12 into:
6577
6578 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6579
6580 LDR R0, R2, R3,
6581
6582 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6583
6584
6585 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6586 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6587 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6588
6589 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6590
6591 pc_val = pc_val & 0xfffffffc;
6592
6593 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6594 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6595
6596 dsc->rd = rt;
6597
6598 dsc->u.ldst.xfersize = size;
6599 dsc->u.ldst.immed = 0;
6600 dsc->u.ldst.writeback = 0;
6601 dsc->u.ldst.restore_r4 = 0;
6602
6603 /* LDR R0, R2, R3 */
6604 dsc->modinsn[0] = 0xf852;
6605 dsc->modinsn[1] = 0x3;
6606 dsc->numinsns = 2;
6607
6608 dsc->cleanup = &cleanup_load;
6609
6610 return 0;
6611}
6612
6613static int
6614thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6615 uint16_t insn2, struct regcache *regs,
6616 struct displaced_step_closure *dsc,
6617 int writeback, int immed)
6618{
6619 unsigned int rt = bits (insn2, 12, 15);
6620 unsigned int rn = bits (insn1, 0, 3);
6621 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6622 /* In LDR (register), there is also a register Rm, which is not allowed to
6623 be PC, so we don't have to check it. */
6624
6625 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6626 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6627 dsc);
6628
6629 if (debug_displaced)
6630 fprintf_unfiltered (gdb_stdlog,
6631 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6632 rt, rn, insn1, insn2);
6633
6634 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6635 0, rt, rm, rn);
6636
6637 dsc->u.ldst.restore_r4 = 0;
6638
6639 if (immed)
6640 /* ldr[b]<cond> rt, [rn, #imm], etc.
6641 ->
6642 ldr[b]<cond> r0, [r2, #imm]. */
6643 {
6644 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6645 dsc->modinsn[1] = insn2 & 0x0fff;
6646 }
6647 else
6648 /* ldr[b]<cond> rt, [rn, rm], etc.
6649 ->
6650 ldr[b]<cond> r0, [r2, r3]. */
6651 {
6652 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6653 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6654 }
6655
6656 dsc->numinsns = 2;
6657
6658 return 0;
6659}
6660
6661
7ff120b4
YQ
6662static int
6663arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6664 struct regcache *regs,
6665 struct displaced_step_closure *dsc,
0f6f04ba 6666 int load, int size, int usermode)
7ff120b4
YQ
6667{
6668 int immed = !bit (insn, 25);
6669 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6670 unsigned int rt = bits (insn, 12, 15);
6671 unsigned int rn = bits (insn, 16, 19);
6672 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6673
6674 if (!insn_references_pc (insn, 0x000ff00ful))
6675 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6676
6677 if (debug_displaced)
6678 fprintf_unfiltered (gdb_stdlog,
6679 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
6680 load ? (size == 1 ? "ldrb" : "ldr")
6681 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
6682 rt, rn,
6683 (unsigned long) insn);
6684
0f6f04ba
YQ
6685 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6686 usermode, rt, rm, rn);
7ff120b4 6687
bf9f652a 6688 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
6689 {
6690 dsc->u.ldst.restore_r4 = 0;
6691
6692 if (immed)
6693 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6694 ->
6695 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6696 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6697 else
6698 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6699 ->
6700 {ldr,str}[b]<cond> r0, [r2, r3]. */
6701 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6702 }
6703 else
6704 {
6705 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6706 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
6707 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6708 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
6709 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6710 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6711 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6712
6713 /* As above. */
6714 if (immed)
6715 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6716 else
6717 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6718
cca44b1b
JB
6719 dsc->numinsns = 6;
6720 }
6721
6722 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6723
6724 return 0;
6725}
6726
6727/* Cleanup LDM instructions with fully-populated register list. This is an
6728 unfortunate corner case: it's impossible to implement correctly by modifying
6729 the instruction. The issue is as follows: we have an instruction,
6730
6731 ldm rN, {r0-r15}
6732
6733 which we must rewrite to avoid loading PC. A possible solution would be to
6734 do the load in two halves, something like (with suitable cleanup
6735 afterwards):
6736
6737 mov r8, rN
6738 ldm[id][ab] r8!, {r0-r7}
6739 str r7, <temp>
6740 ldm[id][ab] r8, {r7-r14}
6741 <bkpt>
6742
6743 but at present there's no suitable place for <temp>, since the scratch space
6744 is overwritten before the cleanup routine is called. For now, we simply
6745 emulate the instruction. */
6746
6747static void
6748cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6749 struct displaced_step_closure *dsc)
6750{
cca44b1b
JB
6751 int inc = dsc->u.block.increment;
6752 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6753 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6754 uint32_t regmask = dsc->u.block.regmask;
6755 int regno = inc ? 0 : 15;
6756 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6757 int exception_return = dsc->u.block.load && dsc->u.block.user
6758 && (regmask & 0x8000) != 0;
36073a92 6759 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
6760 int do_transfer = condition_true (dsc->u.block.cond, status);
6761 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6762
6763 if (!do_transfer)
6764 return;
6765
6766 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6767 sensible we can do here. Complain loudly. */
6768 if (exception_return)
6769 error (_("Cannot single-step exception return"));
6770
6771 /* We don't handle any stores here for now. */
6772 gdb_assert (dsc->u.block.load != 0);
6773
6774 if (debug_displaced)
6775 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6776 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6777 dsc->u.block.increment ? "inc" : "dec",
6778 dsc->u.block.before ? "before" : "after");
6779
6780 while (regmask)
6781 {
6782 uint32_t memword;
6783
6784 if (inc)
bf9f652a 6785 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
6786 regno++;
6787 else
6788 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6789 regno--;
6790
6791 xfer_addr += bump_before;
6792
6793 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6794 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6795
6796 xfer_addr += bump_after;
6797
6798 regmask &= ~(1 << regno);
6799 }
6800
6801 if (dsc->u.block.writeback)
6802 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6803 CANNOT_WRITE_PC);
6804}
6805
6806/* Clean up an STM which included the PC in the register list. */
6807
6808static void
6809cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6810 struct displaced_step_closure *dsc)
6811{
36073a92 6812 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
6813 int store_executed = condition_true (dsc->u.block.cond, status);
6814 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6815 CORE_ADDR stm_insn_addr;
6816 uint32_t pc_val;
6817 long offset;
6818 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6819
6820 /* If condition code fails, there's nothing else to do. */
6821 if (!store_executed)
6822 return;
6823
6824 if (dsc->u.block.increment)
6825 {
6826 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6827
6828 if (dsc->u.block.before)
6829 pc_stored_at += 4;
6830 }
6831 else
6832 {
6833 pc_stored_at = dsc->u.block.xfer_addr;
6834
6835 if (dsc->u.block.before)
6836 pc_stored_at -= 4;
6837 }
6838
6839 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6840 stm_insn_addr = dsc->scratch_base;
6841 offset = pc_val - stm_insn_addr;
6842
6843 if (debug_displaced)
6844 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6845 "STM instruction\n", offset);
6846
6847 /* Rewrite the stored PC to the proper value for the non-displaced original
6848 instruction. */
6849 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6850 dsc->insn_addr + offset);
6851}
6852
6853/* Clean up an LDM which includes the PC in the register list. We clumped all
6854 the registers in the transferred list into a contiguous range r0...rX (to
6855 avoid loading PC directly and losing control of the debugged program), so we
6856 must undo that here. */
6857
6858static void
6e39997a 6859cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
6860 struct regcache *regs,
6861 struct displaced_step_closure *dsc)
6862{
36073a92 6863 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b 6864 int load_executed = condition_true (dsc->u.block.cond, status), i;
bf9f652a 6865 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
6866 unsigned int regs_loaded = bitcount (mask);
6867 unsigned int num_to_shuffle = regs_loaded, clobbered;
6868
6869 /* The method employed here will fail if the register list is fully populated
6870 (we need to avoid loading PC directly). */
6871 gdb_assert (num_to_shuffle < 16);
6872
6873 if (!load_executed)
6874 return;
6875
6876 clobbered = (1 << num_to_shuffle) - 1;
6877
6878 while (num_to_shuffle > 0)
6879 {
6880 if ((mask & (1 << write_reg)) != 0)
6881 {
6882 unsigned int read_reg = num_to_shuffle - 1;
6883
6884 if (read_reg != write_reg)
6885 {
36073a92 6886 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6887 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6890 "loaded register r%d to r%d\n"), read_reg,
6891 write_reg);
6892 }
6893 else if (debug_displaced)
6894 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6895 "r%d already in the right place\n"),
6896 write_reg);
6897
6898 clobbered &= ~(1 << write_reg);
6899
6900 num_to_shuffle--;
6901 }
6902
6903 write_reg--;
6904 }
6905
6906 /* Restore any registers we scribbled over. */
6907 for (write_reg = 0; clobbered != 0; write_reg++)
6908 {
6909 if ((clobbered & (1 << write_reg)) != 0)
6910 {
6911 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6912 CANNOT_WRITE_PC);
6913 if (debug_displaced)
6914 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6915 "clobbered register r%d\n"), write_reg);
6916 clobbered &= ~(1 << write_reg);
6917 }
6918 }
6919
6920 /* Perform register writeback manually. */
6921 if (dsc->u.block.writeback)
6922 {
6923 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6924
6925 if (dsc->u.block.increment)
6926 new_rn_val += regs_loaded * 4;
6927 else
6928 new_rn_val -= regs_loaded * 4;
6929
6930 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6931 CANNOT_WRITE_PC);
6932 }
6933}
6934
6935/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6936 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6937
6938static int
7ff120b4
YQ
6939arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6940 struct regcache *regs,
6941 struct displaced_step_closure *dsc)
cca44b1b
JB
6942{
6943 int load = bit (insn, 20);
6944 int user = bit (insn, 22);
6945 int increment = bit (insn, 23);
6946 int before = bit (insn, 24);
6947 int writeback = bit (insn, 21);
6948 int rn = bits (insn, 16, 19);
cca44b1b 6949
0963b4bd
MS
6950 /* Block transfers which don't mention PC can be run directly
6951 out-of-line. */
bf9f652a 6952 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6953 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6954
bf9f652a 6955 if (rn == ARM_PC_REGNUM)
cca44b1b 6956 {
0963b4bd
MS
6957 warning (_("displaced: Unpredictable LDM or STM with "
6958 "base register r15"));
7ff120b4 6959 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6960 }
6961
6962 if (debug_displaced)
6963 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6964 "%.8lx\n", (unsigned long) insn);
6965
36073a92 6966 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6967 dsc->u.block.rn = rn;
6968
6969 dsc->u.block.load = load;
6970 dsc->u.block.user = user;
6971 dsc->u.block.increment = increment;
6972 dsc->u.block.before = before;
6973 dsc->u.block.writeback = writeback;
6974 dsc->u.block.cond = bits (insn, 28, 31);
6975
6976 dsc->u.block.regmask = insn & 0xffff;
6977
6978 if (load)
6979 {
6980 if ((insn & 0xffff) == 0xffff)
6981 {
6982 /* LDM with a fully-populated register list. This case is
6983 particularly tricky. Implement for now by fully emulating the
6984 instruction (which might not behave perfectly in all cases, but
6985 these instructions should be rare enough for that not to matter
6986 too much). */
6987 dsc->modinsn[0] = ARM_NOP;
6988
6989 dsc->cleanup = &cleanup_block_load_all;
6990 }
6991 else
6992 {
6993 /* LDM of a list of registers which includes PC. Implement by
6994 rewriting the list of registers to be transferred into a
6995 contiguous chunk r0...rX before doing the transfer, then shuffling
6996 registers into the correct places in the cleanup routine. */
6997 unsigned int regmask = insn & 0xffff;
6998 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6999 unsigned int to = 0, from = 0, i, new_rn;
7000
7001 for (i = 0; i < num_in_list; i++)
36073a92 7002 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
7003
7004 /* Writeback makes things complicated. We need to avoid clobbering
7005 the base register with one of the registers in our modified
7006 register list, but just using a different register can't work in
7007 all cases, e.g.:
7008
7009 ldm r14!, {r0-r13,pc}
7010
7011 which would need to be rewritten as:
7012
7013 ldm rN!, {r0-r14}
7014
7015 but that can't work, because there's no free register for N.
7016
7017 Solve this by turning off the writeback bit, and emulating
7018 writeback manually in the cleanup routine. */
7019
7020 if (writeback)
7021 insn &= ~(1 << 21);
7022
7023 new_regmask = (1 << num_in_list) - 1;
7024
7025 if (debug_displaced)
7026 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7027 "{..., pc}: original reg list %.4x, modified "
7028 "list %.4x\n"), rn, writeback ? "!" : "",
7029 (int) insn & 0xffff, new_regmask);
7030
7031 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7032
7033 dsc->cleanup = &cleanup_block_load_pc;
7034 }
7035 }
7036 else
7037 {
7038 /* STM of a list of registers which includes PC. Run the instruction
7039 as-is, but out of line: this will store the wrong value for the PC,
7040 so we must manually fix up the memory in the cleanup routine.
7041 Doing things this way has the advantage that we can auto-detect
7042 the offset of the PC write (which is architecture-dependent) in
7043 the cleanup routine. */
7044 dsc->modinsn[0] = insn;
7045
7046 dsc->cleanup = &cleanup_block_store_pc;
7047 }
7048
7049 return 0;
7050}
7051
34518530
YQ
7052static int
7053thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7054 struct regcache *regs,
7055 struct displaced_step_closure *dsc)
cca44b1b 7056{
34518530
YQ
7057 int rn = bits (insn1, 0, 3);
7058 int load = bit (insn1, 4);
7059 int writeback = bit (insn1, 5);
cca44b1b 7060
34518530
YQ
7061 /* Block transfers which don't mention PC can be run directly
7062 out-of-line. */
7063 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7064 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 7065
34518530
YQ
7066 if (rn == ARM_PC_REGNUM)
7067 {
7068 warning (_("displaced: Unpredictable LDM or STM with "
7069 "base register r15"));
7070 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7071 "unpredictable ldm/stm", dsc);
7072 }
cca44b1b
JB
7073
7074 if (debug_displaced)
34518530
YQ
7075 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7076 "%.4x%.4x\n", insn1, insn2);
cca44b1b 7077
34518530
YQ
7078 /* Clear bit 13, since it should be always zero. */
7079 dsc->u.block.regmask = (insn2 & 0xdfff);
7080 dsc->u.block.rn = rn;
cca44b1b 7081
34518530
YQ
7082 dsc->u.block.load = load;
7083 dsc->u.block.user = 0;
7084 dsc->u.block.increment = bit (insn1, 7);
7085 dsc->u.block.before = bit (insn1, 8);
7086 dsc->u.block.writeback = writeback;
7087 dsc->u.block.cond = INST_AL;
7088 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 7089
34518530
YQ
7090 if (load)
7091 {
7092 if (dsc->u.block.regmask == 0xffff)
7093 {
7094 /* This branch is impossible to happen. */
7095 gdb_assert (0);
7096 }
7097 else
7098 {
7099 unsigned int regmask = dsc->u.block.regmask;
7100 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7101 unsigned int to = 0, from = 0, i, new_rn;
7102
7103 for (i = 0; i < num_in_list; i++)
7104 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7105
7106 if (writeback)
7107 insn1 &= ~(1 << 5);
7108
7109 new_regmask = (1 << num_in_list) - 1;
7110
7111 if (debug_displaced)
7112 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7113 "{..., pc}: original reg list %.4x, modified "
7114 "list %.4x\n"), rn, writeback ? "!" : "",
7115 (int) dsc->u.block.regmask, new_regmask);
7116
7117 dsc->modinsn[0] = insn1;
7118 dsc->modinsn[1] = (new_regmask & 0xffff);
7119 dsc->numinsns = 2;
7120
7121 dsc->cleanup = &cleanup_block_load_pc;
7122 }
7123 }
7124 else
7125 {
7126 dsc->modinsn[0] = insn1;
7127 dsc->modinsn[1] = insn2;
7128 dsc->numinsns = 2;
7129 dsc->cleanup = &cleanup_block_store_pc;
7130 }
7131 return 0;
7132}
7133
7134/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7135 for Linux, where some SVC instructions must be treated specially. */
7136
7137static void
7138cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7139 struct displaced_step_closure *dsc)
7140{
7141 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7142
7143 if (debug_displaced)
7144 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7145 "%.8lx\n", (unsigned long) resume_addr);
7146
7147 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7148}
7149
7150
7151/* Common copy routine for svc instruciton. */
7152
7153static int
7154install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7155 struct displaced_step_closure *dsc)
7156{
7157 /* Preparation: none.
7158 Insn: unmodified svc.
7159 Cleanup: pc <- insn_addr + insn_size. */
7160
7161 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7162 instruction. */
7163 dsc->wrote_to_pc = 1;
7164
7165 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
7166 if (dsc->u.svc.copy_svc_os)
7167 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7168 else
7169 {
7170 dsc->cleanup = &cleanup_svc;
7171 return 0;
7172 }
34518530
YQ
7173}
7174
7175static int
7176arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7177 struct regcache *regs, struct displaced_step_closure *dsc)
7178{
7179
7180 if (debug_displaced)
7181 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7182 (unsigned long) insn);
7183
7184 dsc->modinsn[0] = insn;
7185
7186 return install_svc (gdbarch, regs, dsc);
7187}
7188
7189static int
7190thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7191 struct regcache *regs, struct displaced_step_closure *dsc)
7192{
7193
7194 if (debug_displaced)
7195 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7196 insn);
bd18283a 7197
34518530
YQ
7198 dsc->modinsn[0] = insn;
7199
7200 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
7201}
7202
7203/* Copy undefined instructions. */
7204
7205static int
7ff120b4
YQ
7206arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7207 struct displaced_step_closure *dsc)
cca44b1b
JB
7208{
7209 if (debug_displaced)
0963b4bd
MS
7210 fprintf_unfiltered (gdb_stdlog,
7211 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
7212 (unsigned long) insn);
7213
7214 dsc->modinsn[0] = insn;
7215
7216 return 0;
7217}
7218
34518530
YQ
7219static int
7220thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7221 struct displaced_step_closure *dsc)
7222{
7223
7224 if (debug_displaced)
7225 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7226 "%.4x %.4x\n", (unsigned short) insn1,
7227 (unsigned short) insn2);
7228
7229 dsc->modinsn[0] = insn1;
7230 dsc->modinsn[1] = insn2;
7231 dsc->numinsns = 2;
7232
7233 return 0;
7234}
7235
cca44b1b
JB
7236/* Copy unpredictable instructions. */
7237
7238static int
7ff120b4
YQ
7239arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7240 struct displaced_step_closure *dsc)
cca44b1b
JB
7241{
7242 if (debug_displaced)
7243 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7244 "%.8lx\n", (unsigned long) insn);
7245
7246 dsc->modinsn[0] = insn;
7247
7248 return 0;
7249}
7250
7251/* The decode_* functions are instruction decoding helpers. They mostly follow
7252 the presentation in the ARM ARM. */
7253
7254static int
7ff120b4
YQ
7255arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7256 struct regcache *regs,
7257 struct displaced_step_closure *dsc)
cca44b1b
JB
7258{
7259 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7260 unsigned int rn = bits (insn, 16, 19);
7261
7262 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 7263 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 7264 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 7265 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 7266 else if ((op1 & 0x60) == 0x20)
7ff120b4 7267 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 7268 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
7269 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7270 dsc);
cca44b1b 7271 else if ((op1 & 0x77) == 0x41)
7ff120b4 7272 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 7273 else if ((op1 & 0x77) == 0x45)
7ff120b4 7274 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
7275 else if ((op1 & 0x77) == 0x51)
7276 {
7277 if (rn != 0xf)
7ff120b4 7278 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 7279 else
7ff120b4 7280 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
7281 }
7282 else if ((op1 & 0x77) == 0x55)
7ff120b4 7283 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
7284 else if (op1 == 0x57)
7285 switch (op2)
7286 {
7ff120b4
YQ
7287 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7288 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7289 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7290 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7291 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
7292 }
7293 else if ((op1 & 0x63) == 0x43)
7ff120b4 7294 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
7295 else if ((op2 & 0x1) == 0x0)
7296 switch (op1 & ~0x80)
7297 {
7298 case 0x61:
7ff120b4 7299 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 7300 case 0x65:
7ff120b4 7301 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
7302 case 0x71: case 0x75:
7303 /* pld/pldw reg. */
7ff120b4 7304 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 7305 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 7306 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 7307 default:
7ff120b4 7308 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7309 }
7310 else
7ff120b4 7311 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
7312}
7313
7314static int
7ff120b4
YQ
7315arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7316 struct regcache *regs,
7317 struct displaced_step_closure *dsc)
cca44b1b
JB
7318{
7319 if (bit (insn, 27) == 0)
7ff120b4 7320 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
7321 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7322 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7323 {
7324 case 0x0: case 0x2:
7ff120b4 7325 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
7326
7327 case 0x1: case 0x3:
7ff120b4 7328 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
7329
7330 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 7331 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
7332
7333 case 0x8:
7334 switch ((insn & 0xe00000) >> 21)
7335 {
7336 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7337 /* stc/stc2. */
7ff120b4 7338 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7339
7340 case 0x2:
7ff120b4 7341 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
7342
7343 default:
7ff120b4 7344 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7345 }
7346
7347 case 0x9:
7348 {
7349 int rn_f = (bits (insn, 16, 19) == 0xf);
7350 switch ((insn & 0xe00000) >> 21)
7351 {
7352 case 0x1: case 0x3:
7353 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
7354 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7355 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7356
7357 case 0x2:
7ff120b4 7358 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
7359
7360 case 0x4: case 0x5: case 0x6: case 0x7:
7361 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
7362 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7363 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7364
7365 default:
7ff120b4 7366 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7367 }
7368 }
7369
7370 case 0xa:
7ff120b4 7371 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
7372
7373 case 0xb:
7374 if (bits (insn, 16, 19) == 0xf)
7375 /* ldc/ldc2 lit. */
7ff120b4 7376 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 7377 else
7ff120b4 7378 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7379
7380 case 0xc:
7381 if (bit (insn, 4))
7ff120b4 7382 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 7383 else
7ff120b4 7384 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
7385
7386 case 0xd:
7387 if (bit (insn, 4))
7ff120b4 7388 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 7389 else
7ff120b4 7390 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
7391
7392 default:
7ff120b4 7393 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7394 }
7395}
7396
7397/* Decode miscellaneous instructions in dp/misc encoding space. */
7398
7399static int
7ff120b4
YQ
7400arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7401 struct regcache *regs,
7402 struct displaced_step_closure *dsc)
cca44b1b
JB
7403{
7404 unsigned int op2 = bits (insn, 4, 6);
7405 unsigned int op = bits (insn, 21, 22);
7406 unsigned int op1 = bits (insn, 16, 19);
7407
7408 switch (op2)
7409 {
7410 case 0x0:
7ff120b4 7411 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
7412
7413 case 0x1:
7414 if (op == 0x1) /* bx. */
7ff120b4 7415 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 7416 else if (op == 0x3)
7ff120b4 7417 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 7418 else
7ff120b4 7419 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7420
7421 case 0x2:
7422 if (op == 0x1)
7423 /* Not really supported. */
7ff120b4 7424 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 7425 else
7ff120b4 7426 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7427
7428 case 0x3:
7429 if (op == 0x1)
7ff120b4 7430 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 7431 regs, dsc); /* blx register. */
cca44b1b 7432 else
7ff120b4 7433 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7434
7435 case 0x5:
7ff120b4 7436 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
7437
7438 case 0x7:
7439 if (op == 0x1)
7ff120b4 7440 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
7441 else if (op == 0x3)
7442 /* Not really supported. */
7ff120b4 7443 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
7444
7445 default:
7ff120b4 7446 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7447 }
7448}
7449
7450static int
7ff120b4
YQ
7451arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7452 struct regcache *regs,
7453 struct displaced_step_closure *dsc)
cca44b1b
JB
7454{
7455 if (bit (insn, 25))
7456 switch (bits (insn, 20, 24))
7457 {
7458 case 0x10:
7ff120b4 7459 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
7460
7461 case 0x14:
7ff120b4 7462 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
7463
7464 case 0x12: case 0x16:
7ff120b4 7465 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
7466
7467 default:
7ff120b4 7468 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7469 }
7470 else
7471 {
7472 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7473
7474 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 7475 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 7476 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 7477 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 7478 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 7479 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 7480 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 7481 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 7482 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 7483 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 7484 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 7485 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b
JB
7486 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7487 /* 2nd arg means "unpriveleged". */
7ff120b4
YQ
7488 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7489 dsc);
cca44b1b
JB
7490 }
7491
7492 /* Should be unreachable. */
7493 return 1;
7494}
7495
7496static int
7ff120b4
YQ
7497arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7498 struct regcache *regs,
7499 struct displaced_step_closure *dsc)
cca44b1b
JB
7500{
7501 int a = bit (insn, 25), b = bit (insn, 4);
7502 uint32_t op1 = bits (insn, 20, 24);
7503 int rn_f = bits (insn, 16, 19) == 0xf;
7504
7505 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7506 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 7507 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
7508 else if ((!a && (op1 & 0x17) == 0x02)
7509 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 7510 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
7511 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7512 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 7513 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
7514 else if ((!a && (op1 & 0x17) == 0x03)
7515 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 7516 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
7517 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7518 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 7519 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
7520 else if ((!a && (op1 & 0x17) == 0x06)
7521 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 7522 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
7523 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7524 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 7525 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
7526 else if ((!a && (op1 & 0x17) == 0x07)
7527 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 7528 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
7529
7530 /* Should be unreachable. */
7531 return 1;
7532}
7533
7534static int
7ff120b4
YQ
7535arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7536 struct displaced_step_closure *dsc)
cca44b1b
JB
7537{
7538 switch (bits (insn, 20, 24))
7539 {
7540 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 7541 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
7542
7543 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 7544 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
7545
7546 case 0x08: case 0x09: case 0x0a: case 0x0b:
7547 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 7548 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
7549 "decode/pack/unpack/saturate/reverse", dsc);
7550
7551 case 0x18:
7552 if (bits (insn, 5, 7) == 0) /* op2. */
7553 {
7554 if (bits (insn, 12, 15) == 0xf)
7ff120b4 7555 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 7556 else
7ff120b4 7557 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
7558 }
7559 else
7ff120b4 7560 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7561
7562 case 0x1a: case 0x1b:
7563 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 7564 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 7565 else
7ff120b4 7566 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7567
7568 case 0x1c: case 0x1d:
7569 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7570 {
7571 if (bits (insn, 0, 3) == 0xf)
7ff120b4 7572 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 7573 else
7ff120b4 7574 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
7575 }
7576 else
7ff120b4 7577 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7578
7579 case 0x1e: case 0x1f:
7580 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 7581 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 7582 else
7ff120b4 7583 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7584 }
7585
7586 /* Should be unreachable. */
7587 return 1;
7588}
7589
7590static int
7ff120b4
YQ
7591arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7592 struct regcache *regs,
7593 struct displaced_step_closure *dsc)
cca44b1b
JB
7594{
7595 if (bit (insn, 25))
7ff120b4 7596 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 7597 else
7ff120b4 7598 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
7599}
7600
7601static int
7ff120b4
YQ
7602arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7603 struct regcache *regs,
7604 struct displaced_step_closure *dsc)
cca44b1b
JB
7605{
7606 unsigned int opcode = bits (insn, 20, 24);
7607
7608 switch (opcode)
7609 {
7610 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 7611 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
7612
7613 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7614 case 0x12: case 0x16:
7ff120b4 7615 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
7616
7617 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7618 case 0x13: case 0x17:
7ff120b4 7619 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
7620
7621 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7622 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7623 /* Note: no writeback for these instructions. Bit 25 will always be
7624 zero though (via caller), so the following works OK. */
7ff120b4 7625 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7626 }
7627
7628 /* Should be unreachable. */
7629 return 1;
7630}
7631
34518530
YQ
7632/* Decode shifted register instructions. */
7633
7634static int
7635thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7636 uint16_t insn2, struct regcache *regs,
7637 struct displaced_step_closure *dsc)
7638{
7639 /* PC is only allowed to be used in instruction MOV. */
7640
7641 unsigned int op = bits (insn1, 5, 8);
7642 unsigned int rn = bits (insn1, 0, 3);
7643
7644 if (op == 0x2 && rn == 0xf) /* MOV */
7645 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7646 else
7647 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7648 "dp (shift reg)", dsc);
7649}
7650
7651
7652/* Decode extension register load/store. Exactly the same as
7653 arm_decode_ext_reg_ld_st. */
7654
7655static int
7656thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7657 uint16_t insn2, struct regcache *regs,
7658 struct displaced_step_closure *dsc)
7659{
7660 unsigned int opcode = bits (insn1, 4, 8);
7661
7662 switch (opcode)
7663 {
7664 case 0x04: case 0x05:
7665 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7666 "vfp/neon vmov", dsc);
7667
7668 case 0x08: case 0x0c: /* 01x00 */
7669 case 0x0a: case 0x0e: /* 01x10 */
7670 case 0x12: case 0x16: /* 10x10 */
7671 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7672 "vfp/neon vstm/vpush", dsc);
7673
7674 case 0x09: case 0x0d: /* 01x01 */
7675 case 0x0b: case 0x0f: /* 01x11 */
7676 case 0x13: case 0x17: /* 10x11 */
7677 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7678 "vfp/neon vldm/vpop", dsc);
7679
7680 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7681 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7682 "vstr", dsc);
7683 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7684 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7685 }
7686
7687 /* Should be unreachable. */
7688 return 1;
7689}
7690
cca44b1b 7691static int
7ff120b4
YQ
7692arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7693 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
7694{
7695 unsigned int op1 = bits (insn, 20, 25);
7696 int op = bit (insn, 4);
7697 unsigned int coproc = bits (insn, 8, 11);
7698 unsigned int rn = bits (insn, 16, 19);
7699
7700 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 7701 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
7702 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7703 && (coproc & 0xe) != 0xa)
7704 /* stc/stc2. */
7ff120b4 7705 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7706 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7707 && (coproc & 0xe) != 0xa)
7708 /* ldc/ldc2 imm/lit. */
7ff120b4 7709 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 7710 else if ((op1 & 0x3e) == 0x00)
7ff120b4 7711 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 7712 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 7713 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 7714 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 7715 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 7716 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 7717 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
7718 else if ((op1 & 0x30) == 0x20 && !op)
7719 {
7720 if ((coproc & 0xe) == 0xa)
7ff120b4 7721 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 7722 else
7ff120b4 7723 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
7724 }
7725 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 7726 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 7727 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 7728 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 7729 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 7730 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 7731 else if ((op1 & 0x30) == 0x30)
7ff120b4 7732 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 7733 else
7ff120b4 7734 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
7735}
7736
34518530
YQ
7737static int
7738thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7739 uint16_t insn2, struct regcache *regs,
7740 struct displaced_step_closure *dsc)
7741{
7742 unsigned int coproc = bits (insn2, 8, 11);
7743 unsigned int op1 = bits (insn1, 4, 9);
7744 unsigned int bit_5_8 = bits (insn1, 5, 8);
7745 unsigned int bit_9 = bit (insn1, 9);
7746 unsigned int bit_4 = bit (insn1, 4);
7747 unsigned int rn = bits (insn1, 0, 3);
7748
7749 if (bit_9 == 0)
7750 {
7751 if (bit_5_8 == 2)
7752 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7753 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7754 dsc);
7755 else if (bit_5_8 == 0) /* UNDEFINED. */
7756 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7757 else
7758 {
7759 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7760 if ((coproc & 0xe) == 0xa)
7761 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7762 dsc);
7763 else /* coproc is not 101x. */
7764 {
7765 if (bit_4 == 0) /* STC/STC2. */
7766 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7767 "stc/stc2", dsc);
7768 else /* LDC/LDC2 {literal, immeidate}. */
7769 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7770 regs, dsc);
7771 }
7772 }
7773 }
7774 else
7775 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7776
7777 return 0;
7778}
7779
7780static void
7781install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7782 struct displaced_step_closure *dsc, int rd)
7783{
7784 /* ADR Rd, #imm
7785
7786 Rewrite as:
7787
7788 Preparation: Rd <- PC
7789 Insn: ADD Rd, #imm
7790 Cleanup: Null.
7791 */
7792
7793 /* Rd <- PC */
7794 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7795 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7796}
7797
7798static int
7799thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7800 struct displaced_step_closure *dsc,
7801 int rd, unsigned int imm)
7802{
7803
7804 /* Encoding T2: ADDS Rd, #imm */
7805 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7806
7807 install_pc_relative (gdbarch, regs, dsc, rd);
7808
7809 return 0;
7810}
7811
7812static int
7813thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7814 struct regcache *regs,
7815 struct displaced_step_closure *dsc)
7816{
7817 unsigned int rd = bits (insn, 8, 10);
7818 unsigned int imm8 = bits (insn, 0, 7);
7819
7820 if (debug_displaced)
7821 fprintf_unfiltered (gdb_stdlog,
7822 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7823 rd, imm8, insn);
7824
7825 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7826}
7827
7828static int
7829thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7830 uint16_t insn2, struct regcache *regs,
7831 struct displaced_step_closure *dsc)
7832{
7833 unsigned int rd = bits (insn2, 8, 11);
7834 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7835 extract raw immediate encoding rather than computing immediate. When
7836 generating ADD or SUB instruction, we can simply perform OR operation to
7837 set immediate into ADD. */
7838 unsigned int imm_3_8 = insn2 & 0x70ff;
7839 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7840
7841 if (debug_displaced)
7842 fprintf_unfiltered (gdb_stdlog,
7843 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7844 rd, imm_i, imm_3_8, insn1, insn2);
7845
7846 if (bit (insn1, 7)) /* Encoding T2 */
7847 {
7848 /* Encoding T3: SUB Rd, Rd, #imm */
7849 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7850 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7851 }
7852 else /* Encoding T3 */
7853 {
7854 /* Encoding T3: ADD Rd, Rd, #imm */
7855 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7856 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7857 }
7858 dsc->numinsns = 2;
7859
7860 install_pc_relative (gdbarch, regs, dsc, rd);
7861
7862 return 0;
7863}
7864
7865static int
7866thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
7867 struct regcache *regs,
7868 struct displaced_step_closure *dsc)
7869{
7870 unsigned int rt = bits (insn1, 8, 10);
7871 unsigned int pc;
7872 int imm8 = (bits (insn1, 0, 7) << 2);
7873 CORE_ADDR from = dsc->insn_addr;
7874
7875 /* LDR Rd, #imm8
7876
7877 Rwrite as:
7878
7879 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7880
7881 Insn: LDR R0, [R2, R3];
7882 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7883
7884 if (debug_displaced)
7885 fprintf_unfiltered (gdb_stdlog,
7886 "displaced: copying thumb ldr r%d [pc #%d]\n"
7887 , rt, imm8);
7888
7889 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7890 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7891 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7892 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7893 /* The assembler calculates the required value of the offset from the
7894 Align(PC,4) value of this instruction to the label. */
7895 pc = pc & 0xfffffffc;
7896
7897 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7898 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7899
7900 dsc->rd = rt;
7901 dsc->u.ldst.xfersize = 4;
7902 dsc->u.ldst.rn = 0;
7903 dsc->u.ldst.immed = 0;
7904 dsc->u.ldst.writeback = 0;
7905 dsc->u.ldst.restore_r4 = 0;
7906
7907 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7908
7909 dsc->cleanup = &cleanup_load;
7910
7911 return 0;
7912}
7913
7914/* Copy Thumb cbnz/cbz insruction. */
7915
7916static int
7917thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7918 struct regcache *regs,
7919 struct displaced_step_closure *dsc)
7920{
7921 int non_zero = bit (insn1, 11);
7922 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7923 CORE_ADDR from = dsc->insn_addr;
7924 int rn = bits (insn1, 0, 2);
7925 int rn_val = displaced_read_reg (regs, dsc, rn);
7926
7927 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7928 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7929 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7930 condition is false, let it be, cleanup_branch will do nothing. */
7931 if (dsc->u.branch.cond)
7932 {
7933 dsc->u.branch.cond = INST_AL;
7934 dsc->u.branch.dest = from + 4 + imm5;
7935 }
7936 else
7937 dsc->u.branch.dest = from + 2;
7938
7939 dsc->u.branch.link = 0;
7940 dsc->u.branch.exchange = 0;
7941
7942 if (debug_displaced)
7943 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7944 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7945 rn, rn_val, insn1, dsc->u.branch.dest);
7946
7947 dsc->modinsn[0] = THUMB_NOP;
7948
7949 dsc->cleanup = &cleanup_branch;
7950 return 0;
7951}
7952
7953/* Copy Table Branch Byte/Halfword */
7954static int
7955thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7956 uint16_t insn2, struct regcache *regs,
7957 struct displaced_step_closure *dsc)
7958{
7959 ULONGEST rn_val, rm_val;
7960 int is_tbh = bit (insn2, 4);
7961 CORE_ADDR halfwords = 0;
7962 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7963
7964 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7965 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7966
7967 if (is_tbh)
7968 {
7969 gdb_byte buf[2];
7970
7971 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7972 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7973 }
7974 else
7975 {
7976 gdb_byte buf[1];
7977
7978 target_read_memory (rn_val + rm_val, buf, 1);
7979 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7980 }
7981
7982 if (debug_displaced)
7983 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7984 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7985 (unsigned int) rn_val, (unsigned int) rm_val,
7986 (unsigned int) halfwords);
7987
7988 dsc->u.branch.cond = INST_AL;
7989 dsc->u.branch.link = 0;
7990 dsc->u.branch.exchange = 0;
7991 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7992
7993 dsc->cleanup = &cleanup_branch;
7994
7995 return 0;
7996}
7997
7998static void
7999cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8000 struct displaced_step_closure *dsc)
8001{
8002 /* PC <- r7 */
8003 int val = displaced_read_reg (regs, dsc, 7);
8004 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8005
8006 /* r7 <- r8 */
8007 val = displaced_read_reg (regs, dsc, 8);
8008 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8009
8010 /* r8 <- tmp[0] */
8011 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8012
8013}
8014
8015static int
8016thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8017 struct regcache *regs,
8018 struct displaced_step_closure *dsc)
8019{
8020 dsc->u.block.regmask = insn1 & 0x00ff;
8021
8022 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8023 to :
8024
8025 (1) register list is full, that is, r0-r7 are used.
8026 Prepare: tmp[0] <- r8
8027
8028 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8029 MOV r8, r7; Move value of r7 to r8;
8030 POP {r7}; Store PC value into r7.
8031
8032 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8033
8034 (2) register list is not full, supposing there are N registers in
8035 register list (except PC, 0 <= N <= 7).
8036 Prepare: for each i, 0 - N, tmp[i] <- ri.
8037
8038 POP {r0, r1, ...., rN};
8039
8040 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8041 from tmp[] properly.
8042 */
8043 if (debug_displaced)
8044 fprintf_unfiltered (gdb_stdlog,
8045 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8046 dsc->u.block.regmask, insn1);
8047
8048 if (dsc->u.block.regmask == 0xff)
8049 {
8050 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8051
8052 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8053 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8054 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8055
8056 dsc->numinsns = 3;
8057 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8058 }
8059 else
8060 {
8061 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8062 unsigned int new_regmask, bit = 1;
8063 unsigned int to = 0, from = 0, i, new_rn;
8064
8065 for (i = 0; i < num_in_list + 1; i++)
8066 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8067
8068 new_regmask = (1 << (num_in_list + 1)) - 1;
8069
8070 if (debug_displaced)
8071 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8072 "{..., pc}: original reg list %.4x,"
8073 " modified list %.4x\n"),
8074 (int) dsc->u.block.regmask, new_regmask);
8075
8076 dsc->u.block.regmask |= 0x8000;
8077 dsc->u.block.writeback = 0;
8078 dsc->u.block.cond = INST_AL;
8079
8080 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8081
8082 dsc->cleanup = &cleanup_block_load_pc;
8083 }
8084
8085 return 0;
8086}
8087
8088static void
8089thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8090 struct regcache *regs,
8091 struct displaced_step_closure *dsc)
8092{
8093 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8094 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8095 int err = 0;
8096
8097 /* 16-bit thumb instructions. */
8098 switch (op_bit_12_15)
8099 {
8100 /* Shift (imme), add, subtract, move and compare. */
8101 case 0: case 1: case 2: case 3:
8102 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8103 "shift/add/sub/mov/cmp",
8104 dsc);
8105 break;
8106 case 4:
8107 switch (op_bit_10_11)
8108 {
8109 case 0: /* Data-processing */
8110 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8111 "data-processing",
8112 dsc);
8113 break;
8114 case 1: /* Special data instructions and branch and exchange. */
8115 {
8116 unsigned short op = bits (insn1, 7, 9);
8117 if (op == 6 || op == 7) /* BX or BLX */
8118 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8119 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8120 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8121 else
8122 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8123 dsc);
8124 }
8125 break;
8126 default: /* LDR (literal) */
8127 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8128 }
8129 break;
8130 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8131 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8132 break;
8133 case 10:
8134 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8135 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8136 else /* Generate SP-relative address */
8137 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8138 break;
8139 case 11: /* Misc 16-bit instructions */
8140 {
8141 switch (bits (insn1, 8, 11))
8142 {
8143 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8144 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8145 break;
8146 case 12: case 13: /* POP */
8147 if (bit (insn1, 8)) /* PC is in register list. */
8148 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8149 else
8150 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8151 break;
8152 case 15: /* If-Then, and hints */
8153 if (bits (insn1, 0, 3))
8154 /* If-Then makes up to four following instructions conditional.
8155 IT instruction itself is not conditional, so handle it as a
8156 common unmodified instruction. */
8157 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8158 dsc);
8159 else
8160 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8161 break;
8162 default:
8163 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8164 }
8165 }
8166 break;
8167 case 12:
8168 if (op_bit_10_11 < 2) /* Store multiple registers */
8169 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8170 else /* Load multiple registers */
8171 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8172 break;
8173 case 13: /* Conditional branch and supervisor call */
8174 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8175 err = thumb_copy_b (gdbarch, insn1, dsc);
8176 else
8177 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8178 break;
8179 case 14: /* Unconditional branch */
8180 err = thumb_copy_b (gdbarch, insn1, dsc);
8181 break;
8182 default:
8183 err = 1;
8184 }
8185
8186 if (err)
8187 internal_error (__FILE__, __LINE__,
8188 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8189}
8190
8191static int
8192decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8193 uint16_t insn1, uint16_t insn2,
8194 struct regcache *regs,
8195 struct displaced_step_closure *dsc)
8196{
8197 int rt = bits (insn2, 12, 15);
8198 int rn = bits (insn1, 0, 3);
8199 int op1 = bits (insn1, 7, 8);
8200 int err = 0;
8201
8202 switch (bits (insn1, 5, 6))
8203 {
8204 case 0: /* Load byte and memory hints */
8205 if (rt == 0xf) /* PLD/PLI */
8206 {
8207 if (rn == 0xf)
8208 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8209 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8210 else
8211 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8212 "pli/pld", dsc);
8213 }
8214 else
8215 {
8216 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8217 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8218 1);
8219 else
8220 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8221 "ldrb{reg, immediate}/ldrbt",
8222 dsc);
8223 }
8224
8225 break;
8226 case 1: /* Load halfword and memory hints. */
8227 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8228 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8229 "pld/unalloc memhint", dsc);
8230 else
8231 {
8232 if (rn == 0xf)
8233 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8234 2);
8235 else
8236 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8237 "ldrh/ldrht", dsc);
8238 }
8239 break;
8240 case 2: /* Load word */
8241 {
8242 int insn2_bit_8_11 = bits (insn2, 8, 11);
8243
8244 if (rn == 0xf)
8245 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8246 else if (op1 == 0x1) /* Encoding T3 */
8247 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8248 0, 1);
8249 else /* op1 == 0x0 */
8250 {
8251 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8252 /* LDR (immediate) */
8253 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8254 dsc, bit (insn2, 8), 1);
8255 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8256 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8257 "ldrt", dsc);
8258 else
8259 /* LDR (register) */
8260 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8261 dsc, 0, 0);
8262 }
8263 break;
8264 }
8265 default:
8266 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8267 break;
8268 }
8269 return 0;
8270}
8271
8272static void
8273thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8274 uint16_t insn2, struct regcache *regs,
8275 struct displaced_step_closure *dsc)
8276{
8277 int err = 0;
8278 unsigned short op = bit (insn2, 15);
8279 unsigned int op1 = bits (insn1, 11, 12);
8280
8281 switch (op1)
8282 {
8283 case 1:
8284 {
8285 switch (bits (insn1, 9, 10))
8286 {
8287 case 0:
8288 if (bit (insn1, 6))
8289 {
8290 /* Load/store {dual, execlusive}, table branch. */
8291 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8292 && bits (insn2, 5, 7) == 0)
8293 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8294 dsc);
8295 else
8296 /* PC is not allowed to use in load/store {dual, exclusive}
8297 instructions. */
8298 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8299 "load/store dual/ex", dsc);
8300 }
8301 else /* load/store multiple */
8302 {
8303 switch (bits (insn1, 7, 8))
8304 {
8305 case 0: case 3: /* SRS, RFE */
8306 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8307 "srs/rfe", dsc);
8308 break;
8309 case 1: case 2: /* LDM/STM/PUSH/POP */
8310 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8311 break;
8312 }
8313 }
8314 break;
8315
8316 case 1:
8317 /* Data-processing (shift register). */
8318 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8319 dsc);
8320 break;
8321 default: /* Coprocessor instructions. */
8322 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8323 break;
8324 }
8325 break;
8326 }
8327 case 2: /* op1 = 2 */
8328 if (op) /* Branch and misc control. */
8329 {
8330 if (bit (insn2, 14) /* BLX/BL */
8331 || bit (insn2, 12) /* Unconditional branch */
8332 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8333 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8334 else
8335 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8336 "misc ctrl", dsc);
8337 }
8338 else
8339 {
8340 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8341 {
8342 int op = bits (insn1, 4, 8);
8343 int rn = bits (insn1, 0, 3);
8344 if ((op == 0 || op == 0xa) && rn == 0xf)
8345 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8346 regs, dsc);
8347 else
8348 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8349 "dp/pb", dsc);
8350 }
8351 else /* Data processing (modified immeidate) */
8352 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8353 "dp/mi", dsc);
8354 }
8355 break;
8356 case 3: /* op1 = 3 */
8357 switch (bits (insn1, 9, 10))
8358 {
8359 case 0:
8360 if (bit (insn1, 4))
8361 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8362 regs, dsc);
8363 else /* NEON Load/Store and Store single data item */
8364 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8365 "neon elt/struct load/store",
8366 dsc);
8367 break;
8368 case 1: /* op1 = 3, bits (9, 10) == 1 */
8369 switch (bits (insn1, 7, 8))
8370 {
8371 case 0: case 1: /* Data processing (register) */
8372 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8373 "dp(reg)", dsc);
8374 break;
8375 case 2: /* Multiply and absolute difference */
8376 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8377 "mul/mua/diff", dsc);
8378 break;
8379 case 3: /* Long multiply and divide */
8380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8381 "lmul/lmua", dsc);
8382 break;
8383 }
8384 break;
8385 default: /* Coprocessor instructions */
8386 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8387 break;
8388 }
8389 break;
8390 default:
8391 err = 1;
8392 }
8393
8394 if (err)
8395 internal_error (__FILE__, __LINE__,
8396 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8397
8398}
8399
b434a28f
YQ
8400static void
8401thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8402 CORE_ADDR to, struct regcache *regs,
8403 struct displaced_step_closure *dsc)
8404{
34518530
YQ
8405 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8406 uint16_t insn1
8407 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8408
8409 if (debug_displaced)
8410 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8411 "at %.8lx\n", insn1, (unsigned long) from);
8412
8413 dsc->is_thumb = 1;
8414 dsc->insn_size = thumb_insn_size (insn1);
8415 if (thumb_insn_size (insn1) == 4)
8416 {
8417 uint16_t insn2
8418 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8419 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8420 }
8421 else
8422 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
8423}
8424
cca44b1b 8425void
b434a28f
YQ
8426arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8427 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
8428 struct displaced_step_closure *dsc)
8429{
8430 int err = 0;
b434a28f
YQ
8431 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8432 uint32_t insn;
cca44b1b
JB
8433
8434 /* Most displaced instructions use a 1-instruction scratch space, so set this
8435 here and override below if/when necessary. */
8436 dsc->numinsns = 1;
8437 dsc->insn_addr = from;
8438 dsc->scratch_base = to;
8439 dsc->cleanup = NULL;
8440 dsc->wrote_to_pc = 0;
8441
b434a28f
YQ
8442 if (!displaced_in_arm_mode (regs))
8443 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8444
4db71c0b
YQ
8445 dsc->is_thumb = 0;
8446 dsc->insn_size = 4;
b434a28f
YQ
8447 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8448 if (debug_displaced)
8449 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8450 "at %.8lx\n", (unsigned long) insn,
8451 (unsigned long) from);
8452
cca44b1b 8453 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 8454 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
8455 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8456 {
8457 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 8458 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
8459 break;
8460
8461 case 0x4: case 0x5: case 0x6:
7ff120b4 8462 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
8463 break;
8464
8465 case 0x7:
7ff120b4 8466 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
8467 break;
8468
8469 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 8470 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
8471 break;
8472
8473 case 0xc: case 0xd: case 0xe: case 0xf:
7ff120b4 8474 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
cca44b1b
JB
8475 break;
8476 }
8477
8478 if (err)
8479 internal_error (__FILE__, __LINE__,
8480 _("arm_process_displaced_insn: Instruction decode error"));
8481}
8482
8483/* Actually set up the scratch space for a displaced instruction. */
8484
8485void
8486arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8487 CORE_ADDR to, struct displaced_step_closure *dsc)
8488{
8489 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 8490 unsigned int i, len, offset;
cca44b1b 8491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b
YQ
8492 int size = dsc->is_thumb? 2 : 4;
8493 const unsigned char *bkp_insn;
cca44b1b 8494
4db71c0b 8495 offset = 0;
cca44b1b
JB
8496 /* Poke modified instruction(s). */
8497 for (i = 0; i < dsc->numinsns; i++)
8498 {
8499 if (debug_displaced)
4db71c0b
YQ
8500 {
8501 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8502 if (size == 4)
8503 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8504 dsc->modinsn[i]);
8505 else if (size == 2)
8506 fprintf_unfiltered (gdb_stdlog, "%.4x",
8507 (unsigned short)dsc->modinsn[i]);
8508
8509 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8510 (unsigned long) to + offset);
8511
8512 }
8513 write_memory_unsigned_integer (to + offset, size,
8514 byte_order_for_code,
cca44b1b 8515 dsc->modinsn[i]);
4db71c0b
YQ
8516 offset += size;
8517 }
8518
8519 /* Choose the correct breakpoint instruction. */
8520 if (dsc->is_thumb)
8521 {
8522 bkp_insn = tdep->thumb_breakpoint;
8523 len = tdep->thumb_breakpoint_size;
8524 }
8525 else
8526 {
8527 bkp_insn = tdep->arm_breakpoint;
8528 len = tdep->arm_breakpoint_size;
cca44b1b
JB
8529 }
8530
8531 /* Put breakpoint afterwards. */
4db71c0b 8532 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
8533
8534 if (debug_displaced)
8535 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8536 paddress (gdbarch, from), paddress (gdbarch, to));
8537}
8538
8539/* Entry point for copying an instruction into scratch space for displaced
8540 stepping. */
8541
8542struct displaced_step_closure *
8543arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8544 CORE_ADDR from, CORE_ADDR to,
8545 struct regcache *regs)
8546{
8547 struct displaced_step_closure *dsc
8548 = xmalloc (sizeof (struct displaced_step_closure));
b434a28f 8549 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
cca44b1b
JB
8550 arm_displaced_init_closure (gdbarch, from, to, dsc);
8551
8552 return dsc;
8553}
8554
8555/* Entry point for cleaning things up after a displaced instruction has been
8556 single-stepped. */
8557
8558void
8559arm_displaced_step_fixup (struct gdbarch *gdbarch,
8560 struct displaced_step_closure *dsc,
8561 CORE_ADDR from, CORE_ADDR to,
8562 struct regcache *regs)
8563{
8564 if (dsc->cleanup)
8565 dsc->cleanup (gdbarch, regs, dsc);
8566
8567 if (!dsc->wrote_to_pc)
4db71c0b
YQ
8568 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8569 dsc->insn_addr + dsc->insn_size);
8570
cca44b1b
JB
8571}
8572
8573#include "bfd-in2.h"
8574#include "libcoff.h"
8575
8576static int
8577gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8578{
9779414d
DJ
8579 struct gdbarch *gdbarch = info->application_data;
8580
8581 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
8582 {
8583 static asymbol *asym;
8584 static combined_entry_type ce;
8585 static struct coff_symbol_struct csym;
8586 static struct bfd fake_bfd;
8587 static bfd_target fake_target;
8588
8589 if (csym.native == NULL)
8590 {
8591 /* Create a fake symbol vector containing a Thumb symbol.
8592 This is solely so that the code in print_insn_little_arm()
8593 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8594 the presence of a Thumb symbol and switch to decoding
8595 Thumb instructions. */
8596
8597 fake_target.flavour = bfd_target_coff_flavour;
8598 fake_bfd.xvec = &fake_target;
8599 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8600 csym.native = &ce;
8601 csym.symbol.the_bfd = &fake_bfd;
8602 csym.symbol.name = "fake";
8603 asym = (asymbol *) & csym;
8604 }
8605
8606 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8607 info->symbols = &asym;
8608 }
8609 else
8610 info->symbols = NULL;
8611
8612 if (info->endian == BFD_ENDIAN_BIG)
8613 return print_insn_big_arm (memaddr, info);
8614 else
8615 return print_insn_little_arm (memaddr, info);
8616}
8617
8618/* The following define instruction sequences that will cause ARM
8619 cpu's to take an undefined instruction trap. These are used to
8620 signal a breakpoint to GDB.
8621
8622 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8623 modes. A different instruction is required for each mode. The ARM
8624 cpu's can also be big or little endian. Thus four different
8625 instructions are needed to support all cases.
8626
8627 Note: ARMv4 defines several new instructions that will take the
8628 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8629 not in fact add the new instructions. The new undefined
8630 instructions in ARMv4 are all instructions that had no defined
8631 behaviour in earlier chips. There is no guarantee that they will
8632 raise an exception, but may be treated as NOP's. In practice, it
8633 may only safe to rely on instructions matching:
8634
8635 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8636 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8637 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8638
0963b4bd 8639 Even this may only true if the condition predicate is true. The
cca44b1b
JB
8640 following use a condition predicate of ALWAYS so it is always TRUE.
8641
8642 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8643 and NetBSD all use a software interrupt rather than an undefined
8644 instruction to force a trap. This can be handled by by the
8645 abi-specific code during establishment of the gdbarch vector. */
8646
8647#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8648#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8649#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8650#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8651
8652static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8653static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8654static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8655static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8656
8657/* Determine the type and size of breakpoint to insert at PCPTR. Uses
8658 the program counter value to determine whether a 16-bit or 32-bit
8659 breakpoint should be used. It returns a pointer to a string of
8660 bytes that encode a breakpoint instruction, stores the length of
8661 the string to *lenptr, and adjusts the program counter (if
8662 necessary) to point to the actual memory location where the
8663 breakpoint should be inserted. */
8664
8665static const unsigned char *
8666arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8667{
8668 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 8669 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 8670
9779414d 8671 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
8672 {
8673 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
8674
8675 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8676 check whether we are replacing a 32-bit instruction. */
8677 if (tdep->thumb2_breakpoint != NULL)
8678 {
8679 gdb_byte buf[2];
8680 if (target_read_memory (*pcptr, buf, 2) == 0)
8681 {
8682 unsigned short inst1;
8683 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 8684 if (thumb_insn_size (inst1) == 4)
177321bd
DJ
8685 {
8686 *lenptr = tdep->thumb2_breakpoint_size;
8687 return tdep->thumb2_breakpoint;
8688 }
8689 }
8690 }
8691
cca44b1b
JB
8692 *lenptr = tdep->thumb_breakpoint_size;
8693 return tdep->thumb_breakpoint;
8694 }
8695 else
8696 {
8697 *lenptr = tdep->arm_breakpoint_size;
8698 return tdep->arm_breakpoint;
8699 }
8700}
8701
177321bd
DJ
8702static void
8703arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8704 int *kindptr)
8705{
8706 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8707
8708 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8709
9779414d 8710 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
177321bd
DJ
8711 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8712 that this is not confused with a 32-bit ARM breakpoint. */
8713 *kindptr = 3;
8714}
8715
cca44b1b
JB
8716/* Extract from an array REGBUF containing the (raw) register state a
8717 function return value of type TYPE, and copy that, in virtual
8718 format, into VALBUF. */
8719
8720static void
8721arm_extract_return_value (struct type *type, struct regcache *regs,
8722 gdb_byte *valbuf)
8723{
8724 struct gdbarch *gdbarch = get_regcache_arch (regs);
8725 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8726
8727 if (TYPE_CODE_FLT == TYPE_CODE (type))
8728 {
8729 switch (gdbarch_tdep (gdbarch)->fp_model)
8730 {
8731 case ARM_FLOAT_FPA:
8732 {
8733 /* The value is in register F0 in internal format. We need to
8734 extract the raw value and then convert it to the desired
8735 internal type. */
8736 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8737
8738 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8739 convert_from_extended (floatformat_from_type (type), tmpbuf,
8740 valbuf, gdbarch_byte_order (gdbarch));
8741 }
8742 break;
8743
8744 case ARM_FLOAT_SOFT_FPA:
8745 case ARM_FLOAT_SOFT_VFP:
8746 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8747 not using the VFP ABI code. */
8748 case ARM_FLOAT_VFP:
8749 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8750 if (TYPE_LENGTH (type) > 4)
8751 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8752 valbuf + INT_REGISTER_SIZE);
8753 break;
8754
8755 default:
0963b4bd
MS
8756 internal_error (__FILE__, __LINE__,
8757 _("arm_extract_return_value: "
8758 "Floating point model not supported"));
cca44b1b
JB
8759 break;
8760 }
8761 }
8762 else if (TYPE_CODE (type) == TYPE_CODE_INT
8763 || TYPE_CODE (type) == TYPE_CODE_CHAR
8764 || TYPE_CODE (type) == TYPE_CODE_BOOL
8765 || TYPE_CODE (type) == TYPE_CODE_PTR
8766 || TYPE_CODE (type) == TYPE_CODE_REF
8767 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8768 {
b021a221
MS
8769 /* If the type is a plain integer, then the access is
8770 straight-forward. Otherwise we have to play around a bit
8771 more. */
cca44b1b
JB
8772 int len = TYPE_LENGTH (type);
8773 int regno = ARM_A1_REGNUM;
8774 ULONGEST tmp;
8775
8776 while (len > 0)
8777 {
8778 /* By using store_unsigned_integer we avoid having to do
8779 anything special for small big-endian values. */
8780 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8781 store_unsigned_integer (valbuf,
8782 (len > INT_REGISTER_SIZE
8783 ? INT_REGISTER_SIZE : len),
8784 byte_order, tmp);
8785 len -= INT_REGISTER_SIZE;
8786 valbuf += INT_REGISTER_SIZE;
8787 }
8788 }
8789 else
8790 {
8791 /* For a structure or union the behaviour is as if the value had
8792 been stored to word-aligned memory and then loaded into
8793 registers with 32-bit load instruction(s). */
8794 int len = TYPE_LENGTH (type);
8795 int regno = ARM_A1_REGNUM;
8796 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8797
8798 while (len > 0)
8799 {
8800 regcache_cooked_read (regs, regno++, tmpbuf);
8801 memcpy (valbuf, tmpbuf,
8802 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8803 len -= INT_REGISTER_SIZE;
8804 valbuf += INT_REGISTER_SIZE;
8805 }
8806 }
8807}
8808
8809
8810/* Will a function return an aggregate type in memory or in a
8811 register? Return 0 if an aggregate type can be returned in a
8812 register, 1 if it must be returned in memory. */
8813
8814static int
8815arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8816{
8817 int nRc;
8818 enum type_code code;
8819
8820 CHECK_TYPEDEF (type);
8821
8822 /* In the ARM ABI, "integer" like aggregate types are returned in
8823 registers. For an aggregate type to be integer like, its size
8824 must be less than or equal to INT_REGISTER_SIZE and the
8825 offset of each addressable subfield must be zero. Note that bit
8826 fields are not addressable, and all addressable subfields of
8827 unions always start at offset zero.
8828
8829 This function is based on the behaviour of GCC 2.95.1.
8830 See: gcc/arm.c: arm_return_in_memory() for details.
8831
8832 Note: All versions of GCC before GCC 2.95.2 do not set up the
8833 parameters correctly for a function returning the following
8834 structure: struct { float f;}; This should be returned in memory,
8835 not a register. Richard Earnshaw sent me a patch, but I do not
8836 know of any way to detect if a function like the above has been
8837 compiled with the correct calling convention. */
8838
8839 /* All aggregate types that won't fit in a register must be returned
8840 in memory. */
8841 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8842 {
8843 return 1;
8844 }
8845
8846 /* The AAPCS says all aggregates not larger than a word are returned
8847 in a register. */
8848 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8849 return 0;
8850
8851 /* The only aggregate types that can be returned in a register are
8852 structs and unions. Arrays must be returned in memory. */
8853 code = TYPE_CODE (type);
8854 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
8855 {
8856 return 1;
8857 }
8858
8859 /* Assume all other aggregate types can be returned in a register.
8860 Run a check for structures, unions and arrays. */
8861 nRc = 0;
8862
8863 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8864 {
8865 int i;
8866 /* Need to check if this struct/union is "integer" like. For
8867 this to be true, its size must be less than or equal to
8868 INT_REGISTER_SIZE and the offset of each addressable
8869 subfield must be zero. Note that bit fields are not
8870 addressable, and unions always start at offset zero. If any
8871 of the subfields is a floating point type, the struct/union
8872 cannot be an integer type. */
8873
8874 /* For each field in the object, check:
8875 1) Is it FP? --> yes, nRc = 1;
67255d04
RE
8876 2) Is it addressable (bitpos != 0) and
8877 not packed (bitsize == 0)?
8878 --> yes, nRc = 1
8879 */
8880
8881 for (i = 0; i < TYPE_NFIELDS (type); i++)
8882 {
8883 enum type_code field_type_code;
0963b4bd
MS
8884 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8885 i)));
67255d04
RE
8886
8887 /* Is it a floating point type field? */
8888 if (field_type_code == TYPE_CODE_FLT)
8889 {
8890 nRc = 1;
8891 break;
8892 }
8893
8894 /* If bitpos != 0, then we have to care about it. */
8895 if (TYPE_FIELD_BITPOS (type, i) != 0)
8896 {
8897 /* Bitfields are not addressable. If the field bitsize is
8898 zero, then the field is not packed. Hence it cannot be
8899 a bitfield or any other packed type. */
8900 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8901 {
8902 nRc = 1;
8903 break;
8904 }
8905 }
8906 }
8907 }
8908
8909 return nRc;
8910}
8911
34e8f22d
RE
8912/* Write into appropriate registers a function return value of type
8913 TYPE, given in virtual format. */
8914
8915static void
b508a996 8916arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8917 const gdb_byte *valbuf)
34e8f22d 8918{
be8626e0 8919 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8920 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8921
34e8f22d
RE
8922 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8923 {
7a5ea0d4 8924 char buf[MAX_REGISTER_SIZE];
34e8f22d 8925
be8626e0 8926 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8927 {
8928 case ARM_FLOAT_FPA:
8929
be8626e0
MD
8930 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8931 gdbarch_byte_order (gdbarch));
b508a996 8932 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8933 break;
8934
fd50bc42 8935 case ARM_FLOAT_SOFT_FPA:
08216dd7 8936 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8937 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8938 not using the VFP ABI code. */
8939 case ARM_FLOAT_VFP:
b508a996
RE
8940 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8941 if (TYPE_LENGTH (type) > 4)
8942 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8943 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8944 break;
8945
8946 default:
9b20d036
MS
8947 internal_error (__FILE__, __LINE__,
8948 _("arm_store_return_value: Floating "
8949 "point model not supported"));
08216dd7
RE
8950 break;
8951 }
34e8f22d 8952 }
b508a996
RE
8953 else if (TYPE_CODE (type) == TYPE_CODE_INT
8954 || TYPE_CODE (type) == TYPE_CODE_CHAR
8955 || TYPE_CODE (type) == TYPE_CODE_BOOL
8956 || TYPE_CODE (type) == TYPE_CODE_PTR
8957 || TYPE_CODE (type) == TYPE_CODE_REF
8958 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8959 {
8960 if (TYPE_LENGTH (type) <= 4)
8961 {
8962 /* Values of one word or less are zero/sign-extended and
8963 returned in r0. */
7a5ea0d4 8964 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8965 LONGEST val = unpack_long (type, valbuf);
8966
e17a4113 8967 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8968 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8969 }
8970 else
8971 {
8972 /* Integral values greater than one word are stored in consecutive
8973 registers starting with r0. This will always be a multiple of
8974 the regiser size. */
8975 int len = TYPE_LENGTH (type);
8976 int regno = ARM_A1_REGNUM;
8977
8978 while (len > 0)
8979 {
8980 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8981 len -= INT_REGISTER_SIZE;
8982 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8983 }
8984 }
8985 }
34e8f22d 8986 else
b508a996
RE
8987 {
8988 /* For a structure or union the behaviour is as if the value had
8989 been stored to word-aligned memory and then loaded into
8990 registers with 32-bit load instruction(s). */
8991 int len = TYPE_LENGTH (type);
8992 int regno = ARM_A1_REGNUM;
7a5ea0d4 8993 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8994
8995 while (len > 0)
8996 {
8997 memcpy (tmpbuf, valbuf,
7a5ea0d4 8998 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8999 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
9000 len -= INT_REGISTER_SIZE;
9001 valbuf += INT_REGISTER_SIZE;
b508a996
RE
9002 }
9003 }
34e8f22d
RE
9004}
9005
2af48f68
PB
9006
9007/* Handle function return values. */
9008
9009static enum return_value_convention
c055b101
CV
9010arm_return_value (struct gdbarch *gdbarch, struct type *func_type,
9011 struct type *valtype, struct regcache *regcache,
9012 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 9013{
7c00367c 9014 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
90445bd3
DJ
9015 enum arm_vfp_cprc_base_type vfp_base_type;
9016 int vfp_base_count;
9017
9018 if (arm_vfp_abi_for_function (gdbarch, func_type)
9019 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9020 {
9021 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9022 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9023 int i;
9024 for (i = 0; i < vfp_base_count; i++)
9025 {
58d6951d
DJ
9026 if (reg_char == 'q')
9027 {
9028 if (writebuf)
9029 arm_neon_quad_write (gdbarch, regcache, i,
9030 writebuf + i * unit_length);
9031
9032 if (readbuf)
9033 arm_neon_quad_read (gdbarch, regcache, i,
9034 readbuf + i * unit_length);
9035 }
9036 else
9037 {
9038 char name_buf[4];
9039 int regnum;
9040
9041 sprintf (name_buf, "%c%d", reg_char, i);
9042 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9043 strlen (name_buf));
9044 if (writebuf)
9045 regcache_cooked_write (regcache, regnum,
9046 writebuf + i * unit_length);
9047 if (readbuf)
9048 regcache_cooked_read (regcache, regnum,
9049 readbuf + i * unit_length);
9050 }
90445bd3
DJ
9051 }
9052 return RETURN_VALUE_REGISTER_CONVENTION;
9053 }
7c00367c 9054
2af48f68
PB
9055 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9056 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9057 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9058 {
7c00367c
MK
9059 if (tdep->struct_return == pcc_struct_return
9060 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
9061 return RETURN_VALUE_STRUCT_CONVENTION;
9062 }
9063
9064 if (writebuf)
9065 arm_store_return_value (valtype, regcache, writebuf);
9066
9067 if (readbuf)
9068 arm_extract_return_value (valtype, regcache, readbuf);
9069
9070 return RETURN_VALUE_REGISTER_CONVENTION;
9071}
9072
9073
9df628e0 9074static int
60ade65d 9075arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 9076{
e17a4113
UW
9077 struct gdbarch *gdbarch = get_frame_arch (frame);
9078 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9079 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 9080 CORE_ADDR jb_addr;
7a5ea0d4 9081 char buf[INT_REGISTER_SIZE];
9df628e0 9082
60ade65d 9083 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
9084
9085 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 9086 INT_REGISTER_SIZE))
9df628e0
RE
9087 return 0;
9088
e17a4113 9089 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
9090 return 1;
9091}
9092
faa95490
DJ
9093/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9094 return the target PC. Otherwise return 0. */
c906108c
SS
9095
9096CORE_ADDR
52f729a7 9097arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 9098{
c5aa993b 9099 char *name;
faa95490 9100 int namelen;
c906108c
SS
9101 CORE_ADDR start_addr;
9102
9103 /* Find the starting address and name of the function containing the PC. */
9104 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9105 return 0;
9106
faa95490
DJ
9107 /* If PC is in a Thumb call or return stub, return the address of the
9108 target PC, which is in a register. The thunk functions are called
9109 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
9110 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9111 functions, named __ARM_call_via_r[0-7]. */
9112 if (strncmp (name, "_call_via_", 10) == 0
9113 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
c906108c 9114 {
ed9a39eb
JM
9115 /* Use the name suffix to determine which register contains the
9116 target PC. */
c5aa993b
JM
9117 static char *table[15] =
9118 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9119 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9120 };
c906108c 9121 int regno;
faa95490 9122 int offset = strlen (name) - 2;
c906108c
SS
9123
9124 for (regno = 0; regno <= 14; regno++)
faa95490 9125 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 9126 return get_frame_register_unsigned (frame, regno);
c906108c 9127 }
ed9a39eb 9128
faa95490
DJ
9129 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9130 non-interworking calls to foo. We could decode the stubs
9131 to find the target but it's easier to use the symbol table. */
9132 namelen = strlen (name);
9133 if (name[0] == '_' && name[1] == '_'
9134 && ((namelen > 2 + strlen ("_from_thumb")
9135 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9136 strlen ("_from_thumb")) == 0)
9137 || (namelen > 2 + strlen ("_from_arm")
9138 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9139 strlen ("_from_arm")) == 0)))
9140 {
9141 char *target_name;
9142 int target_len = namelen - 2;
9143 struct minimal_symbol *minsym;
9144 struct objfile *objfile;
9145 struct obj_section *sec;
9146
9147 if (name[namelen - 1] == 'b')
9148 target_len -= strlen ("_from_thumb");
9149 else
9150 target_len -= strlen ("_from_arm");
9151
9152 target_name = alloca (target_len + 1);
9153 memcpy (target_name, name + 2, target_len);
9154 target_name[target_len] = '\0';
9155
9156 sec = find_pc_section (pc);
9157 objfile = (sec == NULL) ? NULL : sec->objfile;
9158 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9159 if (minsym != NULL)
9160 return SYMBOL_VALUE_ADDRESS (minsym);
9161 else
9162 return 0;
9163 }
9164
c5aa993b 9165 return 0; /* not a stub */
c906108c
SS
9166}
9167
afd7eef0
RE
9168static void
9169set_arm_command (char *args, int from_tty)
9170{
edefbb7c
AC
9171 printf_unfiltered (_("\
9172\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
9173 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9174}
9175
9176static void
9177show_arm_command (char *args, int from_tty)
9178{
26304000 9179 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
9180}
9181
28e97307
DJ
9182static void
9183arm_update_current_architecture (void)
fd50bc42 9184{
28e97307 9185 struct gdbarch_info info;
fd50bc42 9186
28e97307 9187 /* If the current architecture is not ARM, we have nothing to do. */
1cf3db46 9188 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
28e97307 9189 return;
fd50bc42 9190
28e97307
DJ
9191 /* Update the architecture. */
9192 gdbarch_info_init (&info);
fd50bc42 9193
28e97307 9194 if (!gdbarch_update_p (info))
9b20d036 9195 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
9196}
9197
9198static void
9199set_fp_model_sfunc (char *args, int from_tty,
9200 struct cmd_list_element *c)
9201{
9202 enum arm_float_model fp_model;
9203
9204 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9205 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9206 {
9207 arm_fp_model = fp_model;
9208 break;
9209 }
9210
9211 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 9212 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
9213 current_fp_model);
9214
28e97307 9215 arm_update_current_architecture ();
fd50bc42
RE
9216}
9217
9218static void
08546159
AC
9219show_fp_model (struct ui_file *file, int from_tty,
9220 struct cmd_list_element *c, const char *value)
fd50bc42 9221{
1cf3db46 9222 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
fd50bc42 9223
28e97307 9224 if (arm_fp_model == ARM_FLOAT_AUTO
1cf3db46 9225 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
28e97307
DJ
9226 fprintf_filtered (file, _("\
9227The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9228 fp_model_strings[tdep->fp_model]);
9229 else
9230 fprintf_filtered (file, _("\
9231The current ARM floating point model is \"%s\".\n"),
9232 fp_model_strings[arm_fp_model]);
9233}
9234
9235static void
9236arm_set_abi (char *args, int from_tty,
9237 struct cmd_list_element *c)
9238{
9239 enum arm_abi_kind arm_abi;
9240
9241 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9242 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9243 {
9244 arm_abi_global = arm_abi;
9245 break;
9246 }
9247
9248 if (arm_abi == ARM_ABI_LAST)
9249 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9250 arm_abi_string);
9251
9252 arm_update_current_architecture ();
9253}
9254
9255static void
9256arm_show_abi (struct ui_file *file, int from_tty,
9257 struct cmd_list_element *c, const char *value)
9258{
1cf3db46 9259 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
28e97307
DJ
9260
9261 if (arm_abi_global == ARM_ABI_AUTO
1cf3db46 9262 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
28e97307
DJ
9263 fprintf_filtered (file, _("\
9264The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9265 arm_abi_strings[tdep->arm_abi]);
9266 else
9267 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9268 arm_abi_string);
fd50bc42
RE
9269}
9270
0428b8f5
DJ
9271static void
9272arm_show_fallback_mode (struct ui_file *file, int from_tty,
9273 struct cmd_list_element *c, const char *value)
9274{
1cf3db46 9275 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
0428b8f5 9276
0963b4bd
MS
9277 fprintf_filtered (file,
9278 _("The current execution mode assumed "
9279 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
9280 arm_fallback_mode_string);
9281}
9282
9283static void
9284arm_show_force_mode (struct ui_file *file, int from_tty,
9285 struct cmd_list_element *c, const char *value)
9286{
1cf3db46 9287 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
0428b8f5 9288
0963b4bd
MS
9289 fprintf_filtered (file,
9290 _("The current execution mode assumed "
9291 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
9292 arm_force_mode_string);
9293}
9294
afd7eef0
RE
9295/* If the user changes the register disassembly style used for info
9296 register and other commands, we have to also switch the style used
9297 in opcodes for disassembly output. This function is run in the "set
9298 arm disassembly" command, and does that. */
bc90b915
FN
9299
9300static void
afd7eef0 9301set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
9302 struct cmd_list_element *c)
9303{
afd7eef0 9304 set_disassembly_style ();
bc90b915
FN
9305}
9306\f
966fbf70 9307/* Return the ARM register name corresponding to register I. */
a208b0cb 9308static const char *
d93859e2 9309arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 9310{
58d6951d
DJ
9311 const int num_regs = gdbarch_num_regs (gdbarch);
9312
9313 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9314 && i >= num_regs && i < num_regs + 32)
9315 {
9316 static const char *const vfp_pseudo_names[] = {
9317 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9318 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9319 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9320 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9321 };
9322
9323 return vfp_pseudo_names[i - num_regs];
9324 }
9325
9326 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9327 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9328 {
9329 static const char *const neon_pseudo_names[] = {
9330 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9331 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9332 };
9333
9334 return neon_pseudo_names[i - num_regs - 32];
9335 }
9336
ff6f572f
DJ
9337 if (i >= ARRAY_SIZE (arm_register_names))
9338 /* These registers are only supported on targets which supply
9339 an XML description. */
9340 return "";
9341
966fbf70
RE
9342 return arm_register_names[i];
9343}
9344
bc90b915 9345static void
afd7eef0 9346set_disassembly_style (void)
bc90b915 9347{
123dc839 9348 int current;
bc90b915 9349
123dc839
DJ
9350 /* Find the style that the user wants. */
9351 for (current = 0; current < num_disassembly_options; current++)
9352 if (disassembly_style == valid_disassembly_styles[current])
9353 break;
9354 gdb_assert (current < num_disassembly_options);
bc90b915 9355
94c30b78 9356 /* Synchronize the disassembler. */
bc90b915
FN
9357 set_arm_regname_option (current);
9358}
9359
082fc60d
RE
9360/* Test whether the coff symbol specific value corresponds to a Thumb
9361 function. */
9362
9363static int
9364coff_sym_is_thumb (int val)
9365{
f8bf5763
PM
9366 return (val == C_THUMBEXT
9367 || val == C_THUMBSTAT
9368 || val == C_THUMBEXTFUNC
9369 || val == C_THUMBSTATFUNC
9370 || val == C_THUMBLABEL);
082fc60d
RE
9371}
9372
9373/* arm_coff_make_msymbol_special()
9374 arm_elf_make_msymbol_special()
9375
9376 These functions test whether the COFF or ELF symbol corresponds to
9377 an address in thumb code, and set a "special" bit in a minimal
9378 symbol to indicate that it does. */
9379
34e8f22d 9380static void
082fc60d
RE
9381arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9382{
467d42c4
UW
9383 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9384 == ST_BRANCH_TO_THUMB)
082fc60d
RE
9385 MSYMBOL_SET_SPECIAL (msym);
9386}
9387
34e8f22d 9388static void
082fc60d
RE
9389arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9390{
9391 if (coff_sym_is_thumb (val))
9392 MSYMBOL_SET_SPECIAL (msym);
9393}
9394
60c5725c 9395static void
c1bd65d0 9396arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c
DJ
9397{
9398 struct arm_per_objfile *data = arg;
9399 unsigned int i;
9400
9401 for (i = 0; i < objfile->obfd->section_count; i++)
9402 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9403}
9404
9405static void
9406arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9407 asymbol *sym)
9408{
9409 const char *name = bfd_asymbol_name (sym);
9410 struct arm_per_objfile *data;
9411 VEC(arm_mapping_symbol_s) **map_p;
9412 struct arm_mapping_symbol new_map_sym;
9413
9414 gdb_assert (name[0] == '$');
9415 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9416 return;
9417
9418 data = objfile_data (objfile, arm_objfile_data_key);
9419 if (data == NULL)
9420 {
9421 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9422 struct arm_per_objfile);
9423 set_objfile_data (objfile, arm_objfile_data_key, data);
9424 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9425 objfile->obfd->section_count,
9426 VEC(arm_mapping_symbol_s) *);
9427 }
9428 map_p = &data->section_maps[bfd_get_section (sym)->index];
9429
9430 new_map_sym.value = sym->value;
9431 new_map_sym.type = name[1];
9432
9433 /* Assume that most mapping symbols appear in order of increasing
9434 value. If they were randomly distributed, it would be faster to
9435 always push here and then sort at first use. */
9436 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9437 {
9438 struct arm_mapping_symbol *prev_map_sym;
9439
9440 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9441 if (prev_map_sym->value >= sym->value)
9442 {
9443 unsigned int idx;
9444 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9445 arm_compare_mapping_symbols);
9446 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9447 return;
9448 }
9449 }
9450
9451 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9452}
9453
756fe439 9454static void
61a1198a 9455arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 9456{
9779414d 9457 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 9458 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
9459
9460 /* If necessary, set the T bit. */
9461 if (arm_apcs_32)
9462 {
9779414d 9463 ULONGEST val, t_bit;
61a1198a 9464 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
9465 t_bit = arm_psr_thumb_bit (gdbarch);
9466 if (arm_pc_is_thumb (gdbarch, pc))
9467 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9468 val | t_bit);
756fe439 9469 else
61a1198a 9470 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 9471 val & ~t_bit);
756fe439
DJ
9472 }
9473}
123dc839 9474
58d6951d
DJ
9475/* Read the contents of a NEON quad register, by reading from two
9476 double registers. This is used to implement the quad pseudo
9477 registers, and for argument passing in case the quad registers are
9478 missing; vectors are passed in quad registers when using the VFP
9479 ABI, even if a NEON unit is not present. REGNUM is the index of
9480 the quad register, in [0, 15]. */
9481
05d1431c 9482static enum register_status
58d6951d
DJ
9483arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9484 int regnum, gdb_byte *buf)
9485{
9486 char name_buf[4];
9487 gdb_byte reg_buf[8];
9488 int offset, double_regnum;
05d1431c 9489 enum register_status status;
58d6951d
DJ
9490
9491 sprintf (name_buf, "d%d", regnum << 1);
9492 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9493 strlen (name_buf));
9494
9495 /* d0 is always the least significant half of q0. */
9496 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9497 offset = 8;
9498 else
9499 offset = 0;
9500
05d1431c
PA
9501 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9502 if (status != REG_VALID)
9503 return status;
58d6951d
DJ
9504 memcpy (buf + offset, reg_buf, 8);
9505
9506 offset = 8 - offset;
05d1431c
PA
9507 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9508 if (status != REG_VALID)
9509 return status;
58d6951d 9510 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
9511
9512 return REG_VALID;
58d6951d
DJ
9513}
9514
05d1431c 9515static enum register_status
58d6951d
DJ
9516arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9517 int regnum, gdb_byte *buf)
9518{
9519 const int num_regs = gdbarch_num_regs (gdbarch);
9520 char name_buf[4];
9521 gdb_byte reg_buf[8];
9522 int offset, double_regnum;
9523
9524 gdb_assert (regnum >= num_regs);
9525 regnum -= num_regs;
9526
9527 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9528 /* Quad-precision register. */
05d1431c 9529 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
9530 else
9531 {
05d1431c
PA
9532 enum register_status status;
9533
58d6951d
DJ
9534 /* Single-precision register. */
9535 gdb_assert (regnum < 32);
9536
9537 /* s0 is always the least significant half of d0. */
9538 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9539 offset = (regnum & 1) ? 0 : 4;
9540 else
9541 offset = (regnum & 1) ? 4 : 0;
9542
9543 sprintf (name_buf, "d%d", regnum >> 1);
9544 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9545 strlen (name_buf));
9546
05d1431c
PA
9547 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9548 if (status == REG_VALID)
9549 memcpy (buf, reg_buf + offset, 4);
9550 return status;
58d6951d
DJ
9551 }
9552}
9553
9554/* Store the contents of BUF to a NEON quad register, by writing to
9555 two double registers. This is used to implement the quad pseudo
9556 registers, and for argument passing in case the quad registers are
9557 missing; vectors are passed in quad registers when using the VFP
9558 ABI, even if a NEON unit is not present. REGNUM is the index
9559 of the quad register, in [0, 15]. */
9560
9561static void
9562arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9563 int regnum, const gdb_byte *buf)
9564{
9565 char name_buf[4];
9566 gdb_byte reg_buf[8];
9567 int offset, double_regnum;
9568
9569 sprintf (name_buf, "d%d", regnum << 1);
9570 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9571 strlen (name_buf));
9572
9573 /* d0 is always the least significant half of q0. */
9574 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9575 offset = 8;
9576 else
9577 offset = 0;
9578
9579 regcache_raw_write (regcache, double_regnum, buf + offset);
9580 offset = 8 - offset;
9581 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9582}
9583
9584static void
9585arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9586 int regnum, const gdb_byte *buf)
9587{
9588 const int num_regs = gdbarch_num_regs (gdbarch);
9589 char name_buf[4];
9590 gdb_byte reg_buf[8];
9591 int offset, double_regnum;
9592
9593 gdb_assert (regnum >= num_regs);
9594 regnum -= num_regs;
9595
9596 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9597 /* Quad-precision register. */
9598 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9599 else
9600 {
9601 /* Single-precision register. */
9602 gdb_assert (regnum < 32);
9603
9604 /* s0 is always the least significant half of d0. */
9605 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9606 offset = (regnum & 1) ? 0 : 4;
9607 else
9608 offset = (regnum & 1) ? 4 : 0;
9609
9610 sprintf (name_buf, "d%d", regnum >> 1);
9611 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9612 strlen (name_buf));
9613
9614 regcache_raw_read (regcache, double_regnum, reg_buf);
9615 memcpy (reg_buf + offset, buf, 4);
9616 regcache_raw_write (regcache, double_regnum, reg_buf);
9617 }
9618}
9619
123dc839
DJ
9620static struct value *
9621value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9622{
9623 const int *reg_p = baton;
9624 return value_of_register (*reg_p, frame);
9625}
97e03143 9626\f
70f80edf
JT
9627static enum gdb_osabi
9628arm_elf_osabi_sniffer (bfd *abfd)
97e03143 9629{
2af48f68 9630 unsigned int elfosabi;
70f80edf 9631 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 9632
70f80edf 9633 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 9634
28e97307
DJ
9635 if (elfosabi == ELFOSABI_ARM)
9636 /* GNU tools use this value. Check note sections in this case,
9637 as well. */
9638 bfd_map_over_sections (abfd,
9639 generic_elf_osabi_sniff_abi_tag_sections,
9640 &osabi);
97e03143 9641
28e97307 9642 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 9643 return osabi;
97e03143
RE
9644}
9645
54483882
YQ
9646static int
9647arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9648 struct reggroup *group)
9649{
2c291032
YQ
9650 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9651 this, FPS register belongs to save_regroup, restore_reggroup, and
9652 all_reggroup, of course. */
54483882 9653 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
9654 return (group == float_reggroup
9655 || group == save_reggroup
9656 || group == restore_reggroup
9657 || group == all_reggroup);
54483882
YQ
9658 else
9659 return default_register_reggroup_p (gdbarch, regnum, group);
9660}
9661
70f80edf 9662\f
da3c6d4a
MS
9663/* Initialize the current architecture based on INFO. If possible,
9664 re-use an architecture from ARCHES, which is a list of
9665 architectures already created during this debugging session.
97e03143 9666
da3c6d4a
MS
9667 Called e.g. at program startup, when reading a core file, and when
9668 reading a binary file. */
97e03143 9669
39bbf761
RE
9670static struct gdbarch *
9671arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9672{
97e03143 9673 struct gdbarch_tdep *tdep;
39bbf761 9674 struct gdbarch *gdbarch;
28e97307
DJ
9675 struct gdbarch_list *best_arch;
9676 enum arm_abi_kind arm_abi = arm_abi_global;
9677 enum arm_float_model fp_model = arm_fp_model;
123dc839 9678 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 9679 int i, is_m = 0;
58d6951d
DJ
9680 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9681 int have_neon = 0;
ff6f572f 9682 int have_fpa_registers = 1;
9779414d
DJ
9683 const struct target_desc *tdesc = info.target_desc;
9684
9685 /* If we have an object to base this architecture on, try to determine
9686 its ABI. */
9687
9688 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9689 {
9690 int ei_osabi, e_flags;
9691
9692 switch (bfd_get_flavour (info.abfd))
9693 {
9694 case bfd_target_aout_flavour:
9695 /* Assume it's an old APCS-style ABI. */
9696 arm_abi = ARM_ABI_APCS;
9697 break;
9698
9699 case bfd_target_coff_flavour:
9700 /* Assume it's an old APCS-style ABI. */
9701 /* XXX WinCE? */
9702 arm_abi = ARM_ABI_APCS;
9703 break;
9704
9705 case bfd_target_elf_flavour:
9706 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9707 e_flags = elf_elfheader (info.abfd)->e_flags;
9708
9709 if (ei_osabi == ELFOSABI_ARM)
9710 {
9711 /* GNU tools used to use this value, but do not for EABI
9712 objects. There's nowhere to tag an EABI version
9713 anyway, so assume APCS. */
9714 arm_abi = ARM_ABI_APCS;
9715 }
9716 else if (ei_osabi == ELFOSABI_NONE)
9717 {
9718 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9719 int attr_arch, attr_profile;
9720
9721 switch (eabi_ver)
9722 {
9723 case EF_ARM_EABI_UNKNOWN:
9724 /* Assume GNU tools. */
9725 arm_abi = ARM_ABI_APCS;
9726 break;
9727
9728 case EF_ARM_EABI_VER4:
9729 case EF_ARM_EABI_VER5:
9730 arm_abi = ARM_ABI_AAPCS;
9731 /* EABI binaries default to VFP float ordering.
9732 They may also contain build attributes that can
9733 be used to identify if the VFP argument-passing
9734 ABI is in use. */
9735 if (fp_model == ARM_FLOAT_AUTO)
9736 {
9737#ifdef HAVE_ELF
9738 switch (bfd_elf_get_obj_attr_int (info.abfd,
9739 OBJ_ATTR_PROC,
9740 Tag_ABI_VFP_args))
9741 {
9742 case 0:
9743 /* "The user intended FP parameter/result
9744 passing to conform to AAPCS, base
9745 variant". */
9746 fp_model = ARM_FLOAT_SOFT_VFP;
9747 break;
9748 case 1:
9749 /* "The user intended FP parameter/result
9750 passing to conform to AAPCS, VFP
9751 variant". */
9752 fp_model = ARM_FLOAT_VFP;
9753 break;
9754 case 2:
9755 /* "The user intended FP parameter/result
9756 passing to conform to tool chain-specific
9757 conventions" - we don't know any such
9758 conventions, so leave it as "auto". */
9759 break;
9760 default:
9761 /* Attribute value not mentioned in the
9762 October 2008 ABI, so leave it as
9763 "auto". */
9764 break;
9765 }
9766#else
9767 fp_model = ARM_FLOAT_SOFT_VFP;
9768#endif
9769 }
9770 break;
9771
9772 default:
9773 /* Leave it as "auto". */
9774 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9775 break;
9776 }
9777
9778#ifdef HAVE_ELF
9779 /* Detect M-profile programs. This only works if the
9780 executable file includes build attributes; GCC does
9781 copy them to the executable, but e.g. RealView does
9782 not. */
9783 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9784 Tag_CPU_arch);
0963b4bd
MS
9785 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9786 OBJ_ATTR_PROC,
9779414d
DJ
9787 Tag_CPU_arch_profile);
9788 /* GCC specifies the profile for v6-M; RealView only
9789 specifies the profile for architectures starting with
9790 V7 (as opposed to architectures with a tag
9791 numerically greater than TAG_CPU_ARCH_V7). */
9792 if (!tdesc_has_registers (tdesc)
9793 && (attr_arch == TAG_CPU_ARCH_V6_M
9794 || attr_arch == TAG_CPU_ARCH_V6S_M
9795 || attr_profile == 'M'))
9796 tdesc = tdesc_arm_with_m;
9797#endif
9798 }
9799
9800 if (fp_model == ARM_FLOAT_AUTO)
9801 {
9802 int e_flags = elf_elfheader (info.abfd)->e_flags;
9803
9804 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9805 {
9806 case 0:
9807 /* Leave it as "auto". Strictly speaking this case
9808 means FPA, but almost nobody uses that now, and
9809 many toolchains fail to set the appropriate bits
9810 for the floating-point model they use. */
9811 break;
9812 case EF_ARM_SOFT_FLOAT:
9813 fp_model = ARM_FLOAT_SOFT_FPA;
9814 break;
9815 case EF_ARM_VFP_FLOAT:
9816 fp_model = ARM_FLOAT_VFP;
9817 break;
9818 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9819 fp_model = ARM_FLOAT_SOFT_VFP;
9820 break;
9821 }
9822 }
9823
9824 if (e_flags & EF_ARM_BE8)
9825 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9826
9827 break;
9828
9829 default:
9830 /* Leave it as "auto". */
9831 break;
9832 }
9833 }
123dc839
DJ
9834
9835 /* Check any target description for validity. */
9779414d 9836 if (tdesc_has_registers (tdesc))
123dc839
DJ
9837 {
9838 /* For most registers we require GDB's default names; but also allow
9839 the numeric names for sp / lr / pc, as a convenience. */
9840 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9841 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9842 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9843
9844 const struct tdesc_feature *feature;
58d6951d 9845 int valid_p;
123dc839 9846
9779414d 9847 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9848 "org.gnu.gdb.arm.core");
9849 if (feature == NULL)
9779414d
DJ
9850 {
9851 feature = tdesc_find_feature (tdesc,
9852 "org.gnu.gdb.arm.m-profile");
9853 if (feature == NULL)
9854 return NULL;
9855 else
9856 is_m = 1;
9857 }
123dc839
DJ
9858
9859 tdesc_data = tdesc_data_alloc ();
9860
9861 valid_p = 1;
9862 for (i = 0; i < ARM_SP_REGNUM; i++)
9863 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9864 arm_register_names[i]);
9865 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9866 ARM_SP_REGNUM,
9867 arm_sp_names);
9868 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9869 ARM_LR_REGNUM,
9870 arm_lr_names);
9871 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9872 ARM_PC_REGNUM,
9873 arm_pc_names);
9779414d
DJ
9874 if (is_m)
9875 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9876 ARM_PS_REGNUM, "xpsr");
9877 else
9878 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9879 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9880
9881 if (!valid_p)
9882 {
9883 tdesc_data_cleanup (tdesc_data);
9884 return NULL;
9885 }
9886
9779414d 9887 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9888 "org.gnu.gdb.arm.fpa");
9889 if (feature != NULL)
9890 {
9891 valid_p = 1;
9892 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9893 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9894 arm_register_names[i]);
9895 if (!valid_p)
9896 {
9897 tdesc_data_cleanup (tdesc_data);
9898 return NULL;
9899 }
9900 }
ff6f572f
DJ
9901 else
9902 have_fpa_registers = 0;
9903
9779414d 9904 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9905 "org.gnu.gdb.xscale.iwmmxt");
9906 if (feature != NULL)
9907 {
9908 static const char *const iwmmxt_names[] = {
9909 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9910 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9911 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9912 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9913 };
9914
9915 valid_p = 1;
9916 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9917 valid_p
9918 &= tdesc_numbered_register (feature, tdesc_data, i,
9919 iwmmxt_names[i - ARM_WR0_REGNUM]);
9920
9921 /* Check for the control registers, but do not fail if they
9922 are missing. */
9923 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9924 tdesc_numbered_register (feature, tdesc_data, i,
9925 iwmmxt_names[i - ARM_WR0_REGNUM]);
9926
9927 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9928 valid_p
9929 &= tdesc_numbered_register (feature, tdesc_data, i,
9930 iwmmxt_names[i - ARM_WR0_REGNUM]);
9931
9932 if (!valid_p)
9933 {
9934 tdesc_data_cleanup (tdesc_data);
9935 return NULL;
9936 }
9937 }
58d6951d
DJ
9938
9939 /* If we have a VFP unit, check whether the single precision registers
9940 are present. If not, then we will synthesize them as pseudo
9941 registers. */
9779414d 9942 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9943 "org.gnu.gdb.arm.vfp");
9944 if (feature != NULL)
9945 {
9946 static const char *const vfp_double_names[] = {
9947 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9948 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9949 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9950 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9951 };
9952
9953 /* Require the double precision registers. There must be either
9954 16 or 32. */
9955 valid_p = 1;
9956 for (i = 0; i < 32; i++)
9957 {
9958 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9959 ARM_D0_REGNUM + i,
9960 vfp_double_names[i]);
9961 if (!valid_p)
9962 break;
9963 }
2b9e5ea6
UW
9964 if (!valid_p && i == 16)
9965 valid_p = 1;
58d6951d 9966
2b9e5ea6
UW
9967 /* Also require FPSCR. */
9968 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9969 ARM_FPSCR_REGNUM, "fpscr");
9970 if (!valid_p)
58d6951d
DJ
9971 {
9972 tdesc_data_cleanup (tdesc_data);
9973 return NULL;
9974 }
9975
9976 if (tdesc_unnumbered_register (feature, "s0") == 0)
9977 have_vfp_pseudos = 1;
9978
9979 have_vfp_registers = 1;
9980
9981 /* If we have VFP, also check for NEON. The architecture allows
9982 NEON without VFP (integer vector operations only), but GDB
9983 does not support that. */
9779414d 9984 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9985 "org.gnu.gdb.arm.neon");
9986 if (feature != NULL)
9987 {
9988 /* NEON requires 32 double-precision registers. */
9989 if (i != 32)
9990 {
9991 tdesc_data_cleanup (tdesc_data);
9992 return NULL;
9993 }
9994
9995 /* If there are quad registers defined by the stub, use
9996 their type; otherwise (normally) provide them with
9997 the default type. */
9998 if (tdesc_unnumbered_register (feature, "q0") == 0)
9999 have_neon_pseudos = 1;
10000
10001 have_neon = 1;
10002 }
10003 }
123dc839 10004 }
39bbf761 10005
28e97307
DJ
10006 /* If there is already a candidate, use it. */
10007 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10008 best_arch != NULL;
10009 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10010 {
b8926edc
DJ
10011 if (arm_abi != ARM_ABI_AUTO
10012 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
10013 continue;
10014
b8926edc
DJ
10015 if (fp_model != ARM_FLOAT_AUTO
10016 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
10017 continue;
10018
58d6951d
DJ
10019 /* There are various other properties in tdep that we do not
10020 need to check here: those derived from a target description,
10021 since gdbarches with a different target description are
10022 automatically disqualified. */
10023
9779414d
DJ
10024 /* Do check is_m, though, since it might come from the binary. */
10025 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10026 continue;
10027
28e97307
DJ
10028 /* Found a match. */
10029 break;
10030 }
97e03143 10031
28e97307 10032 if (best_arch != NULL)
123dc839
DJ
10033 {
10034 if (tdesc_data != NULL)
10035 tdesc_data_cleanup (tdesc_data);
10036 return best_arch->gdbarch;
10037 }
28e97307
DJ
10038
10039 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
97e03143
RE
10040 gdbarch = gdbarch_alloc (&info, tdep);
10041
28e97307
DJ
10042 /* Record additional information about the architecture we are defining.
10043 These are gdbarch discriminators, like the OSABI. */
10044 tdep->arm_abi = arm_abi;
10045 tdep->fp_model = fp_model;
9779414d 10046 tdep->is_m = is_m;
ff6f572f 10047 tdep->have_fpa_registers = have_fpa_registers;
58d6951d
DJ
10048 tdep->have_vfp_registers = have_vfp_registers;
10049 tdep->have_vfp_pseudos = have_vfp_pseudos;
10050 tdep->have_neon_pseudos = have_neon_pseudos;
10051 tdep->have_neon = have_neon;
08216dd7
RE
10052
10053 /* Breakpoints. */
9d4fde75 10054 switch (info.byte_order_for_code)
67255d04
RE
10055 {
10056 case BFD_ENDIAN_BIG:
66e810cd
RE
10057 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10058 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10059 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10060 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10061
67255d04
RE
10062 break;
10063
10064 case BFD_ENDIAN_LITTLE:
66e810cd
RE
10065 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10066 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10067 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10068 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10069
67255d04
RE
10070 break;
10071
10072 default:
10073 internal_error (__FILE__, __LINE__,
edefbb7c 10074 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
10075 }
10076
d7b486e7
RE
10077 /* On ARM targets char defaults to unsigned. */
10078 set_gdbarch_char_signed (gdbarch, 0);
10079
cca44b1b
JB
10080 /* Note: for displaced stepping, this includes the breakpoint, and one word
10081 of additional scratch space. This setting isn't used for anything beside
10082 displaced stepping at present. */
10083 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10084
9df628e0 10085 /* This should be low enough for everything. */
97e03143 10086 tdep->lowest_pc = 0x20;
94c30b78 10087 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 10088
7c00367c
MK
10089 /* The default, for both APCS and AAPCS, is to return small
10090 structures in registers. */
10091 tdep->struct_return = reg_struct_return;
10092
2dd604e7 10093 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 10094 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 10095
756fe439
DJ
10096 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10097
148754e5 10098 /* Frame handling. */
a262aec2 10099 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
10100 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10101 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10102
eb5492fa 10103 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 10104
34e8f22d
RE
10105 /* Address manipulation. */
10106 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
10107 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10108
34e8f22d
RE
10109 /* Advance PC across function entry code. */
10110 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10111
4024ca99
UW
10112 /* Detect whether PC is in function epilogue. */
10113 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10114
190dce09
UW
10115 /* Skip trampolines. */
10116 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10117
34e8f22d
RE
10118 /* The stack grows downward. */
10119 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10120
10121 /* Breakpoint manipulation. */
10122 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
177321bd
DJ
10123 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10124 arm_remote_breakpoint_from_pc);
34e8f22d
RE
10125
10126 /* Information about registers, etc. */
34e8f22d
RE
10127 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10128 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 10129 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 10130 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 10131 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 10132
ff6f572f
DJ
10133 /* This "info float" is FPA-specific. Use the generic version if we
10134 do not have FPA. */
10135 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10136 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10137
26216b98 10138 /* Internal <-> external register number maps. */
ff6f572f 10139 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
10140 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10141
34e8f22d
RE
10142 set_gdbarch_register_name (gdbarch, arm_register_name);
10143
10144 /* Returning results. */
2af48f68 10145 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 10146
03d48a7d
RE
10147 /* Disassembly. */
10148 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10149
34e8f22d
RE
10150 /* Minsymbol frobbing. */
10151 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10152 set_gdbarch_coff_make_msymbol_special (gdbarch,
10153 arm_coff_make_msymbol_special);
60c5725c 10154 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 10155
f9d67f43
DJ
10156 /* Thumb-2 IT block support. */
10157 set_gdbarch_adjust_breakpoint_address (gdbarch,
10158 arm_adjust_breakpoint_address);
10159
0d5de010
DJ
10160 /* Virtual tables. */
10161 set_gdbarch_vbit_in_delta (gdbarch, 1);
10162
97e03143 10163 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 10164 gdbarch_init_osabi (info, gdbarch);
97e03143 10165
b39cc962
DJ
10166 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10167
eb5492fa 10168 /* Add some default predicates. */
a262aec2
DJ
10169 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10170 dwarf2_append_unwinders (gdbarch);
0e9e9abd 10171 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
a262aec2 10172 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 10173
97e03143
RE
10174 /* Now we have tuned the configuration, set a few final things,
10175 based on what the OS ABI has told us. */
10176
b8926edc
DJ
10177 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10178 binaries are always marked. */
10179 if (tdep->arm_abi == ARM_ABI_AUTO)
10180 tdep->arm_abi = ARM_ABI_APCS;
10181
e3039479
UW
10182 /* Watchpoints are not steppable. */
10183 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10184
b8926edc
DJ
10185 /* We used to default to FPA for generic ARM, but almost nobody
10186 uses that now, and we now provide a way for the user to force
10187 the model. So default to the most useful variant. */
10188 if (tdep->fp_model == ARM_FLOAT_AUTO)
10189 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10190
9df628e0
RE
10191 if (tdep->jb_pc >= 0)
10192 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10193
08216dd7 10194 /* Floating point sizes and format. */
8da61cc4 10195 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 10196 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 10197 {
8da61cc4
DJ
10198 set_gdbarch_double_format
10199 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10200 set_gdbarch_long_double_format
10201 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10202 }
10203 else
10204 {
10205 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10206 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
10207 }
10208
58d6951d
DJ
10209 if (have_vfp_pseudos)
10210 {
10211 /* NOTE: These are the only pseudo registers used by
10212 the ARM target at the moment. If more are added, a
10213 little more care in numbering will be needed. */
10214
10215 int num_pseudos = 32;
10216 if (have_neon_pseudos)
10217 num_pseudos += 16;
10218 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10219 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10220 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10221 }
10222
123dc839 10223 if (tdesc_data)
58d6951d
DJ
10224 {
10225 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10226
9779414d 10227 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
10228
10229 /* Override tdesc_register_type to adjust the types of VFP
10230 registers for NEON. */
10231 set_gdbarch_register_type (gdbarch, arm_register_type);
10232 }
123dc839
DJ
10233
10234 /* Add standard register aliases. We add aliases even for those
10235 nanes which are used by the current architecture - it's simpler,
10236 and does no harm, since nothing ever lists user registers. */
10237 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10238 user_reg_add (gdbarch, arm_register_aliases[i].name,
10239 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10240
39bbf761
RE
10241 return gdbarch;
10242}
10243
97e03143 10244static void
2af46ca0 10245arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 10246{
2af46ca0 10247 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
10248
10249 if (tdep == NULL)
10250 return;
10251
edefbb7c 10252 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
10253 (unsigned long) tdep->lowest_pc);
10254}
10255
a78f21af
AC
10256extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10257
c906108c 10258void
ed9a39eb 10259_initialize_arm_tdep (void)
c906108c 10260{
bc90b915
FN
10261 struct ui_file *stb;
10262 long length;
26304000 10263 struct cmd_list_element *new_set, *new_show;
53904c9e
AC
10264 const char *setname;
10265 const char *setdesc;
4bd7b427 10266 const char *const *regnames;
bc90b915
FN
10267 int numregs, i, j;
10268 static char *helptext;
edefbb7c
AC
10269 char regdesc[1024], *rdptr = regdesc;
10270 size_t rest = sizeof (regdesc);
085dd6e6 10271
42cf1509 10272 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 10273
60c5725c 10274 arm_objfile_data_key
c1bd65d0 10275 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 10276
0e9e9abd
UW
10277 /* Add ourselves to objfile event chain. */
10278 observer_attach_new_objfile (arm_exidx_new_objfile);
10279 arm_exidx_data_key
10280 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10281
70f80edf
JT
10282 /* Register an ELF OS ABI sniffer for ARM binaries. */
10283 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10284 bfd_target_elf_flavour,
10285 arm_elf_osabi_sniffer);
10286
9779414d
DJ
10287 /* Initialize the standard target descriptions. */
10288 initialize_tdesc_arm_with_m ();
ef7e8358
UW
10289 initialize_tdesc_arm_with_iwmmxt ();
10290 initialize_tdesc_arm_with_vfpv2 ();
10291 initialize_tdesc_arm_with_vfpv3 ();
10292 initialize_tdesc_arm_with_neon ();
9779414d 10293
94c30b78 10294 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
10295 num_disassembly_options = get_arm_regname_num_options ();
10296
10297 /* Add root prefix command for all "set arm"/"show arm" commands. */
10298 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 10299 _("Various ARM-specific commands."),
afd7eef0
RE
10300 &setarmcmdlist, "set arm ", 0, &setlist);
10301
10302 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 10303 _("Various ARM-specific commands."),
afd7eef0 10304 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 10305
94c30b78 10306 /* Sync the opcode insn printer with our register viewer. */
bc90b915 10307 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 10308
eefe576e
AC
10309 /* Initialize the array that will be passed to
10310 add_setshow_enum_cmd(). */
afd7eef0
RE
10311 valid_disassembly_styles
10312 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10313 for (i = 0; i < num_disassembly_options; i++)
bc90b915
FN
10314 {
10315 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 10316 valid_disassembly_styles[i] = setname;
edefbb7c
AC
10317 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10318 rdptr += length;
10319 rest -= length;
123dc839
DJ
10320 /* When we find the default names, tell the disassembler to use
10321 them. */
bc90b915
FN
10322 if (!strcmp (setname, "std"))
10323 {
afd7eef0 10324 disassembly_style = setname;
bc90b915
FN
10325 set_arm_regname_option (i);
10326 }
10327 }
94c30b78 10328 /* Mark the end of valid options. */
afd7eef0 10329 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 10330
edefbb7c
AC
10331 /* Create the help text. */
10332 stb = mem_fileopen ();
10333 fprintf_unfiltered (stb, "%s%s%s",
10334 _("The valid values are:\n"),
10335 regdesc,
10336 _("The default is \"std\"."));
759ef836 10337 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 10338 ui_file_delete (stb);
ed9a39eb 10339
edefbb7c
AC
10340 add_setshow_enum_cmd("disassembler", no_class,
10341 valid_disassembly_styles, &disassembly_style,
10342 _("Set the disassembly style."),
10343 _("Show the disassembly style."),
10344 helptext,
2c5b56ce 10345 set_disassembly_style_sfunc,
0963b4bd
MS
10346 NULL, /* FIXME: i18n: The disassembly style is
10347 \"%s\". */
7376b4c2 10348 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
10349
10350 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10351 _("Set usage of ARM 32-bit mode."),
10352 _("Show usage of ARM 32-bit mode."),
10353 _("When off, a 26-bit PC will be used."),
2c5b56ce 10354 NULL,
0963b4bd
MS
10355 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10356 mode is %s. */
26304000 10357 &setarmcmdlist, &showarmcmdlist);
c906108c 10358
fd50bc42 10359 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
10360 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10361 _("Set the floating point type."),
10362 _("Show the floating point type."),
10363 _("auto - Determine the FP typefrom the OS-ABI.\n\
10364softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10365fpa - FPA co-processor (GCC compiled).\n\
10366softvfp - Software FP with pure-endian doubles.\n\
10367vfp - VFP co-processor."),
edefbb7c 10368 set_fp_model_sfunc, show_fp_model,
7376b4c2 10369 &setarmcmdlist, &showarmcmdlist);
fd50bc42 10370
28e97307
DJ
10371 /* Add a command to allow the user to force the ABI. */
10372 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10373 _("Set the ABI."),
10374 _("Show the ABI."),
10375 NULL, arm_set_abi, arm_show_abi,
10376 &setarmcmdlist, &showarmcmdlist);
10377
0428b8f5
DJ
10378 /* Add two commands to allow the user to force the assumed
10379 execution mode. */
10380 add_setshow_enum_cmd ("fallback-mode", class_support,
10381 arm_mode_strings, &arm_fallback_mode_string,
10382 _("Set the mode assumed when symbols are unavailable."),
10383 _("Show the mode assumed when symbols are unavailable."),
10384 NULL, NULL, arm_show_fallback_mode,
10385 &setarmcmdlist, &showarmcmdlist);
10386 add_setshow_enum_cmd ("force-mode", class_support,
10387 arm_mode_strings, &arm_force_mode_string,
10388 _("Set the mode assumed even when symbols are available."),
10389 _("Show the mode assumed even when symbols are available."),
10390 NULL, NULL, arm_show_force_mode,
10391 &setarmcmdlist, &showarmcmdlist);
10392
6529d2dd 10393 /* Debugging flag. */
edefbb7c
AC
10394 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10395 _("Set ARM debugging."),
10396 _("Show ARM debugging."),
10397 _("When on, arm-specific debugging is enabled."),
2c5b56ce 10398 NULL,
7915a72c 10399 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 10400 &setdebuglist, &showdebuglist);
c906108c 10401}
This page took 1.756035 seconds and 4 git commands to generate.