* m68klinux-nat.c (fetch_register): Fix strict-aliasing violation.
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
0b302171
JB
3 Copyright (C) 1988-1989, 1991-1993, 1995-1996, 1998-2012 Free
4 Software Foundation, Inc.
c906108c 5
c5aa993b 6 This file is part of GDB.
c906108c 7
c5aa993b
JM
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
a9762ec7 10 the Free Software Foundation; either version 3 of the License, or
c5aa993b 11 (at your option) any later version.
c906108c 12
c5aa993b
JM
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
c906108c 17
c5aa993b 18 You should have received a copy of the GNU General Public License
a9762ec7 19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 20
0963b4bd 21#include <ctype.h> /* XXX for isupper (). */
34e8f22d 22
c906108c
SS
23#include "defs.h"
24#include "frame.h"
25#include "inferior.h"
26#include "gdbcmd.h"
27#include "gdbcore.h"
c906108c 28#include "gdb_string.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d
RE
47
48#include "arm-tdep.h"
26216b98 49#include "gdb/sim-arm.h"
34e8f22d 50
082fc60d
RE
51#include "elf-bfd.h"
52#include "coff/internal.h"
97e03143 53#include "elf/arm.h"
c906108c 54
26216b98 55#include "gdb_assert.h"
60c5725c 56#include "vec.h"
26216b98 57
9779414d 58#include "features/arm-with-m.c"
25f8c692 59#include "features/arm-with-m-fpa-layout.c"
ef7e8358
UW
60#include "features/arm-with-iwmmxt.c"
61#include "features/arm-with-vfpv2.c"
62#include "features/arm-with-vfpv3.c"
63#include "features/arm-with-neon.c"
9779414d 64
6529d2dd
AC
65static int arm_debug;
66
082fc60d
RE
67/* Macros for setting and testing a bit in a minimal symbol that marks
68 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 69 is used for this purpose.
082fc60d
RE
70
71 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 72 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 73
0963b4bd 74#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 75 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
76
77#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 78 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 79
60c5725c
DJ
80/* Per-objfile data used for mapping symbols. */
81static const struct objfile_data *arm_objfile_data_key;
82
83struct arm_mapping_symbol
84{
85 bfd_vma value;
86 char type;
87};
88typedef struct arm_mapping_symbol arm_mapping_symbol_s;
89DEF_VEC_O(arm_mapping_symbol_s);
90
91struct arm_per_objfile
92{
93 VEC(arm_mapping_symbol_s) **section_maps;
94};
95
afd7eef0
RE
96/* The list of available "set arm ..." and "show arm ..." commands. */
97static struct cmd_list_element *setarmcmdlist = NULL;
98static struct cmd_list_element *showarmcmdlist = NULL;
99
fd50bc42
RE
100/* The type of floating-point to use. Keep this in sync with enum
101 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 102static const char *const fp_model_strings[] =
fd50bc42
RE
103{
104 "auto",
105 "softfpa",
106 "fpa",
107 "softvfp",
28e97307
DJ
108 "vfp",
109 NULL
fd50bc42
RE
110};
111
112/* A variable that can be configured by the user. */
113static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
114static const char *current_fp_model = "auto";
115
28e97307 116/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 117static const char *const arm_abi_strings[] =
28e97307
DJ
118{
119 "auto",
120 "APCS",
121 "AAPCS",
122 NULL
123};
124
125/* A variable that can be configured by the user. */
126static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
127static const char *arm_abi_string = "auto";
128
0428b8f5 129/* The execution mode to assume. */
40478521 130static const char *const arm_mode_strings[] =
0428b8f5
DJ
131 {
132 "auto",
133 "arm",
68770265
MGD
134 "thumb",
135 NULL
0428b8f5
DJ
136 };
137
138static const char *arm_fallback_mode_string = "auto";
139static const char *arm_force_mode_string = "auto";
140
18819fa6
UW
141/* Internal override of the execution mode. -1 means no override,
142 0 means override to ARM mode, 1 means override to Thumb mode.
143 The effect is the same as if arm_force_mode has been set by the
144 user (except the internal override has precedence over a user's
145 arm_force_mode override). */
146static int arm_override_mode = -1;
147
94c30b78 148/* Number of different reg name sets (options). */
afd7eef0 149static int num_disassembly_options;
bc90b915 150
f32bf4a4
YQ
151/* The standard register names, and all the valid aliases for them. Note
152 that `fp', `sp' and `pc' are not added in this alias list, because they
153 have been added as builtin user registers in
154 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
155static const struct
156{
157 const char *name;
158 int regnum;
159} arm_register_aliases[] = {
160 /* Basic register numbers. */
161 { "r0", 0 },
162 { "r1", 1 },
163 { "r2", 2 },
164 { "r3", 3 },
165 { "r4", 4 },
166 { "r5", 5 },
167 { "r6", 6 },
168 { "r7", 7 },
169 { "r8", 8 },
170 { "r9", 9 },
171 { "r10", 10 },
172 { "r11", 11 },
173 { "r12", 12 },
174 { "r13", 13 },
175 { "r14", 14 },
176 { "r15", 15 },
177 /* Synonyms (argument and variable registers). */
178 { "a1", 0 },
179 { "a2", 1 },
180 { "a3", 2 },
181 { "a4", 3 },
182 { "v1", 4 },
183 { "v2", 5 },
184 { "v3", 6 },
185 { "v4", 7 },
186 { "v5", 8 },
187 { "v6", 9 },
188 { "v7", 10 },
189 { "v8", 11 },
190 /* Other platform-specific names for r9. */
191 { "sb", 9 },
192 { "tr", 9 },
193 /* Special names. */
194 { "ip", 12 },
123dc839 195 { "lr", 14 },
123dc839
DJ
196 /* Names used by GCC (not listed in the ARM EABI). */
197 { "sl", 10 },
123dc839
DJ
198 /* A special name from the older ATPCS. */
199 { "wr", 7 },
200};
bc90b915 201
123dc839 202static const char *const arm_register_names[] =
da59e081
JM
203{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
204 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
205 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
206 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
207 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
208 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 209 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 210
afd7eef0
RE
211/* Valid register name styles. */
212static const char **valid_disassembly_styles;
ed9a39eb 213
afd7eef0
RE
214/* Disassembly style to use. Default to "std" register names. */
215static const char *disassembly_style;
96baa820 216
ed9a39eb 217/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
218 style. */
219static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 220 struct cmd_list_element *);
afd7eef0 221static void set_disassembly_style (void);
ed9a39eb 222
b508a996 223static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 224 void *, int);
b508a996 225static void convert_to_extended (const struct floatformat *, void *,
be8626e0 226 const void *, int);
ed9a39eb 227
05d1431c
PA
228static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
229 struct regcache *regcache,
230 int regnum, gdb_byte *buf);
58d6951d
DJ
231static void arm_neon_quad_write (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, const gdb_byte *buf);
234
db24da6d
YQ
235static int thumb_insn_size (unsigned short inst1);
236
9b8d791a 237struct arm_prologue_cache
c3b4394c 238{
eb5492fa
DJ
239 /* The stack pointer at the time this frame was created; i.e. the
240 caller's stack pointer when this function was called. It is used
241 to identify this frame. */
242 CORE_ADDR prev_sp;
243
4be43953
DJ
244 /* The frame base for this frame is just prev_sp - frame size.
245 FRAMESIZE is the distance from the frame pointer to the
246 initial stack pointer. */
eb5492fa 247
c3b4394c 248 int framesize;
eb5492fa
DJ
249
250 /* The register used to hold the frame pointer for this frame. */
c3b4394c 251 int framereg;
eb5492fa
DJ
252
253 /* Saved register offsets. */
254 struct trad_frame_saved_reg *saved_regs;
c3b4394c 255};
ed9a39eb 256
0d39a070
DJ
257static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
258 CORE_ADDR prologue_start,
259 CORE_ADDR prologue_end,
260 struct arm_prologue_cache *cache);
261
cca44b1b
JB
262/* Architecture version for displaced stepping. This effects the behaviour of
263 certain instructions, and really should not be hard-wired. */
264
265#define DISPLACED_STEPPING_ARCH_VERSION 5
266
bc90b915
FN
267/* Addresses for calling Thumb functions have the bit 0 set.
268 Here are some macros to test, set, or clear bit 0 of addresses. */
269#define IS_THUMB_ADDR(addr) ((addr) & 1)
270#define MAKE_THUMB_ADDR(addr) ((addr) | 1)
271#define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
272
94c30b78 273/* Set to true if the 32-bit mode is in use. */
c906108c
SS
274
275int arm_apcs_32 = 1;
276
9779414d
DJ
277/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
278
478fd957 279int
9779414d
DJ
280arm_psr_thumb_bit (struct gdbarch *gdbarch)
281{
282 if (gdbarch_tdep (gdbarch)->is_m)
283 return XPSR_T;
284 else
285 return CPSR_T;
286}
287
b39cc962
DJ
288/* Determine if FRAME is executing in Thumb mode. */
289
25b41d01 290int
b39cc962
DJ
291arm_frame_is_thumb (struct frame_info *frame)
292{
293 CORE_ADDR cpsr;
9779414d 294 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
295
296 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
297 directly (from a signal frame or dummy frame) or by interpreting
298 the saved LR (from a prologue or DWARF frame). So consult it and
299 trust the unwinders. */
300 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
301
9779414d 302 return (cpsr & t_bit) != 0;
b39cc962
DJ
303}
304
60c5725c
DJ
305/* Callback for VEC_lower_bound. */
306
307static inline int
308arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
309 const struct arm_mapping_symbol *rhs)
310{
311 return lhs->value < rhs->value;
312}
313
f9d67f43
DJ
314/* Search for the mapping symbol covering MEMADDR. If one is found,
315 return its type. Otherwise, return 0. If START is non-NULL,
316 set *START to the location of the mapping symbol. */
c906108c 317
f9d67f43
DJ
318static char
319arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 320{
60c5725c 321 struct obj_section *sec;
0428b8f5 322
60c5725c
DJ
323 /* If there are mapping symbols, consult them. */
324 sec = find_pc_section (memaddr);
325 if (sec != NULL)
326 {
327 struct arm_per_objfile *data;
328 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
329 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
330 0 };
60c5725c
DJ
331 unsigned int idx;
332
333 data = objfile_data (sec->objfile, arm_objfile_data_key);
334 if (data != NULL)
335 {
336 map = data->section_maps[sec->the_bfd_section->index];
337 if (!VEC_empty (arm_mapping_symbol_s, map))
338 {
339 struct arm_mapping_symbol *map_sym;
340
341 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
342 arm_compare_mapping_symbols);
343
344 /* VEC_lower_bound finds the earliest ordered insertion
345 point. If the following symbol starts at this exact
346 address, we use that; otherwise, the preceding
347 mapping symbol covers this address. */
348 if (idx < VEC_length (arm_mapping_symbol_s, map))
349 {
350 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
351 if (map_sym->value == map_key.value)
f9d67f43
DJ
352 {
353 if (start)
354 *start = map_sym->value + obj_section_addr (sec);
355 return map_sym->type;
356 }
60c5725c
DJ
357 }
358
359 if (idx > 0)
360 {
361 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
362 if (start)
363 *start = map_sym->value + obj_section_addr (sec);
364 return map_sym->type;
60c5725c
DJ
365 }
366 }
367 }
368 }
369
f9d67f43
DJ
370 return 0;
371}
372
373/* Determine if the program counter specified in MEMADDR is in a Thumb
374 function. This function should be called for addresses unrelated to
375 any executing frame; otherwise, prefer arm_frame_is_thumb. */
376
e3039479 377int
9779414d 378arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43
DJ
379{
380 struct obj_section *sec;
381 struct minimal_symbol *sym;
382 char type;
a42244db
YQ
383 struct displaced_step_closure* dsc
384 = get_displaced_step_closure_by_addr(memaddr);
385
386 /* If checking the mode of displaced instruction in copy area, the mode
387 should be determined by instruction on the original address. */
388 if (dsc)
389 {
390 if (debug_displaced)
391 fprintf_unfiltered (gdb_stdlog,
392 "displaced: check mode of %.8lx instead of %.8lx\n",
393 (unsigned long) dsc->insn_addr,
394 (unsigned long) memaddr);
395 memaddr = dsc->insn_addr;
396 }
f9d67f43
DJ
397
398 /* If bit 0 of the address is set, assume this is a Thumb address. */
399 if (IS_THUMB_ADDR (memaddr))
400 return 1;
401
18819fa6
UW
402 /* Respect internal mode override if active. */
403 if (arm_override_mode != -1)
404 return arm_override_mode;
405
f9d67f43
DJ
406 /* If the user wants to override the symbol table, let him. */
407 if (strcmp (arm_force_mode_string, "arm") == 0)
408 return 0;
409 if (strcmp (arm_force_mode_string, "thumb") == 0)
410 return 1;
411
9779414d
DJ
412 /* ARM v6-M and v7-M are always in Thumb mode. */
413 if (gdbarch_tdep (gdbarch)->is_m)
414 return 1;
415
f9d67f43
DJ
416 /* If there are mapping symbols, consult them. */
417 type = arm_find_mapping_symbol (memaddr, NULL);
418 if (type)
419 return type == 't';
420
ed9a39eb 421 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c
SS
422 sym = lookup_minimal_symbol_by_pc (memaddr);
423 if (sym)
0428b8f5
DJ
424 return (MSYMBOL_IS_SPECIAL (sym));
425
426 /* If the user wants to override the fallback mode, let them. */
427 if (strcmp (arm_fallback_mode_string, "arm") == 0)
428 return 0;
429 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
430 return 1;
431
432 /* If we couldn't find any symbol, but we're talking to a running
433 target, then trust the current value of $cpsr. This lets
434 "display/i $pc" always show the correct mode (though if there is
435 a symbol table we will not reach here, so it still may not be
18819fa6 436 displayed in the mode it will be executed). */
0428b8f5 437 if (target_has_registers)
18819fa6 438 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
439
440 /* Otherwise we're out of luck; we assume ARM. */
441 return 0;
c906108c
SS
442}
443
181c1381 444/* Remove useless bits from addresses in a running program. */
34e8f22d 445static CORE_ADDR
24568a2c 446arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 447{
a3a2ee65 448 if (arm_apcs_32)
dd6be234 449 return UNMAKE_THUMB_ADDR (val);
c906108c 450 else
a3a2ee65 451 return (val & 0x03fffffc);
c906108c
SS
452}
453
181c1381
RE
454/* When reading symbols, we need to zap the low bit of the address,
455 which may be set to 1 for Thumb functions. */
34e8f22d 456static CORE_ADDR
24568a2c 457arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
181c1381
RE
458{
459 return val & ~1;
460}
461
0d39a070 462/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
465 is being called. */
0d39a070 466static int
e0634ccf 467skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 468{
e0634ccf 469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
0d39a070 470 struct minimal_symbol *msym;
0d39a070
DJ
471
472 msym = lookup_minimal_symbol_by_pc (pc);
e0634ccf
UW
473 if (msym != NULL
474 && SYMBOL_VALUE_ADDRESS (msym) == pc
475 && SYMBOL_LINKAGE_NAME (msym) != NULL)
476 {
477 const char *name = SYMBOL_LINKAGE_NAME (msym);
0d39a070 478
e0634ccf
UW
479 /* The GNU linker's Thumb call stub to foo is named
480 __foo_from_thumb. */
481 if (strstr (name, "_from_thumb") != NULL)
482 name += 2;
0d39a070 483
e0634ccf
UW
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
486 functions. */
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
488 return 1;
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
490 return 1;
0d39a070 491
e0634ccf
UW
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
494 return 1;
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
496 return 1;
497 }
498 else
499 {
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
504
505 if (!is_thumb
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
510 return 1;
511 }
ec3d575a 512
0d39a070
DJ
513 return 0;
514}
515
516/* Support routines for instruction parsing. */
517#define submask(x) ((1L << ((x) + 1)) - 1)
518#define bit(obj,st) (((obj) >> (st)) & 1)
519#define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520#define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522#define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
524
621c6d5b
YQ
525/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
527 instruction. */
528#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
533
534/* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
539
ec3d575a
UW
540/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
541
542static unsigned int
543thumb_expand_immediate (unsigned int imm)
544{
545 unsigned int count = imm >> 7;
546
547 if (count < 8)
548 switch (count / 2)
549 {
550 case 0:
551 return imm & 0xff;
552 case 1:
553 return (imm & 0xff) | ((imm & 0xff) << 16);
554 case 2:
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
556 case 3:
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
559 }
560
561 return (0x80 | (imm & 0x7f)) << (32 - count);
562}
563
564/* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
566
567static int
568thumb_instruction_changes_pc (unsigned short inst)
569{
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
571 return 1;
572
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
574 return 1;
575
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
577 return 1;
578
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
580 return 1;
581
ad8b5167
UW
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
583 return 1;
584
ec3d575a
UW
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
586 return 1;
587
588 return 0;
589}
590
591/* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
593
594static int
595thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
596{
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
598 {
599 /* Branches and miscellaneous control instructions. */
600
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
602 {
603 /* B, BL, BLX. */
604 return 1;
605 }
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
607 {
608 /* SUBS PC, LR, #imm8. */
609 return 1;
610 }
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
612 {
613 /* Conditional branch. */
614 return 1;
615 }
616
617 return 0;
618 }
619
620 if ((inst1 & 0xfe50) == 0xe810)
621 {
622 /* Load multiple or RFE. */
623
624 if (bit (inst1, 7) && !bit (inst1, 8))
625 {
626 /* LDMIA or POP */
627 if (bit (inst2, 15))
628 return 1;
629 }
630 else if (!bit (inst1, 7) && bit (inst1, 8))
631 {
632 /* LDMDB */
633 if (bit (inst2, 15))
634 return 1;
635 }
636 else if (bit (inst1, 7) && bit (inst1, 8))
637 {
638 /* RFEIA */
639 return 1;
640 }
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
642 {
643 /* RFEDB */
644 return 1;
645 }
646
647 return 0;
648 }
649
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
651 {
652 /* MOV PC or MOVS PC. */
653 return 1;
654 }
655
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
657 {
658 /* LDR PC. */
659 if (bits (inst1, 0, 3) == 15)
660 return 1;
661 if (bit (inst1, 7))
662 return 1;
663 if (bit (inst2, 11))
664 return 1;
665 if ((inst2 & 0x0fc0) == 0x0000)
666 return 1;
667
668 return 0;
669 }
670
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
672 {
673 /* TBB. */
674 return 1;
675 }
676
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
678 {
679 /* TBH. */
680 return 1;
681 }
682
683 return 0;
684}
685
29d73ae4
DJ
686/* Analyze a Thumb prologue, looking for a recognizable stack frame
687 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
688 clobber the stack frame unexpectedly, or an unknown instruction.
689 Return the last address which is definitely safe to skip for an
690 initial breakpoint. */
c906108c
SS
691
692static CORE_ADDR
29d73ae4
DJ
693thumb_analyze_prologue (struct gdbarch *gdbarch,
694 CORE_ADDR start, CORE_ADDR limit,
695 struct arm_prologue_cache *cache)
c906108c 696{
0d39a070 697 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 698 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
699 int i;
700 pv_t regs[16];
701 struct pv_area *stack;
702 struct cleanup *back_to;
703 CORE_ADDR offset;
ec3d575a 704 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 705
29d73ae4
DJ
706 for (i = 0; i < 16; i++)
707 regs[i] = pv_register (i, 0);
55f960e1 708 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
709 back_to = make_cleanup_free_pv_area (stack);
710
29d73ae4 711 while (start < limit)
c906108c 712 {
29d73ae4
DJ
713 unsigned short insn;
714
e17a4113 715 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 716
94c30b78 717 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 718 {
29d73ae4
DJ
719 int regno;
720 int mask;
4be43953
DJ
721
722 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
723 break;
29d73ae4
DJ
724
725 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
726 whether to save LR (R14). */
727 mask = (insn & 0xff) | ((insn & 0x100) << 6);
728
729 /* Calculate offsets of saved R0-R7 and LR. */
730 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
731 if (mask & (1 << regno))
732 {
29d73ae4
DJ
733 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
734 -4);
735 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
736 }
da59e081 737 }
da3c6d4a
MS
738 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
739 sub sp, #simm */
da59e081 740 {
29d73ae4
DJ
741 offset = (insn & 0x7f) << 2; /* get scaled offset */
742 if (insn & 0x80) /* Check for SUB. */
743 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
744 -offset);
da59e081 745 else
29d73ae4
DJ
746 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
747 offset);
da59e081 748 }
0d39a070
DJ
749 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
750 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
751 (insn & 0xff) << 2);
752 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
753 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
754 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
755 bits (insn, 6, 8));
756 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
757 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
758 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
759 bits (insn, 0, 7));
760 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
761 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
762 && pv_is_constant (regs[bits (insn, 3, 5)]))
763 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
764 regs[bits (insn, 6, 8)]);
765 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
766 && pv_is_constant (regs[bits (insn, 3, 6)]))
767 {
768 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
769 int rm = bits (insn, 3, 6);
770 regs[rd] = pv_add (regs[rd], regs[rm]);
771 }
29d73ae4 772 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 773 {
29d73ae4
DJ
774 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
775 int src_reg = (insn & 0x78) >> 3;
776 regs[dst_reg] = regs[src_reg];
da59e081 777 }
29d73ae4 778 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 779 {
29d73ae4
DJ
780 /* Handle stores to the stack. Normally pushes are used,
781 but with GCC -mtpcs-frame, there may be other stores
782 in the prologue to create the frame. */
783 int regno = (insn >> 8) & 0x7;
784 pv_t addr;
785
786 offset = (insn & 0xff) << 2;
787 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
788
789 if (pv_area_store_would_trash (stack, addr))
790 break;
791
792 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 793 }
0d39a070
DJ
794 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
795 {
796 int rd = bits (insn, 0, 2);
797 int rn = bits (insn, 3, 5);
798 pv_t addr;
799
800 offset = bits (insn, 6, 10) << 2;
801 addr = pv_add_constant (regs[rn], offset);
802
803 if (pv_area_store_would_trash (stack, addr))
804 break;
805
806 pv_area_store (stack, addr, 4, regs[rd]);
807 }
808 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
809 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
810 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
811 /* Ignore stores of argument registers to the stack. */
812 ;
813 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
814 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
815 /* Ignore block loads from the stack, potentially copying
816 parameters from memory. */
817 ;
818 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
819 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
820 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
821 /* Similarly ignore single loads from the stack. */
822 ;
823 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
824 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
825 /* Skip register copies, i.e. saves to another register
826 instead of the stack. */
827 ;
828 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
829 /* Recognize constant loads; even with small stacks these are necessary
830 on Thumb. */
831 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
832 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
833 {
834 /* Constant pool loads, for the same reason. */
835 unsigned int constant;
836 CORE_ADDR loc;
837
838 loc = start + 4 + bits (insn, 0, 7) * 4;
839 constant = read_memory_unsigned_integer (loc, 4, byte_order);
840 regs[bits (insn, 8, 10)] = pv_constant (constant);
841 }
db24da6d 842 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 843 {
0d39a070
DJ
844 unsigned short inst2;
845
846 inst2 = read_memory_unsigned_integer (start + 2, 2,
847 byte_order_for_code);
848
849 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
850 {
851 /* BL, BLX. Allow some special function calls when
852 skipping the prologue; GCC generates these before
853 storing arguments to the stack. */
854 CORE_ADDR nextpc;
855 int j1, j2, imm1, imm2;
856
857 imm1 = sbits (insn, 0, 10);
858 imm2 = bits (inst2, 0, 10);
859 j1 = bit (inst2, 13);
860 j2 = bit (inst2, 11);
861
862 offset = ((imm1 << 12) + (imm2 << 1));
863 offset ^= ((!j2) << 22) | ((!j1) << 23);
864
865 nextpc = start + 4 + offset;
866 /* For BLX make sure to clear the low bits. */
867 if (bit (inst2, 12) == 0)
868 nextpc = nextpc & 0xfffffffc;
869
e0634ccf
UW
870 if (!skip_prologue_function (gdbarch, nextpc,
871 bit (inst2, 12) != 0))
0d39a070
DJ
872 break;
873 }
ec3d575a 874
0963b4bd
MS
875 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
876 { registers } */
ec3d575a
UW
877 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
878 {
879 pv_t addr = regs[bits (insn, 0, 3)];
880 int regno;
881
882 if (pv_area_store_would_trash (stack, addr))
883 break;
884
885 /* Calculate offsets of saved registers. */
886 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
887 if (inst2 & (1 << regno))
888 {
889 addr = pv_add_constant (addr, -4);
890 pv_area_store (stack, addr, 4, regs[regno]);
891 }
892
893 if (insn & 0x0020)
894 regs[bits (insn, 0, 3)] = addr;
895 }
896
0963b4bd
MS
897 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
898 [Rn, #+/-imm]{!} */
ec3d575a
UW
899 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
900 {
901 int regno1 = bits (inst2, 12, 15);
902 int regno2 = bits (inst2, 8, 11);
903 pv_t addr = regs[bits (insn, 0, 3)];
904
905 offset = inst2 & 0xff;
906 if (insn & 0x0080)
907 addr = pv_add_constant (addr, offset);
908 else
909 addr = pv_add_constant (addr, -offset);
910
911 if (pv_area_store_would_trash (stack, addr))
912 break;
913
914 pv_area_store (stack, addr, 4, regs[regno1]);
915 pv_area_store (stack, pv_add_constant (addr, 4),
916 4, regs[regno2]);
917
918 if (insn & 0x0020)
919 regs[bits (insn, 0, 3)] = addr;
920 }
921
922 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
923 && (inst2 & 0x0c00) == 0x0c00
924 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
925 {
926 int regno = bits (inst2, 12, 15);
927 pv_t addr = regs[bits (insn, 0, 3)];
928
929 offset = inst2 & 0xff;
930 if (inst2 & 0x0200)
931 addr = pv_add_constant (addr, offset);
932 else
933 addr = pv_add_constant (addr, -offset);
934
935 if (pv_area_store_would_trash (stack, addr))
936 break;
937
938 pv_area_store (stack, addr, 4, regs[regno]);
939
940 if (inst2 & 0x0100)
941 regs[bits (insn, 0, 3)] = addr;
942 }
943
944 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
945 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
946 {
947 int regno = bits (inst2, 12, 15);
948 pv_t addr;
949
950 offset = inst2 & 0xfff;
951 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
952
953 if (pv_area_store_would_trash (stack, addr))
954 break;
955
956 pv_area_store (stack, addr, 4, regs[regno]);
957 }
958
959 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 960 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 961 /* Ignore stores of argument registers to the stack. */
0d39a070 962 ;
ec3d575a
UW
963
964 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
965 && (inst2 & 0x0d00) == 0x0c00
0d39a070 966 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 967 /* Ignore stores of argument registers to the stack. */
0d39a070 968 ;
ec3d575a 969
0963b4bd
MS
970 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
971 { registers } */
ec3d575a
UW
972 && (inst2 & 0x8000) == 0x0000
973 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
974 /* Ignore block loads from the stack, potentially copying
975 parameters from memory. */
0d39a070 976 ;
ec3d575a 977
0963b4bd
MS
978 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
979 [Rn, #+/-imm] */
0d39a070 980 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 981 /* Similarly ignore dual loads from the stack. */
0d39a070 982 ;
ec3d575a
UW
983
984 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
985 && (inst2 & 0x0d00) == 0x0c00
0d39a070 986 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 987 /* Similarly ignore single loads from the stack. */
0d39a070 988 ;
ec3d575a
UW
989
990 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 992 /* Similarly ignore single loads from the stack. */
0d39a070 993 ;
ec3d575a
UW
994
995 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
996 && (inst2 & 0x8000) == 0x0000)
997 {
998 unsigned int imm = ((bits (insn, 10, 10) << 11)
999 | (bits (inst2, 12, 14) << 8)
1000 | bits (inst2, 0, 7));
1001
1002 regs[bits (inst2, 8, 11)]
1003 = pv_add_constant (regs[bits (insn, 0, 3)],
1004 thumb_expand_immediate (imm));
1005 }
1006
1007 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1008 && (inst2 & 0x8000) == 0x0000)
0d39a070 1009 {
ec3d575a
UW
1010 unsigned int imm = ((bits (insn, 10, 10) << 11)
1011 | (bits (inst2, 12, 14) << 8)
1012 | bits (inst2, 0, 7));
1013
1014 regs[bits (inst2, 8, 11)]
1015 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1016 }
1017
1018 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1020 {
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1024
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)],
1027 - (CORE_ADDR) thumb_expand_immediate (imm));
1028 }
1029
1030 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1031 && (inst2 & 0x8000) == 0x0000)
1032 {
1033 unsigned int imm = ((bits (insn, 10, 10) << 11)
1034 | (bits (inst2, 12, 14) << 8)
1035 | bits (inst2, 0, 7));
1036
1037 regs[bits (inst2, 8, 11)]
1038 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1039 }
1040
1041 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1042 {
1043 unsigned int imm = ((bits (insn, 10, 10) << 11)
1044 | (bits (inst2, 12, 14) << 8)
1045 | bits (inst2, 0, 7));
1046
1047 regs[bits (inst2, 8, 11)]
1048 = pv_constant (thumb_expand_immediate (imm));
1049 }
1050
1051 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1052 {
621c6d5b
YQ
1053 unsigned int imm
1054 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1055
1056 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1057 }
1058
1059 else if (insn == 0xea5f /* mov.w Rd,Rm */
1060 && (inst2 & 0xf0f0) == 0)
1061 {
1062 int dst_reg = (inst2 & 0x0f00) >> 8;
1063 int src_reg = inst2 & 0xf;
1064 regs[dst_reg] = regs[src_reg];
1065 }
1066
1067 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1068 {
1069 /* Constant pool loads. */
1070 unsigned int constant;
1071 CORE_ADDR loc;
1072
1073 offset = bits (insn, 0, 11);
1074 if (insn & 0x0080)
1075 loc = start + 4 + offset;
1076 else
1077 loc = start + 4 - offset;
1078
1079 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1080 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1081 }
1082
1083 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1084 {
1085 /* Constant pool loads. */
1086 unsigned int constant;
1087 CORE_ADDR loc;
1088
1089 offset = bits (insn, 0, 7) << 2;
1090 if (insn & 0x0080)
1091 loc = start + 4 + offset;
1092 else
1093 loc = start + 4 - offset;
1094
1095 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1096 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1097
1098 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1099 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1100 }
1101
1102 else if (thumb2_instruction_changes_pc (insn, inst2))
1103 {
1104 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1105 break;
1106 }
ec3d575a
UW
1107 else
1108 {
1109 /* The optimizer might shove anything into the prologue,
1110 so we just skip what we don't recognize. */
1111 unrecognized_pc = start;
1112 }
0d39a070
DJ
1113
1114 start += 2;
1115 }
ec3d575a 1116 else if (thumb_instruction_changes_pc (insn))
3d74b771 1117 {
ec3d575a 1118 /* Don't scan past anything that might change control flow. */
da3c6d4a 1119 break;
3d74b771 1120 }
ec3d575a
UW
1121 else
1122 {
1123 /* The optimizer might shove anything into the prologue,
1124 so we just skip what we don't recognize. */
1125 unrecognized_pc = start;
1126 }
29d73ae4
DJ
1127
1128 start += 2;
c906108c
SS
1129 }
1130
0d39a070
DJ
1131 if (arm_debug)
1132 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1133 paddress (gdbarch, start));
1134
ec3d575a
UW
1135 if (unrecognized_pc == 0)
1136 unrecognized_pc = start;
1137
29d73ae4
DJ
1138 if (cache == NULL)
1139 {
1140 do_cleanups (back_to);
ec3d575a 1141 return unrecognized_pc;
29d73ae4
DJ
1142 }
1143
29d73ae4
DJ
1144 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1145 {
1146 /* Frame pointer is fp. Frame size is constant. */
1147 cache->framereg = ARM_FP_REGNUM;
1148 cache->framesize = -regs[ARM_FP_REGNUM].k;
1149 }
1150 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1151 {
1152 /* Frame pointer is r7. Frame size is constant. */
1153 cache->framereg = THUMB_FP_REGNUM;
1154 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1155 }
72a2e3dc 1156 else
29d73ae4
DJ
1157 {
1158 /* Try the stack pointer... this is a bit desperate. */
1159 cache->framereg = ARM_SP_REGNUM;
1160 cache->framesize = -regs[ARM_SP_REGNUM].k;
1161 }
29d73ae4
DJ
1162
1163 for (i = 0; i < 16; i++)
1164 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1165 cache->saved_regs[i].addr = offset;
1166
1167 do_cleanups (back_to);
ec3d575a 1168 return unrecognized_pc;
c906108c
SS
1169}
1170
621c6d5b
YQ
1171
1172/* Try to analyze the instructions starting from PC, which load symbol
1173 __stack_chk_guard. Return the address of instruction after loading this
1174 symbol, set the dest register number to *BASEREG, and set the size of
1175 instructions for loading symbol in OFFSET. Return 0 if instructions are
1176 not recognized. */
1177
1178static CORE_ADDR
1179arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1180 unsigned int *destreg, int *offset)
1181{
1182 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1183 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1184 unsigned int low, high, address;
1185
1186 address = 0;
1187 if (is_thumb)
1188 {
1189 unsigned short insn1
1190 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1191
1192 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1193 {
1194 *destreg = bits (insn1, 8, 10);
1195 *offset = 2;
1196 address = bits (insn1, 0, 7);
1197 }
1198 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1199 {
1200 unsigned short insn2
1201 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1202
1203 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1204
1205 insn1
1206 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1207 insn2
1208 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1209
1210 /* movt Rd, #const */
1211 if ((insn1 & 0xfbc0) == 0xf2c0)
1212 {
1213 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1214 *destreg = bits (insn2, 8, 11);
1215 *offset = 8;
1216 address = (high << 16 | low);
1217 }
1218 }
1219 }
1220 else
1221 {
2e9e421f
UW
1222 unsigned int insn
1223 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1224
1225 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1226 {
1227 address = bits (insn, 0, 11);
1228 *destreg = bits (insn, 12, 15);
1229 *offset = 4;
1230 }
1231 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1232 {
1233 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1234
1235 insn
1236 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1237
1238 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1239 {
1240 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1241 *destreg = bits (insn, 12, 15);
1242 *offset = 8;
1243 address = (high << 16 | low);
1244 }
1245 }
621c6d5b
YQ
1246 }
1247
1248 return address;
1249}
1250
1251/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1252 points to the first instruction of this sequence, return the address of
1253 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1254
1255 On arm, this sequence of instructions is composed of mainly three steps,
1256 Step 1: load symbol __stack_chk_guard,
1257 Step 2: load from address of __stack_chk_guard,
1258 Step 3: store it to somewhere else.
1259
1260 Usually, instructions on step 2 and step 3 are the same on various ARM
1261 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1262 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1263 instructions in step 1 vary from different ARM architectures. On ARMv7,
1264 they are,
1265
1266 movw Rn, #:lower16:__stack_chk_guard
1267 movt Rn, #:upper16:__stack_chk_guard
1268
1269 On ARMv5t, it is,
1270
1271 ldr Rn, .Label
1272 ....
1273 .Lable:
1274 .word __stack_chk_guard
1275
1276 Since ldr/str is a very popular instruction, we can't use them as
1277 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1278 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1279 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1280
1281static CORE_ADDR
1282arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1283{
1284 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1285 unsigned int address, basereg;
1286 struct minimal_symbol *stack_chk_guard;
1287 int offset;
1288 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1289 CORE_ADDR addr;
1290
1291 /* Try to parse the instructions in Step 1. */
1292 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1293 &basereg, &offset);
1294 if (!addr)
1295 return pc;
1296
1297 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1298 /* If name of symbol doesn't start with '__stack_chk_guard', this
1299 instruction sequence is not for stack protector. If symbol is
1300 removed, we conservatively think this sequence is for stack protector. */
1301 if (stack_chk_guard
c1c2ab58
UW
1302 && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1303 strlen ("__stack_chk_guard")) != 0)
621c6d5b
YQ
1304 return pc;
1305
1306 if (is_thumb)
1307 {
1308 unsigned int destreg;
1309 unsigned short insn
1310 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1311
1312 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1313 if ((insn & 0xf800) != 0x6800)
1314 return pc;
1315 if (bits (insn, 3, 5) != basereg)
1316 return pc;
1317 destreg = bits (insn, 0, 2);
1318
1319 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1320 byte_order_for_code);
1321 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1322 if ((insn & 0xf800) != 0x6000)
1323 return pc;
1324 if (destreg != bits (insn, 0, 2))
1325 return pc;
1326 }
1327 else
1328 {
1329 unsigned int destreg;
1330 unsigned int insn
1331 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1332
1333 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1334 if ((insn & 0x0e500000) != 0x04100000)
1335 return pc;
1336 if (bits (insn, 16, 19) != basereg)
1337 return pc;
1338 destreg = bits (insn, 12, 15);
1339 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1340 insn = read_memory_unsigned_integer (pc + offset + 4,
1341 4, byte_order_for_code);
1342 if ((insn & 0x0e500000) != 0x04000000)
1343 return pc;
1344 if (bits (insn, 12, 15) != destreg)
1345 return pc;
1346 }
1347 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1348 on arm. */
1349 if (is_thumb)
1350 return pc + offset + 4;
1351 else
1352 return pc + offset + 8;
1353}
1354
da3c6d4a
MS
1355/* Advance the PC across any function entry prologue instructions to
1356 reach some "real" code.
34e8f22d
RE
1357
1358 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1359 prologue:
c906108c 1360
c5aa993b
JM
1361 mov ip, sp
1362 [stmfd sp!, {a1,a2,a3,a4}]
1363 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1364 [stfe f7, [sp, #-12]!]
1365 [stfe f6, [sp, #-12]!]
1366 [stfe f5, [sp, #-12]!]
1367 [stfe f4, [sp, #-12]!]
0963b4bd 1368 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1369
34e8f22d 1370static CORE_ADDR
6093d2eb 1371arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1372{
e17a4113 1373 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c906108c
SS
1374 unsigned long inst;
1375 CORE_ADDR skip_pc;
a89fea3c 1376 CORE_ADDR func_addr, limit_pc;
c906108c
SS
1377 struct symtab_and_line sal;
1378
a89fea3c
JL
1379 /* See if we can determine the end of the prologue via the symbol table.
1380 If so, then return either PC, or the PC after the prologue, whichever
1381 is greater. */
1382 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1383 {
d80b854b
UW
1384 CORE_ADDR post_prologue_pc
1385 = skip_prologue_using_sal (gdbarch, func_addr);
0d39a070
DJ
1386 struct symtab *s = find_pc_symtab (func_addr);
1387
621c6d5b
YQ
1388 if (post_prologue_pc)
1389 post_prologue_pc
1390 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1391
1392
0d39a070
DJ
1393 /* GCC always emits a line note before the prologue and another
1394 one after, even if the two are at the same address or on the
1395 same line. Take advantage of this so that we do not need to
1396 know every instruction that might appear in the prologue. We
1397 will have producer information for most binaries; if it is
1398 missing (e.g. for -gstabs), assuming the GNU tools. */
1399 if (post_prologue_pc
1400 && (s == NULL
1401 || s->producer == NULL
1402 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1403 return post_prologue_pc;
1404
a89fea3c 1405 if (post_prologue_pc != 0)
0d39a070
DJ
1406 {
1407 CORE_ADDR analyzed_limit;
1408
1409 /* For non-GCC compilers, make sure the entire line is an
1410 acceptable prologue; GDB will round this function's
1411 return value up to the end of the following line so we
1412 can not skip just part of a line (and we do not want to).
1413
1414 RealView does not treat the prologue specially, but does
1415 associate prologue code with the opening brace; so this
1416 lets us skip the first line if we think it is the opening
1417 brace. */
9779414d 1418 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1419 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1420 post_prologue_pc, NULL);
1421 else
1422 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1423 post_prologue_pc, NULL);
1424
1425 if (analyzed_limit != post_prologue_pc)
1426 return func_addr;
1427
1428 return post_prologue_pc;
1429 }
c906108c
SS
1430 }
1431
a89fea3c
JL
1432 /* Can't determine prologue from the symbol table, need to examine
1433 instructions. */
c906108c 1434
a89fea3c
JL
1435 /* Find an upper limit on the function prologue using the debug
1436 information. If the debug information could not be used to provide
1437 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1438 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1439 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1440 if (limit_pc == 0)
1441 limit_pc = pc + 64; /* Magic. */
1442
c906108c 1443
29d73ae4 1444 /* Check if this is Thumb code. */
9779414d 1445 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1446 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
29d73ae4 1447
a89fea3c 1448 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
f43845b3 1449 {
e17a4113 1450 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
9d4fde75 1451
b8d5e71d
MS
1452 /* "mov ip, sp" is no longer a required part of the prologue. */
1453 if (inst == 0xe1a0c00d) /* mov ip, sp */
1454 continue;
c906108c 1455
28cd8767
JG
1456 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1457 continue;
1458
1459 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1460 continue;
1461
b8d5e71d
MS
1462 /* Some prologues begin with "str lr, [sp, #-4]!". */
1463 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1464 continue;
c906108c 1465
b8d5e71d
MS
1466 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1467 continue;
c906108c 1468
b8d5e71d
MS
1469 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1470 continue;
11d3b27d 1471
b8d5e71d
MS
1472 /* Any insns after this point may float into the code, if it makes
1473 for better instruction scheduling, so we skip them only if we
1474 find them, but still consider the function to be frame-ful. */
f43845b3 1475
b8d5e71d
MS
1476 /* We may have either one sfmfd instruction here, or several stfe
1477 insns, depending on the version of floating point code we
1478 support. */
1479 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1480 continue;
1481
1482 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1483 continue;
1484
1485 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1486 continue;
1487
1488 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1489 continue;
1490
f8bf5763
PM
1491 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1492 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1493 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
b8d5e71d
MS
1494 continue;
1495
f8bf5763
PM
1496 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1497 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1498 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
b8d5e71d
MS
1499 continue;
1500
1501 /* Un-recognized instruction; stop scanning. */
1502 break;
f43845b3 1503 }
c906108c 1504
0963b4bd 1505 return skip_pc; /* End of prologue. */
c906108c 1506}
94c30b78 1507
c5aa993b 1508/* *INDENT-OFF* */
c906108c
SS
1509/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1510 This function decodes a Thumb function prologue to determine:
1511 1) the size of the stack frame
1512 2) which registers are saved on it
1513 3) the offsets of saved regs
1514 4) the offset from the stack pointer to the frame pointer
c906108c 1515
da59e081
JM
1516 A typical Thumb function prologue would create this stack frame
1517 (offsets relative to FP)
c906108c
SS
1518 old SP -> 24 stack parameters
1519 20 LR
1520 16 R7
1521 R7 -> 0 local variables (16 bytes)
1522 SP -> -12 additional stack space (12 bytes)
1523 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1524 12 bytes. The frame register is R7.
da59e081 1525
da3c6d4a
MS
1526 The comments for thumb_skip_prolog() describe the algorithm we use
1527 to detect the end of the prolog. */
c5aa993b
JM
1528/* *INDENT-ON* */
1529
c906108c 1530static void
be8626e0 1531thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1532 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1533{
1534 CORE_ADDR prologue_start;
1535 CORE_ADDR prologue_end;
1536 CORE_ADDR current_pc;
c906108c 1537
b39cc962
DJ
1538 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1539 &prologue_end))
c906108c 1540 {
ec3d575a
UW
1541 /* See comment in arm_scan_prologue for an explanation of
1542 this heuristics. */
1543 if (prologue_end > prologue_start + 64)
1544 {
1545 prologue_end = prologue_start + 64;
1546 }
c906108c
SS
1547 }
1548 else
f7060f85
DJ
1549 /* We're in the boondocks: we have no idea where the start of the
1550 function is. */
1551 return;
c906108c 1552
eb5492fa 1553 prologue_end = min (prologue_end, prev_pc);
c906108c 1554
be8626e0 1555 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1556}
1557
0d39a070 1558/* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
c906108c 1559
0d39a070
DJ
1560static int
1561arm_instruction_changes_pc (uint32_t this_instr)
c906108c 1562{
0d39a070
DJ
1563 if (bits (this_instr, 28, 31) == INST_NV)
1564 /* Unconditional instructions. */
1565 switch (bits (this_instr, 24, 27))
1566 {
1567 case 0xa:
1568 case 0xb:
1569 /* Branch with Link and change to Thumb. */
1570 return 1;
1571 case 0xc:
1572 case 0xd:
1573 case 0xe:
1574 /* Coprocessor register transfer. */
1575 if (bits (this_instr, 12, 15) == 15)
1576 error (_("Invalid update to pc in instruction"));
1577 return 0;
1578 default:
1579 return 0;
1580 }
1581 else
1582 switch (bits (this_instr, 25, 27))
1583 {
1584 case 0x0:
1585 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1586 {
1587 /* Multiplies and extra load/stores. */
1588 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1589 /* Neither multiplies nor extension load/stores are allowed
1590 to modify PC. */
1591 return 0;
1592
1593 /* Otherwise, miscellaneous instructions. */
1594
1595 /* BX <reg>, BXJ <reg>, BLX <reg> */
1596 if (bits (this_instr, 4, 27) == 0x12fff1
1597 || bits (this_instr, 4, 27) == 0x12fff2
1598 || bits (this_instr, 4, 27) == 0x12fff3)
1599 return 1;
1600
1601 /* Other miscellaneous instructions are unpredictable if they
1602 modify PC. */
1603 return 0;
1604 }
1605 /* Data processing instruction. Fall through. */
c906108c 1606
0d39a070
DJ
1607 case 0x1:
1608 if (bits (this_instr, 12, 15) == 15)
1609 return 1;
1610 else
1611 return 0;
c906108c 1612
0d39a070
DJ
1613 case 0x2:
1614 case 0x3:
1615 /* Media instructions and architecturally undefined instructions. */
1616 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1617 return 0;
c906108c 1618
0d39a070
DJ
1619 /* Stores. */
1620 if (bit (this_instr, 20) == 0)
1621 return 0;
2a451106 1622
0d39a070
DJ
1623 /* Loads. */
1624 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1625 return 1;
1626 else
1627 return 0;
2a451106 1628
0d39a070
DJ
1629 case 0x4:
1630 /* Load/store multiple. */
1631 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1632 return 1;
1633 else
1634 return 0;
2a451106 1635
0d39a070
DJ
1636 case 0x5:
1637 /* Branch and branch with link. */
1638 return 1;
2a451106 1639
0d39a070
DJ
1640 case 0x6:
1641 case 0x7:
1642 /* Coprocessor transfers or SWIs can not affect PC. */
1643 return 0;
eb5492fa 1644
0d39a070 1645 default:
9b20d036 1646 internal_error (__FILE__, __LINE__, _("bad value in switch"));
0d39a070
DJ
1647 }
1648}
c906108c 1649
0d39a070
DJ
1650/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1651 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1652 fill it in. Return the first address not recognized as a prologue
1653 instruction.
eb5492fa 1654
0d39a070
DJ
1655 We recognize all the instructions typically found in ARM prologues,
1656 plus harmless instructions which can be skipped (either for analysis
1657 purposes, or a more restrictive set that can be skipped when finding
1658 the end of the prologue). */
1659
1660static CORE_ADDR
1661arm_analyze_prologue (struct gdbarch *gdbarch,
1662 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1663 struct arm_prologue_cache *cache)
1664{
1665 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1666 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1667 int regno;
1668 CORE_ADDR offset, current_pc;
1669 pv_t regs[ARM_FPS_REGNUM];
1670 struct pv_area *stack;
1671 struct cleanup *back_to;
1672 int framereg, framesize;
1673 CORE_ADDR unrecognized_pc = 0;
1674
1675 /* Search the prologue looking for instructions that set up the
96baa820 1676 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1677
96baa820
JM
1678 Be careful, however, and if it doesn't look like a prologue,
1679 don't try to scan it. If, for instance, a frameless function
1680 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1681 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1682 and other operations that rely on a knowledge of the stack
0d39a070 1683 traceback. */
d4473757 1684
4be43953
DJ
1685 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1686 regs[regno] = pv_register (regno, 0);
55f960e1 1687 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1688 back_to = make_cleanup_free_pv_area (stack);
1689
94c30b78
MS
1690 for (current_pc = prologue_start;
1691 current_pc < prologue_end;
f43845b3 1692 current_pc += 4)
96baa820 1693 {
e17a4113
UW
1694 unsigned int insn
1695 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1696
94c30b78 1697 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1698 {
4be43953 1699 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1700 continue;
1701 }
0d39a070
DJ
1702 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1703 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1704 {
1705 unsigned imm = insn & 0xff; /* immediate value */
1706 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1707 int rd = bits (insn, 12, 15);
28cd8767 1708 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1709 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1710 continue;
1711 }
0d39a070
DJ
1712 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1713 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1714 {
1715 unsigned imm = insn & 0xff; /* immediate value */
1716 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1717 int rd = bits (insn, 12, 15);
28cd8767 1718 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1719 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1720 continue;
1721 }
0963b4bd
MS
1722 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1723 [sp, #-4]! */
f43845b3 1724 {
4be43953
DJ
1725 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1726 break;
1727 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1728 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1729 regs[bits (insn, 12, 15)]);
f43845b3
MS
1730 continue;
1731 }
1732 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1733 /* stmfd sp!, {..., fp, ip, lr, pc}
1734 or
1735 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1736 {
d4473757 1737 int mask = insn & 0xffff;
ed9a39eb 1738
4be43953
DJ
1739 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1740 break;
1741
94c30b78 1742 /* Calculate offsets of saved registers. */
34e8f22d 1743 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1744 if (mask & (1 << regno))
1745 {
0963b4bd
MS
1746 regs[ARM_SP_REGNUM]
1747 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1748 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1749 }
1750 }
0d39a070
DJ
1751 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1752 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1753 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1754 {
1755 /* No need to add this to saved_regs -- it's just an arg reg. */
1756 continue;
1757 }
0d39a070
DJ
1758 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1759 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1760 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1761 {
1762 /* No need to add this to saved_regs -- it's just an arg reg. */
1763 continue;
1764 }
0963b4bd
MS
1765 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1766 { registers } */
0d39a070
DJ
1767 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1768 {
1769 /* No need to add this to saved_regs -- it's just arg regs. */
1770 continue;
1771 }
d4473757
KB
1772 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1773 {
94c30b78
MS
1774 unsigned imm = insn & 0xff; /* immediate value */
1775 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1776 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1777 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1778 }
1779 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1780 {
94c30b78
MS
1781 unsigned imm = insn & 0xff; /* immediate value */
1782 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1783 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1784 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1785 }
0963b4bd
MS
1786 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1787 [sp, -#c]! */
2af46ca0 1788 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1789 {
4be43953
DJ
1790 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1791 break;
1792
1793 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1794 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1795 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1796 }
0963b4bd
MS
1797 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1798 [sp!] */
2af46ca0 1799 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1800 {
1801 int n_saved_fp_regs;
1802 unsigned int fp_start_reg, fp_bound_reg;
1803
4be43953
DJ
1804 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1805 break;
1806
94c30b78 1807 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1808 {
d4473757
KB
1809 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1810 n_saved_fp_regs = 3;
1811 else
1812 n_saved_fp_regs = 1;
96baa820 1813 }
d4473757 1814 else
96baa820 1815 {
d4473757
KB
1816 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1817 n_saved_fp_regs = 2;
1818 else
1819 n_saved_fp_regs = 4;
96baa820 1820 }
d4473757 1821
34e8f22d 1822 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1823 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1824 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1825 {
4be43953
DJ
1826 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1827 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1828 regs[fp_start_reg++]);
96baa820 1829 }
c906108c 1830 }
0d39a070
DJ
1831 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1832 {
1833 /* Allow some special function calls when skipping the
1834 prologue; GCC generates these before storing arguments to
1835 the stack. */
1836 CORE_ADDR dest = BranchDest (current_pc, insn);
1837
e0634ccf 1838 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1839 continue;
1840 else
1841 break;
1842 }
d4473757 1843 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1844 break; /* Condition not true, exit early. */
0d39a070
DJ
1845 else if (arm_instruction_changes_pc (insn))
1846 /* Don't scan past anything that might change control flow. */
1847 break;
d19f7eee
UW
1848 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1849 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1850 /* Ignore block loads from the stack, potentially copying
1851 parameters from memory. */
1852 continue;
1853 else if ((insn & 0xfc500000) == 0xe4100000
1854 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1855 /* Similarly ignore single loads from the stack. */
1856 continue;
0d39a070
DJ
1857 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1858 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1859 register instead of the stack. */
d4473757 1860 continue;
0d39a070
DJ
1861 else
1862 {
1863 /* The optimizer might shove anything into the prologue,
1864 so we just skip what we don't recognize. */
1865 unrecognized_pc = current_pc;
1866 continue;
1867 }
c906108c
SS
1868 }
1869
0d39a070
DJ
1870 if (unrecognized_pc == 0)
1871 unrecognized_pc = current_pc;
1872
4be43953
DJ
1873 /* The frame size is just the distance from the frame register
1874 to the original stack pointer. */
1875 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1876 {
1877 /* Frame pointer is fp. */
0d39a070
DJ
1878 framereg = ARM_FP_REGNUM;
1879 framesize = -regs[ARM_FP_REGNUM].k;
4be43953 1880 }
72a2e3dc 1881 else
4be43953
DJ
1882 {
1883 /* Try the stack pointer... this is a bit desperate. */
0d39a070
DJ
1884 framereg = ARM_SP_REGNUM;
1885 framesize = -regs[ARM_SP_REGNUM].k;
4be43953 1886 }
4be43953 1887
0d39a070
DJ
1888 if (cache)
1889 {
1890 cache->framereg = framereg;
1891 cache->framesize = framesize;
1892
1893 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1894 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1895 cache->saved_regs[regno].addr = offset;
1896 }
1897
1898 if (arm_debug)
1899 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1900 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1901
1902 do_cleanups (back_to);
0d39a070
DJ
1903 return unrecognized_pc;
1904}
1905
1906static void
1907arm_scan_prologue (struct frame_info *this_frame,
1908 struct arm_prologue_cache *cache)
1909{
1910 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1911 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1912 int regno;
1913 CORE_ADDR prologue_start, prologue_end, current_pc;
1914 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1915 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1916 pv_t regs[ARM_FPS_REGNUM];
1917 struct pv_area *stack;
1918 struct cleanup *back_to;
1919 CORE_ADDR offset;
1920
1921 /* Assume there is no frame until proven otherwise. */
1922 cache->framereg = ARM_SP_REGNUM;
1923 cache->framesize = 0;
1924
1925 /* Check for Thumb prologue. */
1926 if (arm_frame_is_thumb (this_frame))
1927 {
1928 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1929 return;
1930 }
1931
1932 /* Find the function prologue. If we can't find the function in
1933 the symbol table, peek in the stack frame to find the PC. */
1934 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1935 &prologue_end))
1936 {
1937 /* One way to find the end of the prologue (which works well
1938 for unoptimized code) is to do the following:
1939
1940 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1941
1942 if (sal.line == 0)
1943 prologue_end = prev_pc;
1944 else if (sal.end < prologue_end)
1945 prologue_end = sal.end;
1946
1947 This mechanism is very accurate so long as the optimizer
1948 doesn't move any instructions from the function body into the
1949 prologue. If this happens, sal.end will be the last
1950 instruction in the first hunk of prologue code just before
1951 the first instruction that the scheduler has moved from
1952 the body to the prologue.
1953
1954 In order to make sure that we scan all of the prologue
1955 instructions, we use a slightly less accurate mechanism which
1956 may scan more than necessary. To help compensate for this
1957 lack of accuracy, the prologue scanning loop below contains
1958 several clauses which'll cause the loop to terminate early if
1959 an implausible prologue instruction is encountered.
1960
1961 The expression
1962
1963 prologue_start + 64
1964
1965 is a suitable endpoint since it accounts for the largest
1966 possible prologue plus up to five instructions inserted by
1967 the scheduler. */
1968
1969 if (prologue_end > prologue_start + 64)
1970 {
1971 prologue_end = prologue_start + 64; /* See above. */
1972 }
1973 }
1974 else
1975 {
1976 /* We have no symbol information. Our only option is to assume this
1977 function has a standard stack frame and the normal frame register.
1978 Then, we can find the value of our frame pointer on entrance to
1979 the callee (or at the present moment if this is the innermost frame).
1980 The value stored there should be the address of the stmfd + 8. */
1981 CORE_ADDR frame_loc;
1982 LONGEST return_value;
1983
1984 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1985 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1986 return;
1987 else
1988 {
1989 prologue_start = gdbarch_addr_bits_remove
1990 (gdbarch, return_value) - 8;
1991 prologue_end = prologue_start + 64; /* See above. */
1992 }
1993 }
1994
1995 if (prev_pc < prologue_end)
1996 prologue_end = prev_pc;
1997
1998 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1999}
2000
eb5492fa 2001static struct arm_prologue_cache *
a262aec2 2002arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 2003{
eb5492fa
DJ
2004 int reg;
2005 struct arm_prologue_cache *cache;
2006 CORE_ADDR unwound_fp;
c5aa993b 2007
35d5d4ee 2008 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2009 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 2010
a262aec2 2011 arm_scan_prologue (this_frame, cache);
848cfffb 2012
a262aec2 2013 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
2014 if (unwound_fp == 0)
2015 return cache;
c906108c 2016
4be43953 2017 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 2018
eb5492fa
DJ
2019 /* Calculate actual addresses of saved registers using offsets
2020 determined by arm_scan_prologue. */
a262aec2 2021 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 2022 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
2023 cache->saved_regs[reg].addr += cache->prev_sp;
2024
2025 return cache;
c906108c
SS
2026}
2027
eb5492fa
DJ
2028/* Our frame ID for a normal frame is the current function's starting PC
2029 and the caller's SP when we were called. */
c906108c 2030
148754e5 2031static void
a262aec2 2032arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
2033 void **this_cache,
2034 struct frame_id *this_id)
c906108c 2035{
eb5492fa
DJ
2036 struct arm_prologue_cache *cache;
2037 struct frame_id id;
2c404490 2038 CORE_ADDR pc, func;
f079148d 2039
eb5492fa 2040 if (*this_cache == NULL)
a262aec2 2041 *this_cache = arm_make_prologue_cache (this_frame);
eb5492fa 2042 cache = *this_cache;
2a451106 2043
2c404490
DJ
2044 /* This is meant to halt the backtrace at "_start". */
2045 pc = get_frame_pc (this_frame);
2046 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
eb5492fa 2047 return;
5a203e44 2048
eb5492fa
DJ
2049 /* If we've hit a wall, stop. */
2050 if (cache->prev_sp == 0)
2051 return;
24de872b 2052
0e9e9abd
UW
2053 /* Use function start address as part of the frame ID. If we cannot
2054 identify the start address (due to missing symbol information),
2055 fall back to just using the current PC. */
2c404490 2056 func = get_frame_func (this_frame);
0e9e9abd
UW
2057 if (!func)
2058 func = pc;
2059
eb5492fa 2060 id = frame_id_build (cache->prev_sp, func);
eb5492fa 2061 *this_id = id;
c906108c
SS
2062}
2063
a262aec2
DJ
2064static struct value *
2065arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 2066 void **this_cache,
a262aec2 2067 int prev_regnum)
24de872b 2068{
24568a2c 2069 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
2070 struct arm_prologue_cache *cache;
2071
eb5492fa 2072 if (*this_cache == NULL)
a262aec2 2073 *this_cache = arm_make_prologue_cache (this_frame);
eb5492fa 2074 cache = *this_cache;
24de872b 2075
eb5492fa 2076 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
2077 instead. The prologue may save PC, but it will point into this
2078 frame's prologue, not the next frame's resume location. Also
2079 strip the saved T bit. A valid LR may have the low bit set, but
2080 a valid PC never does. */
eb5492fa 2081 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
2082 {
2083 CORE_ADDR lr;
2084
2085 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2086 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 2087 arm_addr_bits_remove (gdbarch, lr));
b39cc962 2088 }
24de872b 2089
eb5492fa 2090 /* SP is generally not saved to the stack, but this frame is
a262aec2 2091 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
2092 The value was already reconstructed into PREV_SP. */
2093 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 2094 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 2095
b39cc962
DJ
2096 /* The CPSR may have been changed by the call instruction and by the
2097 called function. The only bit we can reconstruct is the T bit,
2098 by checking the low bit of LR as of the call. This is a reliable
2099 indicator of Thumb-ness except for some ARM v4T pre-interworking
2100 Thumb code, which could get away with a clear low bit as long as
2101 the called function did not use bx. Guess that all other
2102 bits are unchanged; the condition flags are presumably lost,
2103 but the processor status is likely valid. */
2104 if (prev_regnum == ARM_PS_REGNUM)
2105 {
2106 CORE_ADDR lr, cpsr;
9779414d 2107 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
2108
2109 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2110 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2111 if (IS_THUMB_ADDR (lr))
9779414d 2112 cpsr |= t_bit;
b39cc962 2113 else
9779414d 2114 cpsr &= ~t_bit;
b39cc962
DJ
2115 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2116 }
2117
a262aec2
DJ
2118 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2119 prev_regnum);
eb5492fa
DJ
2120}
2121
2122struct frame_unwind arm_prologue_unwind = {
2123 NORMAL_FRAME,
8fbca658 2124 default_frame_unwind_stop_reason,
eb5492fa 2125 arm_prologue_this_id,
a262aec2
DJ
2126 arm_prologue_prev_register,
2127 NULL,
2128 default_frame_sniffer
eb5492fa
DJ
2129};
2130
0e9e9abd
UW
2131/* Maintain a list of ARM exception table entries per objfile, similar to the
2132 list of mapping symbols. We only cache entries for standard ARM-defined
2133 personality routines; the cache will contain only the frame unwinding
2134 instructions associated with the entry (not the descriptors). */
2135
2136static const struct objfile_data *arm_exidx_data_key;
2137
2138struct arm_exidx_entry
2139{
2140 bfd_vma addr;
2141 gdb_byte *entry;
2142};
2143typedef struct arm_exidx_entry arm_exidx_entry_s;
2144DEF_VEC_O(arm_exidx_entry_s);
2145
2146struct arm_exidx_data
2147{
2148 VEC(arm_exidx_entry_s) **section_maps;
2149};
2150
2151static void
2152arm_exidx_data_free (struct objfile *objfile, void *arg)
2153{
2154 struct arm_exidx_data *data = arg;
2155 unsigned int i;
2156
2157 for (i = 0; i < objfile->obfd->section_count; i++)
2158 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2159}
2160
2161static inline int
2162arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2163 const struct arm_exidx_entry *rhs)
2164{
2165 return lhs->addr < rhs->addr;
2166}
2167
2168static struct obj_section *
2169arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2170{
2171 struct obj_section *osect;
2172
2173 ALL_OBJFILE_OSECTIONS (objfile, osect)
2174 if (bfd_get_section_flags (objfile->obfd,
2175 osect->the_bfd_section) & SEC_ALLOC)
2176 {
2177 bfd_vma start, size;
2178 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2179 size = bfd_get_section_size (osect->the_bfd_section);
2180
2181 if (start <= vma && vma < start + size)
2182 return osect;
2183 }
2184
2185 return NULL;
2186}
2187
2188/* Parse contents of exception table and exception index sections
2189 of OBJFILE, and fill in the exception table entry cache.
2190
2191 For each entry that refers to a standard ARM-defined personality
2192 routine, extract the frame unwinding instructions (from either
2193 the index or the table section). The unwinding instructions
2194 are normalized by:
2195 - extracting them from the rest of the table data
2196 - converting to host endianness
2197 - appending the implicit 0xb0 ("Finish") code
2198
2199 The extracted and normalized instructions are stored for later
2200 retrieval by the arm_find_exidx_entry routine. */
2201
2202static void
2203arm_exidx_new_objfile (struct objfile *objfile)
2204{
3bb47e8b 2205 struct cleanup *cleanups;
0e9e9abd
UW
2206 struct arm_exidx_data *data;
2207 asection *exidx, *extab;
2208 bfd_vma exidx_vma = 0, extab_vma = 0;
2209 bfd_size_type exidx_size = 0, extab_size = 0;
2210 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2211 LONGEST i;
2212
2213 /* If we've already touched this file, do nothing. */
2214 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2215 return;
3bb47e8b 2216 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2217
2218 /* Read contents of exception table and index. */
2219 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2220 if (exidx)
2221 {
2222 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2223 exidx_size = bfd_get_section_size (exidx);
2224 exidx_data = xmalloc (exidx_size);
2225 make_cleanup (xfree, exidx_data);
2226
2227 if (!bfd_get_section_contents (objfile->obfd, exidx,
2228 exidx_data, 0, exidx_size))
2229 {
2230 do_cleanups (cleanups);
2231 return;
2232 }
2233 }
2234
2235 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2236 if (extab)
2237 {
2238 extab_vma = bfd_section_vma (objfile->obfd, extab);
2239 extab_size = bfd_get_section_size (extab);
2240 extab_data = xmalloc (extab_size);
2241 make_cleanup (xfree, extab_data);
2242
2243 if (!bfd_get_section_contents (objfile->obfd, extab,
2244 extab_data, 0, extab_size))
2245 {
2246 do_cleanups (cleanups);
2247 return;
2248 }
2249 }
2250
2251 /* Allocate exception table data structure. */
2252 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2253 set_objfile_data (objfile, arm_exidx_data_key, data);
2254 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2255 objfile->obfd->section_count,
2256 VEC(arm_exidx_entry_s) *);
2257
2258 /* Fill in exception table. */
2259 for (i = 0; i < exidx_size / 8; i++)
2260 {
2261 struct arm_exidx_entry new_exidx_entry;
2262 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2263 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2264 bfd_vma addr = 0, word = 0;
2265 int n_bytes = 0, n_words = 0;
2266 struct obj_section *sec;
2267 gdb_byte *entry = NULL;
2268
2269 /* Extract address of start of function. */
2270 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2271 idx += exidx_vma + i * 8;
2272
2273 /* Find section containing function and compute section offset. */
2274 sec = arm_obj_section_from_vma (objfile, idx);
2275 if (sec == NULL)
2276 continue;
2277 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2278
2279 /* Determine address of exception table entry. */
2280 if (val == 1)
2281 {
2282 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2283 }
2284 else if ((val & 0xff000000) == 0x80000000)
2285 {
2286 /* Exception table entry embedded in .ARM.exidx
2287 -- must be short form. */
2288 word = val;
2289 n_bytes = 3;
2290 }
2291 else if (!(val & 0x80000000))
2292 {
2293 /* Exception table entry in .ARM.extab. */
2294 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2295 addr += exidx_vma + i * 8 + 4;
2296
2297 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2298 {
2299 word = bfd_h_get_32 (objfile->obfd,
2300 extab_data + addr - extab_vma);
2301 addr += 4;
2302
2303 if ((word & 0xff000000) == 0x80000000)
2304 {
2305 /* Short form. */
2306 n_bytes = 3;
2307 }
2308 else if ((word & 0xff000000) == 0x81000000
2309 || (word & 0xff000000) == 0x82000000)
2310 {
2311 /* Long form. */
2312 n_bytes = 2;
2313 n_words = ((word >> 16) & 0xff);
2314 }
2315 else if (!(word & 0x80000000))
2316 {
2317 bfd_vma pers;
2318 struct obj_section *pers_sec;
2319 int gnu_personality = 0;
2320
2321 /* Custom personality routine. */
2322 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2323 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2324
2325 /* Check whether we've got one of the variants of the
2326 GNU personality routines. */
2327 pers_sec = arm_obj_section_from_vma (objfile, pers);
2328 if (pers_sec)
2329 {
2330 static const char *personality[] =
2331 {
2332 "__gcc_personality_v0",
2333 "__gxx_personality_v0",
2334 "__gcj_personality_v0",
2335 "__gnu_objc_personality_v0",
2336 NULL
2337 };
2338
2339 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2340 int k;
2341
2342 for (k = 0; personality[k]; k++)
2343 if (lookup_minimal_symbol_by_pc_name
2344 (pc, personality[k], objfile))
2345 {
2346 gnu_personality = 1;
2347 break;
2348 }
2349 }
2350
2351 /* If so, the next word contains a word count in the high
2352 byte, followed by the same unwind instructions as the
2353 pre-defined forms. */
2354 if (gnu_personality
2355 && addr + 4 <= extab_vma + extab_size)
2356 {
2357 word = bfd_h_get_32 (objfile->obfd,
2358 extab_data + addr - extab_vma);
2359 addr += 4;
2360 n_bytes = 3;
2361 n_words = ((word >> 24) & 0xff);
2362 }
2363 }
2364 }
2365 }
2366
2367 /* Sanity check address. */
2368 if (n_words)
2369 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2370 n_words = n_bytes = 0;
2371
2372 /* The unwind instructions reside in WORD (only the N_BYTES least
2373 significant bytes are valid), followed by N_WORDS words in the
2374 extab section starting at ADDR. */
2375 if (n_bytes || n_words)
2376 {
2377 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2378 n_bytes + n_words * 4 + 1);
2379
2380 while (n_bytes--)
2381 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2382
2383 while (n_words--)
2384 {
2385 word = bfd_h_get_32 (objfile->obfd,
2386 extab_data + addr - extab_vma);
2387 addr += 4;
2388
2389 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2390 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2391 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2392 *p++ = (gdb_byte) (word & 0xff);
2393 }
2394
2395 /* Implied "Finish" to terminate the list. */
2396 *p++ = 0xb0;
2397 }
2398
2399 /* Push entry onto vector. They are guaranteed to always
2400 appear in order of increasing addresses. */
2401 new_exidx_entry.addr = idx;
2402 new_exidx_entry.entry = entry;
2403 VEC_safe_push (arm_exidx_entry_s,
2404 data->section_maps[sec->the_bfd_section->index],
2405 &new_exidx_entry);
2406 }
2407
2408 do_cleanups (cleanups);
2409}
2410
2411/* Search for the exception table entry covering MEMADDR. If one is found,
2412 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2413 set *START to the start of the region covered by this entry. */
2414
2415static gdb_byte *
2416arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2417{
2418 struct obj_section *sec;
2419
2420 sec = find_pc_section (memaddr);
2421 if (sec != NULL)
2422 {
2423 struct arm_exidx_data *data;
2424 VEC(arm_exidx_entry_s) *map;
2425 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2426 unsigned int idx;
2427
2428 data = objfile_data (sec->objfile, arm_exidx_data_key);
2429 if (data != NULL)
2430 {
2431 map = data->section_maps[sec->the_bfd_section->index];
2432 if (!VEC_empty (arm_exidx_entry_s, map))
2433 {
2434 struct arm_exidx_entry *map_sym;
2435
2436 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2437 arm_compare_exidx_entries);
2438
2439 /* VEC_lower_bound finds the earliest ordered insertion
2440 point. If the following symbol starts at this exact
2441 address, we use that; otherwise, the preceding
2442 exception table entry covers this address. */
2443 if (idx < VEC_length (arm_exidx_entry_s, map))
2444 {
2445 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2446 if (map_sym->addr == map_key.addr)
2447 {
2448 if (start)
2449 *start = map_sym->addr + obj_section_addr (sec);
2450 return map_sym->entry;
2451 }
2452 }
2453
2454 if (idx > 0)
2455 {
2456 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2457 if (start)
2458 *start = map_sym->addr + obj_section_addr (sec);
2459 return map_sym->entry;
2460 }
2461 }
2462 }
2463 }
2464
2465 return NULL;
2466}
2467
2468/* Given the current frame THIS_FRAME, and its associated frame unwinding
2469 instruction list from the ARM exception table entry ENTRY, allocate and
2470 return a prologue cache structure describing how to unwind this frame.
2471
2472 Return NULL if the unwinding instruction list contains a "spare",
2473 "reserved" or "refuse to unwind" instruction as defined in section
2474 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2475 for the ARM Architecture" document. */
2476
2477static struct arm_prologue_cache *
2478arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2479{
2480 CORE_ADDR vsp = 0;
2481 int vsp_valid = 0;
2482
2483 struct arm_prologue_cache *cache;
2484 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2485 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2486
2487 for (;;)
2488 {
2489 gdb_byte insn;
2490
2491 /* Whenever we reload SP, we actually have to retrieve its
2492 actual value in the current frame. */
2493 if (!vsp_valid)
2494 {
2495 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2496 {
2497 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2498 vsp = get_frame_register_unsigned (this_frame, reg);
2499 }
2500 else
2501 {
2502 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2503 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2504 }
2505
2506 vsp_valid = 1;
2507 }
2508
2509 /* Decode next unwind instruction. */
2510 insn = *entry++;
2511
2512 if ((insn & 0xc0) == 0)
2513 {
2514 int offset = insn & 0x3f;
2515 vsp += (offset << 2) + 4;
2516 }
2517 else if ((insn & 0xc0) == 0x40)
2518 {
2519 int offset = insn & 0x3f;
2520 vsp -= (offset << 2) + 4;
2521 }
2522 else if ((insn & 0xf0) == 0x80)
2523 {
2524 int mask = ((insn & 0xf) << 8) | *entry++;
2525 int i;
2526
2527 /* The special case of an all-zero mask identifies
2528 "Refuse to unwind". We return NULL to fall back
2529 to the prologue analyzer. */
2530 if (mask == 0)
2531 return NULL;
2532
2533 /* Pop registers r4..r15 under mask. */
2534 for (i = 0; i < 12; i++)
2535 if (mask & (1 << i))
2536 {
2537 cache->saved_regs[4 + i].addr = vsp;
2538 vsp += 4;
2539 }
2540
2541 /* Special-case popping SP -- we need to reload vsp. */
2542 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2543 vsp_valid = 0;
2544 }
2545 else if ((insn & 0xf0) == 0x90)
2546 {
2547 int reg = insn & 0xf;
2548
2549 /* Reserved cases. */
2550 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2551 return NULL;
2552
2553 /* Set SP from another register and mark VSP for reload. */
2554 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2555 vsp_valid = 0;
2556 }
2557 else if ((insn & 0xf0) == 0xa0)
2558 {
2559 int count = insn & 0x7;
2560 int pop_lr = (insn & 0x8) != 0;
2561 int i;
2562
2563 /* Pop r4..r[4+count]. */
2564 for (i = 0; i <= count; i++)
2565 {
2566 cache->saved_regs[4 + i].addr = vsp;
2567 vsp += 4;
2568 }
2569
2570 /* If indicated by flag, pop LR as well. */
2571 if (pop_lr)
2572 {
2573 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2574 vsp += 4;
2575 }
2576 }
2577 else if (insn == 0xb0)
2578 {
2579 /* We could only have updated PC by popping into it; if so, it
2580 will show up as address. Otherwise, copy LR into PC. */
2581 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2582 cache->saved_regs[ARM_PC_REGNUM]
2583 = cache->saved_regs[ARM_LR_REGNUM];
2584
2585 /* We're done. */
2586 break;
2587 }
2588 else if (insn == 0xb1)
2589 {
2590 int mask = *entry++;
2591 int i;
2592
2593 /* All-zero mask and mask >= 16 is "spare". */
2594 if (mask == 0 || mask >= 16)
2595 return NULL;
2596
2597 /* Pop r0..r3 under mask. */
2598 for (i = 0; i < 4; i++)
2599 if (mask & (1 << i))
2600 {
2601 cache->saved_regs[i].addr = vsp;
2602 vsp += 4;
2603 }
2604 }
2605 else if (insn == 0xb2)
2606 {
2607 ULONGEST offset = 0;
2608 unsigned shift = 0;
2609
2610 do
2611 {
2612 offset |= (*entry & 0x7f) << shift;
2613 shift += 7;
2614 }
2615 while (*entry++ & 0x80);
2616
2617 vsp += 0x204 + (offset << 2);
2618 }
2619 else if (insn == 0xb3)
2620 {
2621 int start = *entry >> 4;
2622 int count = (*entry++) & 0xf;
2623 int i;
2624
2625 /* Only registers D0..D15 are valid here. */
2626 if (start + count >= 16)
2627 return NULL;
2628
2629 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2630 for (i = 0; i <= count; i++)
2631 {
2632 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2633 vsp += 8;
2634 }
2635
2636 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2637 vsp += 4;
2638 }
2639 else if ((insn & 0xf8) == 0xb8)
2640 {
2641 int count = insn & 0x7;
2642 int i;
2643
2644 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2645 for (i = 0; i <= count; i++)
2646 {
2647 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2648 vsp += 8;
2649 }
2650
2651 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2652 vsp += 4;
2653 }
2654 else if (insn == 0xc6)
2655 {
2656 int start = *entry >> 4;
2657 int count = (*entry++) & 0xf;
2658 int i;
2659
2660 /* Only registers WR0..WR15 are valid. */
2661 if (start + count >= 16)
2662 return NULL;
2663
2664 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2665 for (i = 0; i <= count; i++)
2666 {
2667 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2668 vsp += 8;
2669 }
2670 }
2671 else if (insn == 0xc7)
2672 {
2673 int mask = *entry++;
2674 int i;
2675
2676 /* All-zero mask and mask >= 16 is "spare". */
2677 if (mask == 0 || mask >= 16)
2678 return NULL;
2679
2680 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2681 for (i = 0; i < 4; i++)
2682 if (mask & (1 << i))
2683 {
2684 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2685 vsp += 4;
2686 }
2687 }
2688 else if ((insn & 0xf8) == 0xc0)
2689 {
2690 int count = insn & 0x7;
2691 int i;
2692
2693 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2694 for (i = 0; i <= count; i++)
2695 {
2696 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2697 vsp += 8;
2698 }
2699 }
2700 else if (insn == 0xc8)
2701 {
2702 int start = *entry >> 4;
2703 int count = (*entry++) & 0xf;
2704 int i;
2705
2706 /* Only registers D0..D31 are valid. */
2707 if (start + count >= 16)
2708 return NULL;
2709
2710 /* Pop VFP double-precision registers
2711 D[16+start]..D[16+start+count]. */
2712 for (i = 0; i <= count; i++)
2713 {
2714 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2715 vsp += 8;
2716 }
2717 }
2718 else if (insn == 0xc9)
2719 {
2720 int start = *entry >> 4;
2721 int count = (*entry++) & 0xf;
2722 int i;
2723
2724 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2725 for (i = 0; i <= count; i++)
2726 {
2727 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2728 vsp += 8;
2729 }
2730 }
2731 else if ((insn & 0xf8) == 0xd0)
2732 {
2733 int count = insn & 0x7;
2734 int i;
2735
2736 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2737 for (i = 0; i <= count; i++)
2738 {
2739 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2740 vsp += 8;
2741 }
2742 }
2743 else
2744 {
2745 /* Everything else is "spare". */
2746 return NULL;
2747 }
2748 }
2749
2750 /* If we restore SP from a register, assume this was the frame register.
2751 Otherwise just fall back to SP as frame register. */
2752 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2753 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2754 else
2755 cache->framereg = ARM_SP_REGNUM;
2756
2757 /* Determine offset to previous frame. */
2758 cache->framesize
2759 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2760
2761 /* We already got the previous SP. */
2762 cache->prev_sp = vsp;
2763
2764 return cache;
2765}
2766
2767/* Unwinding via ARM exception table entries. Note that the sniffer
2768 already computes a filled-in prologue cache, which is then used
2769 with the same arm_prologue_this_id and arm_prologue_prev_register
2770 routines also used for prologue-parsing based unwinding. */
2771
2772static int
2773arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2774 struct frame_info *this_frame,
2775 void **this_prologue_cache)
2776{
2777 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2778 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2779 CORE_ADDR addr_in_block, exidx_region, func_start;
2780 struct arm_prologue_cache *cache;
2781 gdb_byte *entry;
2782
2783 /* See if we have an ARM exception table entry covering this address. */
2784 addr_in_block = get_frame_address_in_block (this_frame);
2785 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2786 if (!entry)
2787 return 0;
2788
2789 /* The ARM exception table does not describe unwind information
2790 for arbitrary PC values, but is guaranteed to be correct only
2791 at call sites. We have to decide here whether we want to use
2792 ARM exception table information for this frame, or fall back
2793 to using prologue parsing. (Note that if we have DWARF CFI,
2794 this sniffer isn't even called -- CFI is always preferred.)
2795
2796 Before we make this decision, however, we check whether we
2797 actually have *symbol* information for the current frame.
2798 If not, prologue parsing would not work anyway, so we might
2799 as well use the exception table and hope for the best. */
2800 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2801 {
2802 int exc_valid = 0;
2803
2804 /* If the next frame is "normal", we are at a call site in this
2805 frame, so exception information is guaranteed to be valid. */
2806 if (get_next_frame (this_frame)
2807 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2808 exc_valid = 1;
2809
2810 /* We also assume exception information is valid if we're currently
2811 blocked in a system call. The system library is supposed to
2812 ensure this, so that e.g. pthread cancellation works. */
2813 if (arm_frame_is_thumb (this_frame))
2814 {
2815 LONGEST insn;
2816
2817 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2818 byte_order_for_code, &insn)
2819 && (insn & 0xff00) == 0xdf00 /* svc */)
2820 exc_valid = 1;
2821 }
2822 else
2823 {
2824 LONGEST insn;
2825
2826 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2827 byte_order_for_code, &insn)
2828 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2829 exc_valid = 1;
2830 }
2831
2832 /* Bail out if we don't know that exception information is valid. */
2833 if (!exc_valid)
2834 return 0;
2835
2836 /* The ARM exception index does not mark the *end* of the region
2837 covered by the entry, and some functions will not have any entry.
2838 To correctly recognize the end of the covered region, the linker
2839 should have inserted dummy records with a CANTUNWIND marker.
2840
2841 Unfortunately, current versions of GNU ld do not reliably do
2842 this, and thus we may have found an incorrect entry above.
2843 As a (temporary) sanity check, we only use the entry if it
2844 lies *within* the bounds of the function. Note that this check
2845 might reject perfectly valid entries that just happen to cover
2846 multiple functions; therefore this check ought to be removed
2847 once the linker is fixed. */
2848 if (func_start > exidx_region)
2849 return 0;
2850 }
2851
2852 /* Decode the list of unwinding instructions into a prologue cache.
2853 Note that this may fail due to e.g. a "refuse to unwind" code. */
2854 cache = arm_exidx_fill_cache (this_frame, entry);
2855 if (!cache)
2856 return 0;
2857
2858 *this_prologue_cache = cache;
2859 return 1;
2860}
2861
2862struct frame_unwind arm_exidx_unwind = {
2863 NORMAL_FRAME,
8fbca658 2864 default_frame_unwind_stop_reason,
0e9e9abd
UW
2865 arm_prologue_this_id,
2866 arm_prologue_prev_register,
2867 NULL,
2868 arm_exidx_unwind_sniffer
2869};
2870
909cf6ea 2871static struct arm_prologue_cache *
a262aec2 2872arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2873{
909cf6ea 2874 struct arm_prologue_cache *cache;
909cf6ea 2875
35d5d4ee 2876 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2877 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2878
a262aec2 2879 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2880
2881 return cache;
2882}
2883
2884/* Our frame ID for a stub frame is the current SP and LR. */
2885
2886static void
a262aec2 2887arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2888 void **this_cache,
2889 struct frame_id *this_id)
2890{
2891 struct arm_prologue_cache *cache;
2892
2893 if (*this_cache == NULL)
a262aec2 2894 *this_cache = arm_make_stub_cache (this_frame);
909cf6ea
DJ
2895 cache = *this_cache;
2896
a262aec2 2897 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2898}
2899
a262aec2
DJ
2900static int
2901arm_stub_unwind_sniffer (const struct frame_unwind *self,
2902 struct frame_info *this_frame,
2903 void **this_prologue_cache)
909cf6ea 2904{
93d42b30 2905 CORE_ADDR addr_in_block;
909cf6ea
DJ
2906 char dummy[4];
2907
a262aec2 2908 addr_in_block = get_frame_address_in_block (this_frame);
93d42b30 2909 if (in_plt_section (addr_in_block, NULL)
fc36e839
DE
2910 /* We also use the stub winder if the target memory is unreadable
2911 to avoid having the prologue unwinder trying to read it. */
a262aec2
DJ
2912 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2913 return 1;
909cf6ea 2914
a262aec2 2915 return 0;
909cf6ea
DJ
2916}
2917
a262aec2
DJ
2918struct frame_unwind arm_stub_unwind = {
2919 NORMAL_FRAME,
8fbca658 2920 default_frame_unwind_stop_reason,
a262aec2
DJ
2921 arm_stub_this_id,
2922 arm_prologue_prev_register,
2923 NULL,
2924 arm_stub_unwind_sniffer
2925};
2926
24de872b 2927static CORE_ADDR
a262aec2 2928arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
2929{
2930 struct arm_prologue_cache *cache;
2931
eb5492fa 2932 if (*this_cache == NULL)
a262aec2 2933 *this_cache = arm_make_prologue_cache (this_frame);
eb5492fa
DJ
2934 cache = *this_cache;
2935
4be43953 2936 return cache->prev_sp - cache->framesize;
24de872b
DJ
2937}
2938
eb5492fa
DJ
2939struct frame_base arm_normal_base = {
2940 &arm_prologue_unwind,
2941 arm_normal_frame_base,
2942 arm_normal_frame_base,
2943 arm_normal_frame_base
2944};
2945
a262aec2 2946/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
2947 dummy frame. The frame ID's base needs to match the TOS value
2948 saved by save_dummy_frame_tos() and returned from
2949 arm_push_dummy_call, and the PC needs to match the dummy frame's
2950 breakpoint. */
c906108c 2951
eb5492fa 2952static struct frame_id
a262aec2 2953arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 2954{
0963b4bd
MS
2955 return frame_id_build (get_frame_register_unsigned (this_frame,
2956 ARM_SP_REGNUM),
a262aec2 2957 get_frame_pc (this_frame));
eb5492fa 2958}
c3b4394c 2959
eb5492fa
DJ
2960/* Given THIS_FRAME, find the previous frame's resume PC (which will
2961 be used to construct the previous frame's ID, after looking up the
2962 containing function). */
c3b4394c 2963
eb5492fa
DJ
2964static CORE_ADDR
2965arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2966{
2967 CORE_ADDR pc;
2968 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 2969 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
2970}
2971
2972static CORE_ADDR
2973arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2974{
2975 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
2976}
2977
b39cc962
DJ
2978static struct value *
2979arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2980 int regnum)
2981{
24568a2c 2982 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 2983 CORE_ADDR lr, cpsr;
9779414d 2984 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
2985
2986 switch (regnum)
2987 {
2988 case ARM_PC_REGNUM:
2989 /* The PC is normally copied from the return column, which
2990 describes saves of LR. However, that version may have an
2991 extra bit set to indicate Thumb state. The bit is not
2992 part of the PC. */
2993 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2994 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 2995 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
2996
2997 case ARM_PS_REGNUM:
2998 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 2999 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3000 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3001 if (IS_THUMB_ADDR (lr))
9779414d 3002 cpsr |= t_bit;
b39cc962 3003 else
9779414d 3004 cpsr &= ~t_bit;
ca38c58e 3005 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3006
3007 default:
3008 internal_error (__FILE__, __LINE__,
3009 _("Unexpected register %d"), regnum);
3010 }
3011}
3012
3013static void
3014arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3015 struct dwarf2_frame_state_reg *reg,
3016 struct frame_info *this_frame)
3017{
3018 switch (regnum)
3019 {
3020 case ARM_PC_REGNUM:
3021 case ARM_PS_REGNUM:
3022 reg->how = DWARF2_FRAME_REG_FN;
3023 reg->loc.fn = arm_dwarf2_prev_register;
3024 break;
3025 case ARM_SP_REGNUM:
3026 reg->how = DWARF2_FRAME_REG_CFA;
3027 break;
3028 }
3029}
3030
4024ca99
UW
3031/* Return true if we are in the function's epilogue, i.e. after the
3032 instruction that destroyed the function's stack frame. */
3033
3034static int
3035thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3036{
3037 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3038 unsigned int insn, insn2;
3039 int found_return = 0, found_stack_adjust = 0;
3040 CORE_ADDR func_start, func_end;
3041 CORE_ADDR scan_pc;
3042 gdb_byte buf[4];
3043
3044 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3045 return 0;
3046
3047 /* The epilogue is a sequence of instructions along the following lines:
3048
3049 - add stack frame size to SP or FP
3050 - [if frame pointer used] restore SP from FP
3051 - restore registers from SP [may include PC]
3052 - a return-type instruction [if PC wasn't already restored]
3053
3054 In a first pass, we scan forward from the current PC and verify the
3055 instructions we find as compatible with this sequence, ending in a
3056 return instruction.
3057
3058 However, this is not sufficient to distinguish indirect function calls
3059 within a function from indirect tail calls in the epilogue in some cases.
3060 Therefore, if we didn't already find any SP-changing instruction during
3061 forward scan, we add a backward scanning heuristic to ensure we actually
3062 are in the epilogue. */
3063
3064 scan_pc = pc;
3065 while (scan_pc < func_end && !found_return)
3066 {
3067 if (target_read_memory (scan_pc, buf, 2))
3068 break;
3069
3070 scan_pc += 2;
3071 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3072
3073 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3074 found_return = 1;
3075 else if (insn == 0x46f7) /* mov pc, lr */
3076 found_return = 1;
3077 else if (insn == 0x46bd) /* mov sp, r7 */
3078 found_stack_adjust = 1;
3079 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3080 found_stack_adjust = 1;
3081 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
3082 {
3083 found_stack_adjust = 1;
3084 if (insn & 0x0100) /* <registers> include PC. */
3085 found_return = 1;
3086 }
db24da6d 3087 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3088 {
3089 if (target_read_memory (scan_pc, buf, 2))
3090 break;
3091
3092 scan_pc += 2;
3093 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3094
3095 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3096 {
3097 found_stack_adjust = 1;
3098 if (insn2 & 0x8000) /* <registers> include PC. */
3099 found_return = 1;
3100 }
3101 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3102 && (insn2 & 0x0fff) == 0x0b04)
3103 {
3104 found_stack_adjust = 1;
3105 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3106 found_return = 1;
3107 }
3108 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3109 && (insn2 & 0x0e00) == 0x0a00)
3110 found_stack_adjust = 1;
3111 else
3112 break;
3113 }
3114 else
3115 break;
3116 }
3117
3118 if (!found_return)
3119 return 0;
3120
3121 /* Since any instruction in the epilogue sequence, with the possible
3122 exception of return itself, updates the stack pointer, we need to
3123 scan backwards for at most one instruction. Try either a 16-bit or
3124 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3125 too much about false positives. */
4024ca99
UW
3126
3127 if (!found_stack_adjust)
3128 {
3129 if (pc - 4 < func_start)
3130 return 0;
3131 if (target_read_memory (pc - 4, buf, 4))
3132 return 0;
3133
3134 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3135 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3136
3137 if (insn2 == 0x46bd) /* mov sp, r7 */
3138 found_stack_adjust = 1;
3139 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
3140 found_stack_adjust = 1;
3141 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
3142 found_stack_adjust = 1;
3143 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3144 found_stack_adjust = 1;
3145 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3146 && (insn2 & 0x0fff) == 0x0b04)
3147 found_stack_adjust = 1;
3148 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3149 && (insn2 & 0x0e00) == 0x0a00)
3150 found_stack_adjust = 1;
3151 }
3152
3153 return found_stack_adjust;
3154}
3155
3156/* Return true if we are in the function's epilogue, i.e. after the
3157 instruction that destroyed the function's stack frame. */
3158
3159static int
3160arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3161{
3162 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3163 unsigned int insn;
3164 int found_return, found_stack_adjust;
3165 CORE_ADDR func_start, func_end;
3166
3167 if (arm_pc_is_thumb (gdbarch, pc))
3168 return thumb_in_function_epilogue_p (gdbarch, pc);
3169
3170 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3171 return 0;
3172
3173 /* We are in the epilogue if the previous instruction was a stack
3174 adjustment and the next instruction is a possible return (bx, mov
3175 pc, or pop). We could have to scan backwards to find the stack
3176 adjustment, or forwards to find the return, but this is a decent
3177 approximation. First scan forwards. */
3178
3179 found_return = 0;
3180 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3181 if (bits (insn, 28, 31) != INST_NV)
3182 {
3183 if ((insn & 0x0ffffff0) == 0x012fff10)
3184 /* BX. */
3185 found_return = 1;
3186 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3187 /* MOV PC. */
3188 found_return = 1;
3189 else if ((insn & 0x0fff0000) == 0x08bd0000
3190 && (insn & 0x0000c000) != 0)
3191 /* POP (LDMIA), including PC or LR. */
3192 found_return = 1;
3193 }
3194
3195 if (!found_return)
3196 return 0;
3197
3198 /* Scan backwards. This is just a heuristic, so do not worry about
3199 false positives from mode changes. */
3200
3201 if (pc < func_start + 4)
3202 return 0;
3203
73c964d6 3204 found_stack_adjust = 0;
4024ca99
UW
3205 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3206 if (bits (insn, 28, 31) != INST_NV)
3207 {
3208 if ((insn & 0x0df0f000) == 0x0080d000)
3209 /* ADD SP (register or immediate). */
3210 found_stack_adjust = 1;
3211 else if ((insn & 0x0df0f000) == 0x0040d000)
3212 /* SUB SP (register or immediate). */
3213 found_stack_adjust = 1;
3214 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3215 /* MOV SP. */
77bc0675 3216 found_stack_adjust = 1;
4024ca99
UW
3217 else if ((insn & 0x0fff0000) == 0x08bd0000)
3218 /* POP (LDMIA). */
3219 found_stack_adjust = 1;
3220 }
3221
3222 if (found_stack_adjust)
3223 return 1;
3224
3225 return 0;
3226}
3227
3228
2dd604e7
RE
3229/* When arguments must be pushed onto the stack, they go on in reverse
3230 order. The code below implements a FILO (stack) to do this. */
3231
3232struct stack_item
3233{
3234 int len;
3235 struct stack_item *prev;
3236 void *data;
3237};
3238
3239static struct stack_item *
8c6363cf 3240push_stack_item (struct stack_item *prev, const void *contents, int len)
2dd604e7
RE
3241{
3242 struct stack_item *si;
3243 si = xmalloc (sizeof (struct stack_item));
226c7fbc 3244 si->data = xmalloc (len);
2dd604e7
RE
3245 si->len = len;
3246 si->prev = prev;
3247 memcpy (si->data, contents, len);
3248 return si;
3249}
3250
3251static struct stack_item *
3252pop_stack_item (struct stack_item *si)
3253{
3254 struct stack_item *dead = si;
3255 si = si->prev;
3256 xfree (dead->data);
3257 xfree (dead);
3258 return si;
3259}
3260
2af48f68
PB
3261
3262/* Return the alignment (in bytes) of the given type. */
3263
3264static int
3265arm_type_align (struct type *t)
3266{
3267 int n;
3268 int align;
3269 int falign;
3270
3271 t = check_typedef (t);
3272 switch (TYPE_CODE (t))
3273 {
3274 default:
3275 /* Should never happen. */
3276 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3277 return 4;
3278
3279 case TYPE_CODE_PTR:
3280 case TYPE_CODE_ENUM:
3281 case TYPE_CODE_INT:
3282 case TYPE_CODE_FLT:
3283 case TYPE_CODE_SET:
3284 case TYPE_CODE_RANGE:
3285 case TYPE_CODE_BITSTRING:
3286 case TYPE_CODE_REF:
3287 case TYPE_CODE_CHAR:
3288 case TYPE_CODE_BOOL:
3289 return TYPE_LENGTH (t);
3290
3291 case TYPE_CODE_ARRAY:
3292 case TYPE_CODE_COMPLEX:
3293 /* TODO: What about vector types? */
3294 return arm_type_align (TYPE_TARGET_TYPE (t));
3295
3296 case TYPE_CODE_STRUCT:
3297 case TYPE_CODE_UNION:
3298 align = 1;
3299 for (n = 0; n < TYPE_NFIELDS (t); n++)
3300 {
3301 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3302 if (falign > align)
3303 align = falign;
3304 }
3305 return align;
3306 }
3307}
3308
90445bd3
DJ
3309/* Possible base types for a candidate for passing and returning in
3310 VFP registers. */
3311
3312enum arm_vfp_cprc_base_type
3313{
3314 VFP_CPRC_UNKNOWN,
3315 VFP_CPRC_SINGLE,
3316 VFP_CPRC_DOUBLE,
3317 VFP_CPRC_VEC64,
3318 VFP_CPRC_VEC128
3319};
3320
3321/* The length of one element of base type B. */
3322
3323static unsigned
3324arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3325{
3326 switch (b)
3327 {
3328 case VFP_CPRC_SINGLE:
3329 return 4;
3330 case VFP_CPRC_DOUBLE:
3331 return 8;
3332 case VFP_CPRC_VEC64:
3333 return 8;
3334 case VFP_CPRC_VEC128:
3335 return 16;
3336 default:
3337 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3338 (int) b);
3339 }
3340}
3341
3342/* The character ('s', 'd' or 'q') for the type of VFP register used
3343 for passing base type B. */
3344
3345static int
3346arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3347{
3348 switch (b)
3349 {
3350 case VFP_CPRC_SINGLE:
3351 return 's';
3352 case VFP_CPRC_DOUBLE:
3353 return 'd';
3354 case VFP_CPRC_VEC64:
3355 return 'd';
3356 case VFP_CPRC_VEC128:
3357 return 'q';
3358 default:
3359 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3360 (int) b);
3361 }
3362}
3363
3364/* Determine whether T may be part of a candidate for passing and
3365 returning in VFP registers, ignoring the limit on the total number
3366 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3367 classification of the first valid component found; if it is not
3368 VFP_CPRC_UNKNOWN, all components must have the same classification
3369 as *BASE_TYPE. If it is found that T contains a type not permitted
3370 for passing and returning in VFP registers, a type differently
3371 classified from *BASE_TYPE, or two types differently classified
3372 from each other, return -1, otherwise return the total number of
3373 base-type elements found (possibly 0 in an empty structure or
3374 array). Vectors and complex types are not currently supported,
3375 matching the generic AAPCS support. */
3376
3377static int
3378arm_vfp_cprc_sub_candidate (struct type *t,
3379 enum arm_vfp_cprc_base_type *base_type)
3380{
3381 t = check_typedef (t);
3382 switch (TYPE_CODE (t))
3383 {
3384 case TYPE_CODE_FLT:
3385 switch (TYPE_LENGTH (t))
3386 {
3387 case 4:
3388 if (*base_type == VFP_CPRC_UNKNOWN)
3389 *base_type = VFP_CPRC_SINGLE;
3390 else if (*base_type != VFP_CPRC_SINGLE)
3391 return -1;
3392 return 1;
3393
3394 case 8:
3395 if (*base_type == VFP_CPRC_UNKNOWN)
3396 *base_type = VFP_CPRC_DOUBLE;
3397 else if (*base_type != VFP_CPRC_DOUBLE)
3398 return -1;
3399 return 1;
3400
3401 default:
3402 return -1;
3403 }
3404 break;
3405
3406 case TYPE_CODE_ARRAY:
3407 {
3408 int count;
3409 unsigned unitlen;
3410 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3411 if (count == -1)
3412 return -1;
3413 if (TYPE_LENGTH (t) == 0)
3414 {
3415 gdb_assert (count == 0);
3416 return 0;
3417 }
3418 else if (count == 0)
3419 return -1;
3420 unitlen = arm_vfp_cprc_unit_length (*base_type);
3421 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3422 return TYPE_LENGTH (t) / unitlen;
3423 }
3424 break;
3425
3426 case TYPE_CODE_STRUCT:
3427 {
3428 int count = 0;
3429 unsigned unitlen;
3430 int i;
3431 for (i = 0; i < TYPE_NFIELDS (t); i++)
3432 {
3433 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3434 base_type);
3435 if (sub_count == -1)
3436 return -1;
3437 count += sub_count;
3438 }
3439 if (TYPE_LENGTH (t) == 0)
3440 {
3441 gdb_assert (count == 0);
3442 return 0;
3443 }
3444 else if (count == 0)
3445 return -1;
3446 unitlen = arm_vfp_cprc_unit_length (*base_type);
3447 if (TYPE_LENGTH (t) != unitlen * count)
3448 return -1;
3449 return count;
3450 }
3451
3452 case TYPE_CODE_UNION:
3453 {
3454 int count = 0;
3455 unsigned unitlen;
3456 int i;
3457 for (i = 0; i < TYPE_NFIELDS (t); i++)
3458 {
3459 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3460 base_type);
3461 if (sub_count == -1)
3462 return -1;
3463 count = (count > sub_count ? count : sub_count);
3464 }
3465 if (TYPE_LENGTH (t) == 0)
3466 {
3467 gdb_assert (count == 0);
3468 return 0;
3469 }
3470 else if (count == 0)
3471 return -1;
3472 unitlen = arm_vfp_cprc_unit_length (*base_type);
3473 if (TYPE_LENGTH (t) != unitlen * count)
3474 return -1;
3475 return count;
3476 }
3477
3478 default:
3479 break;
3480 }
3481
3482 return -1;
3483}
3484
3485/* Determine whether T is a VFP co-processor register candidate (CPRC)
3486 if passed to or returned from a non-variadic function with the VFP
3487 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3488 *BASE_TYPE to the base type for T and *COUNT to the number of
3489 elements of that base type before returning. */
3490
3491static int
3492arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3493 int *count)
3494{
3495 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3496 int c = arm_vfp_cprc_sub_candidate (t, &b);
3497 if (c <= 0 || c > 4)
3498 return 0;
3499 *base_type = b;
3500 *count = c;
3501 return 1;
3502}
3503
3504/* Return 1 if the VFP ABI should be used for passing arguments to and
3505 returning values from a function of type FUNC_TYPE, 0
3506 otherwise. */
3507
3508static int
3509arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3510{
3511 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3512 /* Variadic functions always use the base ABI. Assume that functions
3513 without debug info are not variadic. */
3514 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3515 return 0;
3516 /* The VFP ABI is only supported as a variant of AAPCS. */
3517 if (tdep->arm_abi != ARM_ABI_AAPCS)
3518 return 0;
3519 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3520}
3521
3522/* We currently only support passing parameters in integer registers, which
3523 conforms with GCC's default model, and VFP argument passing following
3524 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3525 we should probably support some of them based on the selected ABI. */
3526
3527static CORE_ADDR
7d9b040b 3528arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3529 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3530 struct value **args, CORE_ADDR sp, int struct_return,
3531 CORE_ADDR struct_addr)
2dd604e7 3532{
e17a4113 3533 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3534 int argnum;
3535 int argreg;
3536 int nstack;
3537 struct stack_item *si = NULL;
90445bd3
DJ
3538 int use_vfp_abi;
3539 struct type *ftype;
3540 unsigned vfp_regs_free = (1 << 16) - 1;
3541
3542 /* Determine the type of this function and whether the VFP ABI
3543 applies. */
3544 ftype = check_typedef (value_type (function));
3545 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3546 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3547 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3548
6a65450a
AC
3549 /* Set the return address. For the ARM, the return breakpoint is
3550 always at BP_ADDR. */
9779414d 3551 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3552 bp_addr |= 1;
6a65450a 3553 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3554
3555 /* Walk through the list of args and determine how large a temporary
3556 stack is required. Need to take care here as structs may be
7a9dd1b2 3557 passed on the stack, and we have to push them. */
2dd604e7
RE
3558 nstack = 0;
3559
3560 argreg = ARM_A1_REGNUM;
3561 nstack = 0;
3562
2dd604e7
RE
3563 /* The struct_return pointer occupies the first parameter
3564 passing register. */
3565 if (struct_return)
3566 {
3567 if (arm_debug)
5af949e3 3568 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3569 gdbarch_register_name (gdbarch, argreg),
5af949e3 3570 paddress (gdbarch, struct_addr));
2dd604e7
RE
3571 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3572 argreg++;
3573 }
3574
3575 for (argnum = 0; argnum < nargs; argnum++)
3576 {
3577 int len;
3578 struct type *arg_type;
3579 struct type *target_type;
3580 enum type_code typecode;
8c6363cf 3581 const bfd_byte *val;
2af48f68 3582 int align;
90445bd3
DJ
3583 enum arm_vfp_cprc_base_type vfp_base_type;
3584 int vfp_base_count;
3585 int may_use_core_reg = 1;
2dd604e7 3586
df407dfe 3587 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3588 len = TYPE_LENGTH (arg_type);
3589 target_type = TYPE_TARGET_TYPE (arg_type);
3590 typecode = TYPE_CODE (arg_type);
8c6363cf 3591 val = value_contents (args[argnum]);
2dd604e7 3592
2af48f68
PB
3593 align = arm_type_align (arg_type);
3594 /* Round alignment up to a whole number of words. */
3595 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3596 /* Different ABIs have different maximum alignments. */
3597 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3598 {
3599 /* The APCS ABI only requires word alignment. */
3600 align = INT_REGISTER_SIZE;
3601 }
3602 else
3603 {
3604 /* The AAPCS requires at most doubleword alignment. */
3605 if (align > INT_REGISTER_SIZE * 2)
3606 align = INT_REGISTER_SIZE * 2;
3607 }
3608
90445bd3
DJ
3609 if (use_vfp_abi
3610 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3611 &vfp_base_count))
3612 {
3613 int regno;
3614 int unit_length;
3615 int shift;
3616 unsigned mask;
3617
3618 /* Because this is a CPRC it cannot go in a core register or
3619 cause a core register to be skipped for alignment.
3620 Either it goes in VFP registers and the rest of this loop
3621 iteration is skipped for this argument, or it goes on the
3622 stack (and the stack alignment code is correct for this
3623 case). */
3624 may_use_core_reg = 0;
3625
3626 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3627 shift = unit_length / 4;
3628 mask = (1 << (shift * vfp_base_count)) - 1;
3629 for (regno = 0; regno < 16; regno += shift)
3630 if (((vfp_regs_free >> regno) & mask) == mask)
3631 break;
3632
3633 if (regno < 16)
3634 {
3635 int reg_char;
3636 int reg_scaled;
3637 int i;
3638
3639 vfp_regs_free &= ~(mask << regno);
3640 reg_scaled = regno / shift;
3641 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3642 for (i = 0; i < vfp_base_count; i++)
3643 {
3644 char name_buf[4];
3645 int regnum;
58d6951d
DJ
3646 if (reg_char == 'q')
3647 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3648 val + i * unit_length);
58d6951d
DJ
3649 else
3650 {
3651 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
3652 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3653 strlen (name_buf));
3654 regcache_cooked_write (regcache, regnum,
3655 val + i * unit_length);
3656 }
90445bd3
DJ
3657 }
3658 continue;
3659 }
3660 else
3661 {
3662 /* This CPRC could not go in VFP registers, so all VFP
3663 registers are now marked as used. */
3664 vfp_regs_free = 0;
3665 }
3666 }
3667
2af48f68
PB
3668 /* Push stack padding for dowubleword alignment. */
3669 if (nstack & (align - 1))
3670 {
3671 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3672 nstack += INT_REGISTER_SIZE;
3673 }
3674
3675 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3676 if (may_use_core_reg
3677 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3678 && align > INT_REGISTER_SIZE
3679 && argreg & 1)
3680 argreg++;
3681
2dd604e7
RE
3682 /* If the argument is a pointer to a function, and it is a
3683 Thumb function, create a LOCAL copy of the value and set
3684 the THUMB bit in it. */
3685 if (TYPE_CODE_PTR == typecode
3686 && target_type != NULL
f96b8fa0 3687 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3688 {
e17a4113 3689 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3690 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3691 {
8c6363cf
TT
3692 bfd_byte *copy = alloca (len);
3693 store_unsigned_integer (copy, len, byte_order,
e17a4113 3694 MAKE_THUMB_ADDR (regval));
8c6363cf 3695 val = copy;
2dd604e7
RE
3696 }
3697 }
3698
3699 /* Copy the argument to general registers or the stack in
3700 register-sized pieces. Large arguments are split between
3701 registers and stack. */
3702 while (len > 0)
3703 {
f0c9063c 3704 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
2dd604e7 3705
90445bd3 3706 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3707 {
3708 /* The argument is being passed in a general purpose
3709 register. */
e17a4113
UW
3710 CORE_ADDR regval
3711 = extract_unsigned_integer (val, partial_len, byte_order);
3712 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3713 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3714 if (arm_debug)
3715 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3716 argnum,
3717 gdbarch_register_name
2af46ca0 3718 (gdbarch, argreg),
f0c9063c 3719 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3720 regcache_cooked_write_unsigned (regcache, argreg, regval);
3721 argreg++;
3722 }
3723 else
3724 {
3725 /* Push the arguments onto the stack. */
3726 if (arm_debug)
3727 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3728 argnum, nstack);
f0c9063c
UW
3729 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3730 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3731 }
3732
3733 len -= partial_len;
3734 val += partial_len;
3735 }
3736 }
3737 /* If we have an odd number of words to push, then decrement the stack
3738 by one word now, so first stack argument will be dword aligned. */
3739 if (nstack & 4)
3740 sp -= 4;
3741
3742 while (si)
3743 {
3744 sp -= si->len;
3745 write_memory (sp, si->data, si->len);
3746 si = pop_stack_item (si);
3747 }
3748
3749 /* Finally, update teh SP register. */
3750 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3751
3752 return sp;
3753}
3754
f53f0d0b
PB
3755
3756/* Always align the frame to an 8-byte boundary. This is required on
3757 some platforms and harmless on the rest. */
3758
3759static CORE_ADDR
3760arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3761{
3762 /* Align the stack to eight bytes. */
3763 return sp & ~ (CORE_ADDR) 7;
3764}
3765
c906108c 3766static void
ed9a39eb 3767print_fpu_flags (int flags)
c906108c 3768{
c5aa993b
JM
3769 if (flags & (1 << 0))
3770 fputs ("IVO ", stdout);
3771 if (flags & (1 << 1))
3772 fputs ("DVZ ", stdout);
3773 if (flags & (1 << 2))
3774 fputs ("OFL ", stdout);
3775 if (flags & (1 << 3))
3776 fputs ("UFL ", stdout);
3777 if (flags & (1 << 4))
3778 fputs ("INX ", stdout);
3779 putchar ('\n');
c906108c
SS
3780}
3781
5e74b15c
RE
3782/* Print interesting information about the floating point processor
3783 (if present) or emulator. */
34e8f22d 3784static void
d855c300 3785arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3786 struct frame_info *frame, const char *args)
c906108c 3787{
9c9acae0 3788 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3789 int type;
3790
3791 type = (status >> 24) & 127;
edefbb7c
AC
3792 if (status & (1 << 31))
3793 printf (_("Hardware FPU type %d\n"), type);
3794 else
3795 printf (_("Software FPU type %d\n"), type);
3796 /* i18n: [floating point unit] mask */
3797 fputs (_("mask: "), stdout);
c5aa993b 3798 print_fpu_flags (status >> 16);
edefbb7c
AC
3799 /* i18n: [floating point unit] flags */
3800 fputs (_("flags: "), stdout);
c5aa993b 3801 print_fpu_flags (status);
c906108c
SS
3802}
3803
27067745
UW
3804/* Construct the ARM extended floating point type. */
3805static struct type *
3806arm_ext_type (struct gdbarch *gdbarch)
3807{
3808 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3809
3810 if (!tdep->arm_ext_type)
3811 tdep->arm_ext_type
e9bb382b 3812 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3813 floatformats_arm_ext);
3814
3815 return tdep->arm_ext_type;
3816}
3817
58d6951d
DJ
3818static struct type *
3819arm_neon_double_type (struct gdbarch *gdbarch)
3820{
3821 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3822
3823 if (tdep->neon_double_type == NULL)
3824 {
3825 struct type *t, *elem;
3826
3827 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3828 TYPE_CODE_UNION);
3829 elem = builtin_type (gdbarch)->builtin_uint8;
3830 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3831 elem = builtin_type (gdbarch)->builtin_uint16;
3832 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3833 elem = builtin_type (gdbarch)->builtin_uint32;
3834 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3835 elem = builtin_type (gdbarch)->builtin_uint64;
3836 append_composite_type_field (t, "u64", elem);
3837 elem = builtin_type (gdbarch)->builtin_float;
3838 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3839 elem = builtin_type (gdbarch)->builtin_double;
3840 append_composite_type_field (t, "f64", elem);
3841
3842 TYPE_VECTOR (t) = 1;
3843 TYPE_NAME (t) = "neon_d";
3844 tdep->neon_double_type = t;
3845 }
3846
3847 return tdep->neon_double_type;
3848}
3849
3850/* FIXME: The vector types are not correctly ordered on big-endian
3851 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3852 bits of d0 - regardless of what unit size is being held in d0. So
3853 the offset of the first uint8 in d0 is 7, but the offset of the
3854 first float is 4. This code works as-is for little-endian
3855 targets. */
3856
3857static struct type *
3858arm_neon_quad_type (struct gdbarch *gdbarch)
3859{
3860 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3861
3862 if (tdep->neon_quad_type == NULL)
3863 {
3864 struct type *t, *elem;
3865
3866 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3867 TYPE_CODE_UNION);
3868 elem = builtin_type (gdbarch)->builtin_uint8;
3869 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3870 elem = builtin_type (gdbarch)->builtin_uint16;
3871 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3872 elem = builtin_type (gdbarch)->builtin_uint32;
3873 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3874 elem = builtin_type (gdbarch)->builtin_uint64;
3875 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3876 elem = builtin_type (gdbarch)->builtin_float;
3877 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3878 elem = builtin_type (gdbarch)->builtin_double;
3879 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3880
3881 TYPE_VECTOR (t) = 1;
3882 TYPE_NAME (t) = "neon_q";
3883 tdep->neon_quad_type = t;
3884 }
3885
3886 return tdep->neon_quad_type;
3887}
3888
34e8f22d
RE
3889/* Return the GDB type object for the "standard" data type of data in
3890 register N. */
3891
3892static struct type *
7a5ea0d4 3893arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 3894{
58d6951d
DJ
3895 int num_regs = gdbarch_num_regs (gdbarch);
3896
3897 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3898 && regnum >= num_regs && regnum < num_regs + 32)
3899 return builtin_type (gdbarch)->builtin_float;
3900
3901 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3902 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3903 return arm_neon_quad_type (gdbarch);
3904
3905 /* If the target description has register information, we are only
3906 in this function so that we can override the types of
3907 double-precision registers for NEON. */
3908 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3909 {
3910 struct type *t = tdesc_register_type (gdbarch, regnum);
3911
3912 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3913 && TYPE_CODE (t) == TYPE_CODE_FLT
3914 && gdbarch_tdep (gdbarch)->have_neon)
3915 return arm_neon_double_type (gdbarch);
3916 else
3917 return t;
3918 }
3919
34e8f22d 3920 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
3921 {
3922 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3923 return builtin_type (gdbarch)->builtin_void;
3924
3925 return arm_ext_type (gdbarch);
3926 }
e4c16157 3927 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 3928 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 3929 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 3930 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
3931 else if (regnum >= ARRAY_SIZE (arm_register_names))
3932 /* These registers are only supported on targets which supply
3933 an XML description. */
df4df182 3934 return builtin_type (gdbarch)->builtin_int0;
032758dc 3935 else
df4df182 3936 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
3937}
3938
ff6f572f
DJ
3939/* Map a DWARF register REGNUM onto the appropriate GDB register
3940 number. */
3941
3942static int
d3f73121 3943arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
3944{
3945 /* Core integer regs. */
3946 if (reg >= 0 && reg <= 15)
3947 return reg;
3948
3949 /* Legacy FPA encoding. These were once used in a way which
3950 overlapped with VFP register numbering, so their use is
3951 discouraged, but GDB doesn't support the ARM toolchain
3952 which used them for VFP. */
3953 if (reg >= 16 && reg <= 23)
3954 return ARM_F0_REGNUM + reg - 16;
3955
3956 /* New assignments for the FPA registers. */
3957 if (reg >= 96 && reg <= 103)
3958 return ARM_F0_REGNUM + reg - 96;
3959
3960 /* WMMX register assignments. */
3961 if (reg >= 104 && reg <= 111)
3962 return ARM_WCGR0_REGNUM + reg - 104;
3963
3964 if (reg >= 112 && reg <= 127)
3965 return ARM_WR0_REGNUM + reg - 112;
3966
3967 if (reg >= 192 && reg <= 199)
3968 return ARM_WC0_REGNUM + reg - 192;
3969
58d6951d
DJ
3970 /* VFP v2 registers. A double precision value is actually
3971 in d1 rather than s2, but the ABI only defines numbering
3972 for the single precision registers. This will "just work"
3973 in GDB for little endian targets (we'll read eight bytes,
3974 starting in s0 and then progressing to s1), but will be
3975 reversed on big endian targets with VFP. This won't
3976 be a problem for the new Neon quad registers; you're supposed
3977 to use DW_OP_piece for those. */
3978 if (reg >= 64 && reg <= 95)
3979 {
3980 char name_buf[4];
3981
3982 sprintf (name_buf, "s%d", reg - 64);
3983 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3984 strlen (name_buf));
3985 }
3986
3987 /* VFP v3 / Neon registers. This range is also used for VFP v2
3988 registers, except that it now describes d0 instead of s0. */
3989 if (reg >= 256 && reg <= 287)
3990 {
3991 char name_buf[4];
3992
3993 sprintf (name_buf, "d%d", reg - 256);
3994 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3995 strlen (name_buf));
3996 }
3997
ff6f572f
DJ
3998 return -1;
3999}
4000
26216b98
AC
4001/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4002static int
e7faf938 4003arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4004{
4005 int reg = regnum;
e7faf938 4006 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4007
ff6f572f
DJ
4008 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4009 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4010
4011 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4012 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4013
4014 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4015 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4016
26216b98
AC
4017 if (reg < NUM_GREGS)
4018 return SIM_ARM_R0_REGNUM + reg;
4019 reg -= NUM_GREGS;
4020
4021 if (reg < NUM_FREGS)
4022 return SIM_ARM_FP0_REGNUM + reg;
4023 reg -= NUM_FREGS;
4024
4025 if (reg < NUM_SREGS)
4026 return SIM_ARM_FPS_REGNUM + reg;
4027 reg -= NUM_SREGS;
4028
edefbb7c 4029 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4030}
34e8f22d 4031
a37b3cc0
AC
4032/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4033 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4034 It is thought that this is is the floating-point register format on
4035 little-endian systems. */
c906108c 4036
ed9a39eb 4037static void
b508a996 4038convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4039 void *dbl, int endianess)
c906108c 4040{
a37b3cc0 4041 DOUBLEST d;
be8626e0
MD
4042
4043 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4044 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4045 else
4046 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4047 ptr, &d);
b508a996 4048 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4049}
4050
34e8f22d 4051static void
be8626e0
MD
4052convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4053 int endianess)
c906108c 4054{
a37b3cc0 4055 DOUBLEST d;
be8626e0 4056
b508a996 4057 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4058 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4059 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4060 else
4061 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4062 &d, dbl);
c906108c 4063}
ed9a39eb 4064
c906108c 4065static int
ed9a39eb 4066condition_true (unsigned long cond, unsigned long status_reg)
c906108c
SS
4067{
4068 if (cond == INST_AL || cond == INST_NV)
4069 return 1;
4070
4071 switch (cond)
4072 {
4073 case INST_EQ:
4074 return ((status_reg & FLAG_Z) != 0);
4075 case INST_NE:
4076 return ((status_reg & FLAG_Z) == 0);
4077 case INST_CS:
4078 return ((status_reg & FLAG_C) != 0);
4079 case INST_CC:
4080 return ((status_reg & FLAG_C) == 0);
4081 case INST_MI:
4082 return ((status_reg & FLAG_N) != 0);
4083 case INST_PL:
4084 return ((status_reg & FLAG_N) == 0);
4085 case INST_VS:
4086 return ((status_reg & FLAG_V) != 0);
4087 case INST_VC:
4088 return ((status_reg & FLAG_V) == 0);
4089 case INST_HI:
4090 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4091 case INST_LS:
4092 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4093 case INST_GE:
4094 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4095 case INST_LT:
4096 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4097 case INST_GT:
f8bf5763
PM
4098 return (((status_reg & FLAG_Z) == 0)
4099 && (((status_reg & FLAG_N) == 0)
4100 == ((status_reg & FLAG_V) == 0)));
c906108c 4101 case INST_LE:
f8bf5763
PM
4102 return (((status_reg & FLAG_Z) != 0)
4103 || (((status_reg & FLAG_N) == 0)
4104 != ((status_reg & FLAG_V) == 0)));
c906108c
SS
4105 }
4106 return 1;
4107}
4108
c906108c 4109static unsigned long
0b1b3e42
UW
4110shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4111 unsigned long pc_val, unsigned long status_reg)
c906108c
SS
4112{
4113 unsigned long res, shift;
4114 int rm = bits (inst, 0, 3);
4115 unsigned long shifttype = bits (inst, 5, 6);
c5aa993b
JM
4116
4117 if (bit (inst, 4))
c906108c
SS
4118 {
4119 int rs = bits (inst, 8, 11);
0b1b3e42
UW
4120 shift = (rs == 15 ? pc_val + 8
4121 : get_frame_register_unsigned (frame, rs)) & 0xFF;
c906108c
SS
4122 }
4123 else
4124 shift = bits (inst, 7, 11);
c5aa993b 4125
bf9f652a 4126 res = (rm == ARM_PC_REGNUM
0d39a070 4127 ? (pc_val + (bit (inst, 4) ? 12 : 8))
0b1b3e42 4128 : get_frame_register_unsigned (frame, rm));
c906108c
SS
4129
4130 switch (shifttype)
4131 {
c5aa993b 4132 case 0: /* LSL */
c906108c
SS
4133 res = shift >= 32 ? 0 : res << shift;
4134 break;
c5aa993b
JM
4135
4136 case 1: /* LSR */
c906108c
SS
4137 res = shift >= 32 ? 0 : res >> shift;
4138 break;
4139
c5aa993b
JM
4140 case 2: /* ASR */
4141 if (shift >= 32)
4142 shift = 31;
c906108c
SS
4143 res = ((res & 0x80000000L)
4144 ? ~((~res) >> shift) : res >> shift);
4145 break;
4146
c5aa993b 4147 case 3: /* ROR/RRX */
c906108c
SS
4148 shift &= 31;
4149 if (shift == 0)
4150 res = (res >> 1) | (carry ? 0x80000000L : 0);
4151 else
c5aa993b 4152 res = (res >> shift) | (res << (32 - shift));
c906108c
SS
4153 break;
4154 }
4155
4156 return res & 0xffffffff;
4157}
4158
c906108c
SS
4159/* Return number of 1-bits in VAL. */
4160
4161static int
ed9a39eb 4162bitcount (unsigned long val)
c906108c
SS
4163{
4164 int nbits;
4165 for (nbits = 0; val != 0; nbits++)
0963b4bd 4166 val &= val - 1; /* Delete rightmost 1-bit in val. */
c906108c
SS
4167 return nbits;
4168}
4169
177321bd
DJ
4170/* Return the size in bytes of the complete Thumb instruction whose
4171 first halfword is INST1. */
4172
4173static int
4174thumb_insn_size (unsigned short inst1)
4175{
4176 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4177 return 4;
4178 else
4179 return 2;
4180}
4181
4182static int
4183thumb_advance_itstate (unsigned int itstate)
4184{
4185 /* Preserve IT[7:5], the first three bits of the condition. Shift
4186 the upcoming condition flags left by one bit. */
4187 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4188
4189 /* If we have finished the IT block, clear the state. */
4190 if ((itstate & 0x0f) == 0)
4191 itstate = 0;
4192
4193 return itstate;
4194}
4195
4196/* Find the next PC after the current instruction executes. In some
4197 cases we can not statically determine the answer (see the IT state
4198 handling in this function); in that case, a breakpoint may be
4199 inserted in addition to the returned PC, which will be used to set
4200 another breakpoint by our caller. */
4201
ad527d2e 4202static CORE_ADDR
18819fa6 4203thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
c906108c 4204{
2af46ca0 4205 struct gdbarch *gdbarch = get_frame_arch (frame);
177321bd 4206 struct address_space *aspace = get_frame_address_space (frame);
e17a4113
UW
4207 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4208 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c5aa993b 4209 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
e17a4113 4210 unsigned short inst1;
0963b4bd 4211 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
c906108c 4212 unsigned long offset;
177321bd 4213 ULONGEST status, itstate;
c906108c 4214
50e98be4
DJ
4215 nextpc = MAKE_THUMB_ADDR (nextpc);
4216 pc_val = MAKE_THUMB_ADDR (pc_val);
4217
e17a4113 4218 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
9d4fde75 4219
9dca5578
DJ
4220 /* Thumb-2 conditional execution support. There are eight bits in
4221 the CPSR which describe conditional execution state. Once
4222 reconstructed (they're in a funny order), the low five bits
4223 describe the low bit of the condition for each instruction and
4224 how many instructions remain. The high three bits describe the
4225 base condition. One of the low four bits will be set if an IT
4226 block is active. These bits read as zero on earlier
4227 processors. */
4228 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
177321bd 4229 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
9dca5578 4230
177321bd
DJ
4231 /* If-Then handling. On GNU/Linux, where this routine is used, we
4232 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4233 can disable execution of the undefined instruction. So we might
4234 miss the breakpoint if we set it on a skipped conditional
4235 instruction. Because conditional instructions can change the
4236 flags, affecting the execution of further instructions, we may
4237 need to set two breakpoints. */
9dca5578 4238
177321bd
DJ
4239 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4240 {
4241 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4242 {
4243 /* An IT instruction. Because this instruction does not
4244 modify the flags, we can accurately predict the next
4245 executed instruction. */
4246 itstate = inst1 & 0x00ff;
4247 pc += thumb_insn_size (inst1);
4248
4249 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4250 {
0963b4bd
MS
4251 inst1 = read_memory_unsigned_integer (pc, 2,
4252 byte_order_for_code);
177321bd
DJ
4253 pc += thumb_insn_size (inst1);
4254 itstate = thumb_advance_itstate (itstate);
4255 }
4256
50e98be4 4257 return MAKE_THUMB_ADDR (pc);
177321bd
DJ
4258 }
4259 else if (itstate != 0)
4260 {
4261 /* We are in a conditional block. Check the condition. */
4262 if (! condition_true (itstate >> 4, status))
4263 {
4264 /* Advance to the next executed instruction. */
4265 pc += thumb_insn_size (inst1);
4266 itstate = thumb_advance_itstate (itstate);
4267
4268 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4269 {
0963b4bd
MS
4270 inst1 = read_memory_unsigned_integer (pc, 2,
4271 byte_order_for_code);
177321bd
DJ
4272 pc += thumb_insn_size (inst1);
4273 itstate = thumb_advance_itstate (itstate);
4274 }
4275
50e98be4 4276 return MAKE_THUMB_ADDR (pc);
177321bd
DJ
4277 }
4278 else if ((itstate & 0x0f) == 0x08)
4279 {
4280 /* This is the last instruction of the conditional
4281 block, and it is executed. We can handle it normally
4282 because the following instruction is not conditional,
4283 and we must handle it normally because it is
4284 permitted to branch. Fall through. */
4285 }
4286 else
4287 {
4288 int cond_negated;
4289
4290 /* There are conditional instructions after this one.
4291 If this instruction modifies the flags, then we can
4292 not predict what the next executed instruction will
4293 be. Fortunately, this instruction is architecturally
4294 forbidden to branch; we know it will fall through.
4295 Start by skipping past it. */
4296 pc += thumb_insn_size (inst1);
4297 itstate = thumb_advance_itstate (itstate);
4298
4299 /* Set a breakpoint on the following instruction. */
4300 gdb_assert ((itstate & 0x0f) != 0);
18819fa6
UW
4301 arm_insert_single_step_breakpoint (gdbarch, aspace,
4302 MAKE_THUMB_ADDR (pc));
177321bd
DJ
4303 cond_negated = (itstate >> 4) & 1;
4304
4305 /* Skip all following instructions with the same
4306 condition. If there is a later instruction in the IT
4307 block with the opposite condition, set the other
4308 breakpoint there. If not, then set a breakpoint on
4309 the instruction after the IT block. */
4310 do
4311 {
0963b4bd
MS
4312 inst1 = read_memory_unsigned_integer (pc, 2,
4313 byte_order_for_code);
177321bd
DJ
4314 pc += thumb_insn_size (inst1);
4315 itstate = thumb_advance_itstate (itstate);
4316 }
4317 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4318
50e98be4 4319 return MAKE_THUMB_ADDR (pc);
177321bd
DJ
4320 }
4321 }
4322 }
4323 else if (itstate & 0x0f)
9dca5578
DJ
4324 {
4325 /* We are in a conditional block. Check the condition. */
177321bd 4326 int cond = itstate >> 4;
9dca5578
DJ
4327
4328 if (! condition_true (cond, status))
db24da6d
YQ
4329 /* Advance to the next instruction. All the 32-bit
4330 instructions share a common prefix. */
4331 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
177321bd
DJ
4332
4333 /* Otherwise, handle the instruction normally. */
9dca5578
DJ
4334 }
4335
c906108c
SS
4336 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4337 {
4338 CORE_ADDR sp;
4339
4340 /* Fetch the saved PC from the stack. It's stored above
4341 all of the other registers. */
f0c9063c 4342 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
0b1b3e42 4343 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
e17a4113 4344 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
c906108c
SS
4345 }
4346 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4347 {
c5aa993b 4348 unsigned long cond = bits (inst1, 8, 11);
25b41d01
YQ
4349 if (cond == 0x0f) /* 0x0f = SWI */
4350 {
4351 struct gdbarch_tdep *tdep;
4352 tdep = gdbarch_tdep (gdbarch);
4353
4354 if (tdep->syscall_next_pc != NULL)
4355 nextpc = tdep->syscall_next_pc (frame);
4356
4357 }
4358 else if (cond != 0x0f && condition_true (cond, status))
c906108c
SS
4359 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4360 }
4361 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4362 {
4363 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4364 }
db24da6d 4365 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
c906108c 4366 {
e17a4113
UW
4367 unsigned short inst2;
4368 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
9dca5578
DJ
4369
4370 /* Default to the next instruction. */
4371 nextpc = pc + 4;
50e98be4 4372 nextpc = MAKE_THUMB_ADDR (nextpc);
9dca5578
DJ
4373
4374 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4375 {
4376 /* Branches and miscellaneous control instructions. */
4377
4378 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4379 {
4380 /* B, BL, BLX. */
4381 int j1, j2, imm1, imm2;
4382
4383 imm1 = sbits (inst1, 0, 10);
4384 imm2 = bits (inst2, 0, 10);
4385 j1 = bit (inst2, 13);
4386 j2 = bit (inst2, 11);
4387
4388 offset = ((imm1 << 12) + (imm2 << 1));
4389 offset ^= ((!j2) << 22) | ((!j1) << 23);
4390
4391 nextpc = pc_val + offset;
4392 /* For BLX make sure to clear the low bits. */
4393 if (bit (inst2, 12) == 0)
4394 nextpc = nextpc & 0xfffffffc;
4395 }
4396 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4397 {
4398 /* SUBS PC, LR, #imm8. */
4399 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4400 nextpc -= inst2 & 0x00ff;
4401 }
4069ebbe 4402 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
9dca5578
DJ
4403 {
4404 /* Conditional branch. */
4405 if (condition_true (bits (inst1, 6, 9), status))
4406 {
4407 int sign, j1, j2, imm1, imm2;
4408
4409 sign = sbits (inst1, 10, 10);
4410 imm1 = bits (inst1, 0, 5);
4411 imm2 = bits (inst2, 0, 10);
4412 j1 = bit (inst2, 13);
4413 j2 = bit (inst2, 11);
4414
4415 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4416 offset += (imm1 << 12) + (imm2 << 1);
4417
4418 nextpc = pc_val + offset;
4419 }
4420 }
4421 }
4422 else if ((inst1 & 0xfe50) == 0xe810)
4423 {
4424 /* Load multiple or RFE. */
4425 int rn, offset, load_pc = 1;
4426
4427 rn = bits (inst1, 0, 3);
4428 if (bit (inst1, 7) && !bit (inst1, 8))
4429 {
4430 /* LDMIA or POP */
4431 if (!bit (inst2, 15))
4432 load_pc = 0;
4433 offset = bitcount (inst2) * 4 - 4;
4434 }
4435 else if (!bit (inst1, 7) && bit (inst1, 8))
4436 {
4437 /* LDMDB */
4438 if (!bit (inst2, 15))
4439 load_pc = 0;
4440 offset = -4;
4441 }
4442 else if (bit (inst1, 7) && bit (inst1, 8))
4443 {
4444 /* RFEIA */
4445 offset = 0;
4446 }
4447 else if (!bit (inst1, 7) && !bit (inst1, 8))
4448 {
4449 /* RFEDB */
4450 offset = -8;
4451 }
4452 else
4453 load_pc = 0;
4454
4455 if (load_pc)
4456 {
4457 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4458 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4459 }
4460 }
4461 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4462 {
4463 /* MOV PC or MOVS PC. */
4464 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
50e98be4 4465 nextpc = MAKE_THUMB_ADDR (nextpc);
9dca5578
DJ
4466 }
4467 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4468 {
4469 /* LDR PC. */
4470 CORE_ADDR base;
4471 int rn, load_pc = 1;
4472
4473 rn = bits (inst1, 0, 3);
4474 base = get_frame_register_unsigned (frame, rn);
bf9f652a 4475 if (rn == ARM_PC_REGNUM)
9dca5578
DJ
4476 {
4477 base = (base + 4) & ~(CORE_ADDR) 0x3;
4478 if (bit (inst1, 7))
4479 base += bits (inst2, 0, 11);
4480 else
4481 base -= bits (inst2, 0, 11);
4482 }
4483 else if (bit (inst1, 7))
4484 base += bits (inst2, 0, 11);
4485 else if (bit (inst2, 11))
4486 {
4487 if (bit (inst2, 10))
4488 {
4489 if (bit (inst2, 9))
4490 base += bits (inst2, 0, 7);
4491 else
4492 base -= bits (inst2, 0, 7);
4493 }
4494 }
4495 else if ((inst2 & 0x0fc0) == 0x0000)
4496 {
4497 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4498 base += get_frame_register_unsigned (frame, rm) << shift;
4499 }
4500 else
4501 /* Reserved. */
4502 load_pc = 0;
4503
4504 if (load_pc)
4505 nextpc = get_frame_memory_unsigned (frame, base, 4);
4506 }
4507 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4508 {
4509 /* TBB. */
d476da0e
RE
4510 CORE_ADDR tbl_reg, table, offset, length;
4511
4512 tbl_reg = bits (inst1, 0, 3);
4513 if (tbl_reg == 0x0f)
4514 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4515 else
4516 table = get_frame_register_unsigned (frame, tbl_reg);
9dca5578 4517
9dca5578
DJ
4518 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4519 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4520 nextpc = pc_val + length;
4521 }
d476da0e 4522 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
9dca5578
DJ
4523 {
4524 /* TBH. */
d476da0e
RE
4525 CORE_ADDR tbl_reg, table, offset, length;
4526
4527 tbl_reg = bits (inst1, 0, 3);
4528 if (tbl_reg == 0x0f)
4529 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4530 else
4531 table = get_frame_register_unsigned (frame, tbl_reg);
9dca5578 4532
9dca5578
DJ
4533 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4534 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4535 nextpc = pc_val + length;
4536 }
c906108c 4537 }
aa17d93e 4538 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
9498281f
DJ
4539 {
4540 if (bits (inst1, 3, 6) == 0x0f)
4541 nextpc = pc_val;
4542 else
0b1b3e42 4543 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
9498281f 4544 }
ad8b5167
UW
4545 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4546 {
4547 if (bits (inst1, 3, 6) == 0x0f)
4548 nextpc = pc_val;
4549 else
4550 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4551
4552 nextpc = MAKE_THUMB_ADDR (nextpc);
4553 }
9dca5578
DJ
4554 else if ((inst1 & 0xf500) == 0xb100)
4555 {
4556 /* CBNZ or CBZ. */
4557 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4558 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4559
4560 if (bit (inst1, 11) && reg != 0)
4561 nextpc = pc_val + imm;
4562 else if (!bit (inst1, 11) && reg == 0)
4563 nextpc = pc_val + imm;
4564 }
c906108c
SS
4565 return nextpc;
4566}
4567
50e98be4 4568/* Get the raw next address. PC is the current program counter, in
18819fa6 4569 FRAME, which is assumed to be executing in ARM mode.
50e98be4
DJ
4570
4571 The value returned has the execution state of the next instruction
4572 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4573 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
0963b4bd
MS
4574 address. */
4575
50e98be4 4576static CORE_ADDR
18819fa6 4577arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
c906108c 4578{
2af46ca0 4579 struct gdbarch *gdbarch = get_frame_arch (frame);
e17a4113
UW
4580 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4581 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c906108c
SS
4582 unsigned long pc_val;
4583 unsigned long this_instr;
4584 unsigned long status;
4585 CORE_ADDR nextpc;
4586
c906108c 4587 pc_val = (unsigned long) pc;
e17a4113 4588 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
9d4fde75 4589
0b1b3e42 4590 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
c5aa993b 4591 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
c906108c 4592
daddc3c1
DJ
4593 if (bits (this_instr, 28, 31) == INST_NV)
4594 switch (bits (this_instr, 24, 27))
4595 {
4596 case 0xa:
4597 case 0xb:
4598 {
4599 /* Branch with Link and change to Thumb. */
4600 nextpc = BranchDest (pc, this_instr);
4601 nextpc |= bit (this_instr, 24) << 1;
50e98be4 4602 nextpc = MAKE_THUMB_ADDR (nextpc);
daddc3c1
DJ
4603 break;
4604 }
4605 case 0xc:
4606 case 0xd:
4607 case 0xe:
4608 /* Coprocessor register transfer. */
4609 if (bits (this_instr, 12, 15) == 15)
4610 error (_("Invalid update to pc in instruction"));
4611 break;
4612 }
4613 else if (condition_true (bits (this_instr, 28, 31), status))
c906108c
SS
4614 {
4615 switch (bits (this_instr, 24, 27))
4616 {
c5aa993b 4617 case 0x0:
94c30b78 4618 case 0x1: /* data processing */
c5aa993b
JM
4619 case 0x2:
4620 case 0x3:
c906108c
SS
4621 {
4622 unsigned long operand1, operand2, result = 0;
4623 unsigned long rn;
4624 int c;
c5aa993b 4625
c906108c
SS
4626 if (bits (this_instr, 12, 15) != 15)
4627 break;
4628
4629 if (bits (this_instr, 22, 25) == 0
c5aa993b 4630 && bits (this_instr, 4, 7) == 9) /* multiply */
edefbb7c 4631 error (_("Invalid update to pc in instruction"));
c906108c 4632
9498281f 4633 /* BX <reg>, BLX <reg> */
e150acc7
PB
4634 if (bits (this_instr, 4, 27) == 0x12fff1
4635 || bits (this_instr, 4, 27) == 0x12fff3)
9498281f
DJ
4636 {
4637 rn = bits (this_instr, 0, 3);
bf9f652a
YQ
4638 nextpc = ((rn == ARM_PC_REGNUM)
4639 ? (pc_val + 8)
4640 : get_frame_register_unsigned (frame, rn));
4641
9498281f
DJ
4642 return nextpc;
4643 }
4644
0963b4bd 4645 /* Multiply into PC. */
c906108c
SS
4646 c = (status & FLAG_C) ? 1 : 0;
4647 rn = bits (this_instr, 16, 19);
bf9f652a
YQ
4648 operand1 = ((rn == ARM_PC_REGNUM)
4649 ? (pc_val + 8)
4650 : get_frame_register_unsigned (frame, rn));
c5aa993b 4651
c906108c
SS
4652 if (bit (this_instr, 25))
4653 {
4654 unsigned long immval = bits (this_instr, 0, 7);
4655 unsigned long rotate = 2 * bits (this_instr, 8, 11);
c5aa993b
JM
4656 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4657 & 0xffffffff;
c906108c 4658 }
0963b4bd
MS
4659 else /* operand 2 is a shifted register. */
4660 operand2 = shifted_reg_val (frame, this_instr, c,
4661 pc_val, status);
c5aa993b 4662
c906108c
SS
4663 switch (bits (this_instr, 21, 24))
4664 {
c5aa993b 4665 case 0x0: /*and */
c906108c
SS
4666 result = operand1 & operand2;
4667 break;
4668
c5aa993b 4669 case 0x1: /*eor */
c906108c
SS
4670 result = operand1 ^ operand2;
4671 break;
4672
c5aa993b 4673 case 0x2: /*sub */
c906108c
SS
4674 result = operand1 - operand2;
4675 break;
4676
c5aa993b 4677 case 0x3: /*rsb */
c906108c
SS
4678 result = operand2 - operand1;
4679 break;
4680
c5aa993b 4681 case 0x4: /*add */
c906108c
SS
4682 result = operand1 + operand2;
4683 break;
4684
c5aa993b 4685 case 0x5: /*adc */
c906108c
SS
4686 result = operand1 + operand2 + c;
4687 break;
4688
c5aa993b 4689 case 0x6: /*sbc */
c906108c
SS
4690 result = operand1 - operand2 + c;
4691 break;
4692
c5aa993b 4693 case 0x7: /*rsc */
c906108c
SS
4694 result = operand2 - operand1 + c;
4695 break;
4696
c5aa993b
JM
4697 case 0x8:
4698 case 0x9:
4699 case 0xa:
4700 case 0xb: /* tst, teq, cmp, cmn */
c906108c
SS
4701 result = (unsigned long) nextpc;
4702 break;
4703
c5aa993b 4704 case 0xc: /*orr */
c906108c
SS
4705 result = operand1 | operand2;
4706 break;
4707
c5aa993b 4708 case 0xd: /*mov */
c906108c
SS
4709 /* Always step into a function. */
4710 result = operand2;
c5aa993b 4711 break;
c906108c 4712
c5aa993b 4713 case 0xe: /*bic */
c906108c
SS
4714 result = operand1 & ~operand2;
4715 break;
4716
c5aa993b 4717 case 0xf: /*mvn */
c906108c
SS
4718 result = ~operand2;
4719 break;
4720 }
c906108c 4721
50e98be4
DJ
4722 /* In 26-bit APCS the bottom two bits of the result are
4723 ignored, and we always end up in ARM state. */
4724 if (!arm_apcs_32)
4725 nextpc = arm_addr_bits_remove (gdbarch, result);
4726 else
4727 nextpc = result;
4728
c906108c
SS
4729 break;
4730 }
c5aa993b
JM
4731
4732 case 0x4:
4733 case 0x5: /* data transfer */
4734 case 0x6:
4735 case 0x7:
c906108c
SS
4736 if (bit (this_instr, 20))
4737 {
4738 /* load */
4739 if (bits (this_instr, 12, 15) == 15)
4740 {
4741 /* rd == pc */
c5aa993b 4742 unsigned long rn;
c906108c 4743 unsigned long base;
c5aa993b 4744
c906108c 4745 if (bit (this_instr, 22))
edefbb7c 4746 error (_("Invalid update to pc in instruction"));
c906108c
SS
4747
4748 /* byte write to PC */
4749 rn = bits (this_instr, 16, 19);
bf9f652a
YQ
4750 base = ((rn == ARM_PC_REGNUM)
4751 ? (pc_val + 8)
4752 : get_frame_register_unsigned (frame, rn));
4753
c906108c
SS
4754 if (bit (this_instr, 24))
4755 {
4756 /* pre-indexed */
4757 int c = (status & FLAG_C) ? 1 : 0;
4758 unsigned long offset =
c5aa993b 4759 (bit (this_instr, 25)
0b1b3e42 4760 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
c5aa993b 4761 : bits (this_instr, 0, 11));
c906108c
SS
4762
4763 if (bit (this_instr, 23))
4764 base += offset;
4765 else
4766 base -= offset;
4767 }
51370a33
YQ
4768 nextpc =
4769 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4770 4, byte_order);
c906108c
SS
4771 }
4772 }
4773 break;
c5aa993b
JM
4774
4775 case 0x8:
4776 case 0x9: /* block transfer */
c906108c
SS
4777 if (bit (this_instr, 20))
4778 {
4779 /* LDM */
4780 if (bit (this_instr, 15))
4781 {
4782 /* loading pc */
4783 int offset = 0;
51370a33
YQ
4784 unsigned long rn_val
4785 = get_frame_register_unsigned (frame,
4786 bits (this_instr, 16, 19));
c906108c
SS
4787
4788 if (bit (this_instr, 23))
4789 {
4790 /* up */
4791 unsigned long reglist = bits (this_instr, 0, 14);
4792 offset = bitcount (reglist) * 4;
c5aa993b 4793 if (bit (this_instr, 24)) /* pre */
c906108c
SS
4794 offset += 4;
4795 }
4796 else if (bit (this_instr, 24))
4797 offset = -4;
c5aa993b 4798
51370a33
YQ
4799 nextpc =
4800 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4801 (rn_val + offset),
4802 4, byte_order);
c906108c
SS
4803 }
4804 }
4805 break;
c5aa993b
JM
4806
4807 case 0xb: /* branch & link */
4808 case 0xa: /* branch */
c906108c
SS
4809 {
4810 nextpc = BranchDest (pc, this_instr);
c906108c
SS
4811 break;
4812 }
c5aa993b
JM
4813
4814 case 0xc:
4815 case 0xd:
4816 case 0xe: /* coproc ops */
25b41d01 4817 break;
c5aa993b 4818 case 0xf: /* SWI */
25b41d01
YQ
4819 {
4820 struct gdbarch_tdep *tdep;
4821 tdep = gdbarch_tdep (gdbarch);
4822
4823 if (tdep->syscall_next_pc != NULL)
4824 nextpc = tdep->syscall_next_pc (frame);
4825
4826 }
c906108c
SS
4827 break;
4828
4829 default:
edefbb7c 4830 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
c906108c
SS
4831 return (pc);
4832 }
4833 }
4834
4835 return nextpc;
4836}
4837
18819fa6
UW
4838/* Determine next PC after current instruction executes. Will call either
4839 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
4840 loop is detected. */
4841
50e98be4
DJ
4842CORE_ADDR
4843arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4844{
18819fa6
UW
4845 CORE_ADDR nextpc;
4846
4847 if (arm_frame_is_thumb (frame))
4848 {
4849 nextpc = thumb_get_next_pc_raw (frame, pc);
4850 if (nextpc == MAKE_THUMB_ADDR (pc))
4851 error (_("Infinite loop detected"));
4852 }
4853 else
4854 {
4855 nextpc = arm_get_next_pc_raw (frame, pc);
4856 if (nextpc == pc)
4857 error (_("Infinite loop detected"));
4858 }
4859
50e98be4
DJ
4860 return nextpc;
4861}
4862
18819fa6
UW
4863/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4864 of the appropriate mode (as encoded in the PC value), even if this
4865 differs from what would be expected according to the symbol tables. */
4866
4867void
4868arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4869 struct address_space *aspace,
4870 CORE_ADDR pc)
4871{
4872 struct cleanup *old_chain
4873 = make_cleanup_restore_integer (&arm_override_mode);
4874
4875 arm_override_mode = IS_THUMB_ADDR (pc);
4876 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4877
4878 insert_single_step_breakpoint (gdbarch, aspace, pc);
4879
4880 do_cleanups (old_chain);
4881}
4882
35f73cfc
UW
4883/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
4884 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
4885 is found, attempt to step through it. A breakpoint is placed at the end of
4886 the sequence. */
4887
4888static int
4889thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
4890{
4891 struct gdbarch *gdbarch = get_frame_arch (frame);
4892 struct address_space *aspace = get_frame_address_space (frame);
4893 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4894 CORE_ADDR pc = get_frame_pc (frame);
4895 CORE_ADDR breaks[2] = {-1, -1};
4896 CORE_ADDR loc = pc;
4897 unsigned short insn1, insn2;
4898 int insn_count;
4899 int index;
4900 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
4901 const int atomic_sequence_length = 16; /* Instruction sequence length. */
4902 ULONGEST status, itstate;
4903
4904 /* We currently do not support atomic sequences within an IT block. */
4905 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4906 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4907 if (itstate & 0x0f)
4908 return 0;
4909
4910 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
4911 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4912 loc += 2;
4913 if (thumb_insn_size (insn1) != 4)
4914 return 0;
4915
4916 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4917 loc += 2;
4918 if (!((insn1 & 0xfff0) == 0xe850
4919 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
4920 return 0;
4921
4922 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
4923 instructions. */
4924 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
4925 {
4926 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4927 loc += 2;
4928
4929 if (thumb_insn_size (insn1) != 4)
4930 {
4931 /* Assume that there is at most one conditional branch in the
4932 atomic sequence. If a conditional branch is found, put a
4933 breakpoint in its destination address. */
4934 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
4935 {
4936 if (last_breakpoint > 0)
4937 return 0; /* More than one conditional branch found,
4938 fallback to the standard code. */
4939
4940 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
4941 last_breakpoint++;
4942 }
4943
4944 /* We do not support atomic sequences that use any *other*
4945 instructions but conditional branches to change the PC.
4946 Fall back to standard code to avoid losing control of
4947 execution. */
4948 else if (thumb_instruction_changes_pc (insn1))
4949 return 0;
4950 }
4951 else
4952 {
4953 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
4954 loc += 2;
4955
4956 /* Assume that there is at most one conditional branch in the
4957 atomic sequence. If a conditional branch is found, put a
4958 breakpoint in its destination address. */
4959 if ((insn1 & 0xf800) == 0xf000
4960 && (insn2 & 0xd000) == 0x8000
4961 && (insn1 & 0x0380) != 0x0380)
4962 {
4963 int sign, j1, j2, imm1, imm2;
4964 unsigned int offset;
4965
4966 sign = sbits (insn1, 10, 10);
4967 imm1 = bits (insn1, 0, 5);
4968 imm2 = bits (insn2, 0, 10);
4969 j1 = bit (insn2, 13);
4970 j2 = bit (insn2, 11);
4971
4972 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4973 offset += (imm1 << 12) + (imm2 << 1);
4974
4975 if (last_breakpoint > 0)
4976 return 0; /* More than one conditional branch found,
4977 fallback to the standard code. */
4978
4979 breaks[1] = loc + offset;
4980 last_breakpoint++;
4981 }
4982
4983 /* We do not support atomic sequences that use any *other*
4984 instructions but conditional branches to change the PC.
4985 Fall back to standard code to avoid losing control of
4986 execution. */
4987 else if (thumb2_instruction_changes_pc (insn1, insn2))
4988 return 0;
4989
4990 /* If we find a strex{,b,h,d}, we're done. */
4991 if ((insn1 & 0xfff0) == 0xe840
4992 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
4993 break;
4994 }
4995 }
4996
4997 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
4998 if (insn_count == atomic_sequence_length)
4999 return 0;
5000
5001 /* Insert a breakpoint right after the end of the atomic sequence. */
5002 breaks[0] = loc;
5003
5004 /* Check for duplicated breakpoints. Check also for a breakpoint
5005 placed (branch instruction's destination) anywhere in sequence. */
5006 if (last_breakpoint
5007 && (breaks[1] == breaks[0]
5008 || (breaks[1] >= pc && breaks[1] < loc)))
5009 last_breakpoint = 0;
5010
5011 /* Effectively inserts the breakpoints. */
5012 for (index = 0; index <= last_breakpoint; index++)
5013 arm_insert_single_step_breakpoint (gdbarch, aspace,
5014 MAKE_THUMB_ADDR (breaks[index]));
5015
5016 return 1;
5017}
5018
5019static int
5020arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5021{
5022 struct gdbarch *gdbarch = get_frame_arch (frame);
5023 struct address_space *aspace = get_frame_address_space (frame);
5024 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5025 CORE_ADDR pc = get_frame_pc (frame);
5026 CORE_ADDR breaks[2] = {-1, -1};
5027 CORE_ADDR loc = pc;
5028 unsigned int insn;
5029 int insn_count;
5030 int index;
5031 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5032 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5033
5034 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5035 Note that we do not currently support conditionally executed atomic
5036 instructions. */
5037 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5038 loc += 4;
5039 if ((insn & 0xff9000f0) != 0xe1900090)
5040 return 0;
5041
5042 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5043 instructions. */
5044 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5045 {
5046 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5047 loc += 4;
5048
5049 /* Assume that there is at most one conditional branch in the atomic
5050 sequence. If a conditional branch is found, put a breakpoint in
5051 its destination address. */
5052 if (bits (insn, 24, 27) == 0xa)
5053 {
5054 if (last_breakpoint > 0)
5055 return 0; /* More than one conditional branch found, fallback
5056 to the standard single-step code. */
5057
5058 breaks[1] = BranchDest (loc - 4, insn);
5059 last_breakpoint++;
5060 }
5061
5062 /* We do not support atomic sequences that use any *other* instructions
5063 but conditional branches to change the PC. Fall back to standard
5064 code to avoid losing control of execution. */
5065 else if (arm_instruction_changes_pc (insn))
5066 return 0;
5067
5068 /* If we find a strex{,b,h,d}, we're done. */
5069 if ((insn & 0xff9000f0) == 0xe1800090)
5070 break;
5071 }
5072
5073 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5074 if (insn_count == atomic_sequence_length)
5075 return 0;
5076
5077 /* Insert a breakpoint right after the end of the atomic sequence. */
5078 breaks[0] = loc;
5079
5080 /* Check for duplicated breakpoints. Check also for a breakpoint
5081 placed (branch instruction's destination) anywhere in sequence. */
5082 if (last_breakpoint
5083 && (breaks[1] == breaks[0]
5084 || (breaks[1] >= pc && breaks[1] < loc)))
5085 last_breakpoint = 0;
5086
5087 /* Effectively inserts the breakpoints. */
5088 for (index = 0; index <= last_breakpoint; index++)
5089 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5090
5091 return 1;
5092}
5093
5094int
5095arm_deal_with_atomic_sequence (struct frame_info *frame)
5096{
5097 if (arm_frame_is_thumb (frame))
5098 return thumb_deal_with_atomic_sequence_raw (frame);
5099 else
5100 return arm_deal_with_atomic_sequence_raw (frame);
5101}
5102
9512d7fd
FN
5103/* single_step() is called just before we want to resume the inferior,
5104 if we want to single-step it but there is no hardware or kernel
5105 single-step support. We find the target of the coming instruction
e0cd558a 5106 and breakpoint it. */
9512d7fd 5107
190dce09 5108int
0b1b3e42 5109arm_software_single_step (struct frame_info *frame)
9512d7fd 5110{
a6d9a66e 5111 struct gdbarch *gdbarch = get_frame_arch (frame);
6c95b8df 5112 struct address_space *aspace = get_frame_address_space (frame);
35f73cfc
UW
5113 CORE_ADDR next_pc;
5114
5115 if (arm_deal_with_atomic_sequence (frame))
5116 return 1;
18819fa6 5117
35f73cfc 5118 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
18819fa6 5119 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
e6590a1b
UW
5120
5121 return 1;
9512d7fd 5122}
9512d7fd 5123
f9d67f43
DJ
5124/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5125 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5126 NULL if an error occurs. BUF is freed. */
5127
5128static gdb_byte *
5129extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5130 int old_len, int new_len)
5131{
5132 gdb_byte *new_buf, *middle;
5133 int bytes_to_read = new_len - old_len;
5134
5135 new_buf = xmalloc (new_len);
5136 memcpy (new_buf + bytes_to_read, buf, old_len);
5137 xfree (buf);
5138 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5139 {
5140 xfree (new_buf);
5141 return NULL;
5142 }
5143 return new_buf;
5144}
5145
5146/* An IT block is at most the 2-byte IT instruction followed by
5147 four 4-byte instructions. The furthest back we must search to
5148 find an IT block that affects the current instruction is thus
5149 2 + 3 * 4 == 14 bytes. */
5150#define MAX_IT_BLOCK_PREFIX 14
5151
5152/* Use a quick scan if there are more than this many bytes of
5153 code. */
5154#define IT_SCAN_THRESHOLD 32
5155
5156/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5157 A breakpoint in an IT block may not be hit, depending on the
5158 condition flags. */
5159static CORE_ADDR
5160arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5161{
5162 gdb_byte *buf;
5163 char map_type;
5164 CORE_ADDR boundary, func_start;
5165 int buf_len, buf2_len;
5166 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5167 int i, any, last_it, last_it_count;
5168
5169 /* If we are using BKPT breakpoints, none of this is necessary. */
5170 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5171 return bpaddr;
5172
5173 /* ARM mode does not have this problem. */
9779414d 5174 if (!arm_pc_is_thumb (gdbarch, bpaddr))
f9d67f43
DJ
5175 return bpaddr;
5176
5177 /* We are setting a breakpoint in Thumb code that could potentially
5178 contain an IT block. The first step is to find how much Thumb
5179 code there is; we do not need to read outside of known Thumb
5180 sequences. */
5181 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5182 if (map_type == 0)
5183 /* Thumb-2 code must have mapping symbols to have a chance. */
5184 return bpaddr;
5185
5186 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5187
5188 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5189 && func_start > boundary)
5190 boundary = func_start;
5191
5192 /* Search for a candidate IT instruction. We have to do some fancy
5193 footwork to distinguish a real IT instruction from the second
5194 half of a 32-bit instruction, but there is no need for that if
5195 there's no candidate. */
5196 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5197 if (buf_len == 0)
5198 /* No room for an IT instruction. */
5199 return bpaddr;
5200
5201 buf = xmalloc (buf_len);
5202 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5203 return bpaddr;
5204 any = 0;
5205 for (i = 0; i < buf_len; i += 2)
5206 {
5207 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5208 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5209 {
5210 any = 1;
5211 break;
5212 }
5213 }
5214 if (any == 0)
5215 {
5216 xfree (buf);
5217 return bpaddr;
5218 }
5219
5220 /* OK, the code bytes before this instruction contain at least one
5221 halfword which resembles an IT instruction. We know that it's
5222 Thumb code, but there are still two possibilities. Either the
5223 halfword really is an IT instruction, or it is the second half of
5224 a 32-bit Thumb instruction. The only way we can tell is to
5225 scan forwards from a known instruction boundary. */
5226 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5227 {
5228 int definite;
5229
5230 /* There's a lot of code before this instruction. Start with an
5231 optimistic search; it's easy to recognize halfwords that can
5232 not be the start of a 32-bit instruction, and use that to
5233 lock on to the instruction boundaries. */
5234 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5235 if (buf == NULL)
5236 return bpaddr;
5237 buf_len = IT_SCAN_THRESHOLD;
5238
5239 definite = 0;
5240 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5241 {
5242 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5243 if (thumb_insn_size (inst1) == 2)
5244 {
5245 definite = 1;
5246 break;
5247 }
5248 }
5249
5250 /* At this point, if DEFINITE, BUF[I] is the first place we
5251 are sure that we know the instruction boundaries, and it is far
5252 enough from BPADDR that we could not miss an IT instruction
5253 affecting BPADDR. If ! DEFINITE, give up - start from a
5254 known boundary. */
5255 if (! definite)
5256 {
0963b4bd
MS
5257 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5258 bpaddr - boundary);
f9d67f43
DJ
5259 if (buf == NULL)
5260 return bpaddr;
5261 buf_len = bpaddr - boundary;
5262 i = 0;
5263 }
5264 }
5265 else
5266 {
5267 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5268 if (buf == NULL)
5269 return bpaddr;
5270 buf_len = bpaddr - boundary;
5271 i = 0;
5272 }
5273
5274 /* Scan forwards. Find the last IT instruction before BPADDR. */
5275 last_it = -1;
5276 last_it_count = 0;
5277 while (i < buf_len)
5278 {
5279 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5280 last_it_count--;
5281 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5282 {
5283 last_it = i;
5284 if (inst1 & 0x0001)
5285 last_it_count = 4;
5286 else if (inst1 & 0x0002)
5287 last_it_count = 3;
5288 else if (inst1 & 0x0004)
5289 last_it_count = 2;
5290 else
5291 last_it_count = 1;
5292 }
5293 i += thumb_insn_size (inst1);
5294 }
5295
5296 xfree (buf);
5297
5298 if (last_it == -1)
5299 /* There wasn't really an IT instruction after all. */
5300 return bpaddr;
5301
5302 if (last_it_count < 1)
5303 /* It was too far away. */
5304 return bpaddr;
5305
5306 /* This really is a trouble spot. Move the breakpoint to the IT
5307 instruction. */
5308 return bpaddr - buf_len + last_it;
5309}
5310
cca44b1b 5311/* ARM displaced stepping support.
c906108c 5312
cca44b1b 5313 Generally ARM displaced stepping works as follows:
c906108c 5314
cca44b1b
JB
5315 1. When an instruction is to be single-stepped, it is first decoded by
5316 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5317 Depending on the type of instruction, it is then copied to a scratch
5318 location, possibly in a modified form. The copy_* set of functions
0963b4bd 5319 performs such modification, as necessary. A breakpoint is placed after
cca44b1b
JB
5320 the modified instruction in the scratch space to return control to GDB.
5321 Note in particular that instructions which modify the PC will no longer
5322 do so after modification.
c5aa993b 5323
cca44b1b
JB
5324 2. The instruction is single-stepped, by setting the PC to the scratch
5325 location address, and resuming. Control returns to GDB when the
5326 breakpoint is hit.
c5aa993b 5327
cca44b1b
JB
5328 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5329 function used for the current instruction. This function's job is to
5330 put the CPU/memory state back to what it would have been if the
5331 instruction had been executed unmodified in its original location. */
c5aa993b 5332
cca44b1b
JB
5333/* NOP instruction (mov r0, r0). */
5334#define ARM_NOP 0xe1a00000
34518530 5335#define THUMB_NOP 0x4600
cca44b1b
JB
5336
5337/* Helper for register reads for displaced stepping. In particular, this
5338 returns the PC as it would be seen by the instruction at its original
5339 location. */
5340
5341ULONGEST
36073a92
YQ
5342displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5343 int regno)
cca44b1b
JB
5344{
5345 ULONGEST ret;
36073a92 5346 CORE_ADDR from = dsc->insn_addr;
cca44b1b 5347
bf9f652a 5348 if (regno == ARM_PC_REGNUM)
cca44b1b 5349 {
4db71c0b
YQ
5350 /* Compute pipeline offset:
5351 - When executing an ARM instruction, PC reads as the address of the
5352 current instruction plus 8.
5353 - When executing a Thumb instruction, PC reads as the address of the
5354 current instruction plus 4. */
5355
36073a92 5356 if (!dsc->is_thumb)
4db71c0b
YQ
5357 from += 8;
5358 else
5359 from += 4;
5360
cca44b1b
JB
5361 if (debug_displaced)
5362 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
5363 (unsigned long) from);
5364 return (ULONGEST) from;
cca44b1b 5365 }
c906108c 5366 else
cca44b1b
JB
5367 {
5368 regcache_cooked_read_unsigned (regs, regno, &ret);
5369 if (debug_displaced)
5370 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5371 regno, (unsigned long) ret);
5372 return ret;
5373 }
c906108c
SS
5374}
5375
cca44b1b
JB
5376static int
5377displaced_in_arm_mode (struct regcache *regs)
5378{
5379 ULONGEST ps;
9779414d 5380 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 5381
cca44b1b 5382 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 5383
9779414d 5384 return (ps & t_bit) == 0;
cca44b1b 5385}
66e810cd 5386
cca44b1b 5387/* Write to the PC as from a branch instruction. */
c906108c 5388
cca44b1b 5389static void
36073a92
YQ
5390branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5391 ULONGEST val)
c906108c 5392{
36073a92 5393 if (!dsc->is_thumb)
cca44b1b
JB
5394 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5395 architecture versions < 6. */
0963b4bd
MS
5396 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5397 val & ~(ULONGEST) 0x3);
cca44b1b 5398 else
0963b4bd
MS
5399 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5400 val & ~(ULONGEST) 0x1);
cca44b1b 5401}
66e810cd 5402
cca44b1b
JB
5403/* Write to the PC as from a branch-exchange instruction. */
5404
5405static void
5406bx_write_pc (struct regcache *regs, ULONGEST val)
5407{
5408 ULONGEST ps;
9779414d 5409 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
5410
5411 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5412
5413 if ((val & 1) == 1)
c906108c 5414 {
9779414d 5415 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
5416 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5417 }
5418 else if ((val & 2) == 0)
5419 {
9779414d 5420 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 5421 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
5422 }
5423 else
5424 {
cca44b1b
JB
5425 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5426 mode, align dest to 4 bytes). */
5427 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 5428 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 5429 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
5430 }
5431}
ed9a39eb 5432
cca44b1b 5433/* Write to the PC as if from a load instruction. */
ed9a39eb 5434
34e8f22d 5435static void
36073a92
YQ
5436load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5437 ULONGEST val)
ed9a39eb 5438{
cca44b1b
JB
5439 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5440 bx_write_pc (regs, val);
5441 else
36073a92 5442 branch_write_pc (regs, dsc, val);
cca44b1b 5443}
be8626e0 5444
cca44b1b
JB
5445/* Write to the PC as if from an ALU instruction. */
5446
5447static void
36073a92
YQ
5448alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5449 ULONGEST val)
cca44b1b 5450{
36073a92 5451 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
5452 bx_write_pc (regs, val);
5453 else
36073a92 5454 branch_write_pc (regs, dsc, val);
cca44b1b
JB
5455}
5456
5457/* Helper for writing to registers for displaced stepping. Writing to the PC
5458 has a varying effects depending on the instruction which does the write:
5459 this is controlled by the WRITE_PC argument. */
5460
5461void
5462displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5463 int regno, ULONGEST val, enum pc_write_style write_pc)
5464{
bf9f652a 5465 if (regno == ARM_PC_REGNUM)
08216dd7 5466 {
cca44b1b
JB
5467 if (debug_displaced)
5468 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5469 (unsigned long) val);
5470 switch (write_pc)
08216dd7 5471 {
cca44b1b 5472 case BRANCH_WRITE_PC:
36073a92 5473 branch_write_pc (regs, dsc, val);
08216dd7
RE
5474 break;
5475
cca44b1b
JB
5476 case BX_WRITE_PC:
5477 bx_write_pc (regs, val);
5478 break;
5479
5480 case LOAD_WRITE_PC:
36073a92 5481 load_write_pc (regs, dsc, val);
cca44b1b
JB
5482 break;
5483
5484 case ALU_WRITE_PC:
36073a92 5485 alu_write_pc (regs, dsc, val);
cca44b1b
JB
5486 break;
5487
5488 case CANNOT_WRITE_PC:
5489 warning (_("Instruction wrote to PC in an unexpected way when "
5490 "single-stepping"));
08216dd7
RE
5491 break;
5492
5493 default:
97b9747c
JB
5494 internal_error (__FILE__, __LINE__,
5495 _("Invalid argument to displaced_write_reg"));
08216dd7 5496 }
b508a996 5497
cca44b1b 5498 dsc->wrote_to_pc = 1;
b508a996 5499 }
ed9a39eb 5500 else
b508a996 5501 {
cca44b1b
JB
5502 if (debug_displaced)
5503 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5504 regno, (unsigned long) val);
5505 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 5506 }
34e8f22d
RE
5507}
5508
cca44b1b
JB
5509/* This function is used to concisely determine if an instruction INSN
5510 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
5511 corresponding fields of BITMASK set to 0b1111. The function
5512 returns return 1 if any of these fields in INSN reference the PC
5513 (also 0b1111, r15), else it returns 0. */
67255d04
RE
5514
5515static int
cca44b1b 5516insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 5517{
cca44b1b 5518 uint32_t lowbit = 1;
67255d04 5519
cca44b1b
JB
5520 while (bitmask != 0)
5521 {
5522 uint32_t mask;
44e1a9eb 5523
cca44b1b
JB
5524 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5525 ;
67255d04 5526
cca44b1b
JB
5527 if (!lowbit)
5528 break;
67255d04 5529
cca44b1b 5530 mask = lowbit * 0xf;
67255d04 5531
cca44b1b
JB
5532 if ((insn & mask) == mask)
5533 return 1;
5534
5535 bitmask &= ~mask;
67255d04
RE
5536 }
5537
cca44b1b
JB
5538 return 0;
5539}
2af48f68 5540
cca44b1b
JB
5541/* The simplest copy function. Many instructions have the same effect no
5542 matter what address they are executed at: in those cases, use this. */
67255d04 5543
cca44b1b 5544static int
7ff120b4
YQ
5545arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5546 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
5547{
5548 if (debug_displaced)
5549 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5550 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5551 iname);
67255d04 5552
cca44b1b 5553 dsc->modinsn[0] = insn;
67255d04 5554
cca44b1b
JB
5555 return 0;
5556}
5557
34518530
YQ
5558static int
5559thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5560 uint16_t insn2, const char *iname,
5561 struct displaced_step_closure *dsc)
5562{
5563 if (debug_displaced)
5564 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5565 "opcode/class '%s' unmodified\n", insn1, insn2,
5566 iname);
5567
5568 dsc->modinsn[0] = insn1;
5569 dsc->modinsn[1] = insn2;
5570 dsc->numinsns = 2;
5571
5572 return 0;
5573}
5574
5575/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5576 modification. */
5577static int
5578thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5579 const char *iname,
5580 struct displaced_step_closure *dsc)
5581{
5582 if (debug_displaced)
5583 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5584 "opcode/class '%s' unmodified\n", insn,
5585 iname);
5586
5587 dsc->modinsn[0] = insn;
5588
5589 return 0;
5590}
5591
cca44b1b
JB
5592/* Preload instructions with immediate offset. */
5593
5594static void
6e39997a 5595cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
5596 struct regcache *regs, struct displaced_step_closure *dsc)
5597{
5598 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5599 if (!dsc->u.preload.immed)
5600 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5601}
5602
7ff120b4
YQ
5603static void
5604install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5605 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 5606{
cca44b1b 5607 ULONGEST rn_val;
cca44b1b
JB
5608 /* Preload instructions:
5609
5610 {pli/pld} [rn, #+/-imm]
5611 ->
5612 {pli/pld} [r0, #+/-imm]. */
5613
36073a92
YQ
5614 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5615 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5616 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
5617 dsc->u.preload.immed = 1;
5618
cca44b1b 5619 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
5620}
5621
cca44b1b 5622static int
7ff120b4 5623arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
5624 struct displaced_step_closure *dsc)
5625{
5626 unsigned int rn = bits (insn, 16, 19);
cca44b1b 5627
7ff120b4
YQ
5628 if (!insn_references_pc (insn, 0x000f0000ul))
5629 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
5630
5631 if (debug_displaced)
5632 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5633 (unsigned long) insn);
5634
7ff120b4
YQ
5635 dsc->modinsn[0] = insn & 0xfff0ffff;
5636
5637 install_preload (gdbarch, regs, dsc, rn);
5638
5639 return 0;
5640}
5641
34518530
YQ
5642static int
5643thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5644 struct regcache *regs, struct displaced_step_closure *dsc)
5645{
5646 unsigned int rn = bits (insn1, 0, 3);
5647 unsigned int u_bit = bit (insn1, 7);
5648 int imm12 = bits (insn2, 0, 11);
5649 ULONGEST pc_val;
5650
5651 if (rn != ARM_PC_REGNUM)
5652 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5653
5654 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5655 PLD (literal) Encoding T1. */
5656 if (debug_displaced)
5657 fprintf_unfiltered (gdb_stdlog,
5658 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5659 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5660 imm12);
5661
5662 if (!u_bit)
5663 imm12 = -1 * imm12;
5664
5665 /* Rewrite instruction {pli/pld} PC imm12 into:
5666 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5667
5668 {pli/pld} [r0, r1]
5669
5670 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5671
5672 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5673 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5674
5675 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5676
5677 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5678 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5679 dsc->u.preload.immed = 0;
5680
5681 /* {pli/pld} [r0, r1] */
5682 dsc->modinsn[0] = insn1 & 0xfff0;
5683 dsc->modinsn[1] = 0xf001;
5684 dsc->numinsns = 2;
5685
5686 dsc->cleanup = &cleanup_preload;
5687 return 0;
5688}
5689
7ff120b4
YQ
5690/* Preload instructions with register offset. */
5691
5692static void
5693install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5694 struct displaced_step_closure *dsc, unsigned int rn,
5695 unsigned int rm)
5696{
5697 ULONGEST rn_val, rm_val;
5698
cca44b1b
JB
5699 /* Preload register-offset instructions:
5700
5701 {pli/pld} [rn, rm {, shift}]
5702 ->
5703 {pli/pld} [r0, r1 {, shift}]. */
5704
36073a92
YQ
5705 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5706 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5707 rn_val = displaced_read_reg (regs, dsc, rn);
5708 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5709 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5710 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
5711 dsc->u.preload.immed = 0;
5712
cca44b1b 5713 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
5714}
5715
5716static int
5717arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5718 struct regcache *regs,
5719 struct displaced_step_closure *dsc)
5720{
5721 unsigned int rn = bits (insn, 16, 19);
5722 unsigned int rm = bits (insn, 0, 3);
5723
5724
5725 if (!insn_references_pc (insn, 0x000f000ful))
5726 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5727
5728 if (debug_displaced)
5729 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5730 (unsigned long) insn);
5731
5732 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 5733
7ff120b4 5734 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
5735 return 0;
5736}
5737
5738/* Copy/cleanup coprocessor load and store instructions. */
5739
5740static void
6e39997a 5741cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
5742 struct regcache *regs,
5743 struct displaced_step_closure *dsc)
5744{
36073a92 5745 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5746
5747 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5748
5749 if (dsc->u.ldst.writeback)
5750 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5751}
5752
7ff120b4
YQ
5753static void
5754install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5755 struct displaced_step_closure *dsc,
5756 int writeback, unsigned int rn)
cca44b1b 5757{
cca44b1b 5758 ULONGEST rn_val;
cca44b1b 5759
cca44b1b
JB
5760 /* Coprocessor load/store instructions:
5761
5762 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5763 ->
5764 {stc/stc2} [r0, #+/-imm].
5765
5766 ldc/ldc2 are handled identically. */
5767
36073a92
YQ
5768 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5769 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
5770 /* PC should be 4-byte aligned. */
5771 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
5772 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5773
7ff120b4 5774 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5775 dsc->u.ldst.rn = rn;
5776
7ff120b4
YQ
5777 dsc->cleanup = &cleanup_copro_load_store;
5778}
5779
5780static int
5781arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5782 struct regcache *regs,
5783 struct displaced_step_closure *dsc)
5784{
5785 unsigned int rn = bits (insn, 16, 19);
5786
5787 if (!insn_references_pc (insn, 0x000f0000ul))
5788 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5789
5790 if (debug_displaced)
5791 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5792 "load/store insn %.8lx\n", (unsigned long) insn);
5793
cca44b1b
JB
5794 dsc->modinsn[0] = insn & 0xfff0ffff;
5795
7ff120b4 5796 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
5797
5798 return 0;
5799}
5800
34518530
YQ
5801static int
5802thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5803 uint16_t insn2, struct regcache *regs,
5804 struct displaced_step_closure *dsc)
5805{
5806 unsigned int rn = bits (insn1, 0, 3);
5807
5808 if (rn != ARM_PC_REGNUM)
5809 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5810 "copro load/store", dsc);
5811
5812 if (debug_displaced)
5813 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5814 "load/store insn %.4x%.4x\n", insn1, insn2);
5815
5816 dsc->modinsn[0] = insn1 & 0xfff0;
5817 dsc->modinsn[1] = insn2;
5818 dsc->numinsns = 2;
5819
5820 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5821 doesn't support writeback, so pass 0. */
5822 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5823
5824 return 0;
5825}
5826
cca44b1b
JB
5827/* Clean up branch instructions (actually perform the branch, by setting
5828 PC). */
5829
5830static void
6e39997a 5831cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5832 struct displaced_step_closure *dsc)
5833{
36073a92 5834 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5835 int branch_taken = condition_true (dsc->u.branch.cond, status);
5836 enum pc_write_style write_pc = dsc->u.branch.exchange
5837 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5838
5839 if (!branch_taken)
5840 return;
5841
5842 if (dsc->u.branch.link)
5843 {
8c8dba6d
YQ
5844 /* The value of LR should be the next insn of current one. In order
5845 not to confuse logic hanlding later insn `bx lr', if current insn mode
5846 is Thumb, the bit 0 of LR value should be set to 1. */
5847 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5848
5849 if (dsc->is_thumb)
5850 next_insn_addr |= 0x1;
5851
5852 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5853 CANNOT_WRITE_PC);
cca44b1b
JB
5854 }
5855
bf9f652a 5856 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
5857}
5858
5859/* Copy B/BL/BLX instructions with immediate destinations. */
5860
7ff120b4
YQ
5861static void
5862install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5863 struct displaced_step_closure *dsc,
5864 unsigned int cond, int exchange, int link, long offset)
5865{
5866 /* Implement "BL<cond> <label>" as:
5867
5868 Preparation: cond <- instruction condition
5869 Insn: mov r0, r0 (nop)
5870 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5871
5872 B<cond> similar, but don't set r14 in cleanup. */
5873
5874 dsc->u.branch.cond = cond;
5875 dsc->u.branch.link = link;
5876 dsc->u.branch.exchange = exchange;
5877
2b16b2e3
YQ
5878 dsc->u.branch.dest = dsc->insn_addr;
5879 if (link && exchange)
5880 /* For BLX, offset is computed from the Align (PC, 4). */
5881 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5882
7ff120b4 5883 if (dsc->is_thumb)
2b16b2e3 5884 dsc->u.branch.dest += 4 + offset;
7ff120b4 5885 else
2b16b2e3 5886 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5887
5888 dsc->cleanup = &cleanup_branch;
5889}
cca44b1b 5890static int
7ff120b4
YQ
5891arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5892 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5893{
5894 unsigned int cond = bits (insn, 28, 31);
5895 int exchange = (cond == 0xf);
5896 int link = exchange || bit (insn, 24);
cca44b1b
JB
5897 long offset;
5898
5899 if (debug_displaced)
5900 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5901 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5902 (unsigned long) insn);
cca44b1b
JB
5903 if (exchange)
5904 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5905 then arrange the switch into Thumb mode. */
5906 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5907 else
5908 offset = bits (insn, 0, 23) << 2;
5909
5910 if (bit (offset, 25))
5911 offset = offset | ~0x3ffffff;
5912
cca44b1b
JB
5913 dsc->modinsn[0] = ARM_NOP;
5914
7ff120b4 5915 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5916 return 0;
5917}
5918
34518530
YQ
5919static int
5920thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5921 uint16_t insn2, struct regcache *regs,
5922 struct displaced_step_closure *dsc)
5923{
5924 int link = bit (insn2, 14);
5925 int exchange = link && !bit (insn2, 12);
5926 int cond = INST_AL;
5927 long offset = 0;
5928 int j1 = bit (insn2, 13);
5929 int j2 = bit (insn2, 11);
5930 int s = sbits (insn1, 10, 10);
5931 int i1 = !(j1 ^ bit (insn1, 10));
5932 int i2 = !(j2 ^ bit (insn1, 10));
5933
5934 if (!link && !exchange) /* B */
5935 {
5936 offset = (bits (insn2, 0, 10) << 1);
5937 if (bit (insn2, 12)) /* Encoding T4 */
5938 {
5939 offset |= (bits (insn1, 0, 9) << 12)
5940 | (i2 << 22)
5941 | (i1 << 23)
5942 | (s << 24);
5943 cond = INST_AL;
5944 }
5945 else /* Encoding T3 */
5946 {
5947 offset |= (bits (insn1, 0, 5) << 12)
5948 | (j1 << 18)
5949 | (j2 << 19)
5950 | (s << 20);
5951 cond = bits (insn1, 6, 9);
5952 }
5953 }
5954 else
5955 {
5956 offset = (bits (insn1, 0, 9) << 12);
5957 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5958 offset |= exchange ?
5959 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5960 }
5961
5962 if (debug_displaced)
5963 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5964 "%.4x %.4x with offset %.8lx\n",
5965 link ? (exchange) ? "blx" : "bl" : "b",
5966 insn1, insn2, offset);
5967
5968 dsc->modinsn[0] = THUMB_NOP;
5969
5970 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5971 return 0;
5972}
5973
5974/* Copy B Thumb instructions. */
5975static int
5976thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
5977 struct displaced_step_closure *dsc)
5978{
5979 unsigned int cond = 0;
5980 int offset = 0;
5981 unsigned short bit_12_15 = bits (insn, 12, 15);
5982 CORE_ADDR from = dsc->insn_addr;
5983
5984 if (bit_12_15 == 0xd)
5985 {
5986 /* offset = SignExtend (imm8:0, 32) */
5987 offset = sbits ((insn << 1), 0, 8);
5988 cond = bits (insn, 8, 11);
5989 }
5990 else if (bit_12_15 == 0xe) /* Encoding T2 */
5991 {
5992 offset = sbits ((insn << 1), 0, 11);
5993 cond = INST_AL;
5994 }
5995
5996 if (debug_displaced)
5997 fprintf_unfiltered (gdb_stdlog,
5998 "displaced: copying b immediate insn %.4x "
5999 "with offset %d\n", insn, offset);
6000
6001 dsc->u.branch.cond = cond;
6002 dsc->u.branch.link = 0;
6003 dsc->u.branch.exchange = 0;
6004 dsc->u.branch.dest = from + 4 + offset;
6005
6006 dsc->modinsn[0] = THUMB_NOP;
6007
6008 dsc->cleanup = &cleanup_branch;
6009
6010 return 0;
6011}
6012
cca44b1b
JB
6013/* Copy BX/BLX with register-specified destinations. */
6014
7ff120b4
YQ
6015static void
6016install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6017 struct displaced_step_closure *dsc, int link,
6018 unsigned int cond, unsigned int rm)
cca44b1b 6019{
cca44b1b
JB
6020 /* Implement {BX,BLX}<cond> <reg>" as:
6021
6022 Preparation: cond <- instruction condition
6023 Insn: mov r0, r0 (nop)
6024 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6025
6026 Don't set r14 in cleanup for BX. */
6027
36073a92 6028 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6029
6030 dsc->u.branch.cond = cond;
6031 dsc->u.branch.link = link;
cca44b1b 6032
7ff120b4 6033 dsc->u.branch.exchange = 1;
cca44b1b
JB
6034
6035 dsc->cleanup = &cleanup_branch;
7ff120b4 6036}
cca44b1b 6037
7ff120b4
YQ
6038static int
6039arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6040 struct regcache *regs, struct displaced_step_closure *dsc)
6041{
6042 unsigned int cond = bits (insn, 28, 31);
6043 /* BX: x12xxx1x
6044 BLX: x12xxx3x. */
6045 int link = bit (insn, 5);
6046 unsigned int rm = bits (insn, 0, 3);
6047
6048 if (debug_displaced)
6049 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6050 (unsigned long) insn);
6051
6052 dsc->modinsn[0] = ARM_NOP;
6053
6054 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
6055 return 0;
6056}
6057
34518530
YQ
6058static int
6059thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6060 struct regcache *regs,
6061 struct displaced_step_closure *dsc)
6062{
6063 int link = bit (insn, 7);
6064 unsigned int rm = bits (insn, 3, 6);
6065
6066 if (debug_displaced)
6067 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6068 (unsigned short) insn);
6069
6070 dsc->modinsn[0] = THUMB_NOP;
6071
6072 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6073
6074 return 0;
6075}
6076
6077
0963b4bd 6078/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
6079
6080static void
6e39997a 6081cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
6082 struct regcache *regs, struct displaced_step_closure *dsc)
6083{
36073a92 6084 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
6085 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6086 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6087 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6088}
6089
6090static int
7ff120b4
YQ
6091arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6092 struct displaced_step_closure *dsc)
cca44b1b
JB
6093{
6094 unsigned int rn = bits (insn, 16, 19);
6095 unsigned int rd = bits (insn, 12, 15);
6096 unsigned int op = bits (insn, 21, 24);
6097 int is_mov = (op == 0xd);
6098 ULONGEST rd_val, rn_val;
cca44b1b
JB
6099
6100 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 6101 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
6102
6103 if (debug_displaced)
6104 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6105 "%.8lx\n", is_mov ? "move" : "ALU",
6106 (unsigned long) insn);
6107
6108 /* Instruction is of form:
6109
6110 <op><cond> rd, [rn,] #imm
6111
6112 Rewrite as:
6113
6114 Preparation: tmp1, tmp2 <- r0, r1;
6115 r0, r1 <- rd, rn
6116 Insn: <op><cond> r0, r1, #imm
6117 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6118 */
6119
36073a92
YQ
6120 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6121 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6122 rn_val = displaced_read_reg (regs, dsc, rn);
6123 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
6124 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6125 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6126 dsc->rd = rd;
6127
6128 if (is_mov)
6129 dsc->modinsn[0] = insn & 0xfff00fff;
6130 else
6131 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6132
6133 dsc->cleanup = &cleanup_alu_imm;
6134
6135 return 0;
6136}
6137
34518530
YQ
6138static int
6139thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6140 uint16_t insn2, struct regcache *regs,
6141 struct displaced_step_closure *dsc)
6142{
6143 unsigned int op = bits (insn1, 5, 8);
6144 unsigned int rn, rm, rd;
6145 ULONGEST rd_val, rn_val;
6146
6147 rn = bits (insn1, 0, 3); /* Rn */
6148 rm = bits (insn2, 0, 3); /* Rm */
6149 rd = bits (insn2, 8, 11); /* Rd */
6150
6151 /* This routine is only called for instruction MOV. */
6152 gdb_assert (op == 0x2 && rn == 0xf);
6153
6154 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6156
6157 if (debug_displaced)
6158 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6159 "ALU", insn1, insn2);
6160
6161 /* Instruction is of form:
6162
6163 <op><cond> rd, [rn,] #imm
6164
6165 Rewrite as:
6166
6167 Preparation: tmp1, tmp2 <- r0, r1;
6168 r0, r1 <- rd, rn
6169 Insn: <op><cond> r0, r1, #imm
6170 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6171 */
6172
6173 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6174 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6175 rn_val = displaced_read_reg (regs, dsc, rn);
6176 rd_val = displaced_read_reg (regs, dsc, rd);
6177 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6178 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6179 dsc->rd = rd;
6180
6181 dsc->modinsn[0] = insn1;
6182 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6183 dsc->numinsns = 2;
6184
6185 dsc->cleanup = &cleanup_alu_imm;
6186
6187 return 0;
6188}
6189
cca44b1b
JB
6190/* Copy/cleanup arithmetic/logic insns with register RHS. */
6191
6192static void
6e39997a 6193cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
6194 struct regcache *regs, struct displaced_step_closure *dsc)
6195{
6196 ULONGEST rd_val;
6197 int i;
6198
36073a92 6199 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
6200
6201 for (i = 0; i < 3; i++)
6202 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6203
6204 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6205}
6206
7ff120b4
YQ
6207static void
6208install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6209 struct displaced_step_closure *dsc,
6210 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 6211{
cca44b1b 6212 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 6213
cca44b1b
JB
6214 /* Instruction is of form:
6215
6216 <op><cond> rd, [rn,] rm [, <shift>]
6217
6218 Rewrite as:
6219
6220 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6221 r0, r1, r2 <- rd, rn, rm
6222 Insn: <op><cond> r0, r1, r2 [, <shift>]
6223 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6224 */
6225
36073a92
YQ
6226 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6227 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6228 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6229 rd_val = displaced_read_reg (regs, dsc, rd);
6230 rn_val = displaced_read_reg (regs, dsc, rn);
6231 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6232 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6233 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6234 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6235 dsc->rd = rd;
6236
7ff120b4
YQ
6237 dsc->cleanup = &cleanup_alu_reg;
6238}
6239
6240static int
6241arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6242 struct displaced_step_closure *dsc)
6243{
6244 unsigned int op = bits (insn, 21, 24);
6245 int is_mov = (op == 0xd);
6246
6247 if (!insn_references_pc (insn, 0x000ff00ful))
6248 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6249
6250 if (debug_displaced)
6251 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6252 is_mov ? "move" : "ALU", (unsigned long) insn);
6253
cca44b1b
JB
6254 if (is_mov)
6255 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6256 else
6257 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6258
7ff120b4
YQ
6259 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6260 bits (insn, 0, 3));
cca44b1b
JB
6261 return 0;
6262}
6263
34518530
YQ
6264static int
6265thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6266 struct regcache *regs,
6267 struct displaced_step_closure *dsc)
6268{
6269 unsigned rn, rm, rd;
6270
6271 rd = bits (insn, 3, 6);
6272 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6273 rm = 2;
6274
6275 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6276 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6277
6278 if (debug_displaced)
6279 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6280 "ALU", (unsigned short) insn);
6281
6282 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6283
6284 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6285
6286 return 0;
6287}
6288
cca44b1b
JB
6289/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6290
6291static void
6e39997a 6292cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
6293 struct regcache *regs,
6294 struct displaced_step_closure *dsc)
6295{
36073a92 6296 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
6297 int i;
6298
6299 for (i = 0; i < 4; i++)
6300 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6301
6302 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6303}
6304
7ff120b4
YQ
6305static void
6306install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6307 struct displaced_step_closure *dsc,
6308 unsigned int rd, unsigned int rn, unsigned int rm,
6309 unsigned rs)
cca44b1b 6310{
7ff120b4 6311 int i;
cca44b1b 6312 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 6313
cca44b1b
JB
6314 /* Instruction is of form:
6315
6316 <op><cond> rd, [rn,] rm, <shift> rs
6317
6318 Rewrite as:
6319
6320 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6321 r0, r1, r2, r3 <- rd, rn, rm, rs
6322 Insn: <op><cond> r0, r1, r2, <shift> r3
6323 Cleanup: tmp5 <- r0
6324 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6325 rd <- tmp5
6326 */
6327
6328 for (i = 0; i < 4; i++)
36073a92 6329 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 6330
36073a92
YQ
6331 rd_val = displaced_read_reg (regs, dsc, rd);
6332 rn_val = displaced_read_reg (regs, dsc, rn);
6333 rm_val = displaced_read_reg (regs, dsc, rm);
6334 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
6335 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6336 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6337 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6338 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6339 dsc->rd = rd;
7ff120b4
YQ
6340 dsc->cleanup = &cleanup_alu_shifted_reg;
6341}
6342
6343static int
6344arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6345 struct regcache *regs,
6346 struct displaced_step_closure *dsc)
6347{
6348 unsigned int op = bits (insn, 21, 24);
6349 int is_mov = (op == 0xd);
6350 unsigned int rd, rn, rm, rs;
6351
6352 if (!insn_references_pc (insn, 0x000fff0ful))
6353 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6354
6355 if (debug_displaced)
6356 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6357 "%.8lx\n", is_mov ? "move" : "ALU",
6358 (unsigned long) insn);
6359
6360 rn = bits (insn, 16, 19);
6361 rm = bits (insn, 0, 3);
6362 rs = bits (insn, 8, 11);
6363 rd = bits (insn, 12, 15);
cca44b1b
JB
6364
6365 if (is_mov)
6366 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6367 else
6368 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6369
7ff120b4 6370 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
6371
6372 return 0;
6373}
6374
6375/* Clean up load instructions. */
6376
6377static void
6e39997a 6378cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
6379 struct displaced_step_closure *dsc)
6380{
6381 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 6382
36073a92 6383 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 6384 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
6385 rt_val2 = displaced_read_reg (regs, dsc, 1);
6386 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
6387
6388 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6389 if (dsc->u.ldst.xfersize > 4)
6390 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6391 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6392 if (!dsc->u.ldst.immed)
6393 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6394
6395 /* Handle register writeback. */
6396 if (dsc->u.ldst.writeback)
6397 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6398 /* Put result in right place. */
6399 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6400 if (dsc->u.ldst.xfersize == 8)
6401 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6402}
6403
6404/* Clean up store instructions. */
6405
6406static void
6e39997a 6407cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
6408 struct displaced_step_closure *dsc)
6409{
36073a92 6410 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
6411
6412 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6413 if (dsc->u.ldst.xfersize > 4)
6414 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6415 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6416 if (!dsc->u.ldst.immed)
6417 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6418 if (!dsc->u.ldst.restore_r4)
6419 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6420
6421 /* Writeback. */
6422 if (dsc->u.ldst.writeback)
6423 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6424}
6425
6426/* Copy "extra" load/store instructions. These are halfword/doubleword
6427 transfers, which have a different encoding to byte/word transfers. */
6428
6429static int
7ff120b4
YQ
6430arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6431 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6432{
6433 unsigned int op1 = bits (insn, 20, 24);
6434 unsigned int op2 = bits (insn, 5, 6);
6435 unsigned int rt = bits (insn, 12, 15);
6436 unsigned int rn = bits (insn, 16, 19);
6437 unsigned int rm = bits (insn, 0, 3);
6438 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6439 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6440 int immed = (op1 & 0x4) != 0;
6441 int opcode;
6442 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
6443
6444 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 6445 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
6446
6447 if (debug_displaced)
6448 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6449 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6450 (unsigned long) insn);
6451
6452 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6453
6454 if (opcode < 0)
6455 internal_error (__FILE__, __LINE__,
6456 _("copy_extra_ld_st: instruction decode error"));
6457
36073a92
YQ
6458 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6459 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6460 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 6461 if (!immed)
36073a92 6462 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 6463
36073a92 6464 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 6465 if (bytesize[opcode] == 8)
36073a92
YQ
6466 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6467 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 6468 if (!immed)
36073a92 6469 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6470
6471 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6472 if (bytesize[opcode] == 8)
6473 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6474 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6475 if (!immed)
6476 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6477
6478 dsc->rd = rt;
6479 dsc->u.ldst.xfersize = bytesize[opcode];
6480 dsc->u.ldst.rn = rn;
6481 dsc->u.ldst.immed = immed;
6482 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6483 dsc->u.ldst.restore_r4 = 0;
6484
6485 if (immed)
6486 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6487 ->
6488 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6489 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6490 else
6491 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6492 ->
6493 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6494 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6495
6496 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6497
6498 return 0;
6499}
6500
0f6f04ba 6501/* Copy byte/half word/word loads and stores. */
cca44b1b 6502
7ff120b4 6503static void
0f6f04ba
YQ
6504install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6505 struct displaced_step_closure *dsc, int load,
6506 int immed, int writeback, int size, int usermode,
6507 int rt, int rm, int rn)
cca44b1b 6508{
cca44b1b 6509 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 6510
36073a92
YQ
6511 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6512 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 6513 if (!immed)
36073a92 6514 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 6515 if (!load)
36073a92 6516 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 6517
36073a92
YQ
6518 rt_val = displaced_read_reg (regs, dsc, rt);
6519 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 6520 if (!immed)
36073a92 6521 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
6522
6523 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6524 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6525 if (!immed)
6526 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 6527 dsc->rd = rt;
0f6f04ba 6528 dsc->u.ldst.xfersize = size;
cca44b1b
JB
6529 dsc->u.ldst.rn = rn;
6530 dsc->u.ldst.immed = immed;
7ff120b4 6531 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
6532
6533 /* To write PC we can do:
6534
494e194e
YQ
6535 Before this sequence of instructions:
6536 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6537 r2 is the Rn value got from dispalced_read_reg.
6538
6539 Insn1: push {pc} Write address of STR instruction + offset on stack
6540 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6541 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6542 = addr(Insn1) + offset - addr(Insn3) - 8
6543 = offset - 16
6544 Insn4: add r4, r4, #8 r4 = offset - 8
6545 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6546 = from + offset
6547 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
6548
6549 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
6550 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6551 of this can be found in Section "Saving from r15" in
6552 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 6553
7ff120b4
YQ
6554 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6555}
6556
34518530
YQ
6557
6558static int
6559thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6560 uint16_t insn2, struct regcache *regs,
6561 struct displaced_step_closure *dsc, int size)
6562{
6563 unsigned int u_bit = bit (insn1, 7);
6564 unsigned int rt = bits (insn2, 12, 15);
6565 int imm12 = bits (insn2, 0, 11);
6566 ULONGEST pc_val;
6567
6568 if (debug_displaced)
6569 fprintf_unfiltered (gdb_stdlog,
6570 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6571 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6572 imm12);
6573
6574 if (!u_bit)
6575 imm12 = -1 * imm12;
6576
6577 /* Rewrite instruction LDR Rt imm12 into:
6578
6579 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6580
6581 LDR R0, R2, R3,
6582
6583 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6584
6585
6586 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6587 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6588 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6589
6590 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6591
6592 pc_val = pc_val & 0xfffffffc;
6593
6594 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6595 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6596
6597 dsc->rd = rt;
6598
6599 dsc->u.ldst.xfersize = size;
6600 dsc->u.ldst.immed = 0;
6601 dsc->u.ldst.writeback = 0;
6602 dsc->u.ldst.restore_r4 = 0;
6603
6604 /* LDR R0, R2, R3 */
6605 dsc->modinsn[0] = 0xf852;
6606 dsc->modinsn[1] = 0x3;
6607 dsc->numinsns = 2;
6608
6609 dsc->cleanup = &cleanup_load;
6610
6611 return 0;
6612}
6613
6614static int
6615thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6616 uint16_t insn2, struct regcache *regs,
6617 struct displaced_step_closure *dsc,
6618 int writeback, int immed)
6619{
6620 unsigned int rt = bits (insn2, 12, 15);
6621 unsigned int rn = bits (insn1, 0, 3);
6622 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6623 /* In LDR (register), there is also a register Rm, which is not allowed to
6624 be PC, so we don't have to check it. */
6625
6626 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6627 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6628 dsc);
6629
6630 if (debug_displaced)
6631 fprintf_unfiltered (gdb_stdlog,
6632 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6633 rt, rn, insn1, insn2);
6634
6635 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6636 0, rt, rm, rn);
6637
6638 dsc->u.ldst.restore_r4 = 0;
6639
6640 if (immed)
6641 /* ldr[b]<cond> rt, [rn, #imm], etc.
6642 ->
6643 ldr[b]<cond> r0, [r2, #imm]. */
6644 {
6645 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6646 dsc->modinsn[1] = insn2 & 0x0fff;
6647 }
6648 else
6649 /* ldr[b]<cond> rt, [rn, rm], etc.
6650 ->
6651 ldr[b]<cond> r0, [r2, r3]. */
6652 {
6653 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6654 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6655 }
6656
6657 dsc->numinsns = 2;
6658
6659 return 0;
6660}
6661
6662
7ff120b4
YQ
6663static int
6664arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6665 struct regcache *regs,
6666 struct displaced_step_closure *dsc,
0f6f04ba 6667 int load, int size, int usermode)
7ff120b4
YQ
6668{
6669 int immed = !bit (insn, 25);
6670 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6671 unsigned int rt = bits (insn, 12, 15);
6672 unsigned int rn = bits (insn, 16, 19);
6673 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6674
6675 if (!insn_references_pc (insn, 0x000ff00ful))
6676 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6677
6678 if (debug_displaced)
6679 fprintf_unfiltered (gdb_stdlog,
6680 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
6681 load ? (size == 1 ? "ldrb" : "ldr")
6682 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
6683 rt, rn,
6684 (unsigned long) insn);
6685
0f6f04ba
YQ
6686 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6687 usermode, rt, rm, rn);
7ff120b4 6688
bf9f652a 6689 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
6690 {
6691 dsc->u.ldst.restore_r4 = 0;
6692
6693 if (immed)
6694 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6695 ->
6696 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6697 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6698 else
6699 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6700 ->
6701 {ldr,str}[b]<cond> r0, [r2, r3]. */
6702 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6703 }
6704 else
6705 {
6706 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6707 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
6708 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6709 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
6710 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6711 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6712 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6713
6714 /* As above. */
6715 if (immed)
6716 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6717 else
6718 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6719
cca44b1b
JB
6720 dsc->numinsns = 6;
6721 }
6722
6723 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6724
6725 return 0;
6726}
6727
6728/* Cleanup LDM instructions with fully-populated register list. This is an
6729 unfortunate corner case: it's impossible to implement correctly by modifying
6730 the instruction. The issue is as follows: we have an instruction,
6731
6732 ldm rN, {r0-r15}
6733
6734 which we must rewrite to avoid loading PC. A possible solution would be to
6735 do the load in two halves, something like (with suitable cleanup
6736 afterwards):
6737
6738 mov r8, rN
6739 ldm[id][ab] r8!, {r0-r7}
6740 str r7, <temp>
6741 ldm[id][ab] r8, {r7-r14}
6742 <bkpt>
6743
6744 but at present there's no suitable place for <temp>, since the scratch space
6745 is overwritten before the cleanup routine is called. For now, we simply
6746 emulate the instruction. */
6747
6748static void
6749cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6750 struct displaced_step_closure *dsc)
6751{
cca44b1b
JB
6752 int inc = dsc->u.block.increment;
6753 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6754 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6755 uint32_t regmask = dsc->u.block.regmask;
6756 int regno = inc ? 0 : 15;
6757 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6758 int exception_return = dsc->u.block.load && dsc->u.block.user
6759 && (regmask & 0x8000) != 0;
36073a92 6760 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
6761 int do_transfer = condition_true (dsc->u.block.cond, status);
6762 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6763
6764 if (!do_transfer)
6765 return;
6766
6767 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6768 sensible we can do here. Complain loudly. */
6769 if (exception_return)
6770 error (_("Cannot single-step exception return"));
6771
6772 /* We don't handle any stores here for now. */
6773 gdb_assert (dsc->u.block.load != 0);
6774
6775 if (debug_displaced)
6776 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6777 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6778 dsc->u.block.increment ? "inc" : "dec",
6779 dsc->u.block.before ? "before" : "after");
6780
6781 while (regmask)
6782 {
6783 uint32_t memword;
6784
6785 if (inc)
bf9f652a 6786 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
6787 regno++;
6788 else
6789 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6790 regno--;
6791
6792 xfer_addr += bump_before;
6793
6794 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6795 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6796
6797 xfer_addr += bump_after;
6798
6799 regmask &= ~(1 << regno);
6800 }
6801
6802 if (dsc->u.block.writeback)
6803 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6804 CANNOT_WRITE_PC);
6805}
6806
6807/* Clean up an STM which included the PC in the register list. */
6808
6809static void
6810cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6811 struct displaced_step_closure *dsc)
6812{
36073a92 6813 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
6814 int store_executed = condition_true (dsc->u.block.cond, status);
6815 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6816 CORE_ADDR stm_insn_addr;
6817 uint32_t pc_val;
6818 long offset;
6819 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6820
6821 /* If condition code fails, there's nothing else to do. */
6822 if (!store_executed)
6823 return;
6824
6825 if (dsc->u.block.increment)
6826 {
6827 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6828
6829 if (dsc->u.block.before)
6830 pc_stored_at += 4;
6831 }
6832 else
6833 {
6834 pc_stored_at = dsc->u.block.xfer_addr;
6835
6836 if (dsc->u.block.before)
6837 pc_stored_at -= 4;
6838 }
6839
6840 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6841 stm_insn_addr = dsc->scratch_base;
6842 offset = pc_val - stm_insn_addr;
6843
6844 if (debug_displaced)
6845 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6846 "STM instruction\n", offset);
6847
6848 /* Rewrite the stored PC to the proper value for the non-displaced original
6849 instruction. */
6850 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6851 dsc->insn_addr + offset);
6852}
6853
6854/* Clean up an LDM which includes the PC in the register list. We clumped all
6855 the registers in the transferred list into a contiguous range r0...rX (to
6856 avoid loading PC directly and losing control of the debugged program), so we
6857 must undo that here. */
6858
6859static void
6e39997a 6860cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
6861 struct regcache *regs,
6862 struct displaced_step_closure *dsc)
6863{
36073a92 6864 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b 6865 int load_executed = condition_true (dsc->u.block.cond, status), i;
bf9f652a 6866 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
6867 unsigned int regs_loaded = bitcount (mask);
6868 unsigned int num_to_shuffle = regs_loaded, clobbered;
6869
6870 /* The method employed here will fail if the register list is fully populated
6871 (we need to avoid loading PC directly). */
6872 gdb_assert (num_to_shuffle < 16);
6873
6874 if (!load_executed)
6875 return;
6876
6877 clobbered = (1 << num_to_shuffle) - 1;
6878
6879 while (num_to_shuffle > 0)
6880 {
6881 if ((mask & (1 << write_reg)) != 0)
6882 {
6883 unsigned int read_reg = num_to_shuffle - 1;
6884
6885 if (read_reg != write_reg)
6886 {
36073a92 6887 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6888 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6889 if (debug_displaced)
6890 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6891 "loaded register r%d to r%d\n"), read_reg,
6892 write_reg);
6893 }
6894 else if (debug_displaced)
6895 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6896 "r%d already in the right place\n"),
6897 write_reg);
6898
6899 clobbered &= ~(1 << write_reg);
6900
6901 num_to_shuffle--;
6902 }
6903
6904 write_reg--;
6905 }
6906
6907 /* Restore any registers we scribbled over. */
6908 for (write_reg = 0; clobbered != 0; write_reg++)
6909 {
6910 if ((clobbered & (1 << write_reg)) != 0)
6911 {
6912 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6913 CANNOT_WRITE_PC);
6914 if (debug_displaced)
6915 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6916 "clobbered register r%d\n"), write_reg);
6917 clobbered &= ~(1 << write_reg);
6918 }
6919 }
6920
6921 /* Perform register writeback manually. */
6922 if (dsc->u.block.writeback)
6923 {
6924 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6925
6926 if (dsc->u.block.increment)
6927 new_rn_val += regs_loaded * 4;
6928 else
6929 new_rn_val -= regs_loaded * 4;
6930
6931 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6932 CANNOT_WRITE_PC);
6933 }
6934}
6935
6936/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6937 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6938
6939static int
7ff120b4
YQ
6940arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6941 struct regcache *regs,
6942 struct displaced_step_closure *dsc)
cca44b1b
JB
6943{
6944 int load = bit (insn, 20);
6945 int user = bit (insn, 22);
6946 int increment = bit (insn, 23);
6947 int before = bit (insn, 24);
6948 int writeback = bit (insn, 21);
6949 int rn = bits (insn, 16, 19);
cca44b1b 6950
0963b4bd
MS
6951 /* Block transfers which don't mention PC can be run directly
6952 out-of-line. */
bf9f652a 6953 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6954 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6955
bf9f652a 6956 if (rn == ARM_PC_REGNUM)
cca44b1b 6957 {
0963b4bd
MS
6958 warning (_("displaced: Unpredictable LDM or STM with "
6959 "base register r15"));
7ff120b4 6960 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6961 }
6962
6963 if (debug_displaced)
6964 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6965 "%.8lx\n", (unsigned long) insn);
6966
36073a92 6967 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6968 dsc->u.block.rn = rn;
6969
6970 dsc->u.block.load = load;
6971 dsc->u.block.user = user;
6972 dsc->u.block.increment = increment;
6973 dsc->u.block.before = before;
6974 dsc->u.block.writeback = writeback;
6975 dsc->u.block.cond = bits (insn, 28, 31);
6976
6977 dsc->u.block.regmask = insn & 0xffff;
6978
6979 if (load)
6980 {
6981 if ((insn & 0xffff) == 0xffff)
6982 {
6983 /* LDM with a fully-populated register list. This case is
6984 particularly tricky. Implement for now by fully emulating the
6985 instruction (which might not behave perfectly in all cases, but
6986 these instructions should be rare enough for that not to matter
6987 too much). */
6988 dsc->modinsn[0] = ARM_NOP;
6989
6990 dsc->cleanup = &cleanup_block_load_all;
6991 }
6992 else
6993 {
6994 /* LDM of a list of registers which includes PC. Implement by
6995 rewriting the list of registers to be transferred into a
6996 contiguous chunk r0...rX before doing the transfer, then shuffling
6997 registers into the correct places in the cleanup routine. */
6998 unsigned int regmask = insn & 0xffff;
6999 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7000 unsigned int to = 0, from = 0, i, new_rn;
7001
7002 for (i = 0; i < num_in_list; i++)
36073a92 7003 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
7004
7005 /* Writeback makes things complicated. We need to avoid clobbering
7006 the base register with one of the registers in our modified
7007 register list, but just using a different register can't work in
7008 all cases, e.g.:
7009
7010 ldm r14!, {r0-r13,pc}
7011
7012 which would need to be rewritten as:
7013
7014 ldm rN!, {r0-r14}
7015
7016 but that can't work, because there's no free register for N.
7017
7018 Solve this by turning off the writeback bit, and emulating
7019 writeback manually in the cleanup routine. */
7020
7021 if (writeback)
7022 insn &= ~(1 << 21);
7023
7024 new_regmask = (1 << num_in_list) - 1;
7025
7026 if (debug_displaced)
7027 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7028 "{..., pc}: original reg list %.4x, modified "
7029 "list %.4x\n"), rn, writeback ? "!" : "",
7030 (int) insn & 0xffff, new_regmask);
7031
7032 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7033
7034 dsc->cleanup = &cleanup_block_load_pc;
7035 }
7036 }
7037 else
7038 {
7039 /* STM of a list of registers which includes PC. Run the instruction
7040 as-is, but out of line: this will store the wrong value for the PC,
7041 so we must manually fix up the memory in the cleanup routine.
7042 Doing things this way has the advantage that we can auto-detect
7043 the offset of the PC write (which is architecture-dependent) in
7044 the cleanup routine. */
7045 dsc->modinsn[0] = insn;
7046
7047 dsc->cleanup = &cleanup_block_store_pc;
7048 }
7049
7050 return 0;
7051}
7052
34518530
YQ
7053static int
7054thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7055 struct regcache *regs,
7056 struct displaced_step_closure *dsc)
cca44b1b 7057{
34518530
YQ
7058 int rn = bits (insn1, 0, 3);
7059 int load = bit (insn1, 4);
7060 int writeback = bit (insn1, 5);
cca44b1b 7061
34518530
YQ
7062 /* Block transfers which don't mention PC can be run directly
7063 out-of-line. */
7064 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7065 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 7066
34518530
YQ
7067 if (rn == ARM_PC_REGNUM)
7068 {
7069 warning (_("displaced: Unpredictable LDM or STM with "
7070 "base register r15"));
7071 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7072 "unpredictable ldm/stm", dsc);
7073 }
cca44b1b
JB
7074
7075 if (debug_displaced)
34518530
YQ
7076 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7077 "%.4x%.4x\n", insn1, insn2);
cca44b1b 7078
34518530
YQ
7079 /* Clear bit 13, since it should be always zero. */
7080 dsc->u.block.regmask = (insn2 & 0xdfff);
7081 dsc->u.block.rn = rn;
cca44b1b 7082
34518530
YQ
7083 dsc->u.block.load = load;
7084 dsc->u.block.user = 0;
7085 dsc->u.block.increment = bit (insn1, 7);
7086 dsc->u.block.before = bit (insn1, 8);
7087 dsc->u.block.writeback = writeback;
7088 dsc->u.block.cond = INST_AL;
7089 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 7090
34518530
YQ
7091 if (load)
7092 {
7093 if (dsc->u.block.regmask == 0xffff)
7094 {
7095 /* This branch is impossible to happen. */
7096 gdb_assert (0);
7097 }
7098 else
7099 {
7100 unsigned int regmask = dsc->u.block.regmask;
7101 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7102 unsigned int to = 0, from = 0, i, new_rn;
7103
7104 for (i = 0; i < num_in_list; i++)
7105 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7106
7107 if (writeback)
7108 insn1 &= ~(1 << 5);
7109
7110 new_regmask = (1 << num_in_list) - 1;
7111
7112 if (debug_displaced)
7113 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7114 "{..., pc}: original reg list %.4x, modified "
7115 "list %.4x\n"), rn, writeback ? "!" : "",
7116 (int) dsc->u.block.regmask, new_regmask);
7117
7118 dsc->modinsn[0] = insn1;
7119 dsc->modinsn[1] = (new_regmask & 0xffff);
7120 dsc->numinsns = 2;
7121
7122 dsc->cleanup = &cleanup_block_load_pc;
7123 }
7124 }
7125 else
7126 {
7127 dsc->modinsn[0] = insn1;
7128 dsc->modinsn[1] = insn2;
7129 dsc->numinsns = 2;
7130 dsc->cleanup = &cleanup_block_store_pc;
7131 }
7132 return 0;
7133}
7134
7135/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7136 for Linux, where some SVC instructions must be treated specially. */
7137
7138static void
7139cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7140 struct displaced_step_closure *dsc)
7141{
7142 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7143
7144 if (debug_displaced)
7145 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7146 "%.8lx\n", (unsigned long) resume_addr);
7147
7148 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7149}
7150
7151
7152/* Common copy routine for svc instruciton. */
7153
7154static int
7155install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7156 struct displaced_step_closure *dsc)
7157{
7158 /* Preparation: none.
7159 Insn: unmodified svc.
7160 Cleanup: pc <- insn_addr + insn_size. */
7161
7162 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7163 instruction. */
7164 dsc->wrote_to_pc = 1;
7165
7166 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
7167 if (dsc->u.svc.copy_svc_os)
7168 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7169 else
7170 {
7171 dsc->cleanup = &cleanup_svc;
7172 return 0;
7173 }
34518530
YQ
7174}
7175
7176static int
7177arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7178 struct regcache *regs, struct displaced_step_closure *dsc)
7179{
7180
7181 if (debug_displaced)
7182 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7183 (unsigned long) insn);
7184
7185 dsc->modinsn[0] = insn;
7186
7187 return install_svc (gdbarch, regs, dsc);
7188}
7189
7190static int
7191thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7192 struct regcache *regs, struct displaced_step_closure *dsc)
7193{
7194
7195 if (debug_displaced)
7196 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7197 insn);
bd18283a 7198
34518530
YQ
7199 dsc->modinsn[0] = insn;
7200
7201 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
7202}
7203
7204/* Copy undefined instructions. */
7205
7206static int
7ff120b4
YQ
7207arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7208 struct displaced_step_closure *dsc)
cca44b1b
JB
7209{
7210 if (debug_displaced)
0963b4bd
MS
7211 fprintf_unfiltered (gdb_stdlog,
7212 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
7213 (unsigned long) insn);
7214
7215 dsc->modinsn[0] = insn;
7216
7217 return 0;
7218}
7219
34518530
YQ
7220static int
7221thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7222 struct displaced_step_closure *dsc)
7223{
7224
7225 if (debug_displaced)
7226 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7227 "%.4x %.4x\n", (unsigned short) insn1,
7228 (unsigned short) insn2);
7229
7230 dsc->modinsn[0] = insn1;
7231 dsc->modinsn[1] = insn2;
7232 dsc->numinsns = 2;
7233
7234 return 0;
7235}
7236
cca44b1b
JB
7237/* Copy unpredictable instructions. */
7238
7239static int
7ff120b4
YQ
7240arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7241 struct displaced_step_closure *dsc)
cca44b1b
JB
7242{
7243 if (debug_displaced)
7244 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7245 "%.8lx\n", (unsigned long) insn);
7246
7247 dsc->modinsn[0] = insn;
7248
7249 return 0;
7250}
7251
7252/* The decode_* functions are instruction decoding helpers. They mostly follow
7253 the presentation in the ARM ARM. */
7254
7255static int
7ff120b4
YQ
7256arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7257 struct regcache *regs,
7258 struct displaced_step_closure *dsc)
cca44b1b
JB
7259{
7260 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7261 unsigned int rn = bits (insn, 16, 19);
7262
7263 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 7264 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 7265 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 7266 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 7267 else if ((op1 & 0x60) == 0x20)
7ff120b4 7268 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 7269 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
7270 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7271 dsc);
cca44b1b 7272 else if ((op1 & 0x77) == 0x41)
7ff120b4 7273 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 7274 else if ((op1 & 0x77) == 0x45)
7ff120b4 7275 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
7276 else if ((op1 & 0x77) == 0x51)
7277 {
7278 if (rn != 0xf)
7ff120b4 7279 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 7280 else
7ff120b4 7281 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
7282 }
7283 else if ((op1 & 0x77) == 0x55)
7ff120b4 7284 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
7285 else if (op1 == 0x57)
7286 switch (op2)
7287 {
7ff120b4
YQ
7288 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7289 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7290 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7291 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7292 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
7293 }
7294 else if ((op1 & 0x63) == 0x43)
7ff120b4 7295 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
7296 else if ((op2 & 0x1) == 0x0)
7297 switch (op1 & ~0x80)
7298 {
7299 case 0x61:
7ff120b4 7300 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 7301 case 0x65:
7ff120b4 7302 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
7303 case 0x71: case 0x75:
7304 /* pld/pldw reg. */
7ff120b4 7305 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 7306 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 7307 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 7308 default:
7ff120b4 7309 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7310 }
7311 else
7ff120b4 7312 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
7313}
7314
7315static int
7ff120b4
YQ
7316arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7317 struct regcache *regs,
7318 struct displaced_step_closure *dsc)
cca44b1b
JB
7319{
7320 if (bit (insn, 27) == 0)
7ff120b4 7321 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
7322 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7323 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7324 {
7325 case 0x0: case 0x2:
7ff120b4 7326 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
7327
7328 case 0x1: case 0x3:
7ff120b4 7329 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
7330
7331 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 7332 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
7333
7334 case 0x8:
7335 switch ((insn & 0xe00000) >> 21)
7336 {
7337 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7338 /* stc/stc2. */
7ff120b4 7339 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7340
7341 case 0x2:
7ff120b4 7342 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
7343
7344 default:
7ff120b4 7345 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7346 }
7347
7348 case 0x9:
7349 {
7350 int rn_f = (bits (insn, 16, 19) == 0xf);
7351 switch ((insn & 0xe00000) >> 21)
7352 {
7353 case 0x1: case 0x3:
7354 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
7355 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7356 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7357
7358 case 0x2:
7ff120b4 7359 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
7360
7361 case 0x4: case 0x5: case 0x6: case 0x7:
7362 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
7363 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7364 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7365
7366 default:
7ff120b4 7367 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7368 }
7369 }
7370
7371 case 0xa:
7ff120b4 7372 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
7373
7374 case 0xb:
7375 if (bits (insn, 16, 19) == 0xf)
7376 /* ldc/ldc2 lit. */
7ff120b4 7377 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 7378 else
7ff120b4 7379 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7380
7381 case 0xc:
7382 if (bit (insn, 4))
7ff120b4 7383 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 7384 else
7ff120b4 7385 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
7386
7387 case 0xd:
7388 if (bit (insn, 4))
7ff120b4 7389 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 7390 else
7ff120b4 7391 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
7392
7393 default:
7ff120b4 7394 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7395 }
7396}
7397
7398/* Decode miscellaneous instructions in dp/misc encoding space. */
7399
7400static int
7ff120b4
YQ
7401arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7402 struct regcache *regs,
7403 struct displaced_step_closure *dsc)
cca44b1b
JB
7404{
7405 unsigned int op2 = bits (insn, 4, 6);
7406 unsigned int op = bits (insn, 21, 22);
7407 unsigned int op1 = bits (insn, 16, 19);
7408
7409 switch (op2)
7410 {
7411 case 0x0:
7ff120b4 7412 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
7413
7414 case 0x1:
7415 if (op == 0x1) /* bx. */
7ff120b4 7416 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 7417 else if (op == 0x3)
7ff120b4 7418 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 7419 else
7ff120b4 7420 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7421
7422 case 0x2:
7423 if (op == 0x1)
7424 /* Not really supported. */
7ff120b4 7425 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 7426 else
7ff120b4 7427 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7428
7429 case 0x3:
7430 if (op == 0x1)
7ff120b4 7431 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 7432 regs, dsc); /* blx register. */
cca44b1b 7433 else
7ff120b4 7434 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7435
7436 case 0x5:
7ff120b4 7437 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
7438
7439 case 0x7:
7440 if (op == 0x1)
7ff120b4 7441 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
7442 else if (op == 0x3)
7443 /* Not really supported. */
7ff120b4 7444 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
7445
7446 default:
7ff120b4 7447 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7448 }
7449}
7450
7451static int
7ff120b4
YQ
7452arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7453 struct regcache *regs,
7454 struct displaced_step_closure *dsc)
cca44b1b
JB
7455{
7456 if (bit (insn, 25))
7457 switch (bits (insn, 20, 24))
7458 {
7459 case 0x10:
7ff120b4 7460 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
7461
7462 case 0x14:
7ff120b4 7463 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
7464
7465 case 0x12: case 0x16:
7ff120b4 7466 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
7467
7468 default:
7ff120b4 7469 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7470 }
7471 else
7472 {
7473 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7474
7475 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 7476 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 7477 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 7478 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 7479 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 7480 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 7481 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 7482 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 7483 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 7484 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 7485 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 7486 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b
JB
7487 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7488 /* 2nd arg means "unpriveleged". */
7ff120b4
YQ
7489 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7490 dsc);
cca44b1b
JB
7491 }
7492
7493 /* Should be unreachable. */
7494 return 1;
7495}
7496
7497static int
7ff120b4
YQ
7498arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7499 struct regcache *regs,
7500 struct displaced_step_closure *dsc)
cca44b1b
JB
7501{
7502 int a = bit (insn, 25), b = bit (insn, 4);
7503 uint32_t op1 = bits (insn, 20, 24);
7504 int rn_f = bits (insn, 16, 19) == 0xf;
7505
7506 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7507 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 7508 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
7509 else if ((!a && (op1 & 0x17) == 0x02)
7510 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 7511 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
7512 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7513 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 7514 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
7515 else if ((!a && (op1 & 0x17) == 0x03)
7516 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 7517 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
7518 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7519 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 7520 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
7521 else if ((!a && (op1 & 0x17) == 0x06)
7522 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 7523 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
7524 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7525 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 7526 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
7527 else if ((!a && (op1 & 0x17) == 0x07)
7528 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 7529 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
7530
7531 /* Should be unreachable. */
7532 return 1;
7533}
7534
7535static int
7ff120b4
YQ
7536arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7537 struct displaced_step_closure *dsc)
cca44b1b
JB
7538{
7539 switch (bits (insn, 20, 24))
7540 {
7541 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 7542 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
7543
7544 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 7545 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
7546
7547 case 0x08: case 0x09: case 0x0a: case 0x0b:
7548 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 7549 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
7550 "decode/pack/unpack/saturate/reverse", dsc);
7551
7552 case 0x18:
7553 if (bits (insn, 5, 7) == 0) /* op2. */
7554 {
7555 if (bits (insn, 12, 15) == 0xf)
7ff120b4 7556 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 7557 else
7ff120b4 7558 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
7559 }
7560 else
7ff120b4 7561 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7562
7563 case 0x1a: case 0x1b:
7564 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 7565 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 7566 else
7ff120b4 7567 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7568
7569 case 0x1c: case 0x1d:
7570 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7571 {
7572 if (bits (insn, 0, 3) == 0xf)
7ff120b4 7573 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 7574 else
7ff120b4 7575 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
7576 }
7577 else
7ff120b4 7578 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7579
7580 case 0x1e: case 0x1f:
7581 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 7582 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 7583 else
7ff120b4 7584 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
7585 }
7586
7587 /* Should be unreachable. */
7588 return 1;
7589}
7590
7591static int
7ff120b4
YQ
7592arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7593 struct regcache *regs,
7594 struct displaced_step_closure *dsc)
cca44b1b
JB
7595{
7596 if (bit (insn, 25))
7ff120b4 7597 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 7598 else
7ff120b4 7599 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
7600}
7601
7602static int
7ff120b4
YQ
7603arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7604 struct regcache *regs,
7605 struct displaced_step_closure *dsc)
cca44b1b
JB
7606{
7607 unsigned int opcode = bits (insn, 20, 24);
7608
7609 switch (opcode)
7610 {
7611 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 7612 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
7613
7614 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7615 case 0x12: case 0x16:
7ff120b4 7616 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
7617
7618 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7619 case 0x13: case 0x17:
7ff120b4 7620 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
7621
7622 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7623 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7624 /* Note: no writeback for these instructions. Bit 25 will always be
7625 zero though (via caller), so the following works OK. */
7ff120b4 7626 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7627 }
7628
7629 /* Should be unreachable. */
7630 return 1;
7631}
7632
34518530
YQ
7633/* Decode shifted register instructions. */
7634
7635static int
7636thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7637 uint16_t insn2, struct regcache *regs,
7638 struct displaced_step_closure *dsc)
7639{
7640 /* PC is only allowed to be used in instruction MOV. */
7641
7642 unsigned int op = bits (insn1, 5, 8);
7643 unsigned int rn = bits (insn1, 0, 3);
7644
7645 if (op == 0x2 && rn == 0xf) /* MOV */
7646 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7647 else
7648 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7649 "dp (shift reg)", dsc);
7650}
7651
7652
7653/* Decode extension register load/store. Exactly the same as
7654 arm_decode_ext_reg_ld_st. */
7655
7656static int
7657thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7658 uint16_t insn2, struct regcache *regs,
7659 struct displaced_step_closure *dsc)
7660{
7661 unsigned int opcode = bits (insn1, 4, 8);
7662
7663 switch (opcode)
7664 {
7665 case 0x04: case 0x05:
7666 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7667 "vfp/neon vmov", dsc);
7668
7669 case 0x08: case 0x0c: /* 01x00 */
7670 case 0x0a: case 0x0e: /* 01x10 */
7671 case 0x12: case 0x16: /* 10x10 */
7672 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7673 "vfp/neon vstm/vpush", dsc);
7674
7675 case 0x09: case 0x0d: /* 01x01 */
7676 case 0x0b: case 0x0f: /* 01x11 */
7677 case 0x13: case 0x17: /* 10x11 */
7678 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7679 "vfp/neon vldm/vpop", dsc);
7680
7681 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7682 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7683 "vstr", dsc);
7684 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7685 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7686 }
7687
7688 /* Should be unreachable. */
7689 return 1;
7690}
7691
cca44b1b 7692static int
7ff120b4
YQ
7693arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7694 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
7695{
7696 unsigned int op1 = bits (insn, 20, 25);
7697 int op = bit (insn, 4);
7698 unsigned int coproc = bits (insn, 8, 11);
7699 unsigned int rn = bits (insn, 16, 19);
7700
7701 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 7702 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
7703 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7704 && (coproc & 0xe) != 0xa)
7705 /* stc/stc2. */
7ff120b4 7706 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
7707 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7708 && (coproc & 0xe) != 0xa)
7709 /* ldc/ldc2 imm/lit. */
7ff120b4 7710 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 7711 else if ((op1 & 0x3e) == 0x00)
7ff120b4 7712 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 7713 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 7714 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 7715 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 7716 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 7717 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 7718 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
7719 else if ((op1 & 0x30) == 0x20 && !op)
7720 {
7721 if ((coproc & 0xe) == 0xa)
7ff120b4 7722 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 7723 else
7ff120b4 7724 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
7725 }
7726 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 7727 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 7728 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 7729 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 7730 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 7731 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 7732 else if ((op1 & 0x30) == 0x30)
7ff120b4 7733 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 7734 else
7ff120b4 7735 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
7736}
7737
34518530
YQ
7738static int
7739thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7740 uint16_t insn2, struct regcache *regs,
7741 struct displaced_step_closure *dsc)
7742{
7743 unsigned int coproc = bits (insn2, 8, 11);
7744 unsigned int op1 = bits (insn1, 4, 9);
7745 unsigned int bit_5_8 = bits (insn1, 5, 8);
7746 unsigned int bit_9 = bit (insn1, 9);
7747 unsigned int bit_4 = bit (insn1, 4);
7748 unsigned int rn = bits (insn1, 0, 3);
7749
7750 if (bit_9 == 0)
7751 {
7752 if (bit_5_8 == 2)
7753 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7754 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7755 dsc);
7756 else if (bit_5_8 == 0) /* UNDEFINED. */
7757 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7758 else
7759 {
7760 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7761 if ((coproc & 0xe) == 0xa)
7762 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7763 dsc);
7764 else /* coproc is not 101x. */
7765 {
7766 if (bit_4 == 0) /* STC/STC2. */
7767 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7768 "stc/stc2", dsc);
7769 else /* LDC/LDC2 {literal, immeidate}. */
7770 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7771 regs, dsc);
7772 }
7773 }
7774 }
7775 else
7776 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7777
7778 return 0;
7779}
7780
7781static void
7782install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7783 struct displaced_step_closure *dsc, int rd)
7784{
7785 /* ADR Rd, #imm
7786
7787 Rewrite as:
7788
7789 Preparation: Rd <- PC
7790 Insn: ADD Rd, #imm
7791 Cleanup: Null.
7792 */
7793
7794 /* Rd <- PC */
7795 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7796 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7797}
7798
7799static int
7800thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7801 struct displaced_step_closure *dsc,
7802 int rd, unsigned int imm)
7803{
7804
7805 /* Encoding T2: ADDS Rd, #imm */
7806 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7807
7808 install_pc_relative (gdbarch, regs, dsc, rd);
7809
7810 return 0;
7811}
7812
7813static int
7814thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7815 struct regcache *regs,
7816 struct displaced_step_closure *dsc)
7817{
7818 unsigned int rd = bits (insn, 8, 10);
7819 unsigned int imm8 = bits (insn, 0, 7);
7820
7821 if (debug_displaced)
7822 fprintf_unfiltered (gdb_stdlog,
7823 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7824 rd, imm8, insn);
7825
7826 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7827}
7828
7829static int
7830thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7831 uint16_t insn2, struct regcache *regs,
7832 struct displaced_step_closure *dsc)
7833{
7834 unsigned int rd = bits (insn2, 8, 11);
7835 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7836 extract raw immediate encoding rather than computing immediate. When
7837 generating ADD or SUB instruction, we can simply perform OR operation to
7838 set immediate into ADD. */
7839 unsigned int imm_3_8 = insn2 & 0x70ff;
7840 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7841
7842 if (debug_displaced)
7843 fprintf_unfiltered (gdb_stdlog,
7844 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7845 rd, imm_i, imm_3_8, insn1, insn2);
7846
7847 if (bit (insn1, 7)) /* Encoding T2 */
7848 {
7849 /* Encoding T3: SUB Rd, Rd, #imm */
7850 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7851 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7852 }
7853 else /* Encoding T3 */
7854 {
7855 /* Encoding T3: ADD Rd, Rd, #imm */
7856 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7857 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7858 }
7859 dsc->numinsns = 2;
7860
7861 install_pc_relative (gdbarch, regs, dsc, rd);
7862
7863 return 0;
7864}
7865
7866static int
7867thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
7868 struct regcache *regs,
7869 struct displaced_step_closure *dsc)
7870{
7871 unsigned int rt = bits (insn1, 8, 10);
7872 unsigned int pc;
7873 int imm8 = (bits (insn1, 0, 7) << 2);
7874 CORE_ADDR from = dsc->insn_addr;
7875
7876 /* LDR Rd, #imm8
7877
7878 Rwrite as:
7879
7880 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7881
7882 Insn: LDR R0, [R2, R3];
7883 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7884
7885 if (debug_displaced)
7886 fprintf_unfiltered (gdb_stdlog,
7887 "displaced: copying thumb ldr r%d [pc #%d]\n"
7888 , rt, imm8);
7889
7890 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7891 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7892 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7893 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7894 /* The assembler calculates the required value of the offset from the
7895 Align(PC,4) value of this instruction to the label. */
7896 pc = pc & 0xfffffffc;
7897
7898 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7899 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7900
7901 dsc->rd = rt;
7902 dsc->u.ldst.xfersize = 4;
7903 dsc->u.ldst.rn = 0;
7904 dsc->u.ldst.immed = 0;
7905 dsc->u.ldst.writeback = 0;
7906 dsc->u.ldst.restore_r4 = 0;
7907
7908 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7909
7910 dsc->cleanup = &cleanup_load;
7911
7912 return 0;
7913}
7914
7915/* Copy Thumb cbnz/cbz insruction. */
7916
7917static int
7918thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7919 struct regcache *regs,
7920 struct displaced_step_closure *dsc)
7921{
7922 int non_zero = bit (insn1, 11);
7923 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7924 CORE_ADDR from = dsc->insn_addr;
7925 int rn = bits (insn1, 0, 2);
7926 int rn_val = displaced_read_reg (regs, dsc, rn);
7927
7928 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7929 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7930 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7931 condition is false, let it be, cleanup_branch will do nothing. */
7932 if (dsc->u.branch.cond)
7933 {
7934 dsc->u.branch.cond = INST_AL;
7935 dsc->u.branch.dest = from + 4 + imm5;
7936 }
7937 else
7938 dsc->u.branch.dest = from + 2;
7939
7940 dsc->u.branch.link = 0;
7941 dsc->u.branch.exchange = 0;
7942
7943 if (debug_displaced)
7944 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7945 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7946 rn, rn_val, insn1, dsc->u.branch.dest);
7947
7948 dsc->modinsn[0] = THUMB_NOP;
7949
7950 dsc->cleanup = &cleanup_branch;
7951 return 0;
7952}
7953
7954/* Copy Table Branch Byte/Halfword */
7955static int
7956thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7957 uint16_t insn2, struct regcache *regs,
7958 struct displaced_step_closure *dsc)
7959{
7960 ULONGEST rn_val, rm_val;
7961 int is_tbh = bit (insn2, 4);
7962 CORE_ADDR halfwords = 0;
7963 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7964
7965 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7966 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7967
7968 if (is_tbh)
7969 {
7970 gdb_byte buf[2];
7971
7972 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7973 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7974 }
7975 else
7976 {
7977 gdb_byte buf[1];
7978
7979 target_read_memory (rn_val + rm_val, buf, 1);
7980 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7981 }
7982
7983 if (debug_displaced)
7984 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7985 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7986 (unsigned int) rn_val, (unsigned int) rm_val,
7987 (unsigned int) halfwords);
7988
7989 dsc->u.branch.cond = INST_AL;
7990 dsc->u.branch.link = 0;
7991 dsc->u.branch.exchange = 0;
7992 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7993
7994 dsc->cleanup = &cleanup_branch;
7995
7996 return 0;
7997}
7998
7999static void
8000cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8001 struct displaced_step_closure *dsc)
8002{
8003 /* PC <- r7 */
8004 int val = displaced_read_reg (regs, dsc, 7);
8005 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8006
8007 /* r7 <- r8 */
8008 val = displaced_read_reg (regs, dsc, 8);
8009 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8010
8011 /* r8 <- tmp[0] */
8012 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8013
8014}
8015
8016static int
8017thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8018 struct regcache *regs,
8019 struct displaced_step_closure *dsc)
8020{
8021 dsc->u.block.regmask = insn1 & 0x00ff;
8022
8023 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8024 to :
8025
8026 (1) register list is full, that is, r0-r7 are used.
8027 Prepare: tmp[0] <- r8
8028
8029 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8030 MOV r8, r7; Move value of r7 to r8;
8031 POP {r7}; Store PC value into r7.
8032
8033 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8034
8035 (2) register list is not full, supposing there are N registers in
8036 register list (except PC, 0 <= N <= 7).
8037 Prepare: for each i, 0 - N, tmp[i] <- ri.
8038
8039 POP {r0, r1, ...., rN};
8040
8041 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8042 from tmp[] properly.
8043 */
8044 if (debug_displaced)
8045 fprintf_unfiltered (gdb_stdlog,
8046 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8047 dsc->u.block.regmask, insn1);
8048
8049 if (dsc->u.block.regmask == 0xff)
8050 {
8051 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8052
8053 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8054 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8055 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8056
8057 dsc->numinsns = 3;
8058 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8059 }
8060 else
8061 {
8062 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8063 unsigned int new_regmask, bit = 1;
8064 unsigned int to = 0, from = 0, i, new_rn;
8065
8066 for (i = 0; i < num_in_list + 1; i++)
8067 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8068
8069 new_regmask = (1 << (num_in_list + 1)) - 1;
8070
8071 if (debug_displaced)
8072 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8073 "{..., pc}: original reg list %.4x,"
8074 " modified list %.4x\n"),
8075 (int) dsc->u.block.regmask, new_regmask);
8076
8077 dsc->u.block.regmask |= 0x8000;
8078 dsc->u.block.writeback = 0;
8079 dsc->u.block.cond = INST_AL;
8080
8081 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8082
8083 dsc->cleanup = &cleanup_block_load_pc;
8084 }
8085
8086 return 0;
8087}
8088
8089static void
8090thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8091 struct regcache *regs,
8092 struct displaced_step_closure *dsc)
8093{
8094 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8095 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8096 int err = 0;
8097
8098 /* 16-bit thumb instructions. */
8099 switch (op_bit_12_15)
8100 {
8101 /* Shift (imme), add, subtract, move and compare. */
8102 case 0: case 1: case 2: case 3:
8103 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8104 "shift/add/sub/mov/cmp",
8105 dsc);
8106 break;
8107 case 4:
8108 switch (op_bit_10_11)
8109 {
8110 case 0: /* Data-processing */
8111 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8112 "data-processing",
8113 dsc);
8114 break;
8115 case 1: /* Special data instructions and branch and exchange. */
8116 {
8117 unsigned short op = bits (insn1, 7, 9);
8118 if (op == 6 || op == 7) /* BX or BLX */
8119 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8120 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8121 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8122 else
8123 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8124 dsc);
8125 }
8126 break;
8127 default: /* LDR (literal) */
8128 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8129 }
8130 break;
8131 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8132 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8133 break;
8134 case 10:
8135 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8136 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8137 else /* Generate SP-relative address */
8138 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8139 break;
8140 case 11: /* Misc 16-bit instructions */
8141 {
8142 switch (bits (insn1, 8, 11))
8143 {
8144 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8145 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8146 break;
8147 case 12: case 13: /* POP */
8148 if (bit (insn1, 8)) /* PC is in register list. */
8149 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8150 else
8151 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8152 break;
8153 case 15: /* If-Then, and hints */
8154 if (bits (insn1, 0, 3))
8155 /* If-Then makes up to four following instructions conditional.
8156 IT instruction itself is not conditional, so handle it as a
8157 common unmodified instruction. */
8158 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8159 dsc);
8160 else
8161 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8162 break;
8163 default:
8164 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8165 }
8166 }
8167 break;
8168 case 12:
8169 if (op_bit_10_11 < 2) /* Store multiple registers */
8170 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8171 else /* Load multiple registers */
8172 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8173 break;
8174 case 13: /* Conditional branch and supervisor call */
8175 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8176 err = thumb_copy_b (gdbarch, insn1, dsc);
8177 else
8178 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8179 break;
8180 case 14: /* Unconditional branch */
8181 err = thumb_copy_b (gdbarch, insn1, dsc);
8182 break;
8183 default:
8184 err = 1;
8185 }
8186
8187 if (err)
8188 internal_error (__FILE__, __LINE__,
8189 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8190}
8191
8192static int
8193decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8194 uint16_t insn1, uint16_t insn2,
8195 struct regcache *regs,
8196 struct displaced_step_closure *dsc)
8197{
8198 int rt = bits (insn2, 12, 15);
8199 int rn = bits (insn1, 0, 3);
8200 int op1 = bits (insn1, 7, 8);
8201 int err = 0;
8202
8203 switch (bits (insn1, 5, 6))
8204 {
8205 case 0: /* Load byte and memory hints */
8206 if (rt == 0xf) /* PLD/PLI */
8207 {
8208 if (rn == 0xf)
8209 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8210 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8211 else
8212 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8213 "pli/pld", dsc);
8214 }
8215 else
8216 {
8217 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8218 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8219 1);
8220 else
8221 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8222 "ldrb{reg, immediate}/ldrbt",
8223 dsc);
8224 }
8225
8226 break;
8227 case 1: /* Load halfword and memory hints. */
8228 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8229 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8230 "pld/unalloc memhint", dsc);
8231 else
8232 {
8233 if (rn == 0xf)
8234 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8235 2);
8236 else
8237 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8238 "ldrh/ldrht", dsc);
8239 }
8240 break;
8241 case 2: /* Load word */
8242 {
8243 int insn2_bit_8_11 = bits (insn2, 8, 11);
8244
8245 if (rn == 0xf)
8246 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8247 else if (op1 == 0x1) /* Encoding T3 */
8248 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8249 0, 1);
8250 else /* op1 == 0x0 */
8251 {
8252 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8253 /* LDR (immediate) */
8254 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8255 dsc, bit (insn2, 8), 1);
8256 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8257 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8258 "ldrt", dsc);
8259 else
8260 /* LDR (register) */
8261 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8262 dsc, 0, 0);
8263 }
8264 break;
8265 }
8266 default:
8267 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8268 break;
8269 }
8270 return 0;
8271}
8272
8273static void
8274thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8275 uint16_t insn2, struct regcache *regs,
8276 struct displaced_step_closure *dsc)
8277{
8278 int err = 0;
8279 unsigned short op = bit (insn2, 15);
8280 unsigned int op1 = bits (insn1, 11, 12);
8281
8282 switch (op1)
8283 {
8284 case 1:
8285 {
8286 switch (bits (insn1, 9, 10))
8287 {
8288 case 0:
8289 if (bit (insn1, 6))
8290 {
8291 /* Load/store {dual, execlusive}, table branch. */
8292 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8293 && bits (insn2, 5, 7) == 0)
8294 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8295 dsc);
8296 else
8297 /* PC is not allowed to use in load/store {dual, exclusive}
8298 instructions. */
8299 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8300 "load/store dual/ex", dsc);
8301 }
8302 else /* load/store multiple */
8303 {
8304 switch (bits (insn1, 7, 8))
8305 {
8306 case 0: case 3: /* SRS, RFE */
8307 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8308 "srs/rfe", dsc);
8309 break;
8310 case 1: case 2: /* LDM/STM/PUSH/POP */
8311 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8312 break;
8313 }
8314 }
8315 break;
8316
8317 case 1:
8318 /* Data-processing (shift register). */
8319 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8320 dsc);
8321 break;
8322 default: /* Coprocessor instructions. */
8323 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8324 break;
8325 }
8326 break;
8327 }
8328 case 2: /* op1 = 2 */
8329 if (op) /* Branch and misc control. */
8330 {
8331 if (bit (insn2, 14) /* BLX/BL */
8332 || bit (insn2, 12) /* Unconditional branch */
8333 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8334 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8335 else
8336 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8337 "misc ctrl", dsc);
8338 }
8339 else
8340 {
8341 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8342 {
8343 int op = bits (insn1, 4, 8);
8344 int rn = bits (insn1, 0, 3);
8345 if ((op == 0 || op == 0xa) && rn == 0xf)
8346 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8347 regs, dsc);
8348 else
8349 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8350 "dp/pb", dsc);
8351 }
8352 else /* Data processing (modified immeidate) */
8353 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8354 "dp/mi", dsc);
8355 }
8356 break;
8357 case 3: /* op1 = 3 */
8358 switch (bits (insn1, 9, 10))
8359 {
8360 case 0:
8361 if (bit (insn1, 4))
8362 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8363 regs, dsc);
8364 else /* NEON Load/Store and Store single data item */
8365 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8366 "neon elt/struct load/store",
8367 dsc);
8368 break;
8369 case 1: /* op1 = 3, bits (9, 10) == 1 */
8370 switch (bits (insn1, 7, 8))
8371 {
8372 case 0: case 1: /* Data processing (register) */
8373 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8374 "dp(reg)", dsc);
8375 break;
8376 case 2: /* Multiply and absolute difference */
8377 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8378 "mul/mua/diff", dsc);
8379 break;
8380 case 3: /* Long multiply and divide */
8381 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8382 "lmul/lmua", dsc);
8383 break;
8384 }
8385 break;
8386 default: /* Coprocessor instructions */
8387 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8388 break;
8389 }
8390 break;
8391 default:
8392 err = 1;
8393 }
8394
8395 if (err)
8396 internal_error (__FILE__, __LINE__,
8397 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8398
8399}
8400
b434a28f
YQ
8401static void
8402thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8403 CORE_ADDR to, struct regcache *regs,
8404 struct displaced_step_closure *dsc)
8405{
34518530
YQ
8406 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8407 uint16_t insn1
8408 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8409
8410 if (debug_displaced)
8411 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8412 "at %.8lx\n", insn1, (unsigned long) from);
8413
8414 dsc->is_thumb = 1;
8415 dsc->insn_size = thumb_insn_size (insn1);
8416 if (thumb_insn_size (insn1) == 4)
8417 {
8418 uint16_t insn2
8419 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8420 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8421 }
8422 else
8423 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
8424}
8425
cca44b1b 8426void
b434a28f
YQ
8427arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8428 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
8429 struct displaced_step_closure *dsc)
8430{
8431 int err = 0;
b434a28f
YQ
8432 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8433 uint32_t insn;
cca44b1b
JB
8434
8435 /* Most displaced instructions use a 1-instruction scratch space, so set this
8436 here and override below if/when necessary. */
8437 dsc->numinsns = 1;
8438 dsc->insn_addr = from;
8439 dsc->scratch_base = to;
8440 dsc->cleanup = NULL;
8441 dsc->wrote_to_pc = 0;
8442
b434a28f
YQ
8443 if (!displaced_in_arm_mode (regs))
8444 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8445
4db71c0b
YQ
8446 dsc->is_thumb = 0;
8447 dsc->insn_size = 4;
b434a28f
YQ
8448 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8449 if (debug_displaced)
8450 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8451 "at %.8lx\n", (unsigned long) insn,
8452 (unsigned long) from);
8453
cca44b1b 8454 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 8455 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
8456 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8457 {
8458 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 8459 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
8460 break;
8461
8462 case 0x4: case 0x5: case 0x6:
7ff120b4 8463 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
8464 break;
8465
8466 case 0x7:
7ff120b4 8467 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
8468 break;
8469
8470 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 8471 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
8472 break;
8473
8474 case 0xc: case 0xd: case 0xe: case 0xf:
7ff120b4 8475 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
cca44b1b
JB
8476 break;
8477 }
8478
8479 if (err)
8480 internal_error (__FILE__, __LINE__,
8481 _("arm_process_displaced_insn: Instruction decode error"));
8482}
8483
8484/* Actually set up the scratch space for a displaced instruction. */
8485
8486void
8487arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8488 CORE_ADDR to, struct displaced_step_closure *dsc)
8489{
8490 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 8491 unsigned int i, len, offset;
cca44b1b 8492 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b
YQ
8493 int size = dsc->is_thumb? 2 : 4;
8494 const unsigned char *bkp_insn;
cca44b1b 8495
4db71c0b 8496 offset = 0;
cca44b1b
JB
8497 /* Poke modified instruction(s). */
8498 for (i = 0; i < dsc->numinsns; i++)
8499 {
8500 if (debug_displaced)
4db71c0b
YQ
8501 {
8502 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8503 if (size == 4)
8504 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8505 dsc->modinsn[i]);
8506 else if (size == 2)
8507 fprintf_unfiltered (gdb_stdlog, "%.4x",
8508 (unsigned short)dsc->modinsn[i]);
8509
8510 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8511 (unsigned long) to + offset);
8512
8513 }
8514 write_memory_unsigned_integer (to + offset, size,
8515 byte_order_for_code,
cca44b1b 8516 dsc->modinsn[i]);
4db71c0b
YQ
8517 offset += size;
8518 }
8519
8520 /* Choose the correct breakpoint instruction. */
8521 if (dsc->is_thumb)
8522 {
8523 bkp_insn = tdep->thumb_breakpoint;
8524 len = tdep->thumb_breakpoint_size;
8525 }
8526 else
8527 {
8528 bkp_insn = tdep->arm_breakpoint;
8529 len = tdep->arm_breakpoint_size;
cca44b1b
JB
8530 }
8531
8532 /* Put breakpoint afterwards. */
4db71c0b 8533 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
8534
8535 if (debug_displaced)
8536 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8537 paddress (gdbarch, from), paddress (gdbarch, to));
8538}
8539
8540/* Entry point for copying an instruction into scratch space for displaced
8541 stepping. */
8542
8543struct displaced_step_closure *
8544arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8545 CORE_ADDR from, CORE_ADDR to,
8546 struct regcache *regs)
8547{
8548 struct displaced_step_closure *dsc
8549 = xmalloc (sizeof (struct displaced_step_closure));
b434a28f 8550 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
cca44b1b
JB
8551 arm_displaced_init_closure (gdbarch, from, to, dsc);
8552
8553 return dsc;
8554}
8555
8556/* Entry point for cleaning things up after a displaced instruction has been
8557 single-stepped. */
8558
8559void
8560arm_displaced_step_fixup (struct gdbarch *gdbarch,
8561 struct displaced_step_closure *dsc,
8562 CORE_ADDR from, CORE_ADDR to,
8563 struct regcache *regs)
8564{
8565 if (dsc->cleanup)
8566 dsc->cleanup (gdbarch, regs, dsc);
8567
8568 if (!dsc->wrote_to_pc)
4db71c0b
YQ
8569 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8570 dsc->insn_addr + dsc->insn_size);
8571
cca44b1b
JB
8572}
8573
8574#include "bfd-in2.h"
8575#include "libcoff.h"
8576
8577static int
8578gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8579{
9779414d
DJ
8580 struct gdbarch *gdbarch = info->application_data;
8581
8582 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
8583 {
8584 static asymbol *asym;
8585 static combined_entry_type ce;
8586 static struct coff_symbol_struct csym;
8587 static struct bfd fake_bfd;
8588 static bfd_target fake_target;
8589
8590 if (csym.native == NULL)
8591 {
8592 /* Create a fake symbol vector containing a Thumb symbol.
8593 This is solely so that the code in print_insn_little_arm()
8594 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8595 the presence of a Thumb symbol and switch to decoding
8596 Thumb instructions. */
8597
8598 fake_target.flavour = bfd_target_coff_flavour;
8599 fake_bfd.xvec = &fake_target;
8600 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8601 csym.native = &ce;
8602 csym.symbol.the_bfd = &fake_bfd;
8603 csym.symbol.name = "fake";
8604 asym = (asymbol *) & csym;
8605 }
8606
8607 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8608 info->symbols = &asym;
8609 }
8610 else
8611 info->symbols = NULL;
8612
8613 if (info->endian == BFD_ENDIAN_BIG)
8614 return print_insn_big_arm (memaddr, info);
8615 else
8616 return print_insn_little_arm (memaddr, info);
8617}
8618
8619/* The following define instruction sequences that will cause ARM
8620 cpu's to take an undefined instruction trap. These are used to
8621 signal a breakpoint to GDB.
8622
8623 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8624 modes. A different instruction is required for each mode. The ARM
8625 cpu's can also be big or little endian. Thus four different
8626 instructions are needed to support all cases.
8627
8628 Note: ARMv4 defines several new instructions that will take the
8629 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8630 not in fact add the new instructions. The new undefined
8631 instructions in ARMv4 are all instructions that had no defined
8632 behaviour in earlier chips. There is no guarantee that they will
8633 raise an exception, but may be treated as NOP's. In practice, it
8634 may only safe to rely on instructions matching:
8635
8636 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8637 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8638 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8639
0963b4bd 8640 Even this may only true if the condition predicate is true. The
cca44b1b
JB
8641 following use a condition predicate of ALWAYS so it is always TRUE.
8642
8643 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8644 and NetBSD all use a software interrupt rather than an undefined
8645 instruction to force a trap. This can be handled by by the
8646 abi-specific code during establishment of the gdbarch vector. */
8647
8648#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8649#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8650#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8651#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8652
8653static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8654static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8655static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8656static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8657
8658/* Determine the type and size of breakpoint to insert at PCPTR. Uses
8659 the program counter value to determine whether a 16-bit or 32-bit
8660 breakpoint should be used. It returns a pointer to a string of
8661 bytes that encode a breakpoint instruction, stores the length of
8662 the string to *lenptr, and adjusts the program counter (if
8663 necessary) to point to the actual memory location where the
8664 breakpoint should be inserted. */
8665
8666static const unsigned char *
8667arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8668{
8669 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 8670 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 8671
9779414d 8672 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
8673 {
8674 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
8675
8676 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8677 check whether we are replacing a 32-bit instruction. */
8678 if (tdep->thumb2_breakpoint != NULL)
8679 {
8680 gdb_byte buf[2];
8681 if (target_read_memory (*pcptr, buf, 2) == 0)
8682 {
8683 unsigned short inst1;
8684 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 8685 if (thumb_insn_size (inst1) == 4)
177321bd
DJ
8686 {
8687 *lenptr = tdep->thumb2_breakpoint_size;
8688 return tdep->thumb2_breakpoint;
8689 }
8690 }
8691 }
8692
cca44b1b
JB
8693 *lenptr = tdep->thumb_breakpoint_size;
8694 return tdep->thumb_breakpoint;
8695 }
8696 else
8697 {
8698 *lenptr = tdep->arm_breakpoint_size;
8699 return tdep->arm_breakpoint;
8700 }
8701}
8702
177321bd
DJ
8703static void
8704arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8705 int *kindptr)
8706{
8707 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8708
8709 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8710
9779414d 8711 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
177321bd
DJ
8712 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8713 that this is not confused with a 32-bit ARM breakpoint. */
8714 *kindptr = 3;
8715}
8716
cca44b1b
JB
8717/* Extract from an array REGBUF containing the (raw) register state a
8718 function return value of type TYPE, and copy that, in virtual
8719 format, into VALBUF. */
8720
8721static void
8722arm_extract_return_value (struct type *type, struct regcache *regs,
8723 gdb_byte *valbuf)
8724{
8725 struct gdbarch *gdbarch = get_regcache_arch (regs);
8726 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8727
8728 if (TYPE_CODE_FLT == TYPE_CODE (type))
8729 {
8730 switch (gdbarch_tdep (gdbarch)->fp_model)
8731 {
8732 case ARM_FLOAT_FPA:
8733 {
8734 /* The value is in register F0 in internal format. We need to
8735 extract the raw value and then convert it to the desired
8736 internal type. */
8737 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8738
8739 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8740 convert_from_extended (floatformat_from_type (type), tmpbuf,
8741 valbuf, gdbarch_byte_order (gdbarch));
8742 }
8743 break;
8744
8745 case ARM_FLOAT_SOFT_FPA:
8746 case ARM_FLOAT_SOFT_VFP:
8747 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8748 not using the VFP ABI code. */
8749 case ARM_FLOAT_VFP:
8750 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8751 if (TYPE_LENGTH (type) > 4)
8752 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8753 valbuf + INT_REGISTER_SIZE);
8754 break;
8755
8756 default:
0963b4bd
MS
8757 internal_error (__FILE__, __LINE__,
8758 _("arm_extract_return_value: "
8759 "Floating point model not supported"));
cca44b1b
JB
8760 break;
8761 }
8762 }
8763 else if (TYPE_CODE (type) == TYPE_CODE_INT
8764 || TYPE_CODE (type) == TYPE_CODE_CHAR
8765 || TYPE_CODE (type) == TYPE_CODE_BOOL
8766 || TYPE_CODE (type) == TYPE_CODE_PTR
8767 || TYPE_CODE (type) == TYPE_CODE_REF
8768 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8769 {
b021a221
MS
8770 /* If the type is a plain integer, then the access is
8771 straight-forward. Otherwise we have to play around a bit
8772 more. */
cca44b1b
JB
8773 int len = TYPE_LENGTH (type);
8774 int regno = ARM_A1_REGNUM;
8775 ULONGEST tmp;
8776
8777 while (len > 0)
8778 {
8779 /* By using store_unsigned_integer we avoid having to do
8780 anything special for small big-endian values. */
8781 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8782 store_unsigned_integer (valbuf,
8783 (len > INT_REGISTER_SIZE
8784 ? INT_REGISTER_SIZE : len),
8785 byte_order, tmp);
8786 len -= INT_REGISTER_SIZE;
8787 valbuf += INT_REGISTER_SIZE;
8788 }
8789 }
8790 else
8791 {
8792 /* For a structure or union the behaviour is as if the value had
8793 been stored to word-aligned memory and then loaded into
8794 registers with 32-bit load instruction(s). */
8795 int len = TYPE_LENGTH (type);
8796 int regno = ARM_A1_REGNUM;
8797 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8798
8799 while (len > 0)
8800 {
8801 regcache_cooked_read (regs, regno++, tmpbuf);
8802 memcpy (valbuf, tmpbuf,
8803 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8804 len -= INT_REGISTER_SIZE;
8805 valbuf += INT_REGISTER_SIZE;
8806 }
8807 }
8808}
8809
8810
8811/* Will a function return an aggregate type in memory or in a
8812 register? Return 0 if an aggregate type can be returned in a
8813 register, 1 if it must be returned in memory. */
8814
8815static int
8816arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8817{
8818 int nRc;
8819 enum type_code code;
8820
8821 CHECK_TYPEDEF (type);
8822
8823 /* In the ARM ABI, "integer" like aggregate types are returned in
8824 registers. For an aggregate type to be integer like, its size
8825 must be less than or equal to INT_REGISTER_SIZE and the
8826 offset of each addressable subfield must be zero. Note that bit
8827 fields are not addressable, and all addressable subfields of
8828 unions always start at offset zero.
8829
8830 This function is based on the behaviour of GCC 2.95.1.
8831 See: gcc/arm.c: arm_return_in_memory() for details.
8832
8833 Note: All versions of GCC before GCC 2.95.2 do not set up the
8834 parameters correctly for a function returning the following
8835 structure: struct { float f;}; This should be returned in memory,
8836 not a register. Richard Earnshaw sent me a patch, but I do not
8837 know of any way to detect if a function like the above has been
8838 compiled with the correct calling convention. */
8839
8840 /* All aggregate types that won't fit in a register must be returned
8841 in memory. */
8842 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8843 {
8844 return 1;
8845 }
8846
8847 /* The AAPCS says all aggregates not larger than a word are returned
8848 in a register. */
8849 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8850 return 0;
8851
8852 /* The only aggregate types that can be returned in a register are
8853 structs and unions. Arrays must be returned in memory. */
8854 code = TYPE_CODE (type);
8855 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
8856 {
8857 return 1;
8858 }
8859
8860 /* Assume all other aggregate types can be returned in a register.
8861 Run a check for structures, unions and arrays. */
8862 nRc = 0;
8863
8864 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8865 {
8866 int i;
8867 /* Need to check if this struct/union is "integer" like. For
8868 this to be true, its size must be less than or equal to
8869 INT_REGISTER_SIZE and the offset of each addressable
8870 subfield must be zero. Note that bit fields are not
8871 addressable, and unions always start at offset zero. If any
8872 of the subfields is a floating point type, the struct/union
8873 cannot be an integer type. */
8874
8875 /* For each field in the object, check:
8876 1) Is it FP? --> yes, nRc = 1;
67255d04
RE
8877 2) Is it addressable (bitpos != 0) and
8878 not packed (bitsize == 0)?
8879 --> yes, nRc = 1
8880 */
8881
8882 for (i = 0; i < TYPE_NFIELDS (type); i++)
8883 {
8884 enum type_code field_type_code;
0963b4bd
MS
8885 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8886 i)));
67255d04
RE
8887
8888 /* Is it a floating point type field? */
8889 if (field_type_code == TYPE_CODE_FLT)
8890 {
8891 nRc = 1;
8892 break;
8893 }
8894
8895 /* If bitpos != 0, then we have to care about it. */
8896 if (TYPE_FIELD_BITPOS (type, i) != 0)
8897 {
8898 /* Bitfields are not addressable. If the field bitsize is
8899 zero, then the field is not packed. Hence it cannot be
8900 a bitfield or any other packed type. */
8901 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8902 {
8903 nRc = 1;
8904 break;
8905 }
8906 }
8907 }
8908 }
8909
8910 return nRc;
8911}
8912
34e8f22d
RE
8913/* Write into appropriate registers a function return value of type
8914 TYPE, given in virtual format. */
8915
8916static void
b508a996 8917arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8918 const gdb_byte *valbuf)
34e8f22d 8919{
be8626e0 8920 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8921 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8922
34e8f22d
RE
8923 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8924 {
7a5ea0d4 8925 char buf[MAX_REGISTER_SIZE];
34e8f22d 8926
be8626e0 8927 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8928 {
8929 case ARM_FLOAT_FPA:
8930
be8626e0
MD
8931 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8932 gdbarch_byte_order (gdbarch));
b508a996 8933 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8934 break;
8935
fd50bc42 8936 case ARM_FLOAT_SOFT_FPA:
08216dd7 8937 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8938 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8939 not using the VFP ABI code. */
8940 case ARM_FLOAT_VFP:
b508a996
RE
8941 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8942 if (TYPE_LENGTH (type) > 4)
8943 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8944 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8945 break;
8946
8947 default:
9b20d036
MS
8948 internal_error (__FILE__, __LINE__,
8949 _("arm_store_return_value: Floating "
8950 "point model not supported"));
08216dd7
RE
8951 break;
8952 }
34e8f22d 8953 }
b508a996
RE
8954 else if (TYPE_CODE (type) == TYPE_CODE_INT
8955 || TYPE_CODE (type) == TYPE_CODE_CHAR
8956 || TYPE_CODE (type) == TYPE_CODE_BOOL
8957 || TYPE_CODE (type) == TYPE_CODE_PTR
8958 || TYPE_CODE (type) == TYPE_CODE_REF
8959 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8960 {
8961 if (TYPE_LENGTH (type) <= 4)
8962 {
8963 /* Values of one word or less are zero/sign-extended and
8964 returned in r0. */
7a5ea0d4 8965 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8966 LONGEST val = unpack_long (type, valbuf);
8967
e17a4113 8968 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8969 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8970 }
8971 else
8972 {
8973 /* Integral values greater than one word are stored in consecutive
8974 registers starting with r0. This will always be a multiple of
8975 the regiser size. */
8976 int len = TYPE_LENGTH (type);
8977 int regno = ARM_A1_REGNUM;
8978
8979 while (len > 0)
8980 {
8981 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8982 len -= INT_REGISTER_SIZE;
8983 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8984 }
8985 }
8986 }
34e8f22d 8987 else
b508a996
RE
8988 {
8989 /* For a structure or union the behaviour is as if the value had
8990 been stored to word-aligned memory and then loaded into
8991 registers with 32-bit load instruction(s). */
8992 int len = TYPE_LENGTH (type);
8993 int regno = ARM_A1_REGNUM;
7a5ea0d4 8994 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8995
8996 while (len > 0)
8997 {
8998 memcpy (tmpbuf, valbuf,
7a5ea0d4 8999 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 9000 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
9001 len -= INT_REGISTER_SIZE;
9002 valbuf += INT_REGISTER_SIZE;
b508a996
RE
9003 }
9004 }
34e8f22d
RE
9005}
9006
2af48f68
PB
9007
9008/* Handle function return values. */
9009
9010static enum return_value_convention
c055b101
CV
9011arm_return_value (struct gdbarch *gdbarch, struct type *func_type,
9012 struct type *valtype, struct regcache *regcache,
9013 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 9014{
7c00367c 9015 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
90445bd3
DJ
9016 enum arm_vfp_cprc_base_type vfp_base_type;
9017 int vfp_base_count;
9018
9019 if (arm_vfp_abi_for_function (gdbarch, func_type)
9020 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9021 {
9022 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9023 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9024 int i;
9025 for (i = 0; i < vfp_base_count; i++)
9026 {
58d6951d
DJ
9027 if (reg_char == 'q')
9028 {
9029 if (writebuf)
9030 arm_neon_quad_write (gdbarch, regcache, i,
9031 writebuf + i * unit_length);
9032
9033 if (readbuf)
9034 arm_neon_quad_read (gdbarch, regcache, i,
9035 readbuf + i * unit_length);
9036 }
9037 else
9038 {
9039 char name_buf[4];
9040 int regnum;
9041
9042 sprintf (name_buf, "%c%d", reg_char, i);
9043 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9044 strlen (name_buf));
9045 if (writebuf)
9046 regcache_cooked_write (regcache, regnum,
9047 writebuf + i * unit_length);
9048 if (readbuf)
9049 regcache_cooked_read (regcache, regnum,
9050 readbuf + i * unit_length);
9051 }
90445bd3
DJ
9052 }
9053 return RETURN_VALUE_REGISTER_CONVENTION;
9054 }
7c00367c 9055
2af48f68
PB
9056 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9057 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9058 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9059 {
7c00367c
MK
9060 if (tdep->struct_return == pcc_struct_return
9061 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
9062 return RETURN_VALUE_STRUCT_CONVENTION;
9063 }
9064
7052e42c
UW
9065 /* AAPCS returns complex types longer than a register in memory. */
9066 if (tdep->arm_abi != ARM_ABI_APCS
9067 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9068 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9069 return RETURN_VALUE_STRUCT_CONVENTION;
9070
2af48f68
PB
9071 if (writebuf)
9072 arm_store_return_value (valtype, regcache, writebuf);
9073
9074 if (readbuf)
9075 arm_extract_return_value (valtype, regcache, readbuf);
9076
9077 return RETURN_VALUE_REGISTER_CONVENTION;
9078}
9079
9080
9df628e0 9081static int
60ade65d 9082arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 9083{
e17a4113
UW
9084 struct gdbarch *gdbarch = get_frame_arch (frame);
9085 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9086 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 9087 CORE_ADDR jb_addr;
7a5ea0d4 9088 char buf[INT_REGISTER_SIZE];
9df628e0 9089
60ade65d 9090 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
9091
9092 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 9093 INT_REGISTER_SIZE))
9df628e0
RE
9094 return 0;
9095
e17a4113 9096 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
9097 return 1;
9098}
9099
faa95490
DJ
9100/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9101 return the target PC. Otherwise return 0. */
c906108c
SS
9102
9103CORE_ADDR
52f729a7 9104arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 9105{
2c02bd72 9106 const char *name;
faa95490 9107 int namelen;
c906108c
SS
9108 CORE_ADDR start_addr;
9109
9110 /* Find the starting address and name of the function containing the PC. */
9111 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9112 return 0;
9113
faa95490
DJ
9114 /* If PC is in a Thumb call or return stub, return the address of the
9115 target PC, which is in a register. The thunk functions are called
9116 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
9117 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9118 functions, named __ARM_call_via_r[0-7]. */
9119 if (strncmp (name, "_call_via_", 10) == 0
9120 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
c906108c 9121 {
ed9a39eb
JM
9122 /* Use the name suffix to determine which register contains the
9123 target PC. */
c5aa993b
JM
9124 static char *table[15] =
9125 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9126 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9127 };
c906108c 9128 int regno;
faa95490 9129 int offset = strlen (name) - 2;
c906108c
SS
9130
9131 for (regno = 0; regno <= 14; regno++)
faa95490 9132 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 9133 return get_frame_register_unsigned (frame, regno);
c906108c 9134 }
ed9a39eb 9135
faa95490
DJ
9136 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9137 non-interworking calls to foo. We could decode the stubs
9138 to find the target but it's easier to use the symbol table. */
9139 namelen = strlen (name);
9140 if (name[0] == '_' && name[1] == '_'
9141 && ((namelen > 2 + strlen ("_from_thumb")
9142 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9143 strlen ("_from_thumb")) == 0)
9144 || (namelen > 2 + strlen ("_from_arm")
9145 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9146 strlen ("_from_arm")) == 0)))
9147 {
9148 char *target_name;
9149 int target_len = namelen - 2;
9150 struct minimal_symbol *minsym;
9151 struct objfile *objfile;
9152 struct obj_section *sec;
9153
9154 if (name[namelen - 1] == 'b')
9155 target_len -= strlen ("_from_thumb");
9156 else
9157 target_len -= strlen ("_from_arm");
9158
9159 target_name = alloca (target_len + 1);
9160 memcpy (target_name, name + 2, target_len);
9161 target_name[target_len] = '\0';
9162
9163 sec = find_pc_section (pc);
9164 objfile = (sec == NULL) ? NULL : sec->objfile;
9165 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9166 if (minsym != NULL)
9167 return SYMBOL_VALUE_ADDRESS (minsym);
9168 else
9169 return 0;
9170 }
9171
c5aa993b 9172 return 0; /* not a stub */
c906108c
SS
9173}
9174
afd7eef0
RE
9175static void
9176set_arm_command (char *args, int from_tty)
9177{
edefbb7c
AC
9178 printf_unfiltered (_("\
9179\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
9180 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9181}
9182
9183static void
9184show_arm_command (char *args, int from_tty)
9185{
26304000 9186 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
9187}
9188
28e97307
DJ
9189static void
9190arm_update_current_architecture (void)
fd50bc42 9191{
28e97307 9192 struct gdbarch_info info;
fd50bc42 9193
28e97307 9194 /* If the current architecture is not ARM, we have nothing to do. */
1cf3db46 9195 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
28e97307 9196 return;
fd50bc42 9197
28e97307
DJ
9198 /* Update the architecture. */
9199 gdbarch_info_init (&info);
fd50bc42 9200
28e97307 9201 if (!gdbarch_update_p (info))
9b20d036 9202 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
9203}
9204
9205static void
9206set_fp_model_sfunc (char *args, int from_tty,
9207 struct cmd_list_element *c)
9208{
9209 enum arm_float_model fp_model;
9210
9211 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9212 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9213 {
9214 arm_fp_model = fp_model;
9215 break;
9216 }
9217
9218 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 9219 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
9220 current_fp_model);
9221
28e97307 9222 arm_update_current_architecture ();
fd50bc42
RE
9223}
9224
9225static void
08546159
AC
9226show_fp_model (struct ui_file *file, int from_tty,
9227 struct cmd_list_element *c, const char *value)
fd50bc42 9228{
1cf3db46 9229 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
fd50bc42 9230
28e97307 9231 if (arm_fp_model == ARM_FLOAT_AUTO
1cf3db46 9232 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
28e97307
DJ
9233 fprintf_filtered (file, _("\
9234The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9235 fp_model_strings[tdep->fp_model]);
9236 else
9237 fprintf_filtered (file, _("\
9238The current ARM floating point model is \"%s\".\n"),
9239 fp_model_strings[arm_fp_model]);
9240}
9241
9242static void
9243arm_set_abi (char *args, int from_tty,
9244 struct cmd_list_element *c)
9245{
9246 enum arm_abi_kind arm_abi;
9247
9248 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9249 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9250 {
9251 arm_abi_global = arm_abi;
9252 break;
9253 }
9254
9255 if (arm_abi == ARM_ABI_LAST)
9256 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9257 arm_abi_string);
9258
9259 arm_update_current_architecture ();
9260}
9261
9262static void
9263arm_show_abi (struct ui_file *file, int from_tty,
9264 struct cmd_list_element *c, const char *value)
9265{
1cf3db46 9266 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
28e97307
DJ
9267
9268 if (arm_abi_global == ARM_ABI_AUTO
1cf3db46 9269 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
28e97307
DJ
9270 fprintf_filtered (file, _("\
9271The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9272 arm_abi_strings[tdep->arm_abi]);
9273 else
9274 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9275 arm_abi_string);
fd50bc42
RE
9276}
9277
0428b8f5
DJ
9278static void
9279arm_show_fallback_mode (struct ui_file *file, int from_tty,
9280 struct cmd_list_element *c, const char *value)
9281{
1cf3db46 9282 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
0428b8f5 9283
0963b4bd
MS
9284 fprintf_filtered (file,
9285 _("The current execution mode assumed "
9286 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
9287 arm_fallback_mode_string);
9288}
9289
9290static void
9291arm_show_force_mode (struct ui_file *file, int from_tty,
9292 struct cmd_list_element *c, const char *value)
9293{
1cf3db46 9294 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
0428b8f5 9295
0963b4bd
MS
9296 fprintf_filtered (file,
9297 _("The current execution mode assumed "
9298 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
9299 arm_force_mode_string);
9300}
9301
afd7eef0
RE
9302/* If the user changes the register disassembly style used for info
9303 register and other commands, we have to also switch the style used
9304 in opcodes for disassembly output. This function is run in the "set
9305 arm disassembly" command, and does that. */
bc90b915
FN
9306
9307static void
afd7eef0 9308set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
9309 struct cmd_list_element *c)
9310{
afd7eef0 9311 set_disassembly_style ();
bc90b915
FN
9312}
9313\f
966fbf70 9314/* Return the ARM register name corresponding to register I. */
a208b0cb 9315static const char *
d93859e2 9316arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 9317{
58d6951d
DJ
9318 const int num_regs = gdbarch_num_regs (gdbarch);
9319
9320 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9321 && i >= num_regs && i < num_regs + 32)
9322 {
9323 static const char *const vfp_pseudo_names[] = {
9324 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9325 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9326 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9327 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9328 };
9329
9330 return vfp_pseudo_names[i - num_regs];
9331 }
9332
9333 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9334 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9335 {
9336 static const char *const neon_pseudo_names[] = {
9337 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9338 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9339 };
9340
9341 return neon_pseudo_names[i - num_regs - 32];
9342 }
9343
ff6f572f
DJ
9344 if (i >= ARRAY_SIZE (arm_register_names))
9345 /* These registers are only supported on targets which supply
9346 an XML description. */
9347 return "";
9348
966fbf70
RE
9349 return arm_register_names[i];
9350}
9351
bc90b915 9352static void
afd7eef0 9353set_disassembly_style (void)
bc90b915 9354{
123dc839 9355 int current;
bc90b915 9356
123dc839
DJ
9357 /* Find the style that the user wants. */
9358 for (current = 0; current < num_disassembly_options; current++)
9359 if (disassembly_style == valid_disassembly_styles[current])
9360 break;
9361 gdb_assert (current < num_disassembly_options);
bc90b915 9362
94c30b78 9363 /* Synchronize the disassembler. */
bc90b915
FN
9364 set_arm_regname_option (current);
9365}
9366
082fc60d
RE
9367/* Test whether the coff symbol specific value corresponds to a Thumb
9368 function. */
9369
9370static int
9371coff_sym_is_thumb (int val)
9372{
f8bf5763
PM
9373 return (val == C_THUMBEXT
9374 || val == C_THUMBSTAT
9375 || val == C_THUMBEXTFUNC
9376 || val == C_THUMBSTATFUNC
9377 || val == C_THUMBLABEL);
082fc60d
RE
9378}
9379
9380/* arm_coff_make_msymbol_special()
9381 arm_elf_make_msymbol_special()
9382
9383 These functions test whether the COFF or ELF symbol corresponds to
9384 an address in thumb code, and set a "special" bit in a minimal
9385 symbol to indicate that it does. */
9386
34e8f22d 9387static void
082fc60d
RE
9388arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9389{
467d42c4
UW
9390 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9391 == ST_BRANCH_TO_THUMB)
082fc60d
RE
9392 MSYMBOL_SET_SPECIAL (msym);
9393}
9394
34e8f22d 9395static void
082fc60d
RE
9396arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9397{
9398 if (coff_sym_is_thumb (val))
9399 MSYMBOL_SET_SPECIAL (msym);
9400}
9401
60c5725c 9402static void
c1bd65d0 9403arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c
DJ
9404{
9405 struct arm_per_objfile *data = arg;
9406 unsigned int i;
9407
9408 for (i = 0; i < objfile->obfd->section_count; i++)
9409 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9410}
9411
9412static void
9413arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9414 asymbol *sym)
9415{
9416 const char *name = bfd_asymbol_name (sym);
9417 struct arm_per_objfile *data;
9418 VEC(arm_mapping_symbol_s) **map_p;
9419 struct arm_mapping_symbol new_map_sym;
9420
9421 gdb_assert (name[0] == '$');
9422 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9423 return;
9424
9425 data = objfile_data (objfile, arm_objfile_data_key);
9426 if (data == NULL)
9427 {
9428 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9429 struct arm_per_objfile);
9430 set_objfile_data (objfile, arm_objfile_data_key, data);
9431 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9432 objfile->obfd->section_count,
9433 VEC(arm_mapping_symbol_s) *);
9434 }
9435 map_p = &data->section_maps[bfd_get_section (sym)->index];
9436
9437 new_map_sym.value = sym->value;
9438 new_map_sym.type = name[1];
9439
9440 /* Assume that most mapping symbols appear in order of increasing
9441 value. If they were randomly distributed, it would be faster to
9442 always push here and then sort at first use. */
9443 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9444 {
9445 struct arm_mapping_symbol *prev_map_sym;
9446
9447 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9448 if (prev_map_sym->value >= sym->value)
9449 {
9450 unsigned int idx;
9451 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9452 arm_compare_mapping_symbols);
9453 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9454 return;
9455 }
9456 }
9457
9458 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9459}
9460
756fe439 9461static void
61a1198a 9462arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 9463{
9779414d 9464 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 9465 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
9466
9467 /* If necessary, set the T bit. */
9468 if (arm_apcs_32)
9469 {
9779414d 9470 ULONGEST val, t_bit;
61a1198a 9471 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
9472 t_bit = arm_psr_thumb_bit (gdbarch);
9473 if (arm_pc_is_thumb (gdbarch, pc))
9474 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9475 val | t_bit);
756fe439 9476 else
61a1198a 9477 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 9478 val & ~t_bit);
756fe439
DJ
9479 }
9480}
123dc839 9481
58d6951d
DJ
9482/* Read the contents of a NEON quad register, by reading from two
9483 double registers. This is used to implement the quad pseudo
9484 registers, and for argument passing in case the quad registers are
9485 missing; vectors are passed in quad registers when using the VFP
9486 ABI, even if a NEON unit is not present. REGNUM is the index of
9487 the quad register, in [0, 15]. */
9488
05d1431c 9489static enum register_status
58d6951d
DJ
9490arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9491 int regnum, gdb_byte *buf)
9492{
9493 char name_buf[4];
9494 gdb_byte reg_buf[8];
9495 int offset, double_regnum;
05d1431c 9496 enum register_status status;
58d6951d
DJ
9497
9498 sprintf (name_buf, "d%d", regnum << 1);
9499 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9500 strlen (name_buf));
9501
9502 /* d0 is always the least significant half of q0. */
9503 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9504 offset = 8;
9505 else
9506 offset = 0;
9507
05d1431c
PA
9508 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9509 if (status != REG_VALID)
9510 return status;
58d6951d
DJ
9511 memcpy (buf + offset, reg_buf, 8);
9512
9513 offset = 8 - offset;
05d1431c
PA
9514 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9515 if (status != REG_VALID)
9516 return status;
58d6951d 9517 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
9518
9519 return REG_VALID;
58d6951d
DJ
9520}
9521
05d1431c 9522static enum register_status
58d6951d
DJ
9523arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9524 int regnum, gdb_byte *buf)
9525{
9526 const int num_regs = gdbarch_num_regs (gdbarch);
9527 char name_buf[4];
9528 gdb_byte reg_buf[8];
9529 int offset, double_regnum;
9530
9531 gdb_assert (regnum >= num_regs);
9532 regnum -= num_regs;
9533
9534 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9535 /* Quad-precision register. */
05d1431c 9536 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
9537 else
9538 {
05d1431c
PA
9539 enum register_status status;
9540
58d6951d
DJ
9541 /* Single-precision register. */
9542 gdb_assert (regnum < 32);
9543
9544 /* s0 is always the least significant half of d0. */
9545 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9546 offset = (regnum & 1) ? 0 : 4;
9547 else
9548 offset = (regnum & 1) ? 4 : 0;
9549
9550 sprintf (name_buf, "d%d", regnum >> 1);
9551 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9552 strlen (name_buf));
9553
05d1431c
PA
9554 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9555 if (status == REG_VALID)
9556 memcpy (buf, reg_buf + offset, 4);
9557 return status;
58d6951d
DJ
9558 }
9559}
9560
9561/* Store the contents of BUF to a NEON quad register, by writing to
9562 two double registers. This is used to implement the quad pseudo
9563 registers, and for argument passing in case the quad registers are
9564 missing; vectors are passed in quad registers when using the VFP
9565 ABI, even if a NEON unit is not present. REGNUM is the index
9566 of the quad register, in [0, 15]. */
9567
9568static void
9569arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9570 int regnum, const gdb_byte *buf)
9571{
9572 char name_buf[4];
9573 gdb_byte reg_buf[8];
9574 int offset, double_regnum;
9575
9576 sprintf (name_buf, "d%d", regnum << 1);
9577 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9578 strlen (name_buf));
9579
9580 /* d0 is always the least significant half of q0. */
9581 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9582 offset = 8;
9583 else
9584 offset = 0;
9585
9586 regcache_raw_write (regcache, double_regnum, buf + offset);
9587 offset = 8 - offset;
9588 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9589}
9590
9591static void
9592arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9593 int regnum, const gdb_byte *buf)
9594{
9595 const int num_regs = gdbarch_num_regs (gdbarch);
9596 char name_buf[4];
9597 gdb_byte reg_buf[8];
9598 int offset, double_regnum;
9599
9600 gdb_assert (regnum >= num_regs);
9601 regnum -= num_regs;
9602
9603 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9604 /* Quad-precision register. */
9605 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9606 else
9607 {
9608 /* Single-precision register. */
9609 gdb_assert (regnum < 32);
9610
9611 /* s0 is always the least significant half of d0. */
9612 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9613 offset = (regnum & 1) ? 0 : 4;
9614 else
9615 offset = (regnum & 1) ? 4 : 0;
9616
9617 sprintf (name_buf, "d%d", regnum >> 1);
9618 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9619 strlen (name_buf));
9620
9621 regcache_raw_read (regcache, double_regnum, reg_buf);
9622 memcpy (reg_buf + offset, buf, 4);
9623 regcache_raw_write (regcache, double_regnum, reg_buf);
9624 }
9625}
9626
123dc839
DJ
9627static struct value *
9628value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9629{
9630 const int *reg_p = baton;
9631 return value_of_register (*reg_p, frame);
9632}
97e03143 9633\f
70f80edf
JT
9634static enum gdb_osabi
9635arm_elf_osabi_sniffer (bfd *abfd)
97e03143 9636{
2af48f68 9637 unsigned int elfosabi;
70f80edf 9638 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 9639
70f80edf 9640 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 9641
28e97307
DJ
9642 if (elfosabi == ELFOSABI_ARM)
9643 /* GNU tools use this value. Check note sections in this case,
9644 as well. */
9645 bfd_map_over_sections (abfd,
9646 generic_elf_osabi_sniff_abi_tag_sections,
9647 &osabi);
97e03143 9648
28e97307 9649 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 9650 return osabi;
97e03143
RE
9651}
9652
54483882
YQ
9653static int
9654arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9655 struct reggroup *group)
9656{
2c291032
YQ
9657 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9658 this, FPS register belongs to save_regroup, restore_reggroup, and
9659 all_reggroup, of course. */
54483882 9660 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
9661 return (group == float_reggroup
9662 || group == save_reggroup
9663 || group == restore_reggroup
9664 || group == all_reggroup);
54483882
YQ
9665 else
9666 return default_register_reggroup_p (gdbarch, regnum, group);
9667}
9668
25f8c692
JL
9669\f
9670/* For backward-compatibility we allow two 'g' packet lengths with
9671 the remote protocol depending on whether FPA registers are
9672 supplied. M-profile targets do not have FPA registers, but some
9673 stubs already exist in the wild which use a 'g' packet which
9674 supplies them albeit with dummy values. The packet format which
9675 includes FPA registers should be considered deprecated for
9676 M-profile targets. */
9677
9678static void
9679arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9680{
9681 if (gdbarch_tdep (gdbarch)->is_m)
9682 {
9683 /* If we know from the executable this is an M-profile target,
9684 cater for remote targets whose register set layout is the
9685 same as the FPA layout. */
9686 register_remote_g_packet_guess (gdbarch,
9687 /* r0-r12,sp,lr,pc; f0-f7; fps,cpsr */
9688 (16 * INT_REGISTER_SIZE)
9689 + (8 * FP_REGISTER_SIZE)
9690 + (2 * INT_REGISTER_SIZE),
9691 tdesc_arm_with_m_fpa_layout);
9692
9693 /* The regular M-profile layout. */
9694 register_remote_g_packet_guess (gdbarch,
9695 /* r0-r12,sp,lr,pc; xpsr */
9696 (16 * INT_REGISTER_SIZE)
9697 + INT_REGISTER_SIZE,
9698 tdesc_arm_with_m);
9699 }
9700
9701 /* Otherwise we don't have a useful guess. */
9702}
9703
70f80edf 9704\f
da3c6d4a
MS
9705/* Initialize the current architecture based on INFO. If possible,
9706 re-use an architecture from ARCHES, which is a list of
9707 architectures already created during this debugging session.
97e03143 9708
da3c6d4a
MS
9709 Called e.g. at program startup, when reading a core file, and when
9710 reading a binary file. */
97e03143 9711
39bbf761
RE
9712static struct gdbarch *
9713arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9714{
97e03143 9715 struct gdbarch_tdep *tdep;
39bbf761 9716 struct gdbarch *gdbarch;
28e97307
DJ
9717 struct gdbarch_list *best_arch;
9718 enum arm_abi_kind arm_abi = arm_abi_global;
9719 enum arm_float_model fp_model = arm_fp_model;
123dc839 9720 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 9721 int i, is_m = 0;
58d6951d
DJ
9722 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9723 int have_neon = 0;
ff6f572f 9724 int have_fpa_registers = 1;
9779414d
DJ
9725 const struct target_desc *tdesc = info.target_desc;
9726
9727 /* If we have an object to base this architecture on, try to determine
9728 its ABI. */
9729
9730 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9731 {
9732 int ei_osabi, e_flags;
9733
9734 switch (bfd_get_flavour (info.abfd))
9735 {
9736 case bfd_target_aout_flavour:
9737 /* Assume it's an old APCS-style ABI. */
9738 arm_abi = ARM_ABI_APCS;
9739 break;
9740
9741 case bfd_target_coff_flavour:
9742 /* Assume it's an old APCS-style ABI. */
9743 /* XXX WinCE? */
9744 arm_abi = ARM_ABI_APCS;
9745 break;
9746
9747 case bfd_target_elf_flavour:
9748 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9749 e_flags = elf_elfheader (info.abfd)->e_flags;
9750
9751 if (ei_osabi == ELFOSABI_ARM)
9752 {
9753 /* GNU tools used to use this value, but do not for EABI
9754 objects. There's nowhere to tag an EABI version
9755 anyway, so assume APCS. */
9756 arm_abi = ARM_ABI_APCS;
9757 }
9758 else if (ei_osabi == ELFOSABI_NONE)
9759 {
9760 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9761 int attr_arch, attr_profile;
9762
9763 switch (eabi_ver)
9764 {
9765 case EF_ARM_EABI_UNKNOWN:
9766 /* Assume GNU tools. */
9767 arm_abi = ARM_ABI_APCS;
9768 break;
9769
9770 case EF_ARM_EABI_VER4:
9771 case EF_ARM_EABI_VER5:
9772 arm_abi = ARM_ABI_AAPCS;
9773 /* EABI binaries default to VFP float ordering.
9774 They may also contain build attributes that can
9775 be used to identify if the VFP argument-passing
9776 ABI is in use. */
9777 if (fp_model == ARM_FLOAT_AUTO)
9778 {
9779#ifdef HAVE_ELF
9780 switch (bfd_elf_get_obj_attr_int (info.abfd,
9781 OBJ_ATTR_PROC,
9782 Tag_ABI_VFP_args))
9783 {
9784 case 0:
9785 /* "The user intended FP parameter/result
9786 passing to conform to AAPCS, base
9787 variant". */
9788 fp_model = ARM_FLOAT_SOFT_VFP;
9789 break;
9790 case 1:
9791 /* "The user intended FP parameter/result
9792 passing to conform to AAPCS, VFP
9793 variant". */
9794 fp_model = ARM_FLOAT_VFP;
9795 break;
9796 case 2:
9797 /* "The user intended FP parameter/result
9798 passing to conform to tool chain-specific
9799 conventions" - we don't know any such
9800 conventions, so leave it as "auto". */
9801 break;
9802 default:
9803 /* Attribute value not mentioned in the
9804 October 2008 ABI, so leave it as
9805 "auto". */
9806 break;
9807 }
9808#else
9809 fp_model = ARM_FLOAT_SOFT_VFP;
9810#endif
9811 }
9812 break;
9813
9814 default:
9815 /* Leave it as "auto". */
9816 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9817 break;
9818 }
9819
9820#ifdef HAVE_ELF
9821 /* Detect M-profile programs. This only works if the
9822 executable file includes build attributes; GCC does
9823 copy them to the executable, but e.g. RealView does
9824 not. */
9825 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9826 Tag_CPU_arch);
0963b4bd
MS
9827 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9828 OBJ_ATTR_PROC,
9779414d
DJ
9829 Tag_CPU_arch_profile);
9830 /* GCC specifies the profile for v6-M; RealView only
9831 specifies the profile for architectures starting with
9832 V7 (as opposed to architectures with a tag
9833 numerically greater than TAG_CPU_ARCH_V7). */
9834 if (!tdesc_has_registers (tdesc)
9835 && (attr_arch == TAG_CPU_ARCH_V6_M
9836 || attr_arch == TAG_CPU_ARCH_V6S_M
9837 || attr_profile == 'M'))
25f8c692 9838 is_m = 1;
9779414d
DJ
9839#endif
9840 }
9841
9842 if (fp_model == ARM_FLOAT_AUTO)
9843 {
9844 int e_flags = elf_elfheader (info.abfd)->e_flags;
9845
9846 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9847 {
9848 case 0:
9849 /* Leave it as "auto". Strictly speaking this case
9850 means FPA, but almost nobody uses that now, and
9851 many toolchains fail to set the appropriate bits
9852 for the floating-point model they use. */
9853 break;
9854 case EF_ARM_SOFT_FLOAT:
9855 fp_model = ARM_FLOAT_SOFT_FPA;
9856 break;
9857 case EF_ARM_VFP_FLOAT:
9858 fp_model = ARM_FLOAT_VFP;
9859 break;
9860 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9861 fp_model = ARM_FLOAT_SOFT_VFP;
9862 break;
9863 }
9864 }
9865
9866 if (e_flags & EF_ARM_BE8)
9867 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9868
9869 break;
9870
9871 default:
9872 /* Leave it as "auto". */
9873 break;
9874 }
9875 }
123dc839
DJ
9876
9877 /* Check any target description for validity. */
9779414d 9878 if (tdesc_has_registers (tdesc))
123dc839
DJ
9879 {
9880 /* For most registers we require GDB's default names; but also allow
9881 the numeric names for sp / lr / pc, as a convenience. */
9882 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9883 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9884 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9885
9886 const struct tdesc_feature *feature;
58d6951d 9887 int valid_p;
123dc839 9888
9779414d 9889 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9890 "org.gnu.gdb.arm.core");
9891 if (feature == NULL)
9779414d
DJ
9892 {
9893 feature = tdesc_find_feature (tdesc,
9894 "org.gnu.gdb.arm.m-profile");
9895 if (feature == NULL)
9896 return NULL;
9897 else
9898 is_m = 1;
9899 }
123dc839
DJ
9900
9901 tdesc_data = tdesc_data_alloc ();
9902
9903 valid_p = 1;
9904 for (i = 0; i < ARM_SP_REGNUM; i++)
9905 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9906 arm_register_names[i]);
9907 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9908 ARM_SP_REGNUM,
9909 arm_sp_names);
9910 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9911 ARM_LR_REGNUM,
9912 arm_lr_names);
9913 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9914 ARM_PC_REGNUM,
9915 arm_pc_names);
9779414d
DJ
9916 if (is_m)
9917 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9918 ARM_PS_REGNUM, "xpsr");
9919 else
9920 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9921 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9922
9923 if (!valid_p)
9924 {
9925 tdesc_data_cleanup (tdesc_data);
9926 return NULL;
9927 }
9928
9779414d 9929 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9930 "org.gnu.gdb.arm.fpa");
9931 if (feature != NULL)
9932 {
9933 valid_p = 1;
9934 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9935 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9936 arm_register_names[i]);
9937 if (!valid_p)
9938 {
9939 tdesc_data_cleanup (tdesc_data);
9940 return NULL;
9941 }
9942 }
ff6f572f
DJ
9943 else
9944 have_fpa_registers = 0;
9945
9779414d 9946 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9947 "org.gnu.gdb.xscale.iwmmxt");
9948 if (feature != NULL)
9949 {
9950 static const char *const iwmmxt_names[] = {
9951 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9952 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9953 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9954 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9955 };
9956
9957 valid_p = 1;
9958 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9959 valid_p
9960 &= tdesc_numbered_register (feature, tdesc_data, i,
9961 iwmmxt_names[i - ARM_WR0_REGNUM]);
9962
9963 /* Check for the control registers, but do not fail if they
9964 are missing. */
9965 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9966 tdesc_numbered_register (feature, tdesc_data, i,
9967 iwmmxt_names[i - ARM_WR0_REGNUM]);
9968
9969 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9970 valid_p
9971 &= tdesc_numbered_register (feature, tdesc_data, i,
9972 iwmmxt_names[i - ARM_WR0_REGNUM]);
9973
9974 if (!valid_p)
9975 {
9976 tdesc_data_cleanup (tdesc_data);
9977 return NULL;
9978 }
9979 }
58d6951d
DJ
9980
9981 /* If we have a VFP unit, check whether the single precision registers
9982 are present. If not, then we will synthesize them as pseudo
9983 registers. */
9779414d 9984 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9985 "org.gnu.gdb.arm.vfp");
9986 if (feature != NULL)
9987 {
9988 static const char *const vfp_double_names[] = {
9989 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9990 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9991 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9992 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9993 };
9994
9995 /* Require the double precision registers. There must be either
9996 16 or 32. */
9997 valid_p = 1;
9998 for (i = 0; i < 32; i++)
9999 {
10000 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10001 ARM_D0_REGNUM + i,
10002 vfp_double_names[i]);
10003 if (!valid_p)
10004 break;
10005 }
2b9e5ea6
UW
10006 if (!valid_p && i == 16)
10007 valid_p = 1;
58d6951d 10008
2b9e5ea6
UW
10009 /* Also require FPSCR. */
10010 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10011 ARM_FPSCR_REGNUM, "fpscr");
10012 if (!valid_p)
58d6951d
DJ
10013 {
10014 tdesc_data_cleanup (tdesc_data);
10015 return NULL;
10016 }
10017
10018 if (tdesc_unnumbered_register (feature, "s0") == 0)
10019 have_vfp_pseudos = 1;
10020
10021 have_vfp_registers = 1;
10022
10023 /* If we have VFP, also check for NEON. The architecture allows
10024 NEON without VFP (integer vector operations only), but GDB
10025 does not support that. */
9779414d 10026 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
10027 "org.gnu.gdb.arm.neon");
10028 if (feature != NULL)
10029 {
10030 /* NEON requires 32 double-precision registers. */
10031 if (i != 32)
10032 {
10033 tdesc_data_cleanup (tdesc_data);
10034 return NULL;
10035 }
10036
10037 /* If there are quad registers defined by the stub, use
10038 their type; otherwise (normally) provide them with
10039 the default type. */
10040 if (tdesc_unnumbered_register (feature, "q0") == 0)
10041 have_neon_pseudos = 1;
10042
10043 have_neon = 1;
10044 }
10045 }
123dc839 10046 }
39bbf761 10047
28e97307
DJ
10048 /* If there is already a candidate, use it. */
10049 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10050 best_arch != NULL;
10051 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10052 {
b8926edc
DJ
10053 if (arm_abi != ARM_ABI_AUTO
10054 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
10055 continue;
10056
b8926edc
DJ
10057 if (fp_model != ARM_FLOAT_AUTO
10058 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
10059 continue;
10060
58d6951d
DJ
10061 /* There are various other properties in tdep that we do not
10062 need to check here: those derived from a target description,
10063 since gdbarches with a different target description are
10064 automatically disqualified. */
10065
9779414d
DJ
10066 /* Do check is_m, though, since it might come from the binary. */
10067 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10068 continue;
10069
28e97307
DJ
10070 /* Found a match. */
10071 break;
10072 }
97e03143 10073
28e97307 10074 if (best_arch != NULL)
123dc839
DJ
10075 {
10076 if (tdesc_data != NULL)
10077 tdesc_data_cleanup (tdesc_data);
10078 return best_arch->gdbarch;
10079 }
28e97307
DJ
10080
10081 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
97e03143
RE
10082 gdbarch = gdbarch_alloc (&info, tdep);
10083
28e97307
DJ
10084 /* Record additional information about the architecture we are defining.
10085 These are gdbarch discriminators, like the OSABI. */
10086 tdep->arm_abi = arm_abi;
10087 tdep->fp_model = fp_model;
9779414d 10088 tdep->is_m = is_m;
ff6f572f 10089 tdep->have_fpa_registers = have_fpa_registers;
58d6951d
DJ
10090 tdep->have_vfp_registers = have_vfp_registers;
10091 tdep->have_vfp_pseudos = have_vfp_pseudos;
10092 tdep->have_neon_pseudos = have_neon_pseudos;
10093 tdep->have_neon = have_neon;
08216dd7 10094
25f8c692
JL
10095 arm_register_g_packet_guesses (gdbarch);
10096
08216dd7 10097 /* Breakpoints. */
9d4fde75 10098 switch (info.byte_order_for_code)
67255d04
RE
10099 {
10100 case BFD_ENDIAN_BIG:
66e810cd
RE
10101 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10102 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10103 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10104 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10105
67255d04
RE
10106 break;
10107
10108 case BFD_ENDIAN_LITTLE:
66e810cd
RE
10109 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10110 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10111 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10112 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10113
67255d04
RE
10114 break;
10115
10116 default:
10117 internal_error (__FILE__, __LINE__,
edefbb7c 10118 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
10119 }
10120
d7b486e7
RE
10121 /* On ARM targets char defaults to unsigned. */
10122 set_gdbarch_char_signed (gdbarch, 0);
10123
cca44b1b
JB
10124 /* Note: for displaced stepping, this includes the breakpoint, and one word
10125 of additional scratch space. This setting isn't used for anything beside
10126 displaced stepping at present. */
10127 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10128
9df628e0 10129 /* This should be low enough for everything. */
97e03143 10130 tdep->lowest_pc = 0x20;
94c30b78 10131 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 10132
7c00367c
MK
10133 /* The default, for both APCS and AAPCS, is to return small
10134 structures in registers. */
10135 tdep->struct_return = reg_struct_return;
10136
2dd604e7 10137 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 10138 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 10139
756fe439
DJ
10140 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10141
148754e5 10142 /* Frame handling. */
a262aec2 10143 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
10144 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10145 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10146
eb5492fa 10147 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 10148
34e8f22d
RE
10149 /* Address manipulation. */
10150 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
10151 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10152
34e8f22d
RE
10153 /* Advance PC across function entry code. */
10154 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10155
4024ca99
UW
10156 /* Detect whether PC is in function epilogue. */
10157 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10158
190dce09
UW
10159 /* Skip trampolines. */
10160 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10161
34e8f22d
RE
10162 /* The stack grows downward. */
10163 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10164
10165 /* Breakpoint manipulation. */
10166 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
177321bd
DJ
10167 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10168 arm_remote_breakpoint_from_pc);
34e8f22d
RE
10169
10170 /* Information about registers, etc. */
34e8f22d
RE
10171 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10172 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 10173 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 10174 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 10175 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 10176
ff6f572f
DJ
10177 /* This "info float" is FPA-specific. Use the generic version if we
10178 do not have FPA. */
10179 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10180 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10181
26216b98 10182 /* Internal <-> external register number maps. */
ff6f572f 10183 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
10184 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10185
34e8f22d
RE
10186 set_gdbarch_register_name (gdbarch, arm_register_name);
10187
10188 /* Returning results. */
2af48f68 10189 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 10190
03d48a7d
RE
10191 /* Disassembly. */
10192 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10193
34e8f22d
RE
10194 /* Minsymbol frobbing. */
10195 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10196 set_gdbarch_coff_make_msymbol_special (gdbarch,
10197 arm_coff_make_msymbol_special);
60c5725c 10198 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 10199
f9d67f43
DJ
10200 /* Thumb-2 IT block support. */
10201 set_gdbarch_adjust_breakpoint_address (gdbarch,
10202 arm_adjust_breakpoint_address);
10203
0d5de010
DJ
10204 /* Virtual tables. */
10205 set_gdbarch_vbit_in_delta (gdbarch, 1);
10206
97e03143 10207 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 10208 gdbarch_init_osabi (info, gdbarch);
97e03143 10209
b39cc962
DJ
10210 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10211
eb5492fa 10212 /* Add some default predicates. */
a262aec2
DJ
10213 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10214 dwarf2_append_unwinders (gdbarch);
0e9e9abd 10215 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
a262aec2 10216 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 10217
97e03143
RE
10218 /* Now we have tuned the configuration, set a few final things,
10219 based on what the OS ABI has told us. */
10220
b8926edc
DJ
10221 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10222 binaries are always marked. */
10223 if (tdep->arm_abi == ARM_ABI_AUTO)
10224 tdep->arm_abi = ARM_ABI_APCS;
10225
e3039479
UW
10226 /* Watchpoints are not steppable. */
10227 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10228
b8926edc
DJ
10229 /* We used to default to FPA for generic ARM, but almost nobody
10230 uses that now, and we now provide a way for the user to force
10231 the model. So default to the most useful variant. */
10232 if (tdep->fp_model == ARM_FLOAT_AUTO)
10233 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10234
9df628e0
RE
10235 if (tdep->jb_pc >= 0)
10236 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10237
08216dd7 10238 /* Floating point sizes and format. */
8da61cc4 10239 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 10240 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 10241 {
8da61cc4
DJ
10242 set_gdbarch_double_format
10243 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10244 set_gdbarch_long_double_format
10245 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10246 }
10247 else
10248 {
10249 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10250 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
10251 }
10252
58d6951d
DJ
10253 if (have_vfp_pseudos)
10254 {
10255 /* NOTE: These are the only pseudo registers used by
10256 the ARM target at the moment. If more are added, a
10257 little more care in numbering will be needed. */
10258
10259 int num_pseudos = 32;
10260 if (have_neon_pseudos)
10261 num_pseudos += 16;
10262 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10263 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10264 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10265 }
10266
123dc839 10267 if (tdesc_data)
58d6951d
DJ
10268 {
10269 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10270
9779414d 10271 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
10272
10273 /* Override tdesc_register_type to adjust the types of VFP
10274 registers for NEON. */
10275 set_gdbarch_register_type (gdbarch, arm_register_type);
10276 }
123dc839
DJ
10277
10278 /* Add standard register aliases. We add aliases even for those
10279 nanes which are used by the current architecture - it's simpler,
10280 and does no harm, since nothing ever lists user registers. */
10281 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10282 user_reg_add (gdbarch, arm_register_aliases[i].name,
10283 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10284
39bbf761
RE
10285 return gdbarch;
10286}
10287
97e03143 10288static void
2af46ca0 10289arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 10290{
2af46ca0 10291 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
10292
10293 if (tdep == NULL)
10294 return;
10295
edefbb7c 10296 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
10297 (unsigned long) tdep->lowest_pc);
10298}
10299
a78f21af
AC
10300extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10301
c906108c 10302void
ed9a39eb 10303_initialize_arm_tdep (void)
c906108c 10304{
bc90b915
FN
10305 struct ui_file *stb;
10306 long length;
26304000 10307 struct cmd_list_element *new_set, *new_show;
53904c9e
AC
10308 const char *setname;
10309 const char *setdesc;
4bd7b427 10310 const char *const *regnames;
bc90b915
FN
10311 int numregs, i, j;
10312 static char *helptext;
edefbb7c
AC
10313 char regdesc[1024], *rdptr = regdesc;
10314 size_t rest = sizeof (regdesc);
085dd6e6 10315
42cf1509 10316 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 10317
60c5725c 10318 arm_objfile_data_key
c1bd65d0 10319 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 10320
0e9e9abd
UW
10321 /* Add ourselves to objfile event chain. */
10322 observer_attach_new_objfile (arm_exidx_new_objfile);
10323 arm_exidx_data_key
10324 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10325
70f80edf
JT
10326 /* Register an ELF OS ABI sniffer for ARM binaries. */
10327 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10328 bfd_target_elf_flavour,
10329 arm_elf_osabi_sniffer);
10330
9779414d
DJ
10331 /* Initialize the standard target descriptions. */
10332 initialize_tdesc_arm_with_m ();
25f8c692 10333 initialize_tdesc_arm_with_m_fpa_layout ();
ef7e8358
UW
10334 initialize_tdesc_arm_with_iwmmxt ();
10335 initialize_tdesc_arm_with_vfpv2 ();
10336 initialize_tdesc_arm_with_vfpv3 ();
10337 initialize_tdesc_arm_with_neon ();
9779414d 10338
94c30b78 10339 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
10340 num_disassembly_options = get_arm_regname_num_options ();
10341
10342 /* Add root prefix command for all "set arm"/"show arm" commands. */
10343 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 10344 _("Various ARM-specific commands."),
afd7eef0
RE
10345 &setarmcmdlist, "set arm ", 0, &setlist);
10346
10347 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 10348 _("Various ARM-specific commands."),
afd7eef0 10349 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 10350
94c30b78 10351 /* Sync the opcode insn printer with our register viewer. */
bc90b915 10352 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 10353
eefe576e
AC
10354 /* Initialize the array that will be passed to
10355 add_setshow_enum_cmd(). */
afd7eef0
RE
10356 valid_disassembly_styles
10357 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10358 for (i = 0; i < num_disassembly_options; i++)
bc90b915
FN
10359 {
10360 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 10361 valid_disassembly_styles[i] = setname;
edefbb7c
AC
10362 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10363 rdptr += length;
10364 rest -= length;
123dc839
DJ
10365 /* When we find the default names, tell the disassembler to use
10366 them. */
bc90b915
FN
10367 if (!strcmp (setname, "std"))
10368 {
afd7eef0 10369 disassembly_style = setname;
bc90b915
FN
10370 set_arm_regname_option (i);
10371 }
10372 }
94c30b78 10373 /* Mark the end of valid options. */
afd7eef0 10374 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 10375
edefbb7c
AC
10376 /* Create the help text. */
10377 stb = mem_fileopen ();
10378 fprintf_unfiltered (stb, "%s%s%s",
10379 _("The valid values are:\n"),
10380 regdesc,
10381 _("The default is \"std\"."));
759ef836 10382 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 10383 ui_file_delete (stb);
ed9a39eb 10384
edefbb7c
AC
10385 add_setshow_enum_cmd("disassembler", no_class,
10386 valid_disassembly_styles, &disassembly_style,
10387 _("Set the disassembly style."),
10388 _("Show the disassembly style."),
10389 helptext,
2c5b56ce 10390 set_disassembly_style_sfunc,
0963b4bd
MS
10391 NULL, /* FIXME: i18n: The disassembly style is
10392 \"%s\". */
7376b4c2 10393 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
10394
10395 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10396 _("Set usage of ARM 32-bit mode."),
10397 _("Show usage of ARM 32-bit mode."),
10398 _("When off, a 26-bit PC will be used."),
2c5b56ce 10399 NULL,
0963b4bd
MS
10400 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10401 mode is %s. */
26304000 10402 &setarmcmdlist, &showarmcmdlist);
c906108c 10403
fd50bc42 10404 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
10405 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10406 _("Set the floating point type."),
10407 _("Show the floating point type."),
10408 _("auto - Determine the FP typefrom the OS-ABI.\n\
10409softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10410fpa - FPA co-processor (GCC compiled).\n\
10411softvfp - Software FP with pure-endian doubles.\n\
10412vfp - VFP co-processor."),
edefbb7c 10413 set_fp_model_sfunc, show_fp_model,
7376b4c2 10414 &setarmcmdlist, &showarmcmdlist);
fd50bc42 10415
28e97307
DJ
10416 /* Add a command to allow the user to force the ABI. */
10417 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10418 _("Set the ABI."),
10419 _("Show the ABI."),
10420 NULL, arm_set_abi, arm_show_abi,
10421 &setarmcmdlist, &showarmcmdlist);
10422
0428b8f5
DJ
10423 /* Add two commands to allow the user to force the assumed
10424 execution mode. */
10425 add_setshow_enum_cmd ("fallback-mode", class_support,
10426 arm_mode_strings, &arm_fallback_mode_string,
10427 _("Set the mode assumed when symbols are unavailable."),
10428 _("Show the mode assumed when symbols are unavailable."),
10429 NULL, NULL, arm_show_fallback_mode,
10430 &setarmcmdlist, &showarmcmdlist);
10431 add_setshow_enum_cmd ("force-mode", class_support,
10432 arm_mode_strings, &arm_force_mode_string,
10433 _("Set the mode assumed even when symbols are available."),
10434 _("Show the mode assumed even when symbols are available."),
10435 NULL, NULL, arm_show_force_mode,
10436 &setarmcmdlist, &showarmcmdlist);
10437
6529d2dd 10438 /* Debugging flag. */
edefbb7c
AC
10439 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10440 _("Set ARM debugging."),
10441 _("Show ARM debugging."),
10442 _("When on, arm-specific debugging is enabled."),
2c5b56ce 10443 NULL,
7915a72c 10444 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 10445 &setdebuglist, &showdebuglist);
c906108c 10446}
This page took 1.736493 seconds and 4 git commands to generate.