Add documentation to gdb_compile
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
32d0add0 3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d 47
8689682c 48#include "arch/arm.h"
d9311bfa 49#include "arch/arm-get-next-pcs.h"
34e8f22d 50#include "arm-tdep.h"
26216b98 51#include "gdb/sim-arm.h"
34e8f22d 52
082fc60d
RE
53#include "elf-bfd.h"
54#include "coff/internal.h"
97e03143 55#include "elf/arm.h"
c906108c 56
60c5725c 57#include "vec.h"
26216b98 58
72508ac0 59#include "record.h"
d02ed0bb 60#include "record-full.h"
72508ac0 61
9779414d 62#include "features/arm-with-m.c"
25f8c692 63#include "features/arm-with-m-fpa-layout.c"
3184d3f9 64#include "features/arm-with-m-vfp-d16.c"
ef7e8358
UW
65#include "features/arm-with-iwmmxt.c"
66#include "features/arm-with-vfpv2.c"
67#include "features/arm-with-vfpv3.c"
68#include "features/arm-with-neon.c"
9779414d 69
6529d2dd
AC
70static int arm_debug;
71
082fc60d
RE
72/* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 74 is used for this purpose.
082fc60d
RE
75
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 78
0963b4bd 79#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
81
82#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 83 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 84
60c5725c
DJ
85/* Per-objfile data used for mapping symbols. */
86static const struct objfile_data *arm_objfile_data_key;
87
88struct arm_mapping_symbol
89{
90 bfd_vma value;
91 char type;
92};
93typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94DEF_VEC_O(arm_mapping_symbol_s);
95
96struct arm_per_objfile
97{
98 VEC(arm_mapping_symbol_s) **section_maps;
99};
100
afd7eef0
RE
101/* The list of available "set arm ..." and "show arm ..." commands. */
102static struct cmd_list_element *setarmcmdlist = NULL;
103static struct cmd_list_element *showarmcmdlist = NULL;
104
fd50bc42
RE
105/* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 107static const char *const fp_model_strings[] =
fd50bc42
RE
108{
109 "auto",
110 "softfpa",
111 "fpa",
112 "softvfp",
28e97307
DJ
113 "vfp",
114 NULL
fd50bc42
RE
115};
116
117/* A variable that can be configured by the user. */
118static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119static const char *current_fp_model = "auto";
120
28e97307 121/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 122static const char *const arm_abi_strings[] =
28e97307
DJ
123{
124 "auto",
125 "APCS",
126 "AAPCS",
127 NULL
128};
129
130/* A variable that can be configured by the user. */
131static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132static const char *arm_abi_string = "auto";
133
0428b8f5 134/* The execution mode to assume. */
40478521 135static const char *const arm_mode_strings[] =
0428b8f5
DJ
136 {
137 "auto",
138 "arm",
68770265
MGD
139 "thumb",
140 NULL
0428b8f5
DJ
141 };
142
143static const char *arm_fallback_mode_string = "auto";
144static const char *arm_force_mode_string = "auto";
145
18819fa6
UW
146/* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151static int arm_override_mode = -1;
152
94c30b78 153/* Number of different reg name sets (options). */
afd7eef0 154static int num_disassembly_options;
bc90b915 155
f32bf4a4
YQ
156/* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
160static const struct
161{
162 const char *name;
163 int regnum;
164} arm_register_aliases[] = {
165 /* Basic register numbers. */
166 { "r0", 0 },
167 { "r1", 1 },
168 { "r2", 2 },
169 { "r3", 3 },
170 { "r4", 4 },
171 { "r5", 5 },
172 { "r6", 6 },
173 { "r7", 7 },
174 { "r8", 8 },
175 { "r9", 9 },
176 { "r10", 10 },
177 { "r11", 11 },
178 { "r12", 12 },
179 { "r13", 13 },
180 { "r14", 14 },
181 { "r15", 15 },
182 /* Synonyms (argument and variable registers). */
183 { "a1", 0 },
184 { "a2", 1 },
185 { "a3", 2 },
186 { "a4", 3 },
187 { "v1", 4 },
188 { "v2", 5 },
189 { "v3", 6 },
190 { "v4", 7 },
191 { "v5", 8 },
192 { "v6", 9 },
193 { "v7", 10 },
194 { "v8", 11 },
195 /* Other platform-specific names for r9. */
196 { "sb", 9 },
197 { "tr", 9 },
198 /* Special names. */
199 { "ip", 12 },
123dc839 200 { "lr", 14 },
123dc839
DJ
201 /* Names used by GCC (not listed in the ARM EABI). */
202 { "sl", 10 },
123dc839
DJ
203 /* A special name from the older ATPCS. */
204 { "wr", 7 },
205};
bc90b915 206
123dc839 207static const char *const arm_register_names[] =
da59e081
JM
208{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 214 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 215
afd7eef0
RE
216/* Valid register name styles. */
217static const char **valid_disassembly_styles;
ed9a39eb 218
afd7eef0
RE
219/* Disassembly style to use. Default to "std" register names. */
220static const char *disassembly_style;
96baa820 221
ed9a39eb 222/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
223 style. */
224static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 225 struct cmd_list_element *);
afd7eef0 226static void set_disassembly_style (void);
ed9a39eb 227
b508a996 228static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 229 void *, int);
b508a996 230static void convert_to_extended (const struct floatformat *, void *,
be8626e0 231 const void *, int);
ed9a39eb 232
05d1431c
PA
233static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
58d6951d
DJ
236static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
239
d9311bfa
AT
240/* get_next_pcs operations. */
241static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb
246};
247
9b8d791a 248struct arm_prologue_cache
c3b4394c 249{
eb5492fa
DJ
250 /* The stack pointer at the time this frame was created; i.e. the
251 caller's stack pointer when this function was called. It is used
252 to identify this frame. */
253 CORE_ADDR prev_sp;
254
4be43953
DJ
255 /* The frame base for this frame is just prev_sp - frame size.
256 FRAMESIZE is the distance from the frame pointer to the
257 initial stack pointer. */
eb5492fa 258
c3b4394c 259 int framesize;
eb5492fa
DJ
260
261 /* The register used to hold the frame pointer for this frame. */
c3b4394c 262 int framereg;
eb5492fa
DJ
263
264 /* Saved register offsets. */
265 struct trad_frame_saved_reg *saved_regs;
c3b4394c 266};
ed9a39eb 267
0d39a070
DJ
268static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
269 CORE_ADDR prologue_start,
270 CORE_ADDR prologue_end,
271 struct arm_prologue_cache *cache);
272
cca44b1b
JB
273/* Architecture version for displaced stepping. This effects the behaviour of
274 certain instructions, and really should not be hard-wired. */
275
276#define DISPLACED_STEPPING_ARCH_VERSION 5
277
94c30b78 278/* Set to true if the 32-bit mode is in use. */
c906108c
SS
279
280int arm_apcs_32 = 1;
281
9779414d
DJ
282/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283
478fd957 284int
9779414d
DJ
285arm_psr_thumb_bit (struct gdbarch *gdbarch)
286{
287 if (gdbarch_tdep (gdbarch)->is_m)
288 return XPSR_T;
289 else
290 return CPSR_T;
291}
292
d0e59a68
AT
293/* Determine if the processor is currently executing in Thumb mode. */
294
295int
296arm_is_thumb (struct regcache *regcache)
297{
298 ULONGEST cpsr;
299 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
300
301 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
302
303 return (cpsr & t_bit) != 0;
304}
305
b39cc962
DJ
306/* Determine if FRAME is executing in Thumb mode. */
307
25b41d01 308int
b39cc962
DJ
309arm_frame_is_thumb (struct frame_info *frame)
310{
311 CORE_ADDR cpsr;
9779414d 312 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
313
314 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
315 directly (from a signal frame or dummy frame) or by interpreting
316 the saved LR (from a prologue or DWARF frame). So consult it and
317 trust the unwinders. */
318 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
319
9779414d 320 return (cpsr & t_bit) != 0;
b39cc962
DJ
321}
322
60c5725c
DJ
323/* Callback for VEC_lower_bound. */
324
325static inline int
326arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
327 const struct arm_mapping_symbol *rhs)
328{
329 return lhs->value < rhs->value;
330}
331
f9d67f43
DJ
332/* Search for the mapping symbol covering MEMADDR. If one is found,
333 return its type. Otherwise, return 0. If START is non-NULL,
334 set *START to the location of the mapping symbol. */
c906108c 335
f9d67f43
DJ
336static char
337arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 338{
60c5725c 339 struct obj_section *sec;
0428b8f5 340
60c5725c
DJ
341 /* If there are mapping symbols, consult them. */
342 sec = find_pc_section (memaddr);
343 if (sec != NULL)
344 {
345 struct arm_per_objfile *data;
346 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
347 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
348 0 };
60c5725c
DJ
349 unsigned int idx;
350
9a3c8263
SM
351 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
352 arm_objfile_data_key);
60c5725c
DJ
353 if (data != NULL)
354 {
355 map = data->section_maps[sec->the_bfd_section->index];
356 if (!VEC_empty (arm_mapping_symbol_s, map))
357 {
358 struct arm_mapping_symbol *map_sym;
359
360 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
361 arm_compare_mapping_symbols);
362
363 /* VEC_lower_bound finds the earliest ordered insertion
364 point. If the following symbol starts at this exact
365 address, we use that; otherwise, the preceding
366 mapping symbol covers this address. */
367 if (idx < VEC_length (arm_mapping_symbol_s, map))
368 {
369 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
370 if (map_sym->value == map_key.value)
f9d67f43
DJ
371 {
372 if (start)
373 *start = map_sym->value + obj_section_addr (sec);
374 return map_sym->type;
375 }
60c5725c
DJ
376 }
377
378 if (idx > 0)
379 {
380 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
381 if (start)
382 *start = map_sym->value + obj_section_addr (sec);
383 return map_sym->type;
60c5725c
DJ
384 }
385 }
386 }
387 }
388
f9d67f43
DJ
389 return 0;
390}
391
392/* Determine if the program counter specified in MEMADDR is in a Thumb
393 function. This function should be called for addresses unrelated to
394 any executing frame; otherwise, prefer arm_frame_is_thumb. */
395
e3039479 396int
9779414d 397arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 398{
7cbd4a93 399 struct bound_minimal_symbol sym;
f9d67f43 400 char type;
a42244db
YQ
401 struct displaced_step_closure* dsc
402 = get_displaced_step_closure_by_addr(memaddr);
403
404 /* If checking the mode of displaced instruction in copy area, the mode
405 should be determined by instruction on the original address. */
406 if (dsc)
407 {
408 if (debug_displaced)
409 fprintf_unfiltered (gdb_stdlog,
410 "displaced: check mode of %.8lx instead of %.8lx\n",
411 (unsigned long) dsc->insn_addr,
412 (unsigned long) memaddr);
413 memaddr = dsc->insn_addr;
414 }
f9d67f43
DJ
415
416 /* If bit 0 of the address is set, assume this is a Thumb address. */
417 if (IS_THUMB_ADDR (memaddr))
418 return 1;
419
18819fa6
UW
420 /* Respect internal mode override if active. */
421 if (arm_override_mode != -1)
422 return arm_override_mode;
423
f9d67f43
DJ
424 /* If the user wants to override the symbol table, let him. */
425 if (strcmp (arm_force_mode_string, "arm") == 0)
426 return 0;
427 if (strcmp (arm_force_mode_string, "thumb") == 0)
428 return 1;
429
9779414d
DJ
430 /* ARM v6-M and v7-M are always in Thumb mode. */
431 if (gdbarch_tdep (gdbarch)->is_m)
432 return 1;
433
f9d67f43
DJ
434 /* If there are mapping symbols, consult them. */
435 type = arm_find_mapping_symbol (memaddr, NULL);
436 if (type)
437 return type == 't';
438
ed9a39eb 439 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 440 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
441 if (sym.minsym)
442 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
443
444 /* If the user wants to override the fallback mode, let them. */
445 if (strcmp (arm_fallback_mode_string, "arm") == 0)
446 return 0;
447 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
448 return 1;
449
450 /* If we couldn't find any symbol, but we're talking to a running
451 target, then trust the current value of $cpsr. This lets
452 "display/i $pc" always show the correct mode (though if there is
453 a symbol table we will not reach here, so it still may not be
18819fa6 454 displayed in the mode it will be executed). */
0428b8f5 455 if (target_has_registers)
18819fa6 456 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
457
458 /* Otherwise we're out of luck; we assume ARM. */
459 return 0;
c906108c
SS
460}
461
181c1381 462/* Remove useless bits from addresses in a running program. */
34e8f22d 463static CORE_ADDR
24568a2c 464arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 465{
2ae28aa9
YQ
466 /* On M-profile devices, do not strip the low bit from EXC_RETURN
467 (the magic exception return address). */
468 if (gdbarch_tdep (gdbarch)->is_m
469 && (val & 0xfffffff0) == 0xfffffff0)
470 return val;
471
a3a2ee65 472 if (arm_apcs_32)
dd6be234 473 return UNMAKE_THUMB_ADDR (val);
c906108c 474 else
a3a2ee65 475 return (val & 0x03fffffc);
c906108c
SS
476}
477
0d39a070 478/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
479 can be safely ignored during prologue skipping. IS_THUMB is true
480 if the function is known to be a Thumb function due to the way it
481 is being called. */
0d39a070 482static int
e0634ccf 483skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 484{
e0634ccf 485 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 486 struct bound_minimal_symbol msym;
0d39a070
DJ
487
488 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 489 if (msym.minsym != NULL
77e371c0 490 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 491 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 492 {
efd66ac6 493 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 494
e0634ccf
UW
495 /* The GNU linker's Thumb call stub to foo is named
496 __foo_from_thumb. */
497 if (strstr (name, "_from_thumb") != NULL)
498 name += 2;
0d39a070 499
e0634ccf
UW
500 /* On soft-float targets, __truncdfsf2 is called to convert promoted
501 arguments to their argument types in non-prototyped
502 functions. */
61012eef 503 if (startswith (name, "__truncdfsf2"))
e0634ccf 504 return 1;
61012eef 505 if (startswith (name, "__aeabi_d2f"))
e0634ccf 506 return 1;
0d39a070 507
e0634ccf 508 /* Internal functions related to thread-local storage. */
61012eef 509 if (startswith (name, "__tls_get_addr"))
e0634ccf 510 return 1;
61012eef 511 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
512 return 1;
513 }
514 else
515 {
516 /* If we run against a stripped glibc, we may be unable to identify
517 special functions by name. Check for one important case,
518 __aeabi_read_tp, by comparing the *code* against the default
519 implementation (this is hand-written ARM assembler in glibc). */
520
521 if (!is_thumb
522 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
523 == 0xe3e00a0f /* mov r0, #0xffff0fff */
524 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
525 == 0xe240f01f) /* sub pc, r0, #31 */
526 return 1;
527 }
ec3d575a 528
0d39a070
DJ
529 return 0;
530}
531
621c6d5b
YQ
532/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
533 the first 16-bit of instruction, and INSN2 is the second 16-bit of
534 instruction. */
535#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
536 ((bits ((insn1), 0, 3) << 12) \
537 | (bits ((insn1), 10, 10) << 11) \
538 | (bits ((insn2), 12, 14) << 8) \
539 | bits ((insn2), 0, 7))
540
541/* Extract the immediate from instruction movw/movt of encoding A. INSN is
542 the 32-bit instruction. */
543#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
544 ((bits ((insn), 16, 19) << 12) \
545 | bits ((insn), 0, 11))
546
ec3d575a
UW
547/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
548
549static unsigned int
550thumb_expand_immediate (unsigned int imm)
551{
552 unsigned int count = imm >> 7;
553
554 if (count < 8)
555 switch (count / 2)
556 {
557 case 0:
558 return imm & 0xff;
559 case 1:
560 return (imm & 0xff) | ((imm & 0xff) << 16);
561 case 2:
562 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
563 case 3:
564 return (imm & 0xff) | ((imm & 0xff) << 8)
565 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
566 }
567
568 return (0x80 | (imm & 0x7f)) << (32 - count);
569}
570
540314bd
YQ
571/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
572 epilogue, 0 otherwise. */
573
574static int
575thumb_instruction_restores_sp (unsigned short insn)
576{
577 return (insn == 0x46bd /* mov sp, r7 */
578 || (insn & 0xff80) == 0xb000 /* add sp, imm */
579 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
580}
581
29d73ae4
DJ
582/* Analyze a Thumb prologue, looking for a recognizable stack frame
583 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
584 clobber the stack frame unexpectedly, or an unknown instruction.
585 Return the last address which is definitely safe to skip for an
586 initial breakpoint. */
c906108c
SS
587
588static CORE_ADDR
29d73ae4
DJ
589thumb_analyze_prologue (struct gdbarch *gdbarch,
590 CORE_ADDR start, CORE_ADDR limit,
591 struct arm_prologue_cache *cache)
c906108c 592{
0d39a070 593 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 594 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
595 int i;
596 pv_t regs[16];
597 struct pv_area *stack;
598 struct cleanup *back_to;
599 CORE_ADDR offset;
ec3d575a 600 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 601
29d73ae4
DJ
602 for (i = 0; i < 16; i++)
603 regs[i] = pv_register (i, 0);
55f960e1 604 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
605 back_to = make_cleanup_free_pv_area (stack);
606
29d73ae4 607 while (start < limit)
c906108c 608 {
29d73ae4
DJ
609 unsigned short insn;
610
e17a4113 611 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 612
94c30b78 613 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 614 {
29d73ae4
DJ
615 int regno;
616 int mask;
4be43953
DJ
617
618 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
619 break;
29d73ae4
DJ
620
621 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
622 whether to save LR (R14). */
623 mask = (insn & 0xff) | ((insn & 0x100) << 6);
624
625 /* Calculate offsets of saved R0-R7 and LR. */
626 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
627 if (mask & (1 << regno))
628 {
29d73ae4
DJ
629 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
630 -4);
631 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
632 }
da59e081 633 }
1db01f22 634 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 635 {
29d73ae4 636 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
637 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
638 -offset);
da59e081 639 }
808f7ab1
YQ
640 else if (thumb_instruction_restores_sp (insn))
641 {
642 /* Don't scan past the epilogue. */
643 break;
644 }
0d39a070
DJ
645 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
646 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
647 (insn & 0xff) << 2);
648 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
649 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
650 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
651 bits (insn, 6, 8));
652 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
653 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
654 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
655 bits (insn, 0, 7));
656 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
657 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
658 && pv_is_constant (regs[bits (insn, 3, 5)]))
659 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
660 regs[bits (insn, 6, 8)]);
661 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
662 && pv_is_constant (regs[bits (insn, 3, 6)]))
663 {
664 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
665 int rm = bits (insn, 3, 6);
666 regs[rd] = pv_add (regs[rd], regs[rm]);
667 }
29d73ae4 668 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 669 {
29d73ae4
DJ
670 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
671 int src_reg = (insn & 0x78) >> 3;
672 regs[dst_reg] = regs[src_reg];
da59e081 673 }
29d73ae4 674 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 675 {
29d73ae4
DJ
676 /* Handle stores to the stack. Normally pushes are used,
677 but with GCC -mtpcs-frame, there may be other stores
678 in the prologue to create the frame. */
679 int regno = (insn >> 8) & 0x7;
680 pv_t addr;
681
682 offset = (insn & 0xff) << 2;
683 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
684
685 if (pv_area_store_would_trash (stack, addr))
686 break;
687
688 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 689 }
0d39a070
DJ
690 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
691 {
692 int rd = bits (insn, 0, 2);
693 int rn = bits (insn, 3, 5);
694 pv_t addr;
695
696 offset = bits (insn, 6, 10) << 2;
697 addr = pv_add_constant (regs[rn], offset);
698
699 if (pv_area_store_would_trash (stack, addr))
700 break;
701
702 pv_area_store (stack, addr, 4, regs[rd]);
703 }
704 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
705 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
706 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
707 /* Ignore stores of argument registers to the stack. */
708 ;
709 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
710 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
711 /* Ignore block loads from the stack, potentially copying
712 parameters from memory. */
713 ;
714 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
715 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
716 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
717 /* Similarly ignore single loads from the stack. */
718 ;
719 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
720 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
721 /* Skip register copies, i.e. saves to another register
722 instead of the stack. */
723 ;
724 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
725 /* Recognize constant loads; even with small stacks these are necessary
726 on Thumb. */
727 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
728 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
729 {
730 /* Constant pool loads, for the same reason. */
731 unsigned int constant;
732 CORE_ADDR loc;
733
734 loc = start + 4 + bits (insn, 0, 7) * 4;
735 constant = read_memory_unsigned_integer (loc, 4, byte_order);
736 regs[bits (insn, 8, 10)] = pv_constant (constant);
737 }
db24da6d 738 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 739 {
0d39a070
DJ
740 unsigned short inst2;
741
742 inst2 = read_memory_unsigned_integer (start + 2, 2,
743 byte_order_for_code);
744
745 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
746 {
747 /* BL, BLX. Allow some special function calls when
748 skipping the prologue; GCC generates these before
749 storing arguments to the stack. */
750 CORE_ADDR nextpc;
751 int j1, j2, imm1, imm2;
752
753 imm1 = sbits (insn, 0, 10);
754 imm2 = bits (inst2, 0, 10);
755 j1 = bit (inst2, 13);
756 j2 = bit (inst2, 11);
757
758 offset = ((imm1 << 12) + (imm2 << 1));
759 offset ^= ((!j2) << 22) | ((!j1) << 23);
760
761 nextpc = start + 4 + offset;
762 /* For BLX make sure to clear the low bits. */
763 if (bit (inst2, 12) == 0)
764 nextpc = nextpc & 0xfffffffc;
765
e0634ccf
UW
766 if (!skip_prologue_function (gdbarch, nextpc,
767 bit (inst2, 12) != 0))
0d39a070
DJ
768 break;
769 }
ec3d575a 770
0963b4bd
MS
771 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
772 { registers } */
ec3d575a
UW
773 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
774 {
775 pv_t addr = regs[bits (insn, 0, 3)];
776 int regno;
777
778 if (pv_area_store_would_trash (stack, addr))
779 break;
780
781 /* Calculate offsets of saved registers. */
782 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
783 if (inst2 & (1 << regno))
784 {
785 addr = pv_add_constant (addr, -4);
786 pv_area_store (stack, addr, 4, regs[regno]);
787 }
788
789 if (insn & 0x0020)
790 regs[bits (insn, 0, 3)] = addr;
791 }
792
0963b4bd
MS
793 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
794 [Rn, #+/-imm]{!} */
ec3d575a
UW
795 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
796 {
797 int regno1 = bits (inst2, 12, 15);
798 int regno2 = bits (inst2, 8, 11);
799 pv_t addr = regs[bits (insn, 0, 3)];
800
801 offset = inst2 & 0xff;
802 if (insn & 0x0080)
803 addr = pv_add_constant (addr, offset);
804 else
805 addr = pv_add_constant (addr, -offset);
806
807 if (pv_area_store_would_trash (stack, addr))
808 break;
809
810 pv_area_store (stack, addr, 4, regs[regno1]);
811 pv_area_store (stack, pv_add_constant (addr, 4),
812 4, regs[regno2]);
813
814 if (insn & 0x0020)
815 regs[bits (insn, 0, 3)] = addr;
816 }
817
818 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
819 && (inst2 & 0x0c00) == 0x0c00
820 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
821 {
822 int regno = bits (inst2, 12, 15);
823 pv_t addr = regs[bits (insn, 0, 3)];
824
825 offset = inst2 & 0xff;
826 if (inst2 & 0x0200)
827 addr = pv_add_constant (addr, offset);
828 else
829 addr = pv_add_constant (addr, -offset);
830
831 if (pv_area_store_would_trash (stack, addr))
832 break;
833
834 pv_area_store (stack, addr, 4, regs[regno]);
835
836 if (inst2 & 0x0100)
837 regs[bits (insn, 0, 3)] = addr;
838 }
839
840 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
841 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
842 {
843 int regno = bits (inst2, 12, 15);
844 pv_t addr;
845
846 offset = inst2 & 0xfff;
847 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
848
849 if (pv_area_store_would_trash (stack, addr))
850 break;
851
852 pv_area_store (stack, addr, 4, regs[regno]);
853 }
854
855 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 856 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 857 /* Ignore stores of argument registers to the stack. */
0d39a070 858 ;
ec3d575a
UW
859
860 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
861 && (inst2 & 0x0d00) == 0x0c00
0d39a070 862 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 863 /* Ignore stores of argument registers to the stack. */
0d39a070 864 ;
ec3d575a 865
0963b4bd
MS
866 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
867 { registers } */
ec3d575a
UW
868 && (inst2 & 0x8000) == 0x0000
869 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 /* Ignore block loads from the stack, potentially copying
871 parameters from memory. */
0d39a070 872 ;
ec3d575a 873
0963b4bd
MS
874 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
875 [Rn, #+/-imm] */
0d39a070 876 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 877 /* Similarly ignore dual loads from the stack. */
0d39a070 878 ;
ec3d575a
UW
879
880 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
881 && (inst2 & 0x0d00) == 0x0c00
0d39a070 882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 883 /* Similarly ignore single loads from the stack. */
0d39a070 884 ;
ec3d575a
UW
885
886 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 888 /* Similarly ignore single loads from the stack. */
0d39a070 889 ;
ec3d575a
UW
890
891 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
892 && (inst2 & 0x8000) == 0x0000)
893 {
894 unsigned int imm = ((bits (insn, 10, 10) << 11)
895 | (bits (inst2, 12, 14) << 8)
896 | bits (inst2, 0, 7));
897
898 regs[bits (inst2, 8, 11)]
899 = pv_add_constant (regs[bits (insn, 0, 3)],
900 thumb_expand_immediate (imm));
901 }
902
903 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
904 && (inst2 & 0x8000) == 0x0000)
0d39a070 905 {
ec3d575a
UW
906 unsigned int imm = ((bits (insn, 10, 10) << 11)
907 | (bits (inst2, 12, 14) << 8)
908 | bits (inst2, 0, 7));
909
910 regs[bits (inst2, 8, 11)]
911 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
912 }
913
914 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
915 && (inst2 & 0x8000) == 0x0000)
916 {
917 unsigned int imm = ((bits (insn, 10, 10) << 11)
918 | (bits (inst2, 12, 14) << 8)
919 | bits (inst2, 0, 7));
920
921 regs[bits (inst2, 8, 11)]
922 = pv_add_constant (regs[bits (insn, 0, 3)],
923 - (CORE_ADDR) thumb_expand_immediate (imm));
924 }
925
926 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
927 && (inst2 & 0x8000) == 0x0000)
928 {
929 unsigned int imm = ((bits (insn, 10, 10) << 11)
930 | (bits (inst2, 12, 14) << 8)
931 | bits (inst2, 0, 7));
932
933 regs[bits (inst2, 8, 11)]
934 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
935 }
936
937 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
938 {
939 unsigned int imm = ((bits (insn, 10, 10) << 11)
940 | (bits (inst2, 12, 14) << 8)
941 | bits (inst2, 0, 7));
942
943 regs[bits (inst2, 8, 11)]
944 = pv_constant (thumb_expand_immediate (imm));
945 }
946
947 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
948 {
621c6d5b
YQ
949 unsigned int imm
950 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
951
952 regs[bits (inst2, 8, 11)] = pv_constant (imm);
953 }
954
955 else if (insn == 0xea5f /* mov.w Rd,Rm */
956 && (inst2 & 0xf0f0) == 0)
957 {
958 int dst_reg = (inst2 & 0x0f00) >> 8;
959 int src_reg = inst2 & 0xf;
960 regs[dst_reg] = regs[src_reg];
961 }
962
963 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
964 {
965 /* Constant pool loads. */
966 unsigned int constant;
967 CORE_ADDR loc;
968
cac395ea 969 offset = bits (inst2, 0, 11);
ec3d575a
UW
970 if (insn & 0x0080)
971 loc = start + 4 + offset;
972 else
973 loc = start + 4 - offset;
974
975 constant = read_memory_unsigned_integer (loc, 4, byte_order);
976 regs[bits (inst2, 12, 15)] = pv_constant (constant);
977 }
978
979 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
980 {
981 /* Constant pool loads. */
982 unsigned int constant;
983 CORE_ADDR loc;
984
cac395ea 985 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
986 if (insn & 0x0080)
987 loc = start + 4 + offset;
988 else
989 loc = start + 4 - offset;
990
991 constant = read_memory_unsigned_integer (loc, 4, byte_order);
992 regs[bits (inst2, 12, 15)] = pv_constant (constant);
993
994 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
995 regs[bits (inst2, 8, 11)] = pv_constant (constant);
996 }
997
998 else if (thumb2_instruction_changes_pc (insn, inst2))
999 {
1000 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1001 break;
1002 }
ec3d575a
UW
1003 else
1004 {
1005 /* The optimizer might shove anything into the prologue,
1006 so we just skip what we don't recognize. */
1007 unrecognized_pc = start;
1008 }
0d39a070
DJ
1009
1010 start += 2;
1011 }
ec3d575a 1012 else if (thumb_instruction_changes_pc (insn))
3d74b771 1013 {
ec3d575a 1014 /* Don't scan past anything that might change control flow. */
da3c6d4a 1015 break;
3d74b771 1016 }
ec3d575a
UW
1017 else
1018 {
1019 /* The optimizer might shove anything into the prologue,
1020 so we just skip what we don't recognize. */
1021 unrecognized_pc = start;
1022 }
29d73ae4
DJ
1023
1024 start += 2;
c906108c
SS
1025 }
1026
0d39a070
DJ
1027 if (arm_debug)
1028 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1029 paddress (gdbarch, start));
1030
ec3d575a
UW
1031 if (unrecognized_pc == 0)
1032 unrecognized_pc = start;
1033
29d73ae4
DJ
1034 if (cache == NULL)
1035 {
1036 do_cleanups (back_to);
ec3d575a 1037 return unrecognized_pc;
29d73ae4
DJ
1038 }
1039
29d73ae4
DJ
1040 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1041 {
1042 /* Frame pointer is fp. Frame size is constant. */
1043 cache->framereg = ARM_FP_REGNUM;
1044 cache->framesize = -regs[ARM_FP_REGNUM].k;
1045 }
1046 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1047 {
1048 /* Frame pointer is r7. Frame size is constant. */
1049 cache->framereg = THUMB_FP_REGNUM;
1050 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1051 }
72a2e3dc 1052 else
29d73ae4
DJ
1053 {
1054 /* Try the stack pointer... this is a bit desperate. */
1055 cache->framereg = ARM_SP_REGNUM;
1056 cache->framesize = -regs[ARM_SP_REGNUM].k;
1057 }
29d73ae4
DJ
1058
1059 for (i = 0; i < 16; i++)
1060 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1061 cache->saved_regs[i].addr = offset;
1062
1063 do_cleanups (back_to);
ec3d575a 1064 return unrecognized_pc;
c906108c
SS
1065}
1066
621c6d5b
YQ
1067
1068/* Try to analyze the instructions starting from PC, which load symbol
1069 __stack_chk_guard. Return the address of instruction after loading this
1070 symbol, set the dest register number to *BASEREG, and set the size of
1071 instructions for loading symbol in OFFSET. Return 0 if instructions are
1072 not recognized. */
1073
1074static CORE_ADDR
1075arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1076 unsigned int *destreg, int *offset)
1077{
1078 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1079 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1080 unsigned int low, high, address;
1081
1082 address = 0;
1083 if (is_thumb)
1084 {
1085 unsigned short insn1
1086 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1087
1088 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1089 {
1090 *destreg = bits (insn1, 8, 10);
1091 *offset = 2;
6ae274b7
YQ
1092 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1093 address = read_memory_unsigned_integer (address, 4,
1094 byte_order_for_code);
621c6d5b
YQ
1095 }
1096 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1097 {
1098 unsigned short insn2
1099 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1100
1101 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1102
1103 insn1
1104 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1105 insn2
1106 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1107
1108 /* movt Rd, #const */
1109 if ((insn1 & 0xfbc0) == 0xf2c0)
1110 {
1111 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1112 *destreg = bits (insn2, 8, 11);
1113 *offset = 8;
1114 address = (high << 16 | low);
1115 }
1116 }
1117 }
1118 else
1119 {
2e9e421f
UW
1120 unsigned int insn
1121 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1122
6ae274b7 1123 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1124 {
6ae274b7
YQ
1125 address = bits (insn, 0, 11) + pc + 8;
1126 address = read_memory_unsigned_integer (address, 4,
1127 byte_order_for_code);
1128
2e9e421f
UW
1129 *destreg = bits (insn, 12, 15);
1130 *offset = 4;
1131 }
1132 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1133 {
1134 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1135
1136 insn
1137 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1138
1139 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1140 {
1141 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1142 *destreg = bits (insn, 12, 15);
1143 *offset = 8;
1144 address = (high << 16 | low);
1145 }
1146 }
621c6d5b
YQ
1147 }
1148
1149 return address;
1150}
1151
1152/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1153 points to the first instruction of this sequence, return the address of
1154 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1155
1156 On arm, this sequence of instructions is composed of mainly three steps,
1157 Step 1: load symbol __stack_chk_guard,
1158 Step 2: load from address of __stack_chk_guard,
1159 Step 3: store it to somewhere else.
1160
1161 Usually, instructions on step 2 and step 3 are the same on various ARM
1162 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1163 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1164 instructions in step 1 vary from different ARM architectures. On ARMv7,
1165 they are,
1166
1167 movw Rn, #:lower16:__stack_chk_guard
1168 movt Rn, #:upper16:__stack_chk_guard
1169
1170 On ARMv5t, it is,
1171
1172 ldr Rn, .Label
1173 ....
1174 .Lable:
1175 .word __stack_chk_guard
1176
1177 Since ldr/str is a very popular instruction, we can't use them as
1178 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1179 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1180 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1181
1182static CORE_ADDR
1183arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1184{
1185 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1186 unsigned int basereg;
7cbd4a93 1187 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1188 int offset;
1189 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1190 CORE_ADDR addr;
1191
1192 /* Try to parse the instructions in Step 1. */
1193 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1194 &basereg, &offset);
1195 if (!addr)
1196 return pc;
1197
1198 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1199 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1200 Otherwise, this sequence cannot be for stack protector. */
1201 if (stack_chk_guard.minsym == NULL
61012eef 1202 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1203 return pc;
1204
1205 if (is_thumb)
1206 {
1207 unsigned int destreg;
1208 unsigned short insn
1209 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1210
1211 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1212 if ((insn & 0xf800) != 0x6800)
1213 return pc;
1214 if (bits (insn, 3, 5) != basereg)
1215 return pc;
1216 destreg = bits (insn, 0, 2);
1217
1218 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1219 byte_order_for_code);
1220 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1221 if ((insn & 0xf800) != 0x6000)
1222 return pc;
1223 if (destreg != bits (insn, 0, 2))
1224 return pc;
1225 }
1226 else
1227 {
1228 unsigned int destreg;
1229 unsigned int insn
1230 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1231
1232 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1233 if ((insn & 0x0e500000) != 0x04100000)
1234 return pc;
1235 if (bits (insn, 16, 19) != basereg)
1236 return pc;
1237 destreg = bits (insn, 12, 15);
1238 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1239 insn = read_memory_unsigned_integer (pc + offset + 4,
1240 4, byte_order_for_code);
1241 if ((insn & 0x0e500000) != 0x04000000)
1242 return pc;
1243 if (bits (insn, 12, 15) != destreg)
1244 return pc;
1245 }
1246 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1247 on arm. */
1248 if (is_thumb)
1249 return pc + offset + 4;
1250 else
1251 return pc + offset + 8;
1252}
1253
da3c6d4a
MS
1254/* Advance the PC across any function entry prologue instructions to
1255 reach some "real" code.
34e8f22d
RE
1256
1257 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1258 prologue:
c906108c 1259
c5aa993b
JM
1260 mov ip, sp
1261 [stmfd sp!, {a1,a2,a3,a4}]
1262 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1263 [stfe f7, [sp, #-12]!]
1264 [stfe f6, [sp, #-12]!]
1265 [stfe f5, [sp, #-12]!]
1266 [stfe f4, [sp, #-12]!]
0963b4bd 1267 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1268
34e8f22d 1269static CORE_ADDR
6093d2eb 1270arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1271{
e17a4113 1272 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
c906108c 1273 unsigned long inst;
a89fea3c 1274 CORE_ADDR func_addr, limit_pc;
c906108c 1275
a89fea3c
JL
1276 /* See if we can determine the end of the prologue via the symbol table.
1277 If so, then return either PC, or the PC after the prologue, whichever
1278 is greater. */
1279 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1280 {
d80b854b
UW
1281 CORE_ADDR post_prologue_pc
1282 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1283 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1284
621c6d5b
YQ
1285 if (post_prologue_pc)
1286 post_prologue_pc
1287 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1288
1289
0d39a070
DJ
1290 /* GCC always emits a line note before the prologue and another
1291 one after, even if the two are at the same address or on the
1292 same line. Take advantage of this so that we do not need to
1293 know every instruction that might appear in the prologue. We
1294 will have producer information for most binaries; if it is
1295 missing (e.g. for -gstabs), assuming the GNU tools. */
1296 if (post_prologue_pc
43f3e411
DE
1297 && (cust == NULL
1298 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1299 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1300 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1301 return post_prologue_pc;
1302
a89fea3c 1303 if (post_prologue_pc != 0)
0d39a070
DJ
1304 {
1305 CORE_ADDR analyzed_limit;
1306
1307 /* For non-GCC compilers, make sure the entire line is an
1308 acceptable prologue; GDB will round this function's
1309 return value up to the end of the following line so we
1310 can not skip just part of a line (and we do not want to).
1311
1312 RealView does not treat the prologue specially, but does
1313 associate prologue code with the opening brace; so this
1314 lets us skip the first line if we think it is the opening
1315 brace. */
9779414d 1316 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1317 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1318 post_prologue_pc, NULL);
1319 else
1320 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1321 post_prologue_pc, NULL);
1322
1323 if (analyzed_limit != post_prologue_pc)
1324 return func_addr;
1325
1326 return post_prologue_pc;
1327 }
c906108c
SS
1328 }
1329
a89fea3c
JL
1330 /* Can't determine prologue from the symbol table, need to examine
1331 instructions. */
c906108c 1332
a89fea3c
JL
1333 /* Find an upper limit on the function prologue using the debug
1334 information. If the debug information could not be used to provide
1335 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1336 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1337 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1338 if (limit_pc == 0)
1339 limit_pc = pc + 64; /* Magic. */
1340
c906108c 1341
29d73ae4 1342 /* Check if this is Thumb code. */
9779414d 1343 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1344 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1345 else
1346 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1347}
94c30b78 1348
c5aa993b 1349/* *INDENT-OFF* */
c906108c
SS
1350/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1351 This function decodes a Thumb function prologue to determine:
1352 1) the size of the stack frame
1353 2) which registers are saved on it
1354 3) the offsets of saved regs
1355 4) the offset from the stack pointer to the frame pointer
c906108c 1356
da59e081
JM
1357 A typical Thumb function prologue would create this stack frame
1358 (offsets relative to FP)
c906108c
SS
1359 old SP -> 24 stack parameters
1360 20 LR
1361 16 R7
1362 R7 -> 0 local variables (16 bytes)
1363 SP -> -12 additional stack space (12 bytes)
1364 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1365 12 bytes. The frame register is R7.
da59e081 1366
da3c6d4a
MS
1367 The comments for thumb_skip_prolog() describe the algorithm we use
1368 to detect the end of the prolog. */
c5aa993b
JM
1369/* *INDENT-ON* */
1370
c906108c 1371static void
be8626e0 1372thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1373 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1374{
1375 CORE_ADDR prologue_start;
1376 CORE_ADDR prologue_end;
c906108c 1377
b39cc962
DJ
1378 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1379 &prologue_end))
c906108c 1380 {
ec3d575a
UW
1381 /* See comment in arm_scan_prologue for an explanation of
1382 this heuristics. */
1383 if (prologue_end > prologue_start + 64)
1384 {
1385 prologue_end = prologue_start + 64;
1386 }
c906108c
SS
1387 }
1388 else
f7060f85
DJ
1389 /* We're in the boondocks: we have no idea where the start of the
1390 function is. */
1391 return;
c906108c 1392
eb5492fa 1393 prologue_end = min (prologue_end, prev_pc);
c906108c 1394
be8626e0 1395 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1396}
1397
f303bc3e
YQ
1398/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1399 otherwise. */
1400
1401static int
1402arm_instruction_restores_sp (unsigned int insn)
1403{
1404 if (bits (insn, 28, 31) != INST_NV)
1405 {
1406 if ((insn & 0x0df0f000) == 0x0080d000
1407 /* ADD SP (register or immediate). */
1408 || (insn & 0x0df0f000) == 0x0040d000
1409 /* SUB SP (register or immediate). */
1410 || (insn & 0x0ffffff0) == 0x01a0d000
1411 /* MOV SP. */
1412 || (insn & 0x0fff0000) == 0x08bd0000
1413 /* POP (LDMIA). */
1414 || (insn & 0x0fff0000) == 0x049d0000)
1415 /* POP of a single register. */
1416 return 1;
1417 }
1418
1419 return 0;
1420}
1421
0d39a070
DJ
1422/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1423 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1424 fill it in. Return the first address not recognized as a prologue
1425 instruction.
eb5492fa 1426
0d39a070
DJ
1427 We recognize all the instructions typically found in ARM prologues,
1428 plus harmless instructions which can be skipped (either for analysis
1429 purposes, or a more restrictive set that can be skipped when finding
1430 the end of the prologue). */
1431
1432static CORE_ADDR
1433arm_analyze_prologue (struct gdbarch *gdbarch,
1434 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1435 struct arm_prologue_cache *cache)
1436{
1437 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1438 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1439 int regno;
1440 CORE_ADDR offset, current_pc;
1441 pv_t regs[ARM_FPS_REGNUM];
1442 struct pv_area *stack;
1443 struct cleanup *back_to;
0d39a070
DJ
1444 CORE_ADDR unrecognized_pc = 0;
1445
1446 /* Search the prologue looking for instructions that set up the
96baa820 1447 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1448
96baa820
JM
1449 Be careful, however, and if it doesn't look like a prologue,
1450 don't try to scan it. If, for instance, a frameless function
1451 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1452 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1453 and other operations that rely on a knowledge of the stack
0d39a070 1454 traceback. */
d4473757 1455
4be43953
DJ
1456 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1457 regs[regno] = pv_register (regno, 0);
55f960e1 1458 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1459 back_to = make_cleanup_free_pv_area (stack);
1460
94c30b78
MS
1461 for (current_pc = prologue_start;
1462 current_pc < prologue_end;
f43845b3 1463 current_pc += 4)
96baa820 1464 {
e17a4113
UW
1465 unsigned int insn
1466 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1467
94c30b78 1468 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1469 {
4be43953 1470 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1471 continue;
1472 }
0d39a070
DJ
1473 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1474 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1475 {
1476 unsigned imm = insn & 0xff; /* immediate value */
1477 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1478 int rd = bits (insn, 12, 15);
28cd8767 1479 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1480 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1481 continue;
1482 }
0d39a070
DJ
1483 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1484 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1485 {
1486 unsigned imm = insn & 0xff; /* immediate value */
1487 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1488 int rd = bits (insn, 12, 15);
28cd8767 1489 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1490 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1491 continue;
1492 }
0963b4bd
MS
1493 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1494 [sp, #-4]! */
f43845b3 1495 {
4be43953
DJ
1496 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1497 break;
1498 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1499 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1500 regs[bits (insn, 12, 15)]);
f43845b3
MS
1501 continue;
1502 }
1503 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1504 /* stmfd sp!, {..., fp, ip, lr, pc}
1505 or
1506 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1507 {
d4473757 1508 int mask = insn & 0xffff;
ed9a39eb 1509
4be43953
DJ
1510 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1511 break;
1512
94c30b78 1513 /* Calculate offsets of saved registers. */
34e8f22d 1514 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1515 if (mask & (1 << regno))
1516 {
0963b4bd
MS
1517 regs[ARM_SP_REGNUM]
1518 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1519 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1520 }
1521 }
0d39a070
DJ
1522 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1523 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1524 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1525 {
1526 /* No need to add this to saved_regs -- it's just an arg reg. */
1527 continue;
1528 }
0d39a070
DJ
1529 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1530 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1531 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1532 {
1533 /* No need to add this to saved_regs -- it's just an arg reg. */
1534 continue;
1535 }
0963b4bd
MS
1536 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1537 { registers } */
0d39a070
DJ
1538 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1539 {
1540 /* No need to add this to saved_regs -- it's just arg regs. */
1541 continue;
1542 }
d4473757
KB
1543 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1544 {
94c30b78
MS
1545 unsigned imm = insn & 0xff; /* immediate value */
1546 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1547 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1548 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1549 }
1550 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1551 {
94c30b78
MS
1552 unsigned imm = insn & 0xff; /* immediate value */
1553 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1554 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1555 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1556 }
0963b4bd
MS
1557 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1558 [sp, -#c]! */
2af46ca0 1559 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1560 {
4be43953
DJ
1561 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1562 break;
1563
1564 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1565 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1566 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1567 }
0963b4bd
MS
1568 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1569 [sp!] */
2af46ca0 1570 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1571 {
1572 int n_saved_fp_regs;
1573 unsigned int fp_start_reg, fp_bound_reg;
1574
4be43953
DJ
1575 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1576 break;
1577
94c30b78 1578 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1579 {
d4473757
KB
1580 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1581 n_saved_fp_regs = 3;
1582 else
1583 n_saved_fp_regs = 1;
96baa820 1584 }
d4473757 1585 else
96baa820 1586 {
d4473757
KB
1587 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1588 n_saved_fp_regs = 2;
1589 else
1590 n_saved_fp_regs = 4;
96baa820 1591 }
d4473757 1592
34e8f22d 1593 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1594 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1595 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1596 {
4be43953
DJ
1597 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1598 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1599 regs[fp_start_reg++]);
96baa820 1600 }
c906108c 1601 }
0d39a070
DJ
1602 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1603 {
1604 /* Allow some special function calls when skipping the
1605 prologue; GCC generates these before storing arguments to
1606 the stack. */
1607 CORE_ADDR dest = BranchDest (current_pc, insn);
1608
e0634ccf 1609 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1610 continue;
1611 else
1612 break;
1613 }
d4473757 1614 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1615 break; /* Condition not true, exit early. */
0d39a070
DJ
1616 else if (arm_instruction_changes_pc (insn))
1617 /* Don't scan past anything that might change control flow. */
1618 break;
f303bc3e
YQ
1619 else if (arm_instruction_restores_sp (insn))
1620 {
1621 /* Don't scan past the epilogue. */
1622 break;
1623 }
d19f7eee
UW
1624 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1625 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1626 /* Ignore block loads from the stack, potentially copying
1627 parameters from memory. */
1628 continue;
1629 else if ((insn & 0xfc500000) == 0xe4100000
1630 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1631 /* Similarly ignore single loads from the stack. */
1632 continue;
0d39a070
DJ
1633 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1634 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1635 register instead of the stack. */
d4473757 1636 continue;
0d39a070
DJ
1637 else
1638 {
21daaaaf
YQ
1639 /* The optimizer might shove anything into the prologue, if
1640 we build up cache (cache != NULL) from scanning prologue,
1641 we just skip what we don't recognize and scan further to
1642 make cache as complete as possible. However, if we skip
1643 prologue, we'll stop immediately on unrecognized
1644 instruction. */
0d39a070 1645 unrecognized_pc = current_pc;
21daaaaf
YQ
1646 if (cache != NULL)
1647 continue;
1648 else
1649 break;
0d39a070 1650 }
c906108c
SS
1651 }
1652
0d39a070
DJ
1653 if (unrecognized_pc == 0)
1654 unrecognized_pc = current_pc;
1655
0d39a070
DJ
1656 if (cache)
1657 {
4072f920
YQ
1658 int framereg, framesize;
1659
1660 /* The frame size is just the distance from the frame register
1661 to the original stack pointer. */
1662 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1663 {
1664 /* Frame pointer is fp. */
1665 framereg = ARM_FP_REGNUM;
1666 framesize = -regs[ARM_FP_REGNUM].k;
1667 }
1668 else
1669 {
1670 /* Try the stack pointer... this is a bit desperate. */
1671 framereg = ARM_SP_REGNUM;
1672 framesize = -regs[ARM_SP_REGNUM].k;
1673 }
1674
0d39a070
DJ
1675 cache->framereg = framereg;
1676 cache->framesize = framesize;
1677
1678 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1679 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1680 cache->saved_regs[regno].addr = offset;
1681 }
1682
1683 if (arm_debug)
1684 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1685 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1686
1687 do_cleanups (back_to);
0d39a070
DJ
1688 return unrecognized_pc;
1689}
1690
1691static void
1692arm_scan_prologue (struct frame_info *this_frame,
1693 struct arm_prologue_cache *cache)
1694{
1695 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1696 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1697 int regno;
1698 CORE_ADDR prologue_start, prologue_end, current_pc;
1699 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1700 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1701 pv_t regs[ARM_FPS_REGNUM];
1702 struct pv_area *stack;
1703 struct cleanup *back_to;
1704 CORE_ADDR offset;
1705
1706 /* Assume there is no frame until proven otherwise. */
1707 cache->framereg = ARM_SP_REGNUM;
1708 cache->framesize = 0;
1709
1710 /* Check for Thumb prologue. */
1711 if (arm_frame_is_thumb (this_frame))
1712 {
1713 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1714 return;
1715 }
1716
1717 /* Find the function prologue. If we can't find the function in
1718 the symbol table, peek in the stack frame to find the PC. */
1719 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1720 &prologue_end))
1721 {
1722 /* One way to find the end of the prologue (which works well
1723 for unoptimized code) is to do the following:
1724
1725 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1726
1727 if (sal.line == 0)
1728 prologue_end = prev_pc;
1729 else if (sal.end < prologue_end)
1730 prologue_end = sal.end;
1731
1732 This mechanism is very accurate so long as the optimizer
1733 doesn't move any instructions from the function body into the
1734 prologue. If this happens, sal.end will be the last
1735 instruction in the first hunk of prologue code just before
1736 the first instruction that the scheduler has moved from
1737 the body to the prologue.
1738
1739 In order to make sure that we scan all of the prologue
1740 instructions, we use a slightly less accurate mechanism which
1741 may scan more than necessary. To help compensate for this
1742 lack of accuracy, the prologue scanning loop below contains
1743 several clauses which'll cause the loop to terminate early if
1744 an implausible prologue instruction is encountered.
1745
1746 The expression
1747
1748 prologue_start + 64
1749
1750 is a suitable endpoint since it accounts for the largest
1751 possible prologue plus up to five instructions inserted by
1752 the scheduler. */
1753
1754 if (prologue_end > prologue_start + 64)
1755 {
1756 prologue_end = prologue_start + 64; /* See above. */
1757 }
1758 }
1759 else
1760 {
1761 /* We have no symbol information. Our only option is to assume this
1762 function has a standard stack frame and the normal frame register.
1763 Then, we can find the value of our frame pointer on entrance to
1764 the callee (or at the present moment if this is the innermost frame).
1765 The value stored there should be the address of the stmfd + 8. */
1766 CORE_ADDR frame_loc;
1767 LONGEST return_value;
1768
1769 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1770 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1771 return;
1772 else
1773 {
1774 prologue_start = gdbarch_addr_bits_remove
1775 (gdbarch, return_value) - 8;
1776 prologue_end = prologue_start + 64; /* See above. */
1777 }
1778 }
1779
1780 if (prev_pc < prologue_end)
1781 prologue_end = prev_pc;
1782
1783 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1784}
1785
eb5492fa 1786static struct arm_prologue_cache *
a262aec2 1787arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1788{
eb5492fa
DJ
1789 int reg;
1790 struct arm_prologue_cache *cache;
1791 CORE_ADDR unwound_fp;
c5aa993b 1792
35d5d4ee 1793 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1794 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1795
a262aec2 1796 arm_scan_prologue (this_frame, cache);
848cfffb 1797
a262aec2 1798 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1799 if (unwound_fp == 0)
1800 return cache;
c906108c 1801
4be43953 1802 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1803
eb5492fa
DJ
1804 /* Calculate actual addresses of saved registers using offsets
1805 determined by arm_scan_prologue. */
a262aec2 1806 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1807 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1808 cache->saved_regs[reg].addr += cache->prev_sp;
1809
1810 return cache;
c906108c
SS
1811}
1812
c1ee9414
LM
1813/* Implementation of the stop_reason hook for arm_prologue frames. */
1814
1815static enum unwind_stop_reason
1816arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1817 void **this_cache)
1818{
1819 struct arm_prologue_cache *cache;
1820 CORE_ADDR pc;
1821
1822 if (*this_cache == NULL)
1823 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1824 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1825
1826 /* This is meant to halt the backtrace at "_start". */
1827 pc = get_frame_pc (this_frame);
1828 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1829 return UNWIND_OUTERMOST;
1830
1831 /* If we've hit a wall, stop. */
1832 if (cache->prev_sp == 0)
1833 return UNWIND_OUTERMOST;
1834
1835 return UNWIND_NO_REASON;
1836}
1837
eb5492fa
DJ
1838/* Our frame ID for a normal frame is the current function's starting PC
1839 and the caller's SP when we were called. */
c906108c 1840
148754e5 1841static void
a262aec2 1842arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1843 void **this_cache,
1844 struct frame_id *this_id)
c906108c 1845{
eb5492fa
DJ
1846 struct arm_prologue_cache *cache;
1847 struct frame_id id;
2c404490 1848 CORE_ADDR pc, func;
f079148d 1849
eb5492fa 1850 if (*this_cache == NULL)
a262aec2 1851 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1852 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1853
0e9e9abd
UW
1854 /* Use function start address as part of the frame ID. If we cannot
1855 identify the start address (due to missing symbol information),
1856 fall back to just using the current PC. */
c1ee9414 1857 pc = get_frame_pc (this_frame);
2c404490 1858 func = get_frame_func (this_frame);
0e9e9abd
UW
1859 if (!func)
1860 func = pc;
1861
eb5492fa 1862 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1863 *this_id = id;
c906108c
SS
1864}
1865
a262aec2
DJ
1866static struct value *
1867arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1868 void **this_cache,
a262aec2 1869 int prev_regnum)
24de872b 1870{
24568a2c 1871 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1872 struct arm_prologue_cache *cache;
1873
eb5492fa 1874 if (*this_cache == NULL)
a262aec2 1875 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1876 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1877
eb5492fa 1878 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1879 instead. The prologue may save PC, but it will point into this
1880 frame's prologue, not the next frame's resume location. Also
1881 strip the saved T bit. A valid LR may have the low bit set, but
1882 a valid PC never does. */
eb5492fa 1883 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1884 {
1885 CORE_ADDR lr;
1886
1887 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1888 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1889 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1890 }
24de872b 1891
eb5492fa 1892 /* SP is generally not saved to the stack, but this frame is
a262aec2 1893 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1894 The value was already reconstructed into PREV_SP. */
1895 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1896 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1897
b39cc962
DJ
1898 /* The CPSR may have been changed by the call instruction and by the
1899 called function. The only bit we can reconstruct is the T bit,
1900 by checking the low bit of LR as of the call. This is a reliable
1901 indicator of Thumb-ness except for some ARM v4T pre-interworking
1902 Thumb code, which could get away with a clear low bit as long as
1903 the called function did not use bx. Guess that all other
1904 bits are unchanged; the condition flags are presumably lost,
1905 but the processor status is likely valid. */
1906 if (prev_regnum == ARM_PS_REGNUM)
1907 {
1908 CORE_ADDR lr, cpsr;
9779414d 1909 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1910
1911 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1912 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1913 if (IS_THUMB_ADDR (lr))
9779414d 1914 cpsr |= t_bit;
b39cc962 1915 else
9779414d 1916 cpsr &= ~t_bit;
b39cc962
DJ
1917 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1918 }
1919
a262aec2
DJ
1920 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1921 prev_regnum);
eb5492fa
DJ
1922}
1923
1924struct frame_unwind arm_prologue_unwind = {
1925 NORMAL_FRAME,
c1ee9414 1926 arm_prologue_unwind_stop_reason,
eb5492fa 1927 arm_prologue_this_id,
a262aec2
DJ
1928 arm_prologue_prev_register,
1929 NULL,
1930 default_frame_sniffer
eb5492fa
DJ
1931};
1932
0e9e9abd
UW
1933/* Maintain a list of ARM exception table entries per objfile, similar to the
1934 list of mapping symbols. We only cache entries for standard ARM-defined
1935 personality routines; the cache will contain only the frame unwinding
1936 instructions associated with the entry (not the descriptors). */
1937
1938static const struct objfile_data *arm_exidx_data_key;
1939
1940struct arm_exidx_entry
1941{
1942 bfd_vma addr;
1943 gdb_byte *entry;
1944};
1945typedef struct arm_exidx_entry arm_exidx_entry_s;
1946DEF_VEC_O(arm_exidx_entry_s);
1947
1948struct arm_exidx_data
1949{
1950 VEC(arm_exidx_entry_s) **section_maps;
1951};
1952
1953static void
1954arm_exidx_data_free (struct objfile *objfile, void *arg)
1955{
9a3c8263 1956 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
1957 unsigned int i;
1958
1959 for (i = 0; i < objfile->obfd->section_count; i++)
1960 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1961}
1962
1963static inline int
1964arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1965 const struct arm_exidx_entry *rhs)
1966{
1967 return lhs->addr < rhs->addr;
1968}
1969
1970static struct obj_section *
1971arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1972{
1973 struct obj_section *osect;
1974
1975 ALL_OBJFILE_OSECTIONS (objfile, osect)
1976 if (bfd_get_section_flags (objfile->obfd,
1977 osect->the_bfd_section) & SEC_ALLOC)
1978 {
1979 bfd_vma start, size;
1980 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1981 size = bfd_get_section_size (osect->the_bfd_section);
1982
1983 if (start <= vma && vma < start + size)
1984 return osect;
1985 }
1986
1987 return NULL;
1988}
1989
1990/* Parse contents of exception table and exception index sections
1991 of OBJFILE, and fill in the exception table entry cache.
1992
1993 For each entry that refers to a standard ARM-defined personality
1994 routine, extract the frame unwinding instructions (from either
1995 the index or the table section). The unwinding instructions
1996 are normalized by:
1997 - extracting them from the rest of the table data
1998 - converting to host endianness
1999 - appending the implicit 0xb0 ("Finish") code
2000
2001 The extracted and normalized instructions are stored for later
2002 retrieval by the arm_find_exidx_entry routine. */
2003
2004static void
2005arm_exidx_new_objfile (struct objfile *objfile)
2006{
3bb47e8b 2007 struct cleanup *cleanups;
0e9e9abd
UW
2008 struct arm_exidx_data *data;
2009 asection *exidx, *extab;
2010 bfd_vma exidx_vma = 0, extab_vma = 0;
2011 bfd_size_type exidx_size = 0, extab_size = 0;
2012 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2013 LONGEST i;
2014
2015 /* If we've already touched this file, do nothing. */
2016 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2017 return;
3bb47e8b 2018 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2019
2020 /* Read contents of exception table and index. */
a5eda10c 2021 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2022 if (exidx)
2023 {
2024 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2025 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2026 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2027 make_cleanup (xfree, exidx_data);
2028
2029 if (!bfd_get_section_contents (objfile->obfd, exidx,
2030 exidx_data, 0, exidx_size))
2031 {
2032 do_cleanups (cleanups);
2033 return;
2034 }
2035 }
2036
2037 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2038 if (extab)
2039 {
2040 extab_vma = bfd_section_vma (objfile->obfd, extab);
2041 extab_size = bfd_get_section_size (extab);
224c3ddb 2042 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2043 make_cleanup (xfree, extab_data);
2044
2045 if (!bfd_get_section_contents (objfile->obfd, extab,
2046 extab_data, 0, extab_size))
2047 {
2048 do_cleanups (cleanups);
2049 return;
2050 }
2051 }
2052
2053 /* Allocate exception table data structure. */
2054 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2055 set_objfile_data (objfile, arm_exidx_data_key, data);
2056 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2057 objfile->obfd->section_count,
2058 VEC(arm_exidx_entry_s) *);
2059
2060 /* Fill in exception table. */
2061 for (i = 0; i < exidx_size / 8; i++)
2062 {
2063 struct arm_exidx_entry new_exidx_entry;
2064 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2065 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2066 bfd_vma addr = 0, word = 0;
2067 int n_bytes = 0, n_words = 0;
2068 struct obj_section *sec;
2069 gdb_byte *entry = NULL;
2070
2071 /* Extract address of start of function. */
2072 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2073 idx += exidx_vma + i * 8;
2074
2075 /* Find section containing function and compute section offset. */
2076 sec = arm_obj_section_from_vma (objfile, idx);
2077 if (sec == NULL)
2078 continue;
2079 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2080
2081 /* Determine address of exception table entry. */
2082 if (val == 1)
2083 {
2084 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2085 }
2086 else if ((val & 0xff000000) == 0x80000000)
2087 {
2088 /* Exception table entry embedded in .ARM.exidx
2089 -- must be short form. */
2090 word = val;
2091 n_bytes = 3;
2092 }
2093 else if (!(val & 0x80000000))
2094 {
2095 /* Exception table entry in .ARM.extab. */
2096 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2097 addr += exidx_vma + i * 8 + 4;
2098
2099 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2100 {
2101 word = bfd_h_get_32 (objfile->obfd,
2102 extab_data + addr - extab_vma);
2103 addr += 4;
2104
2105 if ((word & 0xff000000) == 0x80000000)
2106 {
2107 /* Short form. */
2108 n_bytes = 3;
2109 }
2110 else if ((word & 0xff000000) == 0x81000000
2111 || (word & 0xff000000) == 0x82000000)
2112 {
2113 /* Long form. */
2114 n_bytes = 2;
2115 n_words = ((word >> 16) & 0xff);
2116 }
2117 else if (!(word & 0x80000000))
2118 {
2119 bfd_vma pers;
2120 struct obj_section *pers_sec;
2121 int gnu_personality = 0;
2122
2123 /* Custom personality routine. */
2124 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2125 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2126
2127 /* Check whether we've got one of the variants of the
2128 GNU personality routines. */
2129 pers_sec = arm_obj_section_from_vma (objfile, pers);
2130 if (pers_sec)
2131 {
2132 static const char *personality[] =
2133 {
2134 "__gcc_personality_v0",
2135 "__gxx_personality_v0",
2136 "__gcj_personality_v0",
2137 "__gnu_objc_personality_v0",
2138 NULL
2139 };
2140
2141 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2142 int k;
2143
2144 for (k = 0; personality[k]; k++)
2145 if (lookup_minimal_symbol_by_pc_name
2146 (pc, personality[k], objfile))
2147 {
2148 gnu_personality = 1;
2149 break;
2150 }
2151 }
2152
2153 /* If so, the next word contains a word count in the high
2154 byte, followed by the same unwind instructions as the
2155 pre-defined forms. */
2156 if (gnu_personality
2157 && addr + 4 <= extab_vma + extab_size)
2158 {
2159 word = bfd_h_get_32 (objfile->obfd,
2160 extab_data + addr - extab_vma);
2161 addr += 4;
2162 n_bytes = 3;
2163 n_words = ((word >> 24) & 0xff);
2164 }
2165 }
2166 }
2167 }
2168
2169 /* Sanity check address. */
2170 if (n_words)
2171 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2172 n_words = n_bytes = 0;
2173
2174 /* The unwind instructions reside in WORD (only the N_BYTES least
2175 significant bytes are valid), followed by N_WORDS words in the
2176 extab section starting at ADDR. */
2177 if (n_bytes || n_words)
2178 {
224c3ddb
SM
2179 gdb_byte *p = entry
2180 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2181 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2182
2183 while (n_bytes--)
2184 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2185
2186 while (n_words--)
2187 {
2188 word = bfd_h_get_32 (objfile->obfd,
2189 extab_data + addr - extab_vma);
2190 addr += 4;
2191
2192 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2193 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2194 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2195 *p++ = (gdb_byte) (word & 0xff);
2196 }
2197
2198 /* Implied "Finish" to terminate the list. */
2199 *p++ = 0xb0;
2200 }
2201
2202 /* Push entry onto vector. They are guaranteed to always
2203 appear in order of increasing addresses. */
2204 new_exidx_entry.addr = idx;
2205 new_exidx_entry.entry = entry;
2206 VEC_safe_push (arm_exidx_entry_s,
2207 data->section_maps[sec->the_bfd_section->index],
2208 &new_exidx_entry);
2209 }
2210
2211 do_cleanups (cleanups);
2212}
2213
2214/* Search for the exception table entry covering MEMADDR. If one is found,
2215 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2216 set *START to the start of the region covered by this entry. */
2217
2218static gdb_byte *
2219arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2220{
2221 struct obj_section *sec;
2222
2223 sec = find_pc_section (memaddr);
2224 if (sec != NULL)
2225 {
2226 struct arm_exidx_data *data;
2227 VEC(arm_exidx_entry_s) *map;
2228 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2229 unsigned int idx;
2230
9a3c8263
SM
2231 data = ((struct arm_exidx_data *)
2232 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2233 if (data != NULL)
2234 {
2235 map = data->section_maps[sec->the_bfd_section->index];
2236 if (!VEC_empty (arm_exidx_entry_s, map))
2237 {
2238 struct arm_exidx_entry *map_sym;
2239
2240 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2241 arm_compare_exidx_entries);
2242
2243 /* VEC_lower_bound finds the earliest ordered insertion
2244 point. If the following symbol starts at this exact
2245 address, we use that; otherwise, the preceding
2246 exception table entry covers this address. */
2247 if (idx < VEC_length (arm_exidx_entry_s, map))
2248 {
2249 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2250 if (map_sym->addr == map_key.addr)
2251 {
2252 if (start)
2253 *start = map_sym->addr + obj_section_addr (sec);
2254 return map_sym->entry;
2255 }
2256 }
2257
2258 if (idx > 0)
2259 {
2260 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2261 if (start)
2262 *start = map_sym->addr + obj_section_addr (sec);
2263 return map_sym->entry;
2264 }
2265 }
2266 }
2267 }
2268
2269 return NULL;
2270}
2271
2272/* Given the current frame THIS_FRAME, and its associated frame unwinding
2273 instruction list from the ARM exception table entry ENTRY, allocate and
2274 return a prologue cache structure describing how to unwind this frame.
2275
2276 Return NULL if the unwinding instruction list contains a "spare",
2277 "reserved" or "refuse to unwind" instruction as defined in section
2278 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2279 for the ARM Architecture" document. */
2280
2281static struct arm_prologue_cache *
2282arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2283{
2284 CORE_ADDR vsp = 0;
2285 int vsp_valid = 0;
2286
2287 struct arm_prologue_cache *cache;
2288 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2289 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2290
2291 for (;;)
2292 {
2293 gdb_byte insn;
2294
2295 /* Whenever we reload SP, we actually have to retrieve its
2296 actual value in the current frame. */
2297 if (!vsp_valid)
2298 {
2299 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2300 {
2301 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2302 vsp = get_frame_register_unsigned (this_frame, reg);
2303 }
2304 else
2305 {
2306 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2307 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2308 }
2309
2310 vsp_valid = 1;
2311 }
2312
2313 /* Decode next unwind instruction. */
2314 insn = *entry++;
2315
2316 if ((insn & 0xc0) == 0)
2317 {
2318 int offset = insn & 0x3f;
2319 vsp += (offset << 2) + 4;
2320 }
2321 else if ((insn & 0xc0) == 0x40)
2322 {
2323 int offset = insn & 0x3f;
2324 vsp -= (offset << 2) + 4;
2325 }
2326 else if ((insn & 0xf0) == 0x80)
2327 {
2328 int mask = ((insn & 0xf) << 8) | *entry++;
2329 int i;
2330
2331 /* The special case of an all-zero mask identifies
2332 "Refuse to unwind". We return NULL to fall back
2333 to the prologue analyzer. */
2334 if (mask == 0)
2335 return NULL;
2336
2337 /* Pop registers r4..r15 under mask. */
2338 for (i = 0; i < 12; i++)
2339 if (mask & (1 << i))
2340 {
2341 cache->saved_regs[4 + i].addr = vsp;
2342 vsp += 4;
2343 }
2344
2345 /* Special-case popping SP -- we need to reload vsp. */
2346 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2347 vsp_valid = 0;
2348 }
2349 else if ((insn & 0xf0) == 0x90)
2350 {
2351 int reg = insn & 0xf;
2352
2353 /* Reserved cases. */
2354 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2355 return NULL;
2356
2357 /* Set SP from another register and mark VSP for reload. */
2358 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2359 vsp_valid = 0;
2360 }
2361 else if ((insn & 0xf0) == 0xa0)
2362 {
2363 int count = insn & 0x7;
2364 int pop_lr = (insn & 0x8) != 0;
2365 int i;
2366
2367 /* Pop r4..r[4+count]. */
2368 for (i = 0; i <= count; i++)
2369 {
2370 cache->saved_regs[4 + i].addr = vsp;
2371 vsp += 4;
2372 }
2373
2374 /* If indicated by flag, pop LR as well. */
2375 if (pop_lr)
2376 {
2377 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2378 vsp += 4;
2379 }
2380 }
2381 else if (insn == 0xb0)
2382 {
2383 /* We could only have updated PC by popping into it; if so, it
2384 will show up as address. Otherwise, copy LR into PC. */
2385 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2386 cache->saved_regs[ARM_PC_REGNUM]
2387 = cache->saved_regs[ARM_LR_REGNUM];
2388
2389 /* We're done. */
2390 break;
2391 }
2392 else if (insn == 0xb1)
2393 {
2394 int mask = *entry++;
2395 int i;
2396
2397 /* All-zero mask and mask >= 16 is "spare". */
2398 if (mask == 0 || mask >= 16)
2399 return NULL;
2400
2401 /* Pop r0..r3 under mask. */
2402 for (i = 0; i < 4; i++)
2403 if (mask & (1 << i))
2404 {
2405 cache->saved_regs[i].addr = vsp;
2406 vsp += 4;
2407 }
2408 }
2409 else if (insn == 0xb2)
2410 {
2411 ULONGEST offset = 0;
2412 unsigned shift = 0;
2413
2414 do
2415 {
2416 offset |= (*entry & 0x7f) << shift;
2417 shift += 7;
2418 }
2419 while (*entry++ & 0x80);
2420
2421 vsp += 0x204 + (offset << 2);
2422 }
2423 else if (insn == 0xb3)
2424 {
2425 int start = *entry >> 4;
2426 int count = (*entry++) & 0xf;
2427 int i;
2428
2429 /* Only registers D0..D15 are valid here. */
2430 if (start + count >= 16)
2431 return NULL;
2432
2433 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2434 for (i = 0; i <= count; i++)
2435 {
2436 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2437 vsp += 8;
2438 }
2439
2440 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2441 vsp += 4;
2442 }
2443 else if ((insn & 0xf8) == 0xb8)
2444 {
2445 int count = insn & 0x7;
2446 int i;
2447
2448 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2449 for (i = 0; i <= count; i++)
2450 {
2451 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2452 vsp += 8;
2453 }
2454
2455 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2456 vsp += 4;
2457 }
2458 else if (insn == 0xc6)
2459 {
2460 int start = *entry >> 4;
2461 int count = (*entry++) & 0xf;
2462 int i;
2463
2464 /* Only registers WR0..WR15 are valid. */
2465 if (start + count >= 16)
2466 return NULL;
2467
2468 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2469 for (i = 0; i <= count; i++)
2470 {
2471 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2472 vsp += 8;
2473 }
2474 }
2475 else if (insn == 0xc7)
2476 {
2477 int mask = *entry++;
2478 int i;
2479
2480 /* All-zero mask and mask >= 16 is "spare". */
2481 if (mask == 0 || mask >= 16)
2482 return NULL;
2483
2484 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2485 for (i = 0; i < 4; i++)
2486 if (mask & (1 << i))
2487 {
2488 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2489 vsp += 4;
2490 }
2491 }
2492 else if ((insn & 0xf8) == 0xc0)
2493 {
2494 int count = insn & 0x7;
2495 int i;
2496
2497 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2498 for (i = 0; i <= count; i++)
2499 {
2500 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2501 vsp += 8;
2502 }
2503 }
2504 else if (insn == 0xc8)
2505 {
2506 int start = *entry >> 4;
2507 int count = (*entry++) & 0xf;
2508 int i;
2509
2510 /* Only registers D0..D31 are valid. */
2511 if (start + count >= 16)
2512 return NULL;
2513
2514 /* Pop VFP double-precision registers
2515 D[16+start]..D[16+start+count]. */
2516 for (i = 0; i <= count; i++)
2517 {
2518 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2519 vsp += 8;
2520 }
2521 }
2522 else if (insn == 0xc9)
2523 {
2524 int start = *entry >> 4;
2525 int count = (*entry++) & 0xf;
2526 int i;
2527
2528 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2529 for (i = 0; i <= count; i++)
2530 {
2531 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2532 vsp += 8;
2533 }
2534 }
2535 else if ((insn & 0xf8) == 0xd0)
2536 {
2537 int count = insn & 0x7;
2538 int i;
2539
2540 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2541 for (i = 0; i <= count; i++)
2542 {
2543 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2544 vsp += 8;
2545 }
2546 }
2547 else
2548 {
2549 /* Everything else is "spare". */
2550 return NULL;
2551 }
2552 }
2553
2554 /* If we restore SP from a register, assume this was the frame register.
2555 Otherwise just fall back to SP as frame register. */
2556 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2557 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2558 else
2559 cache->framereg = ARM_SP_REGNUM;
2560
2561 /* Determine offset to previous frame. */
2562 cache->framesize
2563 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2564
2565 /* We already got the previous SP. */
2566 cache->prev_sp = vsp;
2567
2568 return cache;
2569}
2570
2571/* Unwinding via ARM exception table entries. Note that the sniffer
2572 already computes a filled-in prologue cache, which is then used
2573 with the same arm_prologue_this_id and arm_prologue_prev_register
2574 routines also used for prologue-parsing based unwinding. */
2575
2576static int
2577arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2578 struct frame_info *this_frame,
2579 void **this_prologue_cache)
2580{
2581 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2582 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2583 CORE_ADDR addr_in_block, exidx_region, func_start;
2584 struct arm_prologue_cache *cache;
2585 gdb_byte *entry;
2586
2587 /* See if we have an ARM exception table entry covering this address. */
2588 addr_in_block = get_frame_address_in_block (this_frame);
2589 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2590 if (!entry)
2591 return 0;
2592
2593 /* The ARM exception table does not describe unwind information
2594 for arbitrary PC values, but is guaranteed to be correct only
2595 at call sites. We have to decide here whether we want to use
2596 ARM exception table information for this frame, or fall back
2597 to using prologue parsing. (Note that if we have DWARF CFI,
2598 this sniffer isn't even called -- CFI is always preferred.)
2599
2600 Before we make this decision, however, we check whether we
2601 actually have *symbol* information for the current frame.
2602 If not, prologue parsing would not work anyway, so we might
2603 as well use the exception table and hope for the best. */
2604 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2605 {
2606 int exc_valid = 0;
2607
2608 /* If the next frame is "normal", we are at a call site in this
2609 frame, so exception information is guaranteed to be valid. */
2610 if (get_next_frame (this_frame)
2611 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2612 exc_valid = 1;
2613
2614 /* We also assume exception information is valid if we're currently
2615 blocked in a system call. The system library is supposed to
d9311bfa
AT
2616 ensure this, so that e.g. pthread cancellation works. */
2617 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2618 {
d9311bfa 2619 LONGEST insn;
416dc9c6 2620
d9311bfa
AT
2621 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2622 byte_order_for_code, &insn)
2623 && (insn & 0xff00) == 0xdf00 /* svc */)
2624 exc_valid = 1;
0e9e9abd 2625 }
d9311bfa
AT
2626 else
2627 {
2628 LONGEST insn;
416dc9c6 2629
d9311bfa
AT
2630 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2631 byte_order_for_code, &insn)
2632 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2633 exc_valid = 1;
2634 }
2635
0e9e9abd
UW
2636 /* Bail out if we don't know that exception information is valid. */
2637 if (!exc_valid)
2638 return 0;
2639
2640 /* The ARM exception index does not mark the *end* of the region
2641 covered by the entry, and some functions will not have any entry.
2642 To correctly recognize the end of the covered region, the linker
2643 should have inserted dummy records with a CANTUNWIND marker.
2644
2645 Unfortunately, current versions of GNU ld do not reliably do
2646 this, and thus we may have found an incorrect entry above.
2647 As a (temporary) sanity check, we only use the entry if it
2648 lies *within* the bounds of the function. Note that this check
2649 might reject perfectly valid entries that just happen to cover
2650 multiple functions; therefore this check ought to be removed
2651 once the linker is fixed. */
2652 if (func_start > exidx_region)
2653 return 0;
2654 }
2655
2656 /* Decode the list of unwinding instructions into a prologue cache.
2657 Note that this may fail due to e.g. a "refuse to unwind" code. */
2658 cache = arm_exidx_fill_cache (this_frame, entry);
2659 if (!cache)
2660 return 0;
2661
2662 *this_prologue_cache = cache;
2663 return 1;
2664}
2665
2666struct frame_unwind arm_exidx_unwind = {
2667 NORMAL_FRAME,
8fbca658 2668 default_frame_unwind_stop_reason,
0e9e9abd
UW
2669 arm_prologue_this_id,
2670 arm_prologue_prev_register,
2671 NULL,
2672 arm_exidx_unwind_sniffer
2673};
2674
80d8d390
YQ
2675/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2676 trampoline, return the target PC. Otherwise return 0.
2677
2678 void call0a (char c, short s, int i, long l) {}
2679
2680 int main (void)
2681 {
2682 (*pointer_to_call0a) (c, s, i, l);
2683 }
2684
2685 Instead of calling a stub library function _call_via_xx (xx is
2686 the register name), GCC may inline the trampoline in the object
2687 file as below (register r2 has the address of call0a).
2688
2689 .global main
2690 .type main, %function
2691 ...
2692 bl .L1
2693 ...
2694 .size main, .-main
2695
2696 .L1:
2697 bx r2
2698
2699 The trampoline 'bx r2' doesn't belong to main. */
2700
2701static CORE_ADDR
2702arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2703{
2704 /* The heuristics of recognizing such trampoline is that FRAME is
2705 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2706 if (arm_frame_is_thumb (frame))
2707 {
2708 gdb_byte buf[2];
2709
2710 if (target_read_memory (pc, buf, 2) == 0)
2711 {
2712 struct gdbarch *gdbarch = get_frame_arch (frame);
2713 enum bfd_endian byte_order_for_code
2714 = gdbarch_byte_order_for_code (gdbarch);
2715 uint16_t insn
2716 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2717
2718 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2719 {
2720 CORE_ADDR dest
2721 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2722
2723 /* Clear the LSB so that gdb core sets step-resume
2724 breakpoint at the right address. */
2725 return UNMAKE_THUMB_ADDR (dest);
2726 }
2727 }
2728 }
2729
2730 return 0;
2731}
2732
909cf6ea 2733static struct arm_prologue_cache *
a262aec2 2734arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2735{
909cf6ea 2736 struct arm_prologue_cache *cache;
909cf6ea 2737
35d5d4ee 2738 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2739 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2740
a262aec2 2741 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2742
2743 return cache;
2744}
2745
2746/* Our frame ID for a stub frame is the current SP and LR. */
2747
2748static void
a262aec2 2749arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2750 void **this_cache,
2751 struct frame_id *this_id)
2752{
2753 struct arm_prologue_cache *cache;
2754
2755 if (*this_cache == NULL)
a262aec2 2756 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2757 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2758
a262aec2 2759 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2760}
2761
a262aec2
DJ
2762static int
2763arm_stub_unwind_sniffer (const struct frame_unwind *self,
2764 struct frame_info *this_frame,
2765 void **this_prologue_cache)
909cf6ea 2766{
93d42b30 2767 CORE_ADDR addr_in_block;
948f8e3d 2768 gdb_byte dummy[4];
18d18ac8
YQ
2769 CORE_ADDR pc, start_addr;
2770 const char *name;
909cf6ea 2771
a262aec2 2772 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2773 pc = get_frame_pc (this_frame);
3e5d3a5a 2774 if (in_plt_section (addr_in_block)
fc36e839
DE
2775 /* We also use the stub winder if the target memory is unreadable
2776 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2777 || target_read_memory (pc, dummy, 4) != 0)
2778 return 1;
2779
2780 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2781 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2782 return 1;
909cf6ea 2783
a262aec2 2784 return 0;
909cf6ea
DJ
2785}
2786
a262aec2
DJ
2787struct frame_unwind arm_stub_unwind = {
2788 NORMAL_FRAME,
8fbca658 2789 default_frame_unwind_stop_reason,
a262aec2
DJ
2790 arm_stub_this_id,
2791 arm_prologue_prev_register,
2792 NULL,
2793 arm_stub_unwind_sniffer
2794};
2795
2ae28aa9
YQ
2796/* Put here the code to store, into CACHE->saved_regs, the addresses
2797 of the saved registers of frame described by THIS_FRAME. CACHE is
2798 returned. */
2799
2800static struct arm_prologue_cache *
2801arm_m_exception_cache (struct frame_info *this_frame)
2802{
2803 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2805 struct arm_prologue_cache *cache;
2806 CORE_ADDR unwound_sp;
2807 LONGEST xpsr;
2808
2809 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2810 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2811
2812 unwound_sp = get_frame_register_unsigned (this_frame,
2813 ARM_SP_REGNUM);
2814
2815 /* The hardware saves eight 32-bit words, comprising xPSR,
2816 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2817 "B1.5.6 Exception entry behavior" in
2818 "ARMv7-M Architecture Reference Manual". */
2819 cache->saved_regs[0].addr = unwound_sp;
2820 cache->saved_regs[1].addr = unwound_sp + 4;
2821 cache->saved_regs[2].addr = unwound_sp + 8;
2822 cache->saved_regs[3].addr = unwound_sp + 12;
2823 cache->saved_regs[12].addr = unwound_sp + 16;
2824 cache->saved_regs[14].addr = unwound_sp + 20;
2825 cache->saved_regs[15].addr = unwound_sp + 24;
2826 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2827
2828 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2829 aligner between the top of the 32-byte stack frame and the
2830 previous context's stack pointer. */
2831 cache->prev_sp = unwound_sp + 32;
2832 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2833 && (xpsr & (1 << 9)) != 0)
2834 cache->prev_sp += 4;
2835
2836 return cache;
2837}
2838
2839/* Implementation of function hook 'this_id' in
2840 'struct frame_uwnind'. */
2841
2842static void
2843arm_m_exception_this_id (struct frame_info *this_frame,
2844 void **this_cache,
2845 struct frame_id *this_id)
2846{
2847 struct arm_prologue_cache *cache;
2848
2849 if (*this_cache == NULL)
2850 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2851 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2852
2853 /* Our frame ID for a stub frame is the current SP and LR. */
2854 *this_id = frame_id_build (cache->prev_sp,
2855 get_frame_pc (this_frame));
2856}
2857
2858/* Implementation of function hook 'prev_register' in
2859 'struct frame_uwnind'. */
2860
2861static struct value *
2862arm_m_exception_prev_register (struct frame_info *this_frame,
2863 void **this_cache,
2864 int prev_regnum)
2865{
2866 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2867 struct arm_prologue_cache *cache;
2868
2869 if (*this_cache == NULL)
2870 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2871 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2872
2873 /* The value was already reconstructed into PREV_SP. */
2874 if (prev_regnum == ARM_SP_REGNUM)
2875 return frame_unwind_got_constant (this_frame, prev_regnum,
2876 cache->prev_sp);
2877
2878 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2879 prev_regnum);
2880}
2881
2882/* Implementation of function hook 'sniffer' in
2883 'struct frame_uwnind'. */
2884
2885static int
2886arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2887 struct frame_info *this_frame,
2888 void **this_prologue_cache)
2889{
2890 CORE_ADDR this_pc = get_frame_pc (this_frame);
2891
2892 /* No need to check is_m; this sniffer is only registered for
2893 M-profile architectures. */
2894
2895 /* Exception frames return to one of these magic PCs. Other values
2896 are not defined as of v7-M. See details in "B1.5.8 Exception
2897 return behavior" in "ARMv7-M Architecture Reference Manual". */
2898 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2899 || this_pc == 0xfffffffd)
2900 return 1;
2901
2902 return 0;
2903}
2904
2905/* Frame unwinder for M-profile exceptions. */
2906
2907struct frame_unwind arm_m_exception_unwind =
2908{
2909 SIGTRAMP_FRAME,
2910 default_frame_unwind_stop_reason,
2911 arm_m_exception_this_id,
2912 arm_m_exception_prev_register,
2913 NULL,
2914 arm_m_exception_unwind_sniffer
2915};
2916
24de872b 2917static CORE_ADDR
a262aec2 2918arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
2919{
2920 struct arm_prologue_cache *cache;
2921
eb5492fa 2922 if (*this_cache == NULL)
a262aec2 2923 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 2924 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 2925
4be43953 2926 return cache->prev_sp - cache->framesize;
24de872b
DJ
2927}
2928
eb5492fa
DJ
2929struct frame_base arm_normal_base = {
2930 &arm_prologue_unwind,
2931 arm_normal_frame_base,
2932 arm_normal_frame_base,
2933 arm_normal_frame_base
2934};
2935
a262aec2 2936/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
2937 dummy frame. The frame ID's base needs to match the TOS value
2938 saved by save_dummy_frame_tos() and returned from
2939 arm_push_dummy_call, and the PC needs to match the dummy frame's
2940 breakpoint. */
c906108c 2941
eb5492fa 2942static struct frame_id
a262aec2 2943arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 2944{
0963b4bd
MS
2945 return frame_id_build (get_frame_register_unsigned (this_frame,
2946 ARM_SP_REGNUM),
a262aec2 2947 get_frame_pc (this_frame));
eb5492fa 2948}
c3b4394c 2949
eb5492fa
DJ
2950/* Given THIS_FRAME, find the previous frame's resume PC (which will
2951 be used to construct the previous frame's ID, after looking up the
2952 containing function). */
c3b4394c 2953
eb5492fa
DJ
2954static CORE_ADDR
2955arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2956{
2957 CORE_ADDR pc;
2958 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 2959 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
2960}
2961
2962static CORE_ADDR
2963arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2964{
2965 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
2966}
2967
b39cc962
DJ
2968static struct value *
2969arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2970 int regnum)
2971{
24568a2c 2972 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 2973 CORE_ADDR lr, cpsr;
9779414d 2974 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
2975
2976 switch (regnum)
2977 {
2978 case ARM_PC_REGNUM:
2979 /* The PC is normally copied from the return column, which
2980 describes saves of LR. However, that version may have an
2981 extra bit set to indicate Thumb state. The bit is not
2982 part of the PC. */
2983 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2984 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 2985 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
2986
2987 case ARM_PS_REGNUM:
2988 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 2989 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
2990 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2991 if (IS_THUMB_ADDR (lr))
9779414d 2992 cpsr |= t_bit;
b39cc962 2993 else
9779414d 2994 cpsr &= ~t_bit;
ca38c58e 2995 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
2996
2997 default:
2998 internal_error (__FILE__, __LINE__,
2999 _("Unexpected register %d"), regnum);
3000 }
3001}
3002
3003static void
3004arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3005 struct dwarf2_frame_state_reg *reg,
3006 struct frame_info *this_frame)
3007{
3008 switch (regnum)
3009 {
3010 case ARM_PC_REGNUM:
3011 case ARM_PS_REGNUM:
3012 reg->how = DWARF2_FRAME_REG_FN;
3013 reg->loc.fn = arm_dwarf2_prev_register;
3014 break;
3015 case ARM_SP_REGNUM:
3016 reg->how = DWARF2_FRAME_REG_CFA;
3017 break;
3018 }
3019}
3020
c9cf6e20 3021/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3022
3023static int
c9cf6e20 3024thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3025{
3026 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3027 unsigned int insn, insn2;
3028 int found_return = 0, found_stack_adjust = 0;
3029 CORE_ADDR func_start, func_end;
3030 CORE_ADDR scan_pc;
3031 gdb_byte buf[4];
3032
3033 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3034 return 0;
3035
3036 /* The epilogue is a sequence of instructions along the following lines:
3037
3038 - add stack frame size to SP or FP
3039 - [if frame pointer used] restore SP from FP
3040 - restore registers from SP [may include PC]
3041 - a return-type instruction [if PC wasn't already restored]
3042
3043 In a first pass, we scan forward from the current PC and verify the
3044 instructions we find as compatible with this sequence, ending in a
3045 return instruction.
3046
3047 However, this is not sufficient to distinguish indirect function calls
3048 within a function from indirect tail calls in the epilogue in some cases.
3049 Therefore, if we didn't already find any SP-changing instruction during
3050 forward scan, we add a backward scanning heuristic to ensure we actually
3051 are in the epilogue. */
3052
3053 scan_pc = pc;
3054 while (scan_pc < func_end && !found_return)
3055 {
3056 if (target_read_memory (scan_pc, buf, 2))
3057 break;
3058
3059 scan_pc += 2;
3060 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3061
3062 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3063 found_return = 1;
3064 else if (insn == 0x46f7) /* mov pc, lr */
3065 found_return = 1;
540314bd 3066 else if (thumb_instruction_restores_sp (insn))
4024ca99 3067 {
b7576e5c 3068 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3069 found_return = 1;
3070 }
db24da6d 3071 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3072 {
3073 if (target_read_memory (scan_pc, buf, 2))
3074 break;
3075
3076 scan_pc += 2;
3077 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3078
3079 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3080 {
4024ca99
UW
3081 if (insn2 & 0x8000) /* <registers> include PC. */
3082 found_return = 1;
3083 }
3084 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3085 && (insn2 & 0x0fff) == 0x0b04)
3086 {
4024ca99
UW
3087 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3088 found_return = 1;
3089 }
3090 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3091 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3092 ;
4024ca99
UW
3093 else
3094 break;
3095 }
3096 else
3097 break;
3098 }
3099
3100 if (!found_return)
3101 return 0;
3102
3103 /* Since any instruction in the epilogue sequence, with the possible
3104 exception of return itself, updates the stack pointer, we need to
3105 scan backwards for at most one instruction. Try either a 16-bit or
3106 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3107 too much about false positives. */
4024ca99 3108
6b65d1b6
YQ
3109 if (pc - 4 < func_start)
3110 return 0;
3111 if (target_read_memory (pc - 4, buf, 4))
3112 return 0;
4024ca99 3113
6b65d1b6
YQ
3114 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3115 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3116
3117 if (thumb_instruction_restores_sp (insn2))
3118 found_stack_adjust = 1;
3119 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3120 found_stack_adjust = 1;
3121 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3122 && (insn2 & 0x0fff) == 0x0b04)
3123 found_stack_adjust = 1;
3124 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3125 && (insn2 & 0x0e00) == 0x0a00)
3126 found_stack_adjust = 1;
4024ca99
UW
3127
3128 return found_stack_adjust;
3129}
3130
c9cf6e20 3131/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3132
3133static int
c9cf6e20 3134arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3135{
3136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3137 unsigned int insn;
f303bc3e 3138 int found_return;
4024ca99
UW
3139 CORE_ADDR func_start, func_end;
3140
3141 if (arm_pc_is_thumb (gdbarch, pc))
c9cf6e20 3142 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4024ca99
UW
3143
3144 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3145 return 0;
3146
3147 /* We are in the epilogue if the previous instruction was a stack
3148 adjustment and the next instruction is a possible return (bx, mov
3149 pc, or pop). We could have to scan backwards to find the stack
3150 adjustment, or forwards to find the return, but this is a decent
3151 approximation. First scan forwards. */
3152
3153 found_return = 0;
3154 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3155 if (bits (insn, 28, 31) != INST_NV)
3156 {
3157 if ((insn & 0x0ffffff0) == 0x012fff10)
3158 /* BX. */
3159 found_return = 1;
3160 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3161 /* MOV PC. */
3162 found_return = 1;
3163 else if ((insn & 0x0fff0000) == 0x08bd0000
3164 && (insn & 0x0000c000) != 0)
3165 /* POP (LDMIA), including PC or LR. */
3166 found_return = 1;
3167 }
3168
3169 if (!found_return)
3170 return 0;
3171
3172 /* Scan backwards. This is just a heuristic, so do not worry about
3173 false positives from mode changes. */
3174
3175 if (pc < func_start + 4)
3176 return 0;
3177
3178 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3179 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3180 return 1;
3181
3182 return 0;
3183}
3184
3185
2dd604e7
RE
3186/* When arguments must be pushed onto the stack, they go on in reverse
3187 order. The code below implements a FILO (stack) to do this. */
3188
3189struct stack_item
3190{
3191 int len;
3192 struct stack_item *prev;
7c543f7b 3193 gdb_byte *data;
2dd604e7
RE
3194};
3195
3196static struct stack_item *
df3b6708 3197push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3198{
3199 struct stack_item *si;
8d749320 3200 si = XNEW (struct stack_item);
7c543f7b 3201 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3202 si->len = len;
3203 si->prev = prev;
3204 memcpy (si->data, contents, len);
3205 return si;
3206}
3207
3208static struct stack_item *
3209pop_stack_item (struct stack_item *si)
3210{
3211 struct stack_item *dead = si;
3212 si = si->prev;
3213 xfree (dead->data);
3214 xfree (dead);
3215 return si;
3216}
3217
2af48f68
PB
3218
3219/* Return the alignment (in bytes) of the given type. */
3220
3221static int
3222arm_type_align (struct type *t)
3223{
3224 int n;
3225 int align;
3226 int falign;
3227
3228 t = check_typedef (t);
3229 switch (TYPE_CODE (t))
3230 {
3231 default:
3232 /* Should never happen. */
3233 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3234 return 4;
3235
3236 case TYPE_CODE_PTR:
3237 case TYPE_CODE_ENUM:
3238 case TYPE_CODE_INT:
3239 case TYPE_CODE_FLT:
3240 case TYPE_CODE_SET:
3241 case TYPE_CODE_RANGE:
2af48f68
PB
3242 case TYPE_CODE_REF:
3243 case TYPE_CODE_CHAR:
3244 case TYPE_CODE_BOOL:
3245 return TYPE_LENGTH (t);
3246
3247 case TYPE_CODE_ARRAY:
c4312b19
YQ
3248 if (TYPE_VECTOR (t))
3249 {
3250 /* Use the natural alignment for vector types (the same for
3251 scalar type), but the maximum alignment is 64-bit. */
3252 if (TYPE_LENGTH (t) > 8)
3253 return 8;
3254 else
3255 return TYPE_LENGTH (t);
3256 }
3257 else
3258 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3259 case TYPE_CODE_COMPLEX:
2af48f68
PB
3260 return arm_type_align (TYPE_TARGET_TYPE (t));
3261
3262 case TYPE_CODE_STRUCT:
3263 case TYPE_CODE_UNION:
3264 align = 1;
3265 for (n = 0; n < TYPE_NFIELDS (t); n++)
3266 {
3267 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3268 if (falign > align)
3269 align = falign;
3270 }
3271 return align;
3272 }
3273}
3274
90445bd3
DJ
3275/* Possible base types for a candidate for passing and returning in
3276 VFP registers. */
3277
3278enum arm_vfp_cprc_base_type
3279{
3280 VFP_CPRC_UNKNOWN,
3281 VFP_CPRC_SINGLE,
3282 VFP_CPRC_DOUBLE,
3283 VFP_CPRC_VEC64,
3284 VFP_CPRC_VEC128
3285};
3286
3287/* The length of one element of base type B. */
3288
3289static unsigned
3290arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3291{
3292 switch (b)
3293 {
3294 case VFP_CPRC_SINGLE:
3295 return 4;
3296 case VFP_CPRC_DOUBLE:
3297 return 8;
3298 case VFP_CPRC_VEC64:
3299 return 8;
3300 case VFP_CPRC_VEC128:
3301 return 16;
3302 default:
3303 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3304 (int) b);
3305 }
3306}
3307
3308/* The character ('s', 'd' or 'q') for the type of VFP register used
3309 for passing base type B. */
3310
3311static int
3312arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3313{
3314 switch (b)
3315 {
3316 case VFP_CPRC_SINGLE:
3317 return 's';
3318 case VFP_CPRC_DOUBLE:
3319 return 'd';
3320 case VFP_CPRC_VEC64:
3321 return 'd';
3322 case VFP_CPRC_VEC128:
3323 return 'q';
3324 default:
3325 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3326 (int) b);
3327 }
3328}
3329
3330/* Determine whether T may be part of a candidate for passing and
3331 returning in VFP registers, ignoring the limit on the total number
3332 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3333 classification of the first valid component found; if it is not
3334 VFP_CPRC_UNKNOWN, all components must have the same classification
3335 as *BASE_TYPE. If it is found that T contains a type not permitted
3336 for passing and returning in VFP registers, a type differently
3337 classified from *BASE_TYPE, or two types differently classified
3338 from each other, return -1, otherwise return the total number of
3339 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3340 array). Vector types are not currently supported, matching the
3341 generic AAPCS support. */
90445bd3
DJ
3342
3343static int
3344arm_vfp_cprc_sub_candidate (struct type *t,
3345 enum arm_vfp_cprc_base_type *base_type)
3346{
3347 t = check_typedef (t);
3348 switch (TYPE_CODE (t))
3349 {
3350 case TYPE_CODE_FLT:
3351 switch (TYPE_LENGTH (t))
3352 {
3353 case 4:
3354 if (*base_type == VFP_CPRC_UNKNOWN)
3355 *base_type = VFP_CPRC_SINGLE;
3356 else if (*base_type != VFP_CPRC_SINGLE)
3357 return -1;
3358 return 1;
3359
3360 case 8:
3361 if (*base_type == VFP_CPRC_UNKNOWN)
3362 *base_type = VFP_CPRC_DOUBLE;
3363 else if (*base_type != VFP_CPRC_DOUBLE)
3364 return -1;
3365 return 1;
3366
3367 default:
3368 return -1;
3369 }
3370 break;
3371
817e0957
YQ
3372 case TYPE_CODE_COMPLEX:
3373 /* Arguments of complex T where T is one of the types float or
3374 double get treated as if they are implemented as:
3375
3376 struct complexT
3377 {
3378 T real;
3379 T imag;
5f52445b
YQ
3380 };
3381
3382 */
817e0957
YQ
3383 switch (TYPE_LENGTH (t))
3384 {
3385 case 8:
3386 if (*base_type == VFP_CPRC_UNKNOWN)
3387 *base_type = VFP_CPRC_SINGLE;
3388 else if (*base_type != VFP_CPRC_SINGLE)
3389 return -1;
3390 return 2;
3391
3392 case 16:
3393 if (*base_type == VFP_CPRC_UNKNOWN)
3394 *base_type = VFP_CPRC_DOUBLE;
3395 else if (*base_type != VFP_CPRC_DOUBLE)
3396 return -1;
3397 return 2;
3398
3399 default:
3400 return -1;
3401 }
3402 break;
3403
90445bd3
DJ
3404 case TYPE_CODE_ARRAY:
3405 {
c4312b19 3406 if (TYPE_VECTOR (t))
90445bd3 3407 {
c4312b19
YQ
3408 /* A 64-bit or 128-bit containerized vector type are VFP
3409 CPRCs. */
3410 switch (TYPE_LENGTH (t))
3411 {
3412 case 8:
3413 if (*base_type == VFP_CPRC_UNKNOWN)
3414 *base_type = VFP_CPRC_VEC64;
3415 return 1;
3416 case 16:
3417 if (*base_type == VFP_CPRC_UNKNOWN)
3418 *base_type = VFP_CPRC_VEC128;
3419 return 1;
3420 default:
3421 return -1;
3422 }
3423 }
3424 else
3425 {
3426 int count;
3427 unsigned unitlen;
3428
3429 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3430 base_type);
3431 if (count == -1)
3432 return -1;
3433 if (TYPE_LENGTH (t) == 0)
3434 {
3435 gdb_assert (count == 0);
3436 return 0;
3437 }
3438 else if (count == 0)
3439 return -1;
3440 unitlen = arm_vfp_cprc_unit_length (*base_type);
3441 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3442 return TYPE_LENGTH (t) / unitlen;
90445bd3 3443 }
90445bd3
DJ
3444 }
3445 break;
3446
3447 case TYPE_CODE_STRUCT:
3448 {
3449 int count = 0;
3450 unsigned unitlen;
3451 int i;
3452 for (i = 0; i < TYPE_NFIELDS (t); i++)
3453 {
3454 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3455 base_type);
3456 if (sub_count == -1)
3457 return -1;
3458 count += sub_count;
3459 }
3460 if (TYPE_LENGTH (t) == 0)
3461 {
3462 gdb_assert (count == 0);
3463 return 0;
3464 }
3465 else if (count == 0)
3466 return -1;
3467 unitlen = arm_vfp_cprc_unit_length (*base_type);
3468 if (TYPE_LENGTH (t) != unitlen * count)
3469 return -1;
3470 return count;
3471 }
3472
3473 case TYPE_CODE_UNION:
3474 {
3475 int count = 0;
3476 unsigned unitlen;
3477 int i;
3478 for (i = 0; i < TYPE_NFIELDS (t); i++)
3479 {
3480 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3481 base_type);
3482 if (sub_count == -1)
3483 return -1;
3484 count = (count > sub_count ? count : sub_count);
3485 }
3486 if (TYPE_LENGTH (t) == 0)
3487 {
3488 gdb_assert (count == 0);
3489 return 0;
3490 }
3491 else if (count == 0)
3492 return -1;
3493 unitlen = arm_vfp_cprc_unit_length (*base_type);
3494 if (TYPE_LENGTH (t) != unitlen * count)
3495 return -1;
3496 return count;
3497 }
3498
3499 default:
3500 break;
3501 }
3502
3503 return -1;
3504}
3505
3506/* Determine whether T is a VFP co-processor register candidate (CPRC)
3507 if passed to or returned from a non-variadic function with the VFP
3508 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3509 *BASE_TYPE to the base type for T and *COUNT to the number of
3510 elements of that base type before returning. */
3511
3512static int
3513arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3514 int *count)
3515{
3516 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3517 int c = arm_vfp_cprc_sub_candidate (t, &b);
3518 if (c <= 0 || c > 4)
3519 return 0;
3520 *base_type = b;
3521 *count = c;
3522 return 1;
3523}
3524
3525/* Return 1 if the VFP ABI should be used for passing arguments to and
3526 returning values from a function of type FUNC_TYPE, 0
3527 otherwise. */
3528
3529static int
3530arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3531{
3532 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3533 /* Variadic functions always use the base ABI. Assume that functions
3534 without debug info are not variadic. */
3535 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3536 return 0;
3537 /* The VFP ABI is only supported as a variant of AAPCS. */
3538 if (tdep->arm_abi != ARM_ABI_AAPCS)
3539 return 0;
3540 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3541}
3542
3543/* We currently only support passing parameters in integer registers, which
3544 conforms with GCC's default model, and VFP argument passing following
3545 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3546 we should probably support some of them based on the selected ABI. */
3547
3548static CORE_ADDR
7d9b040b 3549arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3550 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3551 struct value **args, CORE_ADDR sp, int struct_return,
3552 CORE_ADDR struct_addr)
2dd604e7 3553{
e17a4113 3554 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3555 int argnum;
3556 int argreg;
3557 int nstack;
3558 struct stack_item *si = NULL;
90445bd3
DJ
3559 int use_vfp_abi;
3560 struct type *ftype;
3561 unsigned vfp_regs_free = (1 << 16) - 1;
3562
3563 /* Determine the type of this function and whether the VFP ABI
3564 applies. */
3565 ftype = check_typedef (value_type (function));
3566 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3567 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3568 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3569
6a65450a
AC
3570 /* Set the return address. For the ARM, the return breakpoint is
3571 always at BP_ADDR. */
9779414d 3572 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3573 bp_addr |= 1;
6a65450a 3574 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3575
3576 /* Walk through the list of args and determine how large a temporary
3577 stack is required. Need to take care here as structs may be
7a9dd1b2 3578 passed on the stack, and we have to push them. */
2dd604e7
RE
3579 nstack = 0;
3580
3581 argreg = ARM_A1_REGNUM;
3582 nstack = 0;
3583
2dd604e7
RE
3584 /* The struct_return pointer occupies the first parameter
3585 passing register. */
3586 if (struct_return)
3587 {
3588 if (arm_debug)
5af949e3 3589 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3590 gdbarch_register_name (gdbarch, argreg),
5af949e3 3591 paddress (gdbarch, struct_addr));
2dd604e7
RE
3592 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3593 argreg++;
3594 }
3595
3596 for (argnum = 0; argnum < nargs; argnum++)
3597 {
3598 int len;
3599 struct type *arg_type;
3600 struct type *target_type;
3601 enum type_code typecode;
8c6363cf 3602 const bfd_byte *val;
2af48f68 3603 int align;
90445bd3
DJ
3604 enum arm_vfp_cprc_base_type vfp_base_type;
3605 int vfp_base_count;
3606 int may_use_core_reg = 1;
2dd604e7 3607
df407dfe 3608 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3609 len = TYPE_LENGTH (arg_type);
3610 target_type = TYPE_TARGET_TYPE (arg_type);
3611 typecode = TYPE_CODE (arg_type);
8c6363cf 3612 val = value_contents (args[argnum]);
2dd604e7 3613
2af48f68
PB
3614 align = arm_type_align (arg_type);
3615 /* Round alignment up to a whole number of words. */
3616 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3617 /* Different ABIs have different maximum alignments. */
3618 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3619 {
3620 /* The APCS ABI only requires word alignment. */
3621 align = INT_REGISTER_SIZE;
3622 }
3623 else
3624 {
3625 /* The AAPCS requires at most doubleword alignment. */
3626 if (align > INT_REGISTER_SIZE * 2)
3627 align = INT_REGISTER_SIZE * 2;
3628 }
3629
90445bd3
DJ
3630 if (use_vfp_abi
3631 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3632 &vfp_base_count))
3633 {
3634 int regno;
3635 int unit_length;
3636 int shift;
3637 unsigned mask;
3638
3639 /* Because this is a CPRC it cannot go in a core register or
3640 cause a core register to be skipped for alignment.
3641 Either it goes in VFP registers and the rest of this loop
3642 iteration is skipped for this argument, or it goes on the
3643 stack (and the stack alignment code is correct for this
3644 case). */
3645 may_use_core_reg = 0;
3646
3647 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3648 shift = unit_length / 4;
3649 mask = (1 << (shift * vfp_base_count)) - 1;
3650 for (regno = 0; regno < 16; regno += shift)
3651 if (((vfp_regs_free >> regno) & mask) == mask)
3652 break;
3653
3654 if (regno < 16)
3655 {
3656 int reg_char;
3657 int reg_scaled;
3658 int i;
3659
3660 vfp_regs_free &= ~(mask << regno);
3661 reg_scaled = regno / shift;
3662 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3663 for (i = 0; i < vfp_base_count; i++)
3664 {
3665 char name_buf[4];
3666 int regnum;
58d6951d
DJ
3667 if (reg_char == 'q')
3668 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3669 val + i * unit_length);
58d6951d
DJ
3670 else
3671 {
8c042590
PM
3672 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3673 reg_char, reg_scaled + i);
58d6951d
DJ
3674 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3675 strlen (name_buf));
3676 regcache_cooked_write (regcache, regnum,
3677 val + i * unit_length);
3678 }
90445bd3
DJ
3679 }
3680 continue;
3681 }
3682 else
3683 {
3684 /* This CPRC could not go in VFP registers, so all VFP
3685 registers are now marked as used. */
3686 vfp_regs_free = 0;
3687 }
3688 }
3689
2af48f68
PB
3690 /* Push stack padding for dowubleword alignment. */
3691 if (nstack & (align - 1))
3692 {
3693 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3694 nstack += INT_REGISTER_SIZE;
3695 }
3696
3697 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3698 if (may_use_core_reg
3699 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3700 && align > INT_REGISTER_SIZE
3701 && argreg & 1)
3702 argreg++;
3703
2dd604e7
RE
3704 /* If the argument is a pointer to a function, and it is a
3705 Thumb function, create a LOCAL copy of the value and set
3706 the THUMB bit in it. */
3707 if (TYPE_CODE_PTR == typecode
3708 && target_type != NULL
f96b8fa0 3709 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3710 {
e17a4113 3711 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3712 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3713 {
224c3ddb 3714 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3715 store_unsigned_integer (copy, len, byte_order,
e17a4113 3716 MAKE_THUMB_ADDR (regval));
8c6363cf 3717 val = copy;
2dd604e7
RE
3718 }
3719 }
3720
3721 /* Copy the argument to general registers or the stack in
3722 register-sized pieces. Large arguments are split between
3723 registers and stack. */
3724 while (len > 0)
3725 {
f0c9063c 3726 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3727 CORE_ADDR regval
3728 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3729
90445bd3 3730 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3731 {
3732 /* The argument is being passed in a general purpose
3733 register. */
e17a4113 3734 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3735 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3736 if (arm_debug)
3737 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3738 argnum,
3739 gdbarch_register_name
2af46ca0 3740 (gdbarch, argreg),
f0c9063c 3741 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3742 regcache_cooked_write_unsigned (regcache, argreg, regval);
3743 argreg++;
3744 }
3745 else
3746 {
ef9bd0b8
YQ
3747 gdb_byte buf[INT_REGISTER_SIZE];
3748
3749 memset (buf, 0, sizeof (buf));
3750 store_unsigned_integer (buf, partial_len, byte_order, regval);
3751
2dd604e7
RE
3752 /* Push the arguments onto the stack. */
3753 if (arm_debug)
3754 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3755 argnum, nstack);
ef9bd0b8 3756 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3757 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3758 }
3759
3760 len -= partial_len;
3761 val += partial_len;
3762 }
3763 }
3764 /* If we have an odd number of words to push, then decrement the stack
3765 by one word now, so first stack argument will be dword aligned. */
3766 if (nstack & 4)
3767 sp -= 4;
3768
3769 while (si)
3770 {
3771 sp -= si->len;
3772 write_memory (sp, si->data, si->len);
3773 si = pop_stack_item (si);
3774 }
3775
3776 /* Finally, update teh SP register. */
3777 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3778
3779 return sp;
3780}
3781
f53f0d0b
PB
3782
3783/* Always align the frame to an 8-byte boundary. This is required on
3784 some platforms and harmless on the rest. */
3785
3786static CORE_ADDR
3787arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3788{
3789 /* Align the stack to eight bytes. */
3790 return sp & ~ (CORE_ADDR) 7;
3791}
3792
c906108c 3793static void
12b27276 3794print_fpu_flags (struct ui_file *file, int flags)
c906108c 3795{
c5aa993b 3796 if (flags & (1 << 0))
12b27276 3797 fputs_filtered ("IVO ", file);
c5aa993b 3798 if (flags & (1 << 1))
12b27276 3799 fputs_filtered ("DVZ ", file);
c5aa993b 3800 if (flags & (1 << 2))
12b27276 3801 fputs_filtered ("OFL ", file);
c5aa993b 3802 if (flags & (1 << 3))
12b27276 3803 fputs_filtered ("UFL ", file);
c5aa993b 3804 if (flags & (1 << 4))
12b27276
WN
3805 fputs_filtered ("INX ", file);
3806 fputc_filtered ('\n', file);
c906108c
SS
3807}
3808
5e74b15c
RE
3809/* Print interesting information about the floating point processor
3810 (if present) or emulator. */
34e8f22d 3811static void
d855c300 3812arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3813 struct frame_info *frame, const char *args)
c906108c 3814{
9c9acae0 3815 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3816 int type;
3817
3818 type = (status >> 24) & 127;
edefbb7c 3819 if (status & (1 << 31))
12b27276 3820 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3821 else
12b27276 3822 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3823 /* i18n: [floating point unit] mask */
12b27276
WN
3824 fputs_filtered (_("mask: "), file);
3825 print_fpu_flags (file, status >> 16);
edefbb7c 3826 /* i18n: [floating point unit] flags */
12b27276
WN
3827 fputs_filtered (_("flags: "), file);
3828 print_fpu_flags (file, status);
c906108c
SS
3829}
3830
27067745
UW
3831/* Construct the ARM extended floating point type. */
3832static struct type *
3833arm_ext_type (struct gdbarch *gdbarch)
3834{
3835 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3836
3837 if (!tdep->arm_ext_type)
3838 tdep->arm_ext_type
e9bb382b 3839 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3840 floatformats_arm_ext);
3841
3842 return tdep->arm_ext_type;
3843}
3844
58d6951d
DJ
3845static struct type *
3846arm_neon_double_type (struct gdbarch *gdbarch)
3847{
3848 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3849
3850 if (tdep->neon_double_type == NULL)
3851 {
3852 struct type *t, *elem;
3853
3854 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3855 TYPE_CODE_UNION);
3856 elem = builtin_type (gdbarch)->builtin_uint8;
3857 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3858 elem = builtin_type (gdbarch)->builtin_uint16;
3859 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3860 elem = builtin_type (gdbarch)->builtin_uint32;
3861 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3862 elem = builtin_type (gdbarch)->builtin_uint64;
3863 append_composite_type_field (t, "u64", elem);
3864 elem = builtin_type (gdbarch)->builtin_float;
3865 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3866 elem = builtin_type (gdbarch)->builtin_double;
3867 append_composite_type_field (t, "f64", elem);
3868
3869 TYPE_VECTOR (t) = 1;
3870 TYPE_NAME (t) = "neon_d";
3871 tdep->neon_double_type = t;
3872 }
3873
3874 return tdep->neon_double_type;
3875}
3876
3877/* FIXME: The vector types are not correctly ordered on big-endian
3878 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3879 bits of d0 - regardless of what unit size is being held in d0. So
3880 the offset of the first uint8 in d0 is 7, but the offset of the
3881 first float is 4. This code works as-is for little-endian
3882 targets. */
3883
3884static struct type *
3885arm_neon_quad_type (struct gdbarch *gdbarch)
3886{
3887 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3888
3889 if (tdep->neon_quad_type == NULL)
3890 {
3891 struct type *t, *elem;
3892
3893 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3894 TYPE_CODE_UNION);
3895 elem = builtin_type (gdbarch)->builtin_uint8;
3896 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3897 elem = builtin_type (gdbarch)->builtin_uint16;
3898 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3899 elem = builtin_type (gdbarch)->builtin_uint32;
3900 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3901 elem = builtin_type (gdbarch)->builtin_uint64;
3902 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3903 elem = builtin_type (gdbarch)->builtin_float;
3904 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3905 elem = builtin_type (gdbarch)->builtin_double;
3906 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3907
3908 TYPE_VECTOR (t) = 1;
3909 TYPE_NAME (t) = "neon_q";
3910 tdep->neon_quad_type = t;
3911 }
3912
3913 return tdep->neon_quad_type;
3914}
3915
34e8f22d
RE
3916/* Return the GDB type object for the "standard" data type of data in
3917 register N. */
3918
3919static struct type *
7a5ea0d4 3920arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 3921{
58d6951d
DJ
3922 int num_regs = gdbarch_num_regs (gdbarch);
3923
3924 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3925 && regnum >= num_regs && regnum < num_regs + 32)
3926 return builtin_type (gdbarch)->builtin_float;
3927
3928 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3929 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3930 return arm_neon_quad_type (gdbarch);
3931
3932 /* If the target description has register information, we are only
3933 in this function so that we can override the types of
3934 double-precision registers for NEON. */
3935 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3936 {
3937 struct type *t = tdesc_register_type (gdbarch, regnum);
3938
3939 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3940 && TYPE_CODE (t) == TYPE_CODE_FLT
3941 && gdbarch_tdep (gdbarch)->have_neon)
3942 return arm_neon_double_type (gdbarch);
3943 else
3944 return t;
3945 }
3946
34e8f22d 3947 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
3948 {
3949 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3950 return builtin_type (gdbarch)->builtin_void;
3951
3952 return arm_ext_type (gdbarch);
3953 }
e4c16157 3954 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 3955 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 3956 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 3957 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
3958 else if (regnum >= ARRAY_SIZE (arm_register_names))
3959 /* These registers are only supported on targets which supply
3960 an XML description. */
df4df182 3961 return builtin_type (gdbarch)->builtin_int0;
032758dc 3962 else
df4df182 3963 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
3964}
3965
ff6f572f
DJ
3966/* Map a DWARF register REGNUM onto the appropriate GDB register
3967 number. */
3968
3969static int
d3f73121 3970arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
3971{
3972 /* Core integer regs. */
3973 if (reg >= 0 && reg <= 15)
3974 return reg;
3975
3976 /* Legacy FPA encoding. These were once used in a way which
3977 overlapped with VFP register numbering, so their use is
3978 discouraged, but GDB doesn't support the ARM toolchain
3979 which used them for VFP. */
3980 if (reg >= 16 && reg <= 23)
3981 return ARM_F0_REGNUM + reg - 16;
3982
3983 /* New assignments for the FPA registers. */
3984 if (reg >= 96 && reg <= 103)
3985 return ARM_F0_REGNUM + reg - 96;
3986
3987 /* WMMX register assignments. */
3988 if (reg >= 104 && reg <= 111)
3989 return ARM_WCGR0_REGNUM + reg - 104;
3990
3991 if (reg >= 112 && reg <= 127)
3992 return ARM_WR0_REGNUM + reg - 112;
3993
3994 if (reg >= 192 && reg <= 199)
3995 return ARM_WC0_REGNUM + reg - 192;
3996
58d6951d
DJ
3997 /* VFP v2 registers. A double precision value is actually
3998 in d1 rather than s2, but the ABI only defines numbering
3999 for the single precision registers. This will "just work"
4000 in GDB for little endian targets (we'll read eight bytes,
4001 starting in s0 and then progressing to s1), but will be
4002 reversed on big endian targets with VFP. This won't
4003 be a problem for the new Neon quad registers; you're supposed
4004 to use DW_OP_piece for those. */
4005 if (reg >= 64 && reg <= 95)
4006 {
4007 char name_buf[4];
4008
8c042590 4009 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4010 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4011 strlen (name_buf));
4012 }
4013
4014 /* VFP v3 / Neon registers. This range is also used for VFP v2
4015 registers, except that it now describes d0 instead of s0. */
4016 if (reg >= 256 && reg <= 287)
4017 {
4018 char name_buf[4];
4019
8c042590 4020 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4021 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4022 strlen (name_buf));
4023 }
4024
ff6f572f
DJ
4025 return -1;
4026}
4027
26216b98
AC
4028/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4029static int
e7faf938 4030arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4031{
4032 int reg = regnum;
e7faf938 4033 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4034
ff6f572f
DJ
4035 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4036 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4037
4038 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4039 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4040
4041 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4042 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4043
26216b98
AC
4044 if (reg < NUM_GREGS)
4045 return SIM_ARM_R0_REGNUM + reg;
4046 reg -= NUM_GREGS;
4047
4048 if (reg < NUM_FREGS)
4049 return SIM_ARM_FP0_REGNUM + reg;
4050 reg -= NUM_FREGS;
4051
4052 if (reg < NUM_SREGS)
4053 return SIM_ARM_FPS_REGNUM + reg;
4054 reg -= NUM_SREGS;
4055
edefbb7c 4056 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4057}
34e8f22d 4058
a37b3cc0
AC
4059/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4060 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4061 It is thought that this is is the floating-point register format on
4062 little-endian systems. */
c906108c 4063
ed9a39eb 4064static void
b508a996 4065convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4066 void *dbl, int endianess)
c906108c 4067{
a37b3cc0 4068 DOUBLEST d;
be8626e0
MD
4069
4070 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4071 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4072 else
4073 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4074 ptr, &d);
b508a996 4075 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4076}
4077
34e8f22d 4078static void
be8626e0
MD
4079convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4080 int endianess)
c906108c 4081{
a37b3cc0 4082 DOUBLEST d;
be8626e0 4083
b508a996 4084 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4085 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4086 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4087 else
4088 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4089 &d, dbl);
c906108c 4090}
ed9a39eb 4091
d9311bfa
AT
4092/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4093 of the appropriate mode (as encoded in the PC value), even if this
4094 differs from what would be expected according to the symbol tables. */
4095
4096void
4097arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4098 struct address_space *aspace,
4099 CORE_ADDR pc)
c906108c 4100{
d9311bfa
AT
4101 struct cleanup *old_chain
4102 = make_cleanup_restore_integer (&arm_override_mode);
c5aa993b 4103
d9311bfa
AT
4104 arm_override_mode = IS_THUMB_ADDR (pc);
4105 pc = gdbarch_addr_bits_remove (gdbarch, pc);
c5aa993b 4106
d9311bfa 4107 insert_single_step_breakpoint (gdbarch, aspace, pc);
c906108c 4108
d9311bfa
AT
4109 do_cleanups (old_chain);
4110}
c5aa993b 4111
d9311bfa
AT
4112/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4113 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4114 NULL if an error occurs. BUF is freed. */
c906108c 4115
d9311bfa
AT
4116static gdb_byte *
4117extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4118 int old_len, int new_len)
4119{
4120 gdb_byte *new_buf;
4121 int bytes_to_read = new_len - old_len;
c906108c 4122
d9311bfa
AT
4123 new_buf = (gdb_byte *) xmalloc (new_len);
4124 memcpy (new_buf + bytes_to_read, buf, old_len);
4125 xfree (buf);
4126 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4127 {
4128 xfree (new_buf);
4129 return NULL;
c906108c 4130 }
d9311bfa 4131 return new_buf;
c906108c
SS
4132}
4133
d9311bfa
AT
4134/* An IT block is at most the 2-byte IT instruction followed by
4135 four 4-byte instructions. The furthest back we must search to
4136 find an IT block that affects the current instruction is thus
4137 2 + 3 * 4 == 14 bytes. */
4138#define MAX_IT_BLOCK_PREFIX 14
177321bd 4139
d9311bfa
AT
4140/* Use a quick scan if there are more than this many bytes of
4141 code. */
4142#define IT_SCAN_THRESHOLD 32
177321bd 4143
d9311bfa
AT
4144/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4145 A breakpoint in an IT block may not be hit, depending on the
4146 condition flags. */
ad527d2e 4147static CORE_ADDR
d9311bfa 4148arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4149{
d9311bfa
AT
4150 gdb_byte *buf;
4151 char map_type;
4152 CORE_ADDR boundary, func_start;
4153 int buf_len;
4154 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4155 int i, any, last_it, last_it_count;
177321bd 4156
d9311bfa
AT
4157 /* If we are using BKPT breakpoints, none of this is necessary. */
4158 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4159 return bpaddr;
177321bd 4160
d9311bfa
AT
4161 /* ARM mode does not have this problem. */
4162 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4163 return bpaddr;
177321bd 4164
d9311bfa
AT
4165 /* We are setting a breakpoint in Thumb code that could potentially
4166 contain an IT block. The first step is to find how much Thumb
4167 code there is; we do not need to read outside of known Thumb
4168 sequences. */
4169 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4170 if (map_type == 0)
4171 /* Thumb-2 code must have mapping symbols to have a chance. */
4172 return bpaddr;
9dca5578 4173
d9311bfa 4174 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4175
d9311bfa
AT
4176 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4177 && func_start > boundary)
4178 boundary = func_start;
9dca5578 4179
d9311bfa
AT
4180 /* Search for a candidate IT instruction. We have to do some fancy
4181 footwork to distinguish a real IT instruction from the second
4182 half of a 32-bit instruction, but there is no need for that if
4183 there's no candidate. */
4184 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4185 if (buf_len == 0)
4186 /* No room for an IT instruction. */
4187 return bpaddr;
c906108c 4188
d9311bfa
AT
4189 buf = (gdb_byte *) xmalloc (buf_len);
4190 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4191 return bpaddr;
4192 any = 0;
4193 for (i = 0; i < buf_len; i += 2)
c906108c 4194 {
d9311bfa
AT
4195 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4196 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4197 {
d9311bfa
AT
4198 any = 1;
4199 break;
25b41d01 4200 }
c906108c 4201 }
d9311bfa
AT
4202
4203 if (any == 0)
c906108c 4204 {
d9311bfa
AT
4205 xfree (buf);
4206 return bpaddr;
f9d67f43
DJ
4207 }
4208
4209 /* OK, the code bytes before this instruction contain at least one
4210 halfword which resembles an IT instruction. We know that it's
4211 Thumb code, but there are still two possibilities. Either the
4212 halfword really is an IT instruction, or it is the second half of
4213 a 32-bit Thumb instruction. The only way we can tell is to
4214 scan forwards from a known instruction boundary. */
4215 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4216 {
4217 int definite;
4218
4219 /* There's a lot of code before this instruction. Start with an
4220 optimistic search; it's easy to recognize halfwords that can
4221 not be the start of a 32-bit instruction, and use that to
4222 lock on to the instruction boundaries. */
4223 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4224 if (buf == NULL)
4225 return bpaddr;
4226 buf_len = IT_SCAN_THRESHOLD;
4227
4228 definite = 0;
4229 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4230 {
4231 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4232 if (thumb_insn_size (inst1) == 2)
4233 {
4234 definite = 1;
4235 break;
4236 }
4237 }
4238
4239 /* At this point, if DEFINITE, BUF[I] is the first place we
4240 are sure that we know the instruction boundaries, and it is far
4241 enough from BPADDR that we could not miss an IT instruction
4242 affecting BPADDR. If ! DEFINITE, give up - start from a
4243 known boundary. */
4244 if (! definite)
4245 {
0963b4bd
MS
4246 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4247 bpaddr - boundary);
f9d67f43
DJ
4248 if (buf == NULL)
4249 return bpaddr;
4250 buf_len = bpaddr - boundary;
4251 i = 0;
4252 }
4253 }
4254 else
4255 {
4256 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4257 if (buf == NULL)
4258 return bpaddr;
4259 buf_len = bpaddr - boundary;
4260 i = 0;
4261 }
4262
4263 /* Scan forwards. Find the last IT instruction before BPADDR. */
4264 last_it = -1;
4265 last_it_count = 0;
4266 while (i < buf_len)
4267 {
4268 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4269 last_it_count--;
4270 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4271 {
4272 last_it = i;
4273 if (inst1 & 0x0001)
4274 last_it_count = 4;
4275 else if (inst1 & 0x0002)
4276 last_it_count = 3;
4277 else if (inst1 & 0x0004)
4278 last_it_count = 2;
4279 else
4280 last_it_count = 1;
4281 }
4282 i += thumb_insn_size (inst1);
4283 }
4284
4285 xfree (buf);
4286
4287 if (last_it == -1)
4288 /* There wasn't really an IT instruction after all. */
4289 return bpaddr;
4290
4291 if (last_it_count < 1)
4292 /* It was too far away. */
4293 return bpaddr;
4294
4295 /* This really is a trouble spot. Move the breakpoint to the IT
4296 instruction. */
4297 return bpaddr - buf_len + last_it;
4298}
4299
cca44b1b 4300/* ARM displaced stepping support.
c906108c 4301
cca44b1b 4302 Generally ARM displaced stepping works as follows:
c906108c 4303
cca44b1b
JB
4304 1. When an instruction is to be single-stepped, it is first decoded by
4305 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
4306 Depending on the type of instruction, it is then copied to a scratch
4307 location, possibly in a modified form. The copy_* set of functions
0963b4bd 4308 performs such modification, as necessary. A breakpoint is placed after
cca44b1b
JB
4309 the modified instruction in the scratch space to return control to GDB.
4310 Note in particular that instructions which modify the PC will no longer
4311 do so after modification.
c5aa993b 4312
cca44b1b
JB
4313 2. The instruction is single-stepped, by setting the PC to the scratch
4314 location address, and resuming. Control returns to GDB when the
4315 breakpoint is hit.
c5aa993b 4316
cca44b1b
JB
4317 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4318 function used for the current instruction. This function's job is to
4319 put the CPU/memory state back to what it would have been if the
4320 instruction had been executed unmodified in its original location. */
c5aa993b 4321
cca44b1b
JB
4322/* NOP instruction (mov r0, r0). */
4323#define ARM_NOP 0xe1a00000
34518530 4324#define THUMB_NOP 0x4600
cca44b1b
JB
4325
4326/* Helper for register reads for displaced stepping. In particular, this
4327 returns the PC as it would be seen by the instruction at its original
4328 location. */
4329
4330ULONGEST
36073a92
YQ
4331displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4332 int regno)
cca44b1b
JB
4333{
4334 ULONGEST ret;
36073a92 4335 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4336
bf9f652a 4337 if (regno == ARM_PC_REGNUM)
cca44b1b 4338 {
4db71c0b
YQ
4339 /* Compute pipeline offset:
4340 - When executing an ARM instruction, PC reads as the address of the
4341 current instruction plus 8.
4342 - When executing a Thumb instruction, PC reads as the address of the
4343 current instruction plus 4. */
4344
36073a92 4345 if (!dsc->is_thumb)
4db71c0b
YQ
4346 from += 8;
4347 else
4348 from += 4;
4349
cca44b1b
JB
4350 if (debug_displaced)
4351 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4352 (unsigned long) from);
4353 return (ULONGEST) from;
cca44b1b 4354 }
c906108c 4355 else
cca44b1b
JB
4356 {
4357 regcache_cooked_read_unsigned (regs, regno, &ret);
4358 if (debug_displaced)
4359 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4360 regno, (unsigned long) ret);
4361 return ret;
4362 }
c906108c
SS
4363}
4364
cca44b1b
JB
4365static int
4366displaced_in_arm_mode (struct regcache *regs)
4367{
4368 ULONGEST ps;
9779414d 4369 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4370
cca44b1b 4371 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4372
9779414d 4373 return (ps & t_bit) == 0;
cca44b1b 4374}
66e810cd 4375
cca44b1b 4376/* Write to the PC as from a branch instruction. */
c906108c 4377
cca44b1b 4378static void
36073a92
YQ
4379branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4380 ULONGEST val)
c906108c 4381{
36073a92 4382 if (!dsc->is_thumb)
cca44b1b
JB
4383 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4384 architecture versions < 6. */
0963b4bd
MS
4385 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4386 val & ~(ULONGEST) 0x3);
cca44b1b 4387 else
0963b4bd
MS
4388 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4389 val & ~(ULONGEST) 0x1);
cca44b1b 4390}
66e810cd 4391
cca44b1b
JB
4392/* Write to the PC as from a branch-exchange instruction. */
4393
4394static void
4395bx_write_pc (struct regcache *regs, ULONGEST val)
4396{
4397 ULONGEST ps;
9779414d 4398 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4399
4400 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4401
4402 if ((val & 1) == 1)
c906108c 4403 {
9779414d 4404 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4405 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4406 }
4407 else if ((val & 2) == 0)
4408 {
9779414d 4409 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4410 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4411 }
4412 else
4413 {
cca44b1b
JB
4414 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4415 mode, align dest to 4 bytes). */
4416 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4417 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4418 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4419 }
4420}
ed9a39eb 4421
cca44b1b 4422/* Write to the PC as if from a load instruction. */
ed9a39eb 4423
34e8f22d 4424static void
36073a92
YQ
4425load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4426 ULONGEST val)
ed9a39eb 4427{
cca44b1b
JB
4428 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4429 bx_write_pc (regs, val);
4430 else
36073a92 4431 branch_write_pc (regs, dsc, val);
cca44b1b 4432}
be8626e0 4433
cca44b1b
JB
4434/* Write to the PC as if from an ALU instruction. */
4435
4436static void
36073a92
YQ
4437alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4438 ULONGEST val)
cca44b1b 4439{
36073a92 4440 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4441 bx_write_pc (regs, val);
4442 else
36073a92 4443 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4444}
4445
4446/* Helper for writing to registers for displaced stepping. Writing to the PC
4447 has a varying effects depending on the instruction which does the write:
4448 this is controlled by the WRITE_PC argument. */
4449
4450void
4451displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4452 int regno, ULONGEST val, enum pc_write_style write_pc)
4453{
bf9f652a 4454 if (regno == ARM_PC_REGNUM)
08216dd7 4455 {
cca44b1b
JB
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4458 (unsigned long) val);
4459 switch (write_pc)
08216dd7 4460 {
cca44b1b 4461 case BRANCH_WRITE_PC:
36073a92 4462 branch_write_pc (regs, dsc, val);
08216dd7
RE
4463 break;
4464
cca44b1b
JB
4465 case BX_WRITE_PC:
4466 bx_write_pc (regs, val);
4467 break;
4468
4469 case LOAD_WRITE_PC:
36073a92 4470 load_write_pc (regs, dsc, val);
cca44b1b
JB
4471 break;
4472
4473 case ALU_WRITE_PC:
36073a92 4474 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4475 break;
4476
4477 case CANNOT_WRITE_PC:
4478 warning (_("Instruction wrote to PC in an unexpected way when "
4479 "single-stepping"));
08216dd7
RE
4480 break;
4481
4482 default:
97b9747c
JB
4483 internal_error (__FILE__, __LINE__,
4484 _("Invalid argument to displaced_write_reg"));
08216dd7 4485 }
b508a996 4486
cca44b1b 4487 dsc->wrote_to_pc = 1;
b508a996 4488 }
ed9a39eb 4489 else
b508a996 4490 {
cca44b1b
JB
4491 if (debug_displaced)
4492 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4493 regno, (unsigned long) val);
4494 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4495 }
34e8f22d
RE
4496}
4497
cca44b1b
JB
4498/* This function is used to concisely determine if an instruction INSN
4499 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4500 corresponding fields of BITMASK set to 0b1111. The function
4501 returns return 1 if any of these fields in INSN reference the PC
4502 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4503
4504static int
cca44b1b 4505insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4506{
cca44b1b 4507 uint32_t lowbit = 1;
67255d04 4508
cca44b1b
JB
4509 while (bitmask != 0)
4510 {
4511 uint32_t mask;
44e1a9eb 4512
cca44b1b
JB
4513 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4514 ;
67255d04 4515
cca44b1b
JB
4516 if (!lowbit)
4517 break;
67255d04 4518
cca44b1b 4519 mask = lowbit * 0xf;
67255d04 4520
cca44b1b
JB
4521 if ((insn & mask) == mask)
4522 return 1;
4523
4524 bitmask &= ~mask;
67255d04
RE
4525 }
4526
cca44b1b
JB
4527 return 0;
4528}
2af48f68 4529
cca44b1b
JB
4530/* The simplest copy function. Many instructions have the same effect no
4531 matter what address they are executed at: in those cases, use this. */
67255d04 4532
cca44b1b 4533static int
7ff120b4
YQ
4534arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4535 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4536{
4537 if (debug_displaced)
4538 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4539 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4540 iname);
67255d04 4541
cca44b1b 4542 dsc->modinsn[0] = insn;
67255d04 4543
cca44b1b
JB
4544 return 0;
4545}
4546
34518530
YQ
4547static int
4548thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4549 uint16_t insn2, const char *iname,
4550 struct displaced_step_closure *dsc)
4551{
4552 if (debug_displaced)
4553 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4554 "opcode/class '%s' unmodified\n", insn1, insn2,
4555 iname);
4556
4557 dsc->modinsn[0] = insn1;
4558 dsc->modinsn[1] = insn2;
4559 dsc->numinsns = 2;
4560
4561 return 0;
4562}
4563
4564/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4565 modification. */
4566static int
4567thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
4568 const char *iname,
4569 struct displaced_step_closure *dsc)
4570{
4571 if (debug_displaced)
4572 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4573 "opcode/class '%s' unmodified\n", insn,
4574 iname);
4575
4576 dsc->modinsn[0] = insn;
4577
4578 return 0;
4579}
4580
cca44b1b
JB
4581/* Preload instructions with immediate offset. */
4582
4583static void
6e39997a 4584cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4585 struct regcache *regs, struct displaced_step_closure *dsc)
4586{
4587 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4588 if (!dsc->u.preload.immed)
4589 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4590}
4591
7ff120b4
YQ
4592static void
4593install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4594 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4595{
cca44b1b 4596 ULONGEST rn_val;
cca44b1b
JB
4597 /* Preload instructions:
4598
4599 {pli/pld} [rn, #+/-imm]
4600 ->
4601 {pli/pld} [r0, #+/-imm]. */
4602
36073a92
YQ
4603 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4604 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4605 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4606 dsc->u.preload.immed = 1;
4607
cca44b1b 4608 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4609}
4610
cca44b1b 4611static int
7ff120b4 4612arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4613 struct displaced_step_closure *dsc)
4614{
4615 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4616
7ff120b4
YQ
4617 if (!insn_references_pc (insn, 0x000f0000ul))
4618 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4619
4620 if (debug_displaced)
4621 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4622 (unsigned long) insn);
4623
7ff120b4
YQ
4624 dsc->modinsn[0] = insn & 0xfff0ffff;
4625
4626 install_preload (gdbarch, regs, dsc, rn);
4627
4628 return 0;
4629}
4630
34518530
YQ
4631static int
4632thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4633 struct regcache *regs, struct displaced_step_closure *dsc)
4634{
4635 unsigned int rn = bits (insn1, 0, 3);
4636 unsigned int u_bit = bit (insn1, 7);
4637 int imm12 = bits (insn2, 0, 11);
4638 ULONGEST pc_val;
4639
4640 if (rn != ARM_PC_REGNUM)
4641 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4642
4643 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4644 PLD (literal) Encoding T1. */
4645 if (debug_displaced)
4646 fprintf_unfiltered (gdb_stdlog,
4647 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4648 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4649 imm12);
4650
4651 if (!u_bit)
4652 imm12 = -1 * imm12;
4653
4654 /* Rewrite instruction {pli/pld} PC imm12 into:
4655 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4656
4657 {pli/pld} [r0, r1]
4658
4659 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4660
4661 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4662 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4663
4664 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4665
4666 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4667 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4668 dsc->u.preload.immed = 0;
4669
4670 /* {pli/pld} [r0, r1] */
4671 dsc->modinsn[0] = insn1 & 0xfff0;
4672 dsc->modinsn[1] = 0xf001;
4673 dsc->numinsns = 2;
4674
4675 dsc->cleanup = &cleanup_preload;
4676 return 0;
4677}
4678
7ff120b4
YQ
4679/* Preload instructions with register offset. */
4680
4681static void
4682install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4683 struct displaced_step_closure *dsc, unsigned int rn,
4684 unsigned int rm)
4685{
4686 ULONGEST rn_val, rm_val;
4687
cca44b1b
JB
4688 /* Preload register-offset instructions:
4689
4690 {pli/pld} [rn, rm {, shift}]
4691 ->
4692 {pli/pld} [r0, r1 {, shift}]. */
4693
36073a92
YQ
4694 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4695 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4696 rn_val = displaced_read_reg (regs, dsc, rn);
4697 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4698 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4699 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4700 dsc->u.preload.immed = 0;
4701
cca44b1b 4702 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4703}
4704
4705static int
4706arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4707 struct regcache *regs,
4708 struct displaced_step_closure *dsc)
4709{
4710 unsigned int rn = bits (insn, 16, 19);
4711 unsigned int rm = bits (insn, 0, 3);
4712
4713
4714 if (!insn_references_pc (insn, 0x000f000ful))
4715 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4716
4717 if (debug_displaced)
4718 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4719 (unsigned long) insn);
4720
4721 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4722
7ff120b4 4723 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4724 return 0;
4725}
4726
4727/* Copy/cleanup coprocessor load and store instructions. */
4728
4729static void
6e39997a 4730cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4731 struct regcache *regs,
4732 struct displaced_step_closure *dsc)
4733{
36073a92 4734 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4735
4736 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4737
4738 if (dsc->u.ldst.writeback)
4739 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4740}
4741
7ff120b4
YQ
4742static void
4743install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4744 struct displaced_step_closure *dsc,
4745 int writeback, unsigned int rn)
cca44b1b 4746{
cca44b1b 4747 ULONGEST rn_val;
cca44b1b 4748
cca44b1b
JB
4749 /* Coprocessor load/store instructions:
4750
4751 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4752 ->
4753 {stc/stc2} [r0, #+/-imm].
4754
4755 ldc/ldc2 are handled identically. */
4756
36073a92
YQ
4757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4758 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4759 /* PC should be 4-byte aligned. */
4760 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4761 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4762
7ff120b4 4763 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4764 dsc->u.ldst.rn = rn;
4765
7ff120b4
YQ
4766 dsc->cleanup = &cleanup_copro_load_store;
4767}
4768
4769static int
4770arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4771 struct regcache *regs,
4772 struct displaced_step_closure *dsc)
4773{
4774 unsigned int rn = bits (insn, 16, 19);
4775
4776 if (!insn_references_pc (insn, 0x000f0000ul))
4777 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4778
4779 if (debug_displaced)
4780 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4781 "load/store insn %.8lx\n", (unsigned long) insn);
4782
cca44b1b
JB
4783 dsc->modinsn[0] = insn & 0xfff0ffff;
4784
7ff120b4 4785 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4786
4787 return 0;
4788}
4789
34518530
YQ
4790static int
4791thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4792 uint16_t insn2, struct regcache *regs,
4793 struct displaced_step_closure *dsc)
4794{
4795 unsigned int rn = bits (insn1, 0, 3);
4796
4797 if (rn != ARM_PC_REGNUM)
4798 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4799 "copro load/store", dsc);
4800
4801 if (debug_displaced)
4802 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4803 "load/store insn %.4x%.4x\n", insn1, insn2);
4804
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = insn2;
4807 dsc->numinsns = 2;
4808
4809 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4810 doesn't support writeback, so pass 0. */
4811 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4812
4813 return 0;
4814}
4815
cca44b1b
JB
4816/* Clean up branch instructions (actually perform the branch, by setting
4817 PC). */
4818
4819static void
6e39997a 4820cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4821 struct displaced_step_closure *dsc)
4822{
36073a92 4823 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4824 int branch_taken = condition_true (dsc->u.branch.cond, status);
4825 enum pc_write_style write_pc = dsc->u.branch.exchange
4826 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4827
4828 if (!branch_taken)
4829 return;
4830
4831 if (dsc->u.branch.link)
4832 {
8c8dba6d
YQ
4833 /* The value of LR should be the next insn of current one. In order
4834 not to confuse logic hanlding later insn `bx lr', if current insn mode
4835 is Thumb, the bit 0 of LR value should be set to 1. */
4836 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4837
4838 if (dsc->is_thumb)
4839 next_insn_addr |= 0x1;
4840
4841 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4842 CANNOT_WRITE_PC);
cca44b1b
JB
4843 }
4844
bf9f652a 4845 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4846}
4847
4848/* Copy B/BL/BLX instructions with immediate destinations. */
4849
7ff120b4
YQ
4850static void
4851install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4852 struct displaced_step_closure *dsc,
4853 unsigned int cond, int exchange, int link, long offset)
4854{
4855 /* Implement "BL<cond> <label>" as:
4856
4857 Preparation: cond <- instruction condition
4858 Insn: mov r0, r0 (nop)
4859 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4860
4861 B<cond> similar, but don't set r14 in cleanup. */
4862
4863 dsc->u.branch.cond = cond;
4864 dsc->u.branch.link = link;
4865 dsc->u.branch.exchange = exchange;
4866
2b16b2e3
YQ
4867 dsc->u.branch.dest = dsc->insn_addr;
4868 if (link && exchange)
4869 /* For BLX, offset is computed from the Align (PC, 4). */
4870 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4871
7ff120b4 4872 if (dsc->is_thumb)
2b16b2e3 4873 dsc->u.branch.dest += 4 + offset;
7ff120b4 4874 else
2b16b2e3 4875 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
4876
4877 dsc->cleanup = &cleanup_branch;
4878}
cca44b1b 4879static int
7ff120b4
YQ
4880arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
4882{
4883 unsigned int cond = bits (insn, 28, 31);
4884 int exchange = (cond == 0xf);
4885 int link = exchange || bit (insn, 24);
cca44b1b
JB
4886 long offset;
4887
4888 if (debug_displaced)
4889 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4890 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4891 (unsigned long) insn);
cca44b1b
JB
4892 if (exchange)
4893 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4894 then arrange the switch into Thumb mode. */
4895 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4896 else
4897 offset = bits (insn, 0, 23) << 2;
4898
4899 if (bit (offset, 25))
4900 offset = offset | ~0x3ffffff;
4901
cca44b1b
JB
4902 dsc->modinsn[0] = ARM_NOP;
4903
7ff120b4 4904 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
4905 return 0;
4906}
4907
34518530
YQ
4908static int
4909thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4910 uint16_t insn2, struct regcache *regs,
4911 struct displaced_step_closure *dsc)
4912{
4913 int link = bit (insn2, 14);
4914 int exchange = link && !bit (insn2, 12);
4915 int cond = INST_AL;
4916 long offset = 0;
4917 int j1 = bit (insn2, 13);
4918 int j2 = bit (insn2, 11);
4919 int s = sbits (insn1, 10, 10);
4920 int i1 = !(j1 ^ bit (insn1, 10));
4921 int i2 = !(j2 ^ bit (insn1, 10));
4922
4923 if (!link && !exchange) /* B */
4924 {
4925 offset = (bits (insn2, 0, 10) << 1);
4926 if (bit (insn2, 12)) /* Encoding T4 */
4927 {
4928 offset |= (bits (insn1, 0, 9) << 12)
4929 | (i2 << 22)
4930 | (i1 << 23)
4931 | (s << 24);
4932 cond = INST_AL;
4933 }
4934 else /* Encoding T3 */
4935 {
4936 offset |= (bits (insn1, 0, 5) << 12)
4937 | (j1 << 18)
4938 | (j2 << 19)
4939 | (s << 20);
4940 cond = bits (insn1, 6, 9);
4941 }
4942 }
4943 else
4944 {
4945 offset = (bits (insn1, 0, 9) << 12);
4946 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4947 offset |= exchange ?
4948 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4949 }
4950
4951 if (debug_displaced)
4952 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4953 "%.4x %.4x with offset %.8lx\n",
4954 link ? (exchange) ? "blx" : "bl" : "b",
4955 insn1, insn2, offset);
4956
4957 dsc->modinsn[0] = THUMB_NOP;
4958
4959 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4960 return 0;
4961}
4962
4963/* Copy B Thumb instructions. */
4964static int
4965thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
4966 struct displaced_step_closure *dsc)
4967{
4968 unsigned int cond = 0;
4969 int offset = 0;
4970 unsigned short bit_12_15 = bits (insn, 12, 15);
4971 CORE_ADDR from = dsc->insn_addr;
4972
4973 if (bit_12_15 == 0xd)
4974 {
4975 /* offset = SignExtend (imm8:0, 32) */
4976 offset = sbits ((insn << 1), 0, 8);
4977 cond = bits (insn, 8, 11);
4978 }
4979 else if (bit_12_15 == 0xe) /* Encoding T2 */
4980 {
4981 offset = sbits ((insn << 1), 0, 11);
4982 cond = INST_AL;
4983 }
4984
4985 if (debug_displaced)
4986 fprintf_unfiltered (gdb_stdlog,
4987 "displaced: copying b immediate insn %.4x "
4988 "with offset %d\n", insn, offset);
4989
4990 dsc->u.branch.cond = cond;
4991 dsc->u.branch.link = 0;
4992 dsc->u.branch.exchange = 0;
4993 dsc->u.branch.dest = from + 4 + offset;
4994
4995 dsc->modinsn[0] = THUMB_NOP;
4996
4997 dsc->cleanup = &cleanup_branch;
4998
4999 return 0;
5000}
5001
cca44b1b
JB
5002/* Copy BX/BLX with register-specified destinations. */
5003
7ff120b4
YQ
5004static void
5005install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5006 struct displaced_step_closure *dsc, int link,
5007 unsigned int cond, unsigned int rm)
cca44b1b 5008{
cca44b1b
JB
5009 /* Implement {BX,BLX}<cond> <reg>" as:
5010
5011 Preparation: cond <- instruction condition
5012 Insn: mov r0, r0 (nop)
5013 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5014
5015 Don't set r14 in cleanup for BX. */
5016
36073a92 5017 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5018
5019 dsc->u.branch.cond = cond;
5020 dsc->u.branch.link = link;
cca44b1b 5021
7ff120b4 5022 dsc->u.branch.exchange = 1;
cca44b1b
JB
5023
5024 dsc->cleanup = &cleanup_branch;
7ff120b4 5025}
cca44b1b 5026
7ff120b4
YQ
5027static int
5028arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5029 struct regcache *regs, struct displaced_step_closure *dsc)
5030{
5031 unsigned int cond = bits (insn, 28, 31);
5032 /* BX: x12xxx1x
5033 BLX: x12xxx3x. */
5034 int link = bit (insn, 5);
5035 unsigned int rm = bits (insn, 0, 3);
5036
5037 if (debug_displaced)
5038 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5039 (unsigned long) insn);
5040
5041 dsc->modinsn[0] = ARM_NOP;
5042
5043 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5044 return 0;
5045}
5046
34518530
YQ
5047static int
5048thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5049 struct regcache *regs,
5050 struct displaced_step_closure *dsc)
5051{
5052 int link = bit (insn, 7);
5053 unsigned int rm = bits (insn, 3, 6);
5054
5055 if (debug_displaced)
5056 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5057 (unsigned short) insn);
5058
5059 dsc->modinsn[0] = THUMB_NOP;
5060
5061 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5062
5063 return 0;
5064}
5065
5066
0963b4bd 5067/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5068
5069static void
6e39997a 5070cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5071 struct regcache *regs, struct displaced_step_closure *dsc)
5072{
36073a92 5073 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5074 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5076 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5077}
5078
5079static int
7ff120b4
YQ
5080arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5081 struct displaced_step_closure *dsc)
cca44b1b
JB
5082{
5083 unsigned int rn = bits (insn, 16, 19);
5084 unsigned int rd = bits (insn, 12, 15);
5085 unsigned int op = bits (insn, 21, 24);
5086 int is_mov = (op == 0xd);
5087 ULONGEST rd_val, rn_val;
cca44b1b
JB
5088
5089 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5090 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5091
5092 if (debug_displaced)
5093 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5094 "%.8lx\n", is_mov ? "move" : "ALU",
5095 (unsigned long) insn);
5096
5097 /* Instruction is of form:
5098
5099 <op><cond> rd, [rn,] #imm
5100
5101 Rewrite as:
5102
5103 Preparation: tmp1, tmp2 <- r0, r1;
5104 r0, r1 <- rd, rn
5105 Insn: <op><cond> r0, r1, #imm
5106 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5107 */
5108
36073a92
YQ
5109 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5110 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5111 rn_val = displaced_read_reg (regs, dsc, rn);
5112 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5113 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5114 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5115 dsc->rd = rd;
5116
5117 if (is_mov)
5118 dsc->modinsn[0] = insn & 0xfff00fff;
5119 else
5120 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5121
5122 dsc->cleanup = &cleanup_alu_imm;
5123
5124 return 0;
5125}
5126
34518530
YQ
5127static int
5128thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5129 uint16_t insn2, struct regcache *regs,
5130 struct displaced_step_closure *dsc)
5131{
5132 unsigned int op = bits (insn1, 5, 8);
5133 unsigned int rn, rm, rd;
5134 ULONGEST rd_val, rn_val;
5135
5136 rn = bits (insn1, 0, 3); /* Rn */
5137 rm = bits (insn2, 0, 3); /* Rm */
5138 rd = bits (insn2, 8, 11); /* Rd */
5139
5140 /* This routine is only called for instruction MOV. */
5141 gdb_assert (op == 0x2 && rn == 0xf);
5142
5143 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5144 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5145
5146 if (debug_displaced)
5147 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5148 "ALU", insn1, insn2);
5149
5150 /* Instruction is of form:
5151
5152 <op><cond> rd, [rn,] #imm
5153
5154 Rewrite as:
5155
5156 Preparation: tmp1, tmp2 <- r0, r1;
5157 r0, r1 <- rd, rn
5158 Insn: <op><cond> r0, r1, #imm
5159 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5160 */
5161
5162 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5163 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5164 rn_val = displaced_read_reg (regs, dsc, rn);
5165 rd_val = displaced_read_reg (regs, dsc, rd);
5166 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5167 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5168 dsc->rd = rd;
5169
5170 dsc->modinsn[0] = insn1;
5171 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5172 dsc->numinsns = 2;
5173
5174 dsc->cleanup = &cleanup_alu_imm;
5175
5176 return 0;
5177}
5178
cca44b1b
JB
5179/* Copy/cleanup arithmetic/logic insns with register RHS. */
5180
5181static void
6e39997a 5182cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5183 struct regcache *regs, struct displaced_step_closure *dsc)
5184{
5185 ULONGEST rd_val;
5186 int i;
5187
36073a92 5188 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5189
5190 for (i = 0; i < 3; i++)
5191 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5192
5193 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5194}
5195
7ff120b4
YQ
5196static void
5197install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5198 struct displaced_step_closure *dsc,
5199 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5200{
cca44b1b 5201 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5202
cca44b1b
JB
5203 /* Instruction is of form:
5204
5205 <op><cond> rd, [rn,] rm [, <shift>]
5206
5207 Rewrite as:
5208
5209 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5210 r0, r1, r2 <- rd, rn, rm
ef713951 5211 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5212 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5213 */
5214
36073a92
YQ
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
5219 rn_val = displaced_read_reg (regs, dsc, rn);
5220 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5221 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5223 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5224 dsc->rd = rd;
5225
7ff120b4
YQ
5226 dsc->cleanup = &cleanup_alu_reg;
5227}
5228
5229static int
5230arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5231 struct displaced_step_closure *dsc)
5232{
5233 unsigned int op = bits (insn, 21, 24);
5234 int is_mov = (op == 0xd);
5235
5236 if (!insn_references_pc (insn, 0x000ff00ful))
5237 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5238
5239 if (debug_displaced)
5240 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5241 is_mov ? "move" : "ALU", (unsigned long) insn);
5242
cca44b1b
JB
5243 if (is_mov)
5244 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5245 else
5246 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5247
7ff120b4
YQ
5248 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5249 bits (insn, 0, 3));
cca44b1b
JB
5250 return 0;
5251}
5252
34518530
YQ
5253static int
5254thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5255 struct regcache *regs,
5256 struct displaced_step_closure *dsc)
5257{
ef713951 5258 unsigned rm, rd;
34518530 5259
ef713951
YQ
5260 rm = bits (insn, 3, 6);
5261 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5262
ef713951 5263 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5264 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5265
5266 if (debug_displaced)
ef713951
YQ
5267 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5268 (unsigned short) insn);
34518530 5269
ef713951 5270 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5271
ef713951 5272 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5273
5274 return 0;
5275}
5276
cca44b1b
JB
5277/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5278
5279static void
6e39997a 5280cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5281 struct regcache *regs,
5282 struct displaced_step_closure *dsc)
5283{
36073a92 5284 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5285 int i;
5286
5287 for (i = 0; i < 4; i++)
5288 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5289
5290 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5291}
5292
7ff120b4
YQ
5293static void
5294install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5295 struct displaced_step_closure *dsc,
5296 unsigned int rd, unsigned int rn, unsigned int rm,
5297 unsigned rs)
cca44b1b 5298{
7ff120b4 5299 int i;
cca44b1b 5300 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5301
cca44b1b
JB
5302 /* Instruction is of form:
5303
5304 <op><cond> rd, [rn,] rm, <shift> rs
5305
5306 Rewrite as:
5307
5308 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5309 r0, r1, r2, r3 <- rd, rn, rm, rs
5310 Insn: <op><cond> r0, r1, r2, <shift> r3
5311 Cleanup: tmp5 <- r0
5312 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5313 rd <- tmp5
5314 */
5315
5316 for (i = 0; i < 4; i++)
36073a92 5317 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5318
36073a92
YQ
5319 rd_val = displaced_read_reg (regs, dsc, rd);
5320 rn_val = displaced_read_reg (regs, dsc, rn);
5321 rm_val = displaced_read_reg (regs, dsc, rm);
5322 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5323 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5326 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5327 dsc->rd = rd;
7ff120b4
YQ
5328 dsc->cleanup = &cleanup_alu_shifted_reg;
5329}
5330
5331static int
5332arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5333 struct regcache *regs,
5334 struct displaced_step_closure *dsc)
5335{
5336 unsigned int op = bits (insn, 21, 24);
5337 int is_mov = (op == 0xd);
5338 unsigned int rd, rn, rm, rs;
5339
5340 if (!insn_references_pc (insn, 0x000fff0ful))
5341 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5342
5343 if (debug_displaced)
5344 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5345 "%.8lx\n", is_mov ? "move" : "ALU",
5346 (unsigned long) insn);
5347
5348 rn = bits (insn, 16, 19);
5349 rm = bits (insn, 0, 3);
5350 rs = bits (insn, 8, 11);
5351 rd = bits (insn, 12, 15);
cca44b1b
JB
5352
5353 if (is_mov)
5354 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5355 else
5356 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5357
7ff120b4 5358 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5359
5360 return 0;
5361}
5362
5363/* Clean up load instructions. */
5364
5365static void
6e39997a 5366cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5367 struct displaced_step_closure *dsc)
5368{
5369 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5370
36073a92 5371 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5372 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5373 rt_val2 = displaced_read_reg (regs, dsc, 1);
5374 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5375
5376 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5377 if (dsc->u.ldst.xfersize > 4)
5378 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5379 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5380 if (!dsc->u.ldst.immed)
5381 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5382
5383 /* Handle register writeback. */
5384 if (dsc->u.ldst.writeback)
5385 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5386 /* Put result in right place. */
5387 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5388 if (dsc->u.ldst.xfersize == 8)
5389 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5390}
5391
5392/* Clean up store instructions. */
5393
5394static void
6e39997a 5395cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5396 struct displaced_step_closure *dsc)
5397{
36073a92 5398 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5399
5400 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5401 if (dsc->u.ldst.xfersize > 4)
5402 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5403 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5404 if (!dsc->u.ldst.immed)
5405 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5406 if (!dsc->u.ldst.restore_r4)
5407 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5408
5409 /* Writeback. */
5410 if (dsc->u.ldst.writeback)
5411 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5412}
5413
5414/* Copy "extra" load/store instructions. These are halfword/doubleword
5415 transfers, which have a different encoding to byte/word transfers. */
5416
5417static int
7ff120b4
YQ
5418arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
5419 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5420{
5421 unsigned int op1 = bits (insn, 20, 24);
5422 unsigned int op2 = bits (insn, 5, 6);
5423 unsigned int rt = bits (insn, 12, 15);
5424 unsigned int rn = bits (insn, 16, 19);
5425 unsigned int rm = bits (insn, 0, 3);
5426 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5427 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5428 int immed = (op1 & 0x4) != 0;
5429 int opcode;
5430 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5431
5432 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5433 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5434
5435 if (debug_displaced)
5436 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5437 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
5438 (unsigned long) insn);
5439
5440 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5441
5442 if (opcode < 0)
5443 internal_error (__FILE__, __LINE__,
5444 _("copy_extra_ld_st: instruction decode error"));
5445
36073a92
YQ
5446 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5447 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5448 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5449 if (!immed)
36073a92 5450 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5451
36073a92 5452 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5453 if (bytesize[opcode] == 8)
36073a92
YQ
5454 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5455 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5456 if (!immed)
36073a92 5457 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5458
5459 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5460 if (bytesize[opcode] == 8)
5461 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5462 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5463 if (!immed)
5464 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5465
5466 dsc->rd = rt;
5467 dsc->u.ldst.xfersize = bytesize[opcode];
5468 dsc->u.ldst.rn = rn;
5469 dsc->u.ldst.immed = immed;
5470 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5471 dsc->u.ldst.restore_r4 = 0;
5472
5473 if (immed)
5474 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5475 ->
5476 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5477 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5478 else
5479 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5480 ->
5481 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5482 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5483
5484 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5485
5486 return 0;
5487}
5488
0f6f04ba 5489/* Copy byte/half word/word loads and stores. */
cca44b1b 5490
7ff120b4 5491static void
0f6f04ba
YQ
5492install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5493 struct displaced_step_closure *dsc, int load,
5494 int immed, int writeback, int size, int usermode,
5495 int rt, int rm, int rn)
cca44b1b 5496{
cca44b1b 5497 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5498
36073a92
YQ
5499 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5500 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5501 if (!immed)
36073a92 5502 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5503 if (!load)
36073a92 5504 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5505
36073a92
YQ
5506 rt_val = displaced_read_reg (regs, dsc, rt);
5507 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5508 if (!immed)
36073a92 5509 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5510
5511 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5512 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5513 if (!immed)
5514 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5515 dsc->rd = rt;
0f6f04ba 5516 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5517 dsc->u.ldst.rn = rn;
5518 dsc->u.ldst.immed = immed;
7ff120b4 5519 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5520
5521 /* To write PC we can do:
5522
494e194e
YQ
5523 Before this sequence of instructions:
5524 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5525 r2 is the Rn value got from dispalced_read_reg.
5526
5527 Insn1: push {pc} Write address of STR instruction + offset on stack
5528 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5529 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5530 = addr(Insn1) + offset - addr(Insn3) - 8
5531 = offset - 16
5532 Insn4: add r4, r4, #8 r4 = offset - 8
5533 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5534 = from + offset
5535 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5536
5537 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5538 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5539 of this can be found in Section "Saving from r15" in
5540 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5541
7ff120b4
YQ
5542 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5543}
5544
34518530
YQ
5545
5546static int
5547thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5548 uint16_t insn2, struct regcache *regs,
5549 struct displaced_step_closure *dsc, int size)
5550{
5551 unsigned int u_bit = bit (insn1, 7);
5552 unsigned int rt = bits (insn2, 12, 15);
5553 int imm12 = bits (insn2, 0, 11);
5554 ULONGEST pc_val;
5555
5556 if (debug_displaced)
5557 fprintf_unfiltered (gdb_stdlog,
5558 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5559 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5560 imm12);
5561
5562 if (!u_bit)
5563 imm12 = -1 * imm12;
5564
5565 /* Rewrite instruction LDR Rt imm12 into:
5566
5567 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5568
5569 LDR R0, R2, R3,
5570
5571 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5572
5573
5574 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5575 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5576 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5577
5578 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5579
5580 pc_val = pc_val & 0xfffffffc;
5581
5582 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5583 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5584
5585 dsc->rd = rt;
5586
5587 dsc->u.ldst.xfersize = size;
5588 dsc->u.ldst.immed = 0;
5589 dsc->u.ldst.writeback = 0;
5590 dsc->u.ldst.restore_r4 = 0;
5591
5592 /* LDR R0, R2, R3 */
5593 dsc->modinsn[0] = 0xf852;
5594 dsc->modinsn[1] = 0x3;
5595 dsc->numinsns = 2;
5596
5597 dsc->cleanup = &cleanup_load;
5598
5599 return 0;
5600}
5601
5602static int
5603thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5604 uint16_t insn2, struct regcache *regs,
5605 struct displaced_step_closure *dsc,
5606 int writeback, int immed)
5607{
5608 unsigned int rt = bits (insn2, 12, 15);
5609 unsigned int rn = bits (insn1, 0, 3);
5610 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5611 /* In LDR (register), there is also a register Rm, which is not allowed to
5612 be PC, so we don't have to check it. */
5613
5614 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5615 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5616 dsc);
5617
5618 if (debug_displaced)
5619 fprintf_unfiltered (gdb_stdlog,
5620 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5621 rt, rn, insn1, insn2);
5622
5623 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5624 0, rt, rm, rn);
5625
5626 dsc->u.ldst.restore_r4 = 0;
5627
5628 if (immed)
5629 /* ldr[b]<cond> rt, [rn, #imm], etc.
5630 ->
5631 ldr[b]<cond> r0, [r2, #imm]. */
5632 {
5633 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5634 dsc->modinsn[1] = insn2 & 0x0fff;
5635 }
5636 else
5637 /* ldr[b]<cond> rt, [rn, rm], etc.
5638 ->
5639 ldr[b]<cond> r0, [r2, r3]. */
5640 {
5641 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5642 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5643 }
5644
5645 dsc->numinsns = 2;
5646
5647 return 0;
5648}
5649
5650
7ff120b4
YQ
5651static int
5652arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5653 struct regcache *regs,
5654 struct displaced_step_closure *dsc,
0f6f04ba 5655 int load, int size, int usermode)
7ff120b4
YQ
5656{
5657 int immed = !bit (insn, 25);
5658 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5659 unsigned int rt = bits (insn, 12, 15);
5660 unsigned int rn = bits (insn, 16, 19);
5661 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5662
5663 if (!insn_references_pc (insn, 0x000ff00ful))
5664 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5665
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5669 load ? (size == 1 ? "ldrb" : "ldr")
5670 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5671 rt, rn,
5672 (unsigned long) insn);
5673
0f6f04ba
YQ
5674 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5675 usermode, rt, rm, rn);
7ff120b4 5676
bf9f652a 5677 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5678 {
5679 dsc->u.ldst.restore_r4 = 0;
5680
5681 if (immed)
5682 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5683 ->
5684 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5685 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5686 else
5687 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5688 ->
5689 {ldr,str}[b]<cond> r0, [r2, r3]. */
5690 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5691 }
5692 else
5693 {
5694 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5695 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5696 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5697 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5698 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5699 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5700 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5701
5702 /* As above. */
5703 if (immed)
5704 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5705 else
5706 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5707
cca44b1b
JB
5708 dsc->numinsns = 6;
5709 }
5710
5711 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5712
5713 return 0;
5714}
5715
5716/* Cleanup LDM instructions with fully-populated register list. This is an
5717 unfortunate corner case: it's impossible to implement correctly by modifying
5718 the instruction. The issue is as follows: we have an instruction,
5719
5720 ldm rN, {r0-r15}
5721
5722 which we must rewrite to avoid loading PC. A possible solution would be to
5723 do the load in two halves, something like (with suitable cleanup
5724 afterwards):
5725
5726 mov r8, rN
5727 ldm[id][ab] r8!, {r0-r7}
5728 str r7, <temp>
5729 ldm[id][ab] r8, {r7-r14}
5730 <bkpt>
5731
5732 but at present there's no suitable place for <temp>, since the scratch space
5733 is overwritten before the cleanup routine is called. For now, we simply
5734 emulate the instruction. */
5735
5736static void
5737cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5738 struct displaced_step_closure *dsc)
5739{
cca44b1b
JB
5740 int inc = dsc->u.block.increment;
5741 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5742 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5743 uint32_t regmask = dsc->u.block.regmask;
5744 int regno = inc ? 0 : 15;
5745 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5746 int exception_return = dsc->u.block.load && dsc->u.block.user
5747 && (regmask & 0x8000) != 0;
36073a92 5748 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5749 int do_transfer = condition_true (dsc->u.block.cond, status);
5750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5751
5752 if (!do_transfer)
5753 return;
5754
5755 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5756 sensible we can do here. Complain loudly. */
5757 if (exception_return)
5758 error (_("Cannot single-step exception return"));
5759
5760 /* We don't handle any stores here for now. */
5761 gdb_assert (dsc->u.block.load != 0);
5762
5763 if (debug_displaced)
5764 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5765 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5766 dsc->u.block.increment ? "inc" : "dec",
5767 dsc->u.block.before ? "before" : "after");
5768
5769 while (regmask)
5770 {
5771 uint32_t memword;
5772
5773 if (inc)
bf9f652a 5774 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5775 regno++;
5776 else
5777 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5778 regno--;
5779
5780 xfer_addr += bump_before;
5781
5782 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5783 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5784
5785 xfer_addr += bump_after;
5786
5787 regmask &= ~(1 << regno);
5788 }
5789
5790 if (dsc->u.block.writeback)
5791 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5792 CANNOT_WRITE_PC);
5793}
5794
5795/* Clean up an STM which included the PC in the register list. */
5796
5797static void
5798cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5799 struct displaced_step_closure *dsc)
5800{
36073a92 5801 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5802 int store_executed = condition_true (dsc->u.block.cond, status);
5803 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5805 uint32_t pc_val;
5806 long offset;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5808
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5811 return;
5812
5813 if (dsc->u.block.increment)
5814 {
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5816
5817 if (dsc->u.block.before)
5818 pc_stored_at += 4;
5819 }
5820 else
5821 {
5822 pc_stored_at = dsc->u.block.xfer_addr;
5823
5824 if (dsc->u.block.before)
5825 pc_stored_at -= 4;
5826 }
5827
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5831
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5835
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5837 instruction. */
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5840}
5841
5842/* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5846
5847static void
6e39997a 5848cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5849 struct regcache *regs,
5850 struct displaced_step_closure *dsc)
5851{
36073a92 5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5853 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5855 unsigned int regs_loaded = bitcount (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5857
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5861
5862 if (!load_executed)
5863 return;
5864
5865 clobbered = (1 << num_to_shuffle) - 1;
5866
5867 while (num_to_shuffle > 0)
5868 {
5869 if ((mask & (1 << write_reg)) != 0)
5870 {
5871 unsigned int read_reg = num_to_shuffle - 1;
5872
5873 if (read_reg != write_reg)
5874 {
36073a92 5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5880 write_reg);
5881 }
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5885 write_reg);
5886
5887 clobbered &= ~(1 << write_reg);
5888
5889 num_to_shuffle--;
5890 }
5891
5892 write_reg--;
5893 }
5894
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5897 {
5898 if ((clobbered & (1 << write_reg)) != 0)
5899 {
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5901 CANNOT_WRITE_PC);
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5906 }
5907 }
5908
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5911 {
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5913
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5916 else
5917 new_rn_val -= regs_loaded * 4;
5918
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5920 CANNOT_WRITE_PC);
5921 }
5922}
5923
5924/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5926
5927static int
7ff120b4
YQ
5928arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 struct displaced_step_closure *dsc)
cca44b1b
JB
5931{
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
cca44b1b 5938
0963b4bd
MS
5939 /* Block transfers which don't mention PC can be run directly
5940 out-of-line. */
bf9f652a 5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 5943
bf9f652a 5944 if (rn == ARM_PC_REGNUM)
cca44b1b 5945 {
0963b4bd
MS
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
7ff120b4 5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
5949 }
5950
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5954
36073a92 5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
5956 dsc->u.block.rn = rn;
5957
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5964
5965 dsc->u.block.regmask = insn & 0xffff;
5966
5967 if (load)
5968 {
5969 if ((insn & 0xffff) == 0xffff)
5970 {
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5975 too much). */
5976 dsc->modinsn[0] = ARM_NOP;
5977
5978 dsc->cleanup = &cleanup_block_load_all;
5979 }
5980 else
5981 {
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
5988 unsigned int to = 0, from = 0, i, new_rn;
5989
5990 for (i = 0; i < num_in_list; i++)
36073a92 5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
5992
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5996 all cases, e.g.:
5997
5998 ldm r14!, {r0-r13,pc}
5999
6000 which would need to be rewritten as:
6001
6002 ldm rN!, {r0-r14}
6003
6004 but that can't work, because there's no free register for N.
6005
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6008
6009 if (writeback)
6010 insn &= ~(1 << 21);
6011
6012 new_regmask = (1 << num_in_list) - 1;
6013
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6019
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6021
6022 dsc->cleanup = &cleanup_block_load_pc;
6023 }
6024 }
6025 else
6026 {
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6034
6035 dsc->cleanup = &cleanup_block_store_pc;
6036 }
6037
6038 return 0;
6039}
6040
34518530
YQ
6041static int
6042thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 struct displaced_step_closure *dsc)
cca44b1b 6045{
34518530
YQ
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
cca44b1b 6049
34518530
YQ
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6054
34518530
YQ
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6061 }
cca44b1b
JB
6062
6063 if (debug_displaced)
34518530
YQ
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6066
34518530
YQ
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
cca44b1b 6070
34518530
YQ
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6078
34518530
YQ
6079 if (load)
6080 {
6081 if (dsc->u.block.regmask == 0xffff)
6082 {
6083 /* This branch is impossible to happen. */
6084 gdb_assert (0);
6085 }
6086 else
6087 {
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6090 unsigned int to = 0, from = 0, i, new_rn;
6091
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6094
6095 if (writeback)
6096 insn1 &= ~(1 << 5);
6097
6098 new_regmask = (1 << num_in_list) - 1;
6099
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6105
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6108 dsc->numinsns = 2;
6109
6110 dsc->cleanup = &cleanup_block_load_pc;
6111 }
6112 }
6113 else
6114 {
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6117 dsc->numinsns = 2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6119 }
6120 return 0;
6121}
6122
d9311bfa
AT
6123/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6125
6126ULONGEST
6127arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6128 int byte_order)
6129{
6130 return read_memory_unsigned_integer (memaddr, len, byte_order);
6131}
6132
6133/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6134
6135CORE_ADDR
6136arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6137 CORE_ADDR val)
6138{
6139 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6140}
6141
6142/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6143
6144CORE_ADDR
6145arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self, CORE_ADDR pc)
6146{
6147 struct gdbarch_tdep *tdep;
6148
6149 tdep = gdbarch_tdep (get_regcache_arch (self->regcache));
6150 if (tdep->syscall_next_pc != NULL)
6151 return tdep->syscall_next_pc (self->regcache);
6152
6153 return 0;
6154}
6155
6156/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6157
6158int
6159arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6160{
6161 return arm_is_thumb (self->regcache);
6162}
6163
6164/* single_step() is called just before we want to resume the inferior,
6165 if we want to single-step it but there is no hardware or kernel
6166 single-step support. We find the target of the coming instructions
6167 and breakpoint them. */
6168
6169int
6170arm_software_single_step (struct frame_info *frame)
6171{
6172 struct regcache *regcache = get_current_regcache ();
6173 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6174 struct address_space *aspace = get_regcache_aspace (regcache);
6175 struct arm_get_next_pcs next_pcs_ctx;
6176 CORE_ADDR pc;
6177 int i;
6178 VEC (CORE_ADDR) *next_pcs = NULL;
6179 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6180
6181 arm_get_next_pcs_ctor (&next_pcs_ctx,
6182 &arm_get_next_pcs_ops,
6183 gdbarch_byte_order (gdbarch),
6184 gdbarch_byte_order_for_code (gdbarch),
6185 gdbarch_tdep (gdbarch)->thumb2_breakpoint,
6186 regcache);
6187
6188 next_pcs = arm_get_next_pcs (&next_pcs_ctx, regcache_read_pc (regcache));
6189
6190 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6191 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6192
6193 do_cleanups (old_chain);
6194
6195 return 1;
6196}
6197
34518530
YQ
6198/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6199 for Linux, where some SVC instructions must be treated specially. */
6200
6201static void
6202cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6203 struct displaced_step_closure *dsc)
6204{
6205 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6206
6207 if (debug_displaced)
6208 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6209 "%.8lx\n", (unsigned long) resume_addr);
6210
6211 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6212}
6213
6214
6215/* Common copy routine for svc instruciton. */
6216
6217static int
6218install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6219 struct displaced_step_closure *dsc)
6220{
6221 /* Preparation: none.
6222 Insn: unmodified svc.
6223 Cleanup: pc <- insn_addr + insn_size. */
6224
6225 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6226 instruction. */
6227 dsc->wrote_to_pc = 1;
6228
6229 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6230 if (dsc->u.svc.copy_svc_os)
6231 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6232 else
6233 {
6234 dsc->cleanup = &cleanup_svc;
6235 return 0;
6236 }
34518530
YQ
6237}
6238
6239static int
6240arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6241 struct regcache *regs, struct displaced_step_closure *dsc)
6242{
6243
6244 if (debug_displaced)
6245 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6246 (unsigned long) insn);
6247
6248 dsc->modinsn[0] = insn;
6249
6250 return install_svc (gdbarch, regs, dsc);
6251}
6252
6253static int
6254thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6255 struct regcache *regs, struct displaced_step_closure *dsc)
6256{
6257
6258 if (debug_displaced)
6259 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6260 insn);
bd18283a 6261
34518530
YQ
6262 dsc->modinsn[0] = insn;
6263
6264 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6265}
6266
6267/* Copy undefined instructions. */
6268
6269static int
7ff120b4
YQ
6270arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6271 struct displaced_step_closure *dsc)
cca44b1b
JB
6272{
6273 if (debug_displaced)
0963b4bd
MS
6274 fprintf_unfiltered (gdb_stdlog,
6275 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6276 (unsigned long) insn);
6277
6278 dsc->modinsn[0] = insn;
6279
6280 return 0;
6281}
6282
34518530
YQ
6283static int
6284thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6285 struct displaced_step_closure *dsc)
6286{
6287
6288 if (debug_displaced)
6289 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6290 "%.4x %.4x\n", (unsigned short) insn1,
6291 (unsigned short) insn2);
6292
6293 dsc->modinsn[0] = insn1;
6294 dsc->modinsn[1] = insn2;
6295 dsc->numinsns = 2;
6296
6297 return 0;
6298}
6299
cca44b1b
JB
6300/* Copy unpredictable instructions. */
6301
6302static int
7ff120b4
YQ
6303arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6304 struct displaced_step_closure *dsc)
cca44b1b
JB
6305{
6306 if (debug_displaced)
6307 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6308 "%.8lx\n", (unsigned long) insn);
6309
6310 dsc->modinsn[0] = insn;
6311
6312 return 0;
6313}
6314
6315/* The decode_* functions are instruction decoding helpers. They mostly follow
6316 the presentation in the ARM ARM. */
6317
6318static int
7ff120b4
YQ
6319arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6320 struct regcache *regs,
6321 struct displaced_step_closure *dsc)
cca44b1b
JB
6322{
6323 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6324 unsigned int rn = bits (insn, 16, 19);
6325
6326 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6327 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6328 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6329 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6330 else if ((op1 & 0x60) == 0x20)
7ff120b4 6331 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6332 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6333 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6334 dsc);
cca44b1b 6335 else if ((op1 & 0x77) == 0x41)
7ff120b4 6336 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6337 else if ((op1 & 0x77) == 0x45)
7ff120b4 6338 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6339 else if ((op1 & 0x77) == 0x51)
6340 {
6341 if (rn != 0xf)
7ff120b4 6342 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6343 else
7ff120b4 6344 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6345 }
6346 else if ((op1 & 0x77) == 0x55)
7ff120b4 6347 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6348 else if (op1 == 0x57)
6349 switch (op2)
6350 {
7ff120b4
YQ
6351 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6352 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6353 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6354 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6355 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6356 }
6357 else if ((op1 & 0x63) == 0x43)
7ff120b4 6358 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6359 else if ((op2 & 0x1) == 0x0)
6360 switch (op1 & ~0x80)
6361 {
6362 case 0x61:
7ff120b4 6363 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6364 case 0x65:
7ff120b4 6365 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6366 case 0x71: case 0x75:
6367 /* pld/pldw reg. */
7ff120b4 6368 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6369 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6370 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6371 default:
7ff120b4 6372 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6373 }
6374 else
7ff120b4 6375 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6376}
6377
6378static int
7ff120b4
YQ
6379arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6380 struct regcache *regs,
6381 struct displaced_step_closure *dsc)
cca44b1b
JB
6382{
6383 if (bit (insn, 27) == 0)
7ff120b4 6384 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6385 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6386 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6387 {
6388 case 0x0: case 0x2:
7ff120b4 6389 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6390
6391 case 0x1: case 0x3:
7ff120b4 6392 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6393
6394 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6395 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6396
6397 case 0x8:
6398 switch ((insn & 0xe00000) >> 21)
6399 {
6400 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6401 /* stc/stc2. */
7ff120b4 6402 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6403
6404 case 0x2:
7ff120b4 6405 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6406
6407 default:
7ff120b4 6408 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6409 }
6410
6411 case 0x9:
6412 {
6413 int rn_f = (bits (insn, 16, 19) == 0xf);
6414 switch ((insn & 0xe00000) >> 21)
6415 {
6416 case 0x1: case 0x3:
6417 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6418 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6419 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6420
6421 case 0x2:
7ff120b4 6422 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6423
6424 case 0x4: case 0x5: case 0x6: case 0x7:
6425 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6426 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6427 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6428
6429 default:
7ff120b4 6430 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6431 }
6432 }
6433
6434 case 0xa:
7ff120b4 6435 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6436
6437 case 0xb:
6438 if (bits (insn, 16, 19) == 0xf)
6439 /* ldc/ldc2 lit. */
7ff120b4 6440 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6441 else
7ff120b4 6442 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6443
6444 case 0xc:
6445 if (bit (insn, 4))
7ff120b4 6446 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6447 else
7ff120b4 6448 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6449
6450 case 0xd:
6451 if (bit (insn, 4))
7ff120b4 6452 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6453 else
7ff120b4 6454 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6455
6456 default:
7ff120b4 6457 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6458 }
6459}
6460
6461/* Decode miscellaneous instructions in dp/misc encoding space. */
6462
6463static int
7ff120b4
YQ
6464arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6465 struct regcache *regs,
6466 struct displaced_step_closure *dsc)
cca44b1b
JB
6467{
6468 unsigned int op2 = bits (insn, 4, 6);
6469 unsigned int op = bits (insn, 21, 22);
6470 unsigned int op1 = bits (insn, 16, 19);
6471
6472 switch (op2)
6473 {
6474 case 0x0:
7ff120b4 6475 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6476
6477 case 0x1:
6478 if (op == 0x1) /* bx. */
7ff120b4 6479 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6480 else if (op == 0x3)
7ff120b4 6481 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6482 else
7ff120b4 6483 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6484
6485 case 0x2:
6486 if (op == 0x1)
6487 /* Not really supported. */
7ff120b4 6488 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6489 else
7ff120b4 6490 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6491
6492 case 0x3:
6493 if (op == 0x1)
7ff120b4 6494 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6495 regs, dsc); /* blx register. */
cca44b1b 6496 else
7ff120b4 6497 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6498
6499 case 0x5:
7ff120b4 6500 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6501
6502 case 0x7:
6503 if (op == 0x1)
7ff120b4 6504 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6505 else if (op == 0x3)
6506 /* Not really supported. */
7ff120b4 6507 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6508
6509 default:
7ff120b4 6510 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6511 }
6512}
6513
6514static int
7ff120b4
YQ
6515arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6516 struct regcache *regs,
6517 struct displaced_step_closure *dsc)
cca44b1b
JB
6518{
6519 if (bit (insn, 25))
6520 switch (bits (insn, 20, 24))
6521 {
6522 case 0x10:
7ff120b4 6523 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6524
6525 case 0x14:
7ff120b4 6526 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6527
6528 case 0x12: case 0x16:
7ff120b4 6529 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6530
6531 default:
7ff120b4 6532 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6533 }
6534 else
6535 {
6536 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6537
6538 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6539 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6540 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6541 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6542 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6543 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6544 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6545 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6546 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6547 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6548 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6549 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b
JB
6550 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6551 /* 2nd arg means "unpriveleged". */
7ff120b4
YQ
6552 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6553 dsc);
cca44b1b
JB
6554 }
6555
6556 /* Should be unreachable. */
6557 return 1;
6558}
6559
6560static int
7ff120b4
YQ
6561arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6562 struct regcache *regs,
6563 struct displaced_step_closure *dsc)
cca44b1b
JB
6564{
6565 int a = bit (insn, 25), b = bit (insn, 4);
6566 uint32_t op1 = bits (insn, 20, 24);
6567 int rn_f = bits (insn, 16, 19) == 0xf;
6568
6569 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6570 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6571 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6572 else if ((!a && (op1 & 0x17) == 0x02)
6573 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6574 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6575 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6576 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6577 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6578 else if ((!a && (op1 & 0x17) == 0x03)
6579 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6580 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6581 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6582 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6583 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6584 else if ((!a && (op1 & 0x17) == 0x06)
6585 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6586 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6587 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6588 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6589 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6590 else if ((!a && (op1 & 0x17) == 0x07)
6591 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6592 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6593
6594 /* Should be unreachable. */
6595 return 1;
6596}
6597
6598static int
7ff120b4
YQ
6599arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6600 struct displaced_step_closure *dsc)
cca44b1b
JB
6601{
6602 switch (bits (insn, 20, 24))
6603 {
6604 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6605 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6606
6607 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6608 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6609
6610 case 0x08: case 0x09: case 0x0a: case 0x0b:
6611 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6612 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6613 "decode/pack/unpack/saturate/reverse", dsc);
6614
6615 case 0x18:
6616 if (bits (insn, 5, 7) == 0) /* op2. */
6617 {
6618 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6619 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6620 else
7ff120b4 6621 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6622 }
6623 else
7ff120b4 6624 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6625
6626 case 0x1a: case 0x1b:
6627 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6628 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6629 else
7ff120b4 6630 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6631
6632 case 0x1c: case 0x1d:
6633 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6634 {
6635 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6636 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6637 else
7ff120b4 6638 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6639 }
6640 else
7ff120b4 6641 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6642
6643 case 0x1e: case 0x1f:
6644 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6645 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6646 else
7ff120b4 6647 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6648 }
6649
6650 /* Should be unreachable. */
6651 return 1;
6652}
6653
6654static int
7ff120b4
YQ
6655arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
6656 struct regcache *regs,
6657 struct displaced_step_closure *dsc)
cca44b1b
JB
6658{
6659 if (bit (insn, 25))
7ff120b4 6660 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6661 else
7ff120b4 6662 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6663}
6664
6665static int
7ff120b4
YQ
6666arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6667 struct regcache *regs,
6668 struct displaced_step_closure *dsc)
cca44b1b
JB
6669{
6670 unsigned int opcode = bits (insn, 20, 24);
6671
6672 switch (opcode)
6673 {
6674 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6675 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6676
6677 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6678 case 0x12: case 0x16:
7ff120b4 6679 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6680
6681 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6682 case 0x13: case 0x17:
7ff120b4 6683 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6684
6685 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6686 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6687 /* Note: no writeback for these instructions. Bit 25 will always be
6688 zero though (via caller), so the following works OK. */
7ff120b4 6689 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6690 }
6691
6692 /* Should be unreachable. */
6693 return 1;
6694}
6695
34518530
YQ
6696/* Decode shifted register instructions. */
6697
6698static int
6699thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6700 uint16_t insn2, struct regcache *regs,
6701 struct displaced_step_closure *dsc)
6702{
6703 /* PC is only allowed to be used in instruction MOV. */
6704
6705 unsigned int op = bits (insn1, 5, 8);
6706 unsigned int rn = bits (insn1, 0, 3);
6707
6708 if (op == 0x2 && rn == 0xf) /* MOV */
6709 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6710 else
6711 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6712 "dp (shift reg)", dsc);
6713}
6714
6715
6716/* Decode extension register load/store. Exactly the same as
6717 arm_decode_ext_reg_ld_st. */
6718
6719static int
6720thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6721 uint16_t insn2, struct regcache *regs,
6722 struct displaced_step_closure *dsc)
6723{
6724 unsigned int opcode = bits (insn1, 4, 8);
6725
6726 switch (opcode)
6727 {
6728 case 0x04: case 0x05:
6729 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6730 "vfp/neon vmov", dsc);
6731
6732 case 0x08: case 0x0c: /* 01x00 */
6733 case 0x0a: case 0x0e: /* 01x10 */
6734 case 0x12: case 0x16: /* 10x10 */
6735 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6736 "vfp/neon vstm/vpush", dsc);
6737
6738 case 0x09: case 0x0d: /* 01x01 */
6739 case 0x0b: case 0x0f: /* 01x11 */
6740 case 0x13: case 0x17: /* 10x11 */
6741 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6742 "vfp/neon vldm/vpop", dsc);
6743
6744 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6745 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6746 "vstr", dsc);
6747 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6748 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6749 }
6750
6751 /* Should be unreachable. */
6752 return 1;
6753}
6754
cca44b1b 6755static int
7ff120b4
YQ
6756arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6757 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6758{
6759 unsigned int op1 = bits (insn, 20, 25);
6760 int op = bit (insn, 4);
6761 unsigned int coproc = bits (insn, 8, 11);
6762 unsigned int rn = bits (insn, 16, 19);
6763
6764 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6765 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6766 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6767 && (coproc & 0xe) != 0xa)
6768 /* stc/stc2. */
7ff120b4 6769 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6770 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6771 && (coproc & 0xe) != 0xa)
6772 /* ldc/ldc2 imm/lit. */
7ff120b4 6773 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6774 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6775 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6776 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6777 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6778 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6779 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6780 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6781 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6782 else if ((op1 & 0x30) == 0x20 && !op)
6783 {
6784 if ((coproc & 0xe) == 0xa)
7ff120b4 6785 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6786 else
7ff120b4 6787 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6788 }
6789 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6790 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6791 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6792 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6793 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6794 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6795 else if ((op1 & 0x30) == 0x30)
7ff120b4 6796 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6797 else
7ff120b4 6798 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6799}
6800
34518530
YQ
6801static int
6802thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6803 uint16_t insn2, struct regcache *regs,
6804 struct displaced_step_closure *dsc)
6805{
6806 unsigned int coproc = bits (insn2, 8, 11);
6807 unsigned int op1 = bits (insn1, 4, 9);
6808 unsigned int bit_5_8 = bits (insn1, 5, 8);
6809 unsigned int bit_9 = bit (insn1, 9);
6810 unsigned int bit_4 = bit (insn1, 4);
6811 unsigned int rn = bits (insn1, 0, 3);
6812
6813 if (bit_9 == 0)
6814 {
6815 if (bit_5_8 == 2)
6816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6818 dsc);
6819 else if (bit_5_8 == 0) /* UNDEFINED. */
6820 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6821 else
6822 {
6823 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6824 if ((coproc & 0xe) == 0xa)
6825 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6826 dsc);
6827 else /* coproc is not 101x. */
6828 {
6829 if (bit_4 == 0) /* STC/STC2. */
6830 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6831 "stc/stc2", dsc);
6832 else /* LDC/LDC2 {literal, immeidate}. */
6833 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6834 regs, dsc);
6835 }
6836 }
6837 }
6838 else
6839 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6840
6841 return 0;
6842}
6843
6844static void
6845install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6846 struct displaced_step_closure *dsc, int rd)
6847{
6848 /* ADR Rd, #imm
6849
6850 Rewrite as:
6851
6852 Preparation: Rd <- PC
6853 Insn: ADD Rd, #imm
6854 Cleanup: Null.
6855 */
6856
6857 /* Rd <- PC */
6858 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6859 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6860}
6861
6862static int
6863thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6864 struct displaced_step_closure *dsc,
6865 int rd, unsigned int imm)
6866{
6867
6868 /* Encoding T2: ADDS Rd, #imm */
6869 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6870
6871 install_pc_relative (gdbarch, regs, dsc, rd);
6872
6873 return 0;
6874}
6875
6876static int
6877thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6878 struct regcache *regs,
6879 struct displaced_step_closure *dsc)
6880{
6881 unsigned int rd = bits (insn, 8, 10);
6882 unsigned int imm8 = bits (insn, 0, 7);
6883
6884 if (debug_displaced)
6885 fprintf_unfiltered (gdb_stdlog,
6886 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6887 rd, imm8, insn);
6888
6889 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6890}
6891
6892static int
6893thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6894 uint16_t insn2, struct regcache *regs,
6895 struct displaced_step_closure *dsc)
6896{
6897 unsigned int rd = bits (insn2, 8, 11);
6898 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6899 extract raw immediate encoding rather than computing immediate. When
6900 generating ADD or SUB instruction, we can simply perform OR operation to
6901 set immediate into ADD. */
6902 unsigned int imm_3_8 = insn2 & 0x70ff;
6903 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6904
6905 if (debug_displaced)
6906 fprintf_unfiltered (gdb_stdlog,
6907 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6908 rd, imm_i, imm_3_8, insn1, insn2);
6909
6910 if (bit (insn1, 7)) /* Encoding T2 */
6911 {
6912 /* Encoding T3: SUB Rd, Rd, #imm */
6913 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6914 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6915 }
6916 else /* Encoding T3 */
6917 {
6918 /* Encoding T3: ADD Rd, Rd, #imm */
6919 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6920 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6921 }
6922 dsc->numinsns = 2;
6923
6924 install_pc_relative (gdbarch, regs, dsc, rd);
6925
6926 return 0;
6927}
6928
6929static int
6930thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
6931 struct regcache *regs,
6932 struct displaced_step_closure *dsc)
6933{
6934 unsigned int rt = bits (insn1, 8, 10);
6935 unsigned int pc;
6936 int imm8 = (bits (insn1, 0, 7) << 2);
6937 CORE_ADDR from = dsc->insn_addr;
6938
6939 /* LDR Rd, #imm8
6940
6941 Rwrite as:
6942
6943 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6944
6945 Insn: LDR R0, [R2, R3];
6946 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6947
6948 if (debug_displaced)
6949 fprintf_unfiltered (gdb_stdlog,
6950 "displaced: copying thumb ldr r%d [pc #%d]\n"
6951 , rt, imm8);
6952
6953 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6954 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6955 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6956 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6957 /* The assembler calculates the required value of the offset from the
6958 Align(PC,4) value of this instruction to the label. */
6959 pc = pc & 0xfffffffc;
6960
6961 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6962 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6963
6964 dsc->rd = rt;
6965 dsc->u.ldst.xfersize = 4;
6966 dsc->u.ldst.rn = 0;
6967 dsc->u.ldst.immed = 0;
6968 dsc->u.ldst.writeback = 0;
6969 dsc->u.ldst.restore_r4 = 0;
6970
6971 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6972
6973 dsc->cleanup = &cleanup_load;
6974
6975 return 0;
6976}
6977
6978/* Copy Thumb cbnz/cbz insruction. */
6979
6980static int
6981thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6982 struct regcache *regs,
6983 struct displaced_step_closure *dsc)
6984{
6985 int non_zero = bit (insn1, 11);
6986 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6987 CORE_ADDR from = dsc->insn_addr;
6988 int rn = bits (insn1, 0, 2);
6989 int rn_val = displaced_read_reg (regs, dsc, rn);
6990
6991 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6992 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6993 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6994 condition is false, let it be, cleanup_branch will do nothing. */
6995 if (dsc->u.branch.cond)
6996 {
6997 dsc->u.branch.cond = INST_AL;
6998 dsc->u.branch.dest = from + 4 + imm5;
6999 }
7000 else
7001 dsc->u.branch.dest = from + 2;
7002
7003 dsc->u.branch.link = 0;
7004 dsc->u.branch.exchange = 0;
7005
7006 if (debug_displaced)
7007 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7008 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7009 rn, rn_val, insn1, dsc->u.branch.dest);
7010
7011 dsc->modinsn[0] = THUMB_NOP;
7012
7013 dsc->cleanup = &cleanup_branch;
7014 return 0;
7015}
7016
7017/* Copy Table Branch Byte/Halfword */
7018static int
7019thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7020 uint16_t insn2, struct regcache *regs,
7021 struct displaced_step_closure *dsc)
7022{
7023 ULONGEST rn_val, rm_val;
7024 int is_tbh = bit (insn2, 4);
7025 CORE_ADDR halfwords = 0;
7026 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7027
7028 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7029 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7030
7031 if (is_tbh)
7032 {
7033 gdb_byte buf[2];
7034
7035 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7036 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7037 }
7038 else
7039 {
7040 gdb_byte buf[1];
7041
7042 target_read_memory (rn_val + rm_val, buf, 1);
7043 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7044 }
7045
7046 if (debug_displaced)
7047 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7048 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7049 (unsigned int) rn_val, (unsigned int) rm_val,
7050 (unsigned int) halfwords);
7051
7052 dsc->u.branch.cond = INST_AL;
7053 dsc->u.branch.link = 0;
7054 dsc->u.branch.exchange = 0;
7055 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7056
7057 dsc->cleanup = &cleanup_branch;
7058
7059 return 0;
7060}
7061
7062static void
7063cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7064 struct displaced_step_closure *dsc)
7065{
7066 /* PC <- r7 */
7067 int val = displaced_read_reg (regs, dsc, 7);
7068 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7069
7070 /* r7 <- r8 */
7071 val = displaced_read_reg (regs, dsc, 8);
7072 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7073
7074 /* r8 <- tmp[0] */
7075 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7076
7077}
7078
7079static int
7080thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
7081 struct regcache *regs,
7082 struct displaced_step_closure *dsc)
7083{
7084 dsc->u.block.regmask = insn1 & 0x00ff;
7085
7086 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7087 to :
7088
7089 (1) register list is full, that is, r0-r7 are used.
7090 Prepare: tmp[0] <- r8
7091
7092 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7093 MOV r8, r7; Move value of r7 to r8;
7094 POP {r7}; Store PC value into r7.
7095
7096 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7097
7098 (2) register list is not full, supposing there are N registers in
7099 register list (except PC, 0 <= N <= 7).
7100 Prepare: for each i, 0 - N, tmp[i] <- ri.
7101
7102 POP {r0, r1, ...., rN};
7103
7104 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7105 from tmp[] properly.
7106 */
7107 if (debug_displaced)
7108 fprintf_unfiltered (gdb_stdlog,
7109 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7110 dsc->u.block.regmask, insn1);
7111
7112 if (dsc->u.block.regmask == 0xff)
7113 {
7114 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7115
7116 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7117 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7118 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7119
7120 dsc->numinsns = 3;
7121 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7122 }
7123 else
7124 {
7125 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7126 unsigned int new_regmask, bit = 1;
7127 unsigned int to = 0, from = 0, i, new_rn;
7128
7129 for (i = 0; i < num_in_list + 1; i++)
7130 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7131
7132 new_regmask = (1 << (num_in_list + 1)) - 1;
7133
7134 if (debug_displaced)
7135 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7136 "{..., pc}: original reg list %.4x,"
7137 " modified list %.4x\n"),
7138 (int) dsc->u.block.regmask, new_regmask);
7139
7140 dsc->u.block.regmask |= 0x8000;
7141 dsc->u.block.writeback = 0;
7142 dsc->u.block.cond = INST_AL;
7143
7144 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7145
7146 dsc->cleanup = &cleanup_block_load_pc;
7147 }
7148
7149 return 0;
7150}
7151
7152static void
7153thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7154 struct regcache *regs,
7155 struct displaced_step_closure *dsc)
7156{
7157 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7158 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7159 int err = 0;
7160
7161 /* 16-bit thumb instructions. */
7162 switch (op_bit_12_15)
7163 {
7164 /* Shift (imme), add, subtract, move and compare. */
7165 case 0: case 1: case 2: case 3:
7166 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7167 "shift/add/sub/mov/cmp",
7168 dsc);
7169 break;
7170 case 4:
7171 switch (op_bit_10_11)
7172 {
7173 case 0: /* Data-processing */
7174 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7175 "data-processing",
7176 dsc);
7177 break;
7178 case 1: /* Special data instructions and branch and exchange. */
7179 {
7180 unsigned short op = bits (insn1, 7, 9);
7181 if (op == 6 || op == 7) /* BX or BLX */
7182 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7183 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7184 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7185 else
7186 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7187 dsc);
7188 }
7189 break;
7190 default: /* LDR (literal) */
7191 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7192 }
7193 break;
7194 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7195 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7196 break;
7197 case 10:
7198 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7199 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7200 else /* Generate SP-relative address */
7201 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7202 break;
7203 case 11: /* Misc 16-bit instructions */
7204 {
7205 switch (bits (insn1, 8, 11))
7206 {
7207 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7208 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7209 break;
7210 case 12: case 13: /* POP */
7211 if (bit (insn1, 8)) /* PC is in register list. */
7212 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7213 else
7214 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7215 break;
7216 case 15: /* If-Then, and hints */
7217 if (bits (insn1, 0, 3))
7218 /* If-Then makes up to four following instructions conditional.
7219 IT instruction itself is not conditional, so handle it as a
7220 common unmodified instruction. */
7221 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7222 dsc);
7223 else
7224 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7225 break;
7226 default:
7227 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7228 }
7229 }
7230 break;
7231 case 12:
7232 if (op_bit_10_11 < 2) /* Store multiple registers */
7233 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7234 else /* Load multiple registers */
7235 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7236 break;
7237 case 13: /* Conditional branch and supervisor call */
7238 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7239 err = thumb_copy_b (gdbarch, insn1, dsc);
7240 else
7241 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7242 break;
7243 case 14: /* Unconditional branch */
7244 err = thumb_copy_b (gdbarch, insn1, dsc);
7245 break;
7246 default:
7247 err = 1;
7248 }
7249
7250 if (err)
7251 internal_error (__FILE__, __LINE__,
7252 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7253}
7254
7255static int
7256decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7257 uint16_t insn1, uint16_t insn2,
7258 struct regcache *regs,
7259 struct displaced_step_closure *dsc)
7260{
7261 int rt = bits (insn2, 12, 15);
7262 int rn = bits (insn1, 0, 3);
7263 int op1 = bits (insn1, 7, 8);
7264 int err = 0;
7265
7266 switch (bits (insn1, 5, 6))
7267 {
7268 case 0: /* Load byte and memory hints */
7269 if (rt == 0xf) /* PLD/PLI */
7270 {
7271 if (rn == 0xf)
7272 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7273 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7274 else
7275 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7276 "pli/pld", dsc);
7277 }
7278 else
7279 {
7280 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7281 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7282 1);
7283 else
7284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7285 "ldrb{reg, immediate}/ldrbt",
7286 dsc);
7287 }
7288
7289 break;
7290 case 1: /* Load halfword and memory hints. */
7291 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7292 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7293 "pld/unalloc memhint", dsc);
7294 else
7295 {
7296 if (rn == 0xf)
7297 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7298 2);
7299 else
7300 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7301 "ldrh/ldrht", dsc);
7302 }
7303 break;
7304 case 2: /* Load word */
7305 {
7306 int insn2_bit_8_11 = bits (insn2, 8, 11);
7307
7308 if (rn == 0xf)
7309 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7310 else if (op1 == 0x1) /* Encoding T3 */
7311 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7312 0, 1);
7313 else /* op1 == 0x0 */
7314 {
7315 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7316 /* LDR (immediate) */
7317 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7318 dsc, bit (insn2, 8), 1);
7319 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7320 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7321 "ldrt", dsc);
7322 else
7323 /* LDR (register) */
7324 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7325 dsc, 0, 0);
7326 }
7327 break;
7328 }
7329 default:
7330 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7331 break;
7332 }
7333 return 0;
7334}
7335
7336static void
7337thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7338 uint16_t insn2, struct regcache *regs,
7339 struct displaced_step_closure *dsc)
7340{
7341 int err = 0;
7342 unsigned short op = bit (insn2, 15);
7343 unsigned int op1 = bits (insn1, 11, 12);
7344
7345 switch (op1)
7346 {
7347 case 1:
7348 {
7349 switch (bits (insn1, 9, 10))
7350 {
7351 case 0:
7352 if (bit (insn1, 6))
7353 {
7354 /* Load/store {dual, execlusive}, table branch. */
7355 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7356 && bits (insn2, 5, 7) == 0)
7357 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7358 dsc);
7359 else
7360 /* PC is not allowed to use in load/store {dual, exclusive}
7361 instructions. */
7362 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7363 "load/store dual/ex", dsc);
7364 }
7365 else /* load/store multiple */
7366 {
7367 switch (bits (insn1, 7, 8))
7368 {
7369 case 0: case 3: /* SRS, RFE */
7370 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7371 "srs/rfe", dsc);
7372 break;
7373 case 1: case 2: /* LDM/STM/PUSH/POP */
7374 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7375 break;
7376 }
7377 }
7378 break;
7379
7380 case 1:
7381 /* Data-processing (shift register). */
7382 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7383 dsc);
7384 break;
7385 default: /* Coprocessor instructions. */
7386 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7387 break;
7388 }
7389 break;
7390 }
7391 case 2: /* op1 = 2 */
7392 if (op) /* Branch and misc control. */
7393 {
7394 if (bit (insn2, 14) /* BLX/BL */
7395 || bit (insn2, 12) /* Unconditional branch */
7396 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7397 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7398 else
7399 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7400 "misc ctrl", dsc);
7401 }
7402 else
7403 {
7404 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7405 {
7406 int op = bits (insn1, 4, 8);
7407 int rn = bits (insn1, 0, 3);
7408 if ((op == 0 || op == 0xa) && rn == 0xf)
7409 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7410 regs, dsc);
7411 else
7412 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7413 "dp/pb", dsc);
7414 }
7415 else /* Data processing (modified immeidate) */
7416 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 "dp/mi", dsc);
7418 }
7419 break;
7420 case 3: /* op1 = 3 */
7421 switch (bits (insn1, 9, 10))
7422 {
7423 case 0:
7424 if (bit (insn1, 4))
7425 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7426 regs, dsc);
7427 else /* NEON Load/Store and Store single data item */
7428 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7429 "neon elt/struct load/store",
7430 dsc);
7431 break;
7432 case 1: /* op1 = 3, bits (9, 10) == 1 */
7433 switch (bits (insn1, 7, 8))
7434 {
7435 case 0: case 1: /* Data processing (register) */
7436 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7437 "dp(reg)", dsc);
7438 break;
7439 case 2: /* Multiply and absolute difference */
7440 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7441 "mul/mua/diff", dsc);
7442 break;
7443 case 3: /* Long multiply and divide */
7444 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7445 "lmul/lmua", dsc);
7446 break;
7447 }
7448 break;
7449 default: /* Coprocessor instructions */
7450 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7451 break;
7452 }
7453 break;
7454 default:
7455 err = 1;
7456 }
7457
7458 if (err)
7459 internal_error (__FILE__, __LINE__,
7460 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7461
7462}
7463
b434a28f
YQ
7464static void
7465thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7466 CORE_ADDR to, struct regcache *regs,
7467 struct displaced_step_closure *dsc)
7468{
34518530
YQ
7469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7470 uint16_t insn1
7471 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7472
7473 if (debug_displaced)
7474 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7475 "at %.8lx\n", insn1, (unsigned long) from);
7476
7477 dsc->is_thumb = 1;
7478 dsc->insn_size = thumb_insn_size (insn1);
7479 if (thumb_insn_size (insn1) == 4)
7480 {
7481 uint16_t insn2
7482 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7483 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7484 }
7485 else
7486 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7487}
7488
cca44b1b 7489void
b434a28f
YQ
7490arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7491 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7492 struct displaced_step_closure *dsc)
7493{
7494 int err = 0;
b434a28f
YQ
7495 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7496 uint32_t insn;
cca44b1b
JB
7497
7498 /* Most displaced instructions use a 1-instruction scratch space, so set this
7499 here and override below if/when necessary. */
7500 dsc->numinsns = 1;
7501 dsc->insn_addr = from;
7502 dsc->scratch_base = to;
7503 dsc->cleanup = NULL;
7504 dsc->wrote_to_pc = 0;
7505
b434a28f
YQ
7506 if (!displaced_in_arm_mode (regs))
7507 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
7508
4db71c0b
YQ
7509 dsc->is_thumb = 0;
7510 dsc->insn_size = 4;
b434a28f
YQ
7511 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7512 if (debug_displaced)
7513 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7514 "at %.8lx\n", (unsigned long) insn,
7515 (unsigned long) from);
7516
cca44b1b 7517 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7518 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7519 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7520 {
7521 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7522 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7523 break;
7524
7525 case 0x4: case 0x5: case 0x6:
7ff120b4 7526 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7527 break;
7528
7529 case 0x7:
7ff120b4 7530 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7531 break;
7532
7533 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7534 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7535 break;
7536
7537 case 0xc: case 0xd: case 0xe: case 0xf:
7ff120b4 7538 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
cca44b1b
JB
7539 break;
7540 }
7541
7542 if (err)
7543 internal_error (__FILE__, __LINE__,
7544 _("arm_process_displaced_insn: Instruction decode error"));
7545}
7546
7547/* Actually set up the scratch space for a displaced instruction. */
7548
7549void
7550arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7551 CORE_ADDR to, struct displaced_step_closure *dsc)
7552{
7553 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7554 unsigned int i, len, offset;
cca44b1b 7555 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7556 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7557 const gdb_byte *bkp_insn;
cca44b1b 7558
4db71c0b 7559 offset = 0;
cca44b1b
JB
7560 /* Poke modified instruction(s). */
7561 for (i = 0; i < dsc->numinsns; i++)
7562 {
7563 if (debug_displaced)
4db71c0b
YQ
7564 {
7565 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7566 if (size == 4)
7567 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7568 dsc->modinsn[i]);
7569 else if (size == 2)
7570 fprintf_unfiltered (gdb_stdlog, "%.4x",
7571 (unsigned short)dsc->modinsn[i]);
7572
7573 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7574 (unsigned long) to + offset);
7575
7576 }
7577 write_memory_unsigned_integer (to + offset, size,
7578 byte_order_for_code,
cca44b1b 7579 dsc->modinsn[i]);
4db71c0b
YQ
7580 offset += size;
7581 }
7582
7583 /* Choose the correct breakpoint instruction. */
7584 if (dsc->is_thumb)
7585 {
7586 bkp_insn = tdep->thumb_breakpoint;
7587 len = tdep->thumb_breakpoint_size;
7588 }
7589 else
7590 {
7591 bkp_insn = tdep->arm_breakpoint;
7592 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7593 }
7594
7595 /* Put breakpoint afterwards. */
4db71c0b 7596 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7597
7598 if (debug_displaced)
7599 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7600 paddress (gdbarch, from), paddress (gdbarch, to));
7601}
7602
7603/* Entry point for copying an instruction into scratch space for displaced
7604 stepping. */
7605
7606struct displaced_step_closure *
7607arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
7608 CORE_ADDR from, CORE_ADDR to,
7609 struct regcache *regs)
7610{
8d749320
SM
7611 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
7612
b434a28f 7613 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
cca44b1b
JB
7614 arm_displaced_init_closure (gdbarch, from, to, dsc);
7615
7616 return dsc;
7617}
7618
7619/* Entry point for cleaning things up after a displaced instruction has been
7620 single-stepped. */
7621
7622void
7623arm_displaced_step_fixup (struct gdbarch *gdbarch,
7624 struct displaced_step_closure *dsc,
7625 CORE_ADDR from, CORE_ADDR to,
7626 struct regcache *regs)
7627{
7628 if (dsc->cleanup)
7629 dsc->cleanup (gdbarch, regs, dsc);
7630
7631 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7632 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7633 dsc->insn_addr + dsc->insn_size);
7634
cca44b1b
JB
7635}
7636
7637#include "bfd-in2.h"
7638#include "libcoff.h"
7639
7640static int
7641gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7642{
9a3c8263 7643 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
9779414d
DJ
7644
7645 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7646 {
7647 static asymbol *asym;
7648 static combined_entry_type ce;
7649 static struct coff_symbol_struct csym;
7650 static struct bfd fake_bfd;
7651 static bfd_target fake_target;
7652
7653 if (csym.native == NULL)
7654 {
7655 /* Create a fake symbol vector containing a Thumb symbol.
7656 This is solely so that the code in print_insn_little_arm()
7657 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7658 the presence of a Thumb symbol and switch to decoding
7659 Thumb instructions. */
7660
7661 fake_target.flavour = bfd_target_coff_flavour;
7662 fake_bfd.xvec = &fake_target;
7663 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7664 csym.native = &ce;
7665 csym.symbol.the_bfd = &fake_bfd;
7666 csym.symbol.name = "fake";
7667 asym = (asymbol *) & csym;
7668 }
7669
7670 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7671 info->symbols = &asym;
7672 }
7673 else
7674 info->symbols = NULL;
7675
7676 if (info->endian == BFD_ENDIAN_BIG)
7677 return print_insn_big_arm (memaddr, info);
7678 else
7679 return print_insn_little_arm (memaddr, info);
7680}
7681
7682/* The following define instruction sequences that will cause ARM
7683 cpu's to take an undefined instruction trap. These are used to
7684 signal a breakpoint to GDB.
7685
7686 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7687 modes. A different instruction is required for each mode. The ARM
7688 cpu's can also be big or little endian. Thus four different
7689 instructions are needed to support all cases.
7690
7691 Note: ARMv4 defines several new instructions that will take the
7692 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7693 not in fact add the new instructions. The new undefined
7694 instructions in ARMv4 are all instructions that had no defined
7695 behaviour in earlier chips. There is no guarantee that they will
7696 raise an exception, but may be treated as NOP's. In practice, it
7697 may only safe to rely on instructions matching:
7698
7699 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7700 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7701 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7702
0963b4bd 7703 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7704 following use a condition predicate of ALWAYS so it is always TRUE.
7705
7706 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7707 and NetBSD all use a software interrupt rather than an undefined
7708 instruction to force a trap. This can be handled by by the
7709 abi-specific code during establishment of the gdbarch vector. */
7710
7711#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7712#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7713#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7714#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7715
948f8e3d
PA
7716static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7717static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7718static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7719static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b
JB
7720
7721/* Determine the type and size of breakpoint to insert at PCPTR. Uses
7722 the program counter value to determine whether a 16-bit or 32-bit
7723 breakpoint should be used. It returns a pointer to a string of
7724 bytes that encode a breakpoint instruction, stores the length of
7725 the string to *lenptr, and adjusts the program counter (if
7726 necessary) to point to the actual memory location where the
7727 breakpoint should be inserted. */
7728
7729static const unsigned char *
7730arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7731{
7732 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7733 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7734
9779414d 7735 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7736 {
7737 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7738
7739 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7740 check whether we are replacing a 32-bit instruction. */
7741 if (tdep->thumb2_breakpoint != NULL)
7742 {
7743 gdb_byte buf[2];
7744 if (target_read_memory (*pcptr, buf, 2) == 0)
7745 {
7746 unsigned short inst1;
7747 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7748 if (thumb_insn_size (inst1) == 4)
177321bd
DJ
7749 {
7750 *lenptr = tdep->thumb2_breakpoint_size;
7751 return tdep->thumb2_breakpoint;
7752 }
7753 }
7754 }
7755
cca44b1b
JB
7756 *lenptr = tdep->thumb_breakpoint_size;
7757 return tdep->thumb_breakpoint;
7758 }
7759 else
7760 {
7761 *lenptr = tdep->arm_breakpoint_size;
7762 return tdep->arm_breakpoint;
7763 }
7764}
7765
177321bd
DJ
7766static void
7767arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7768 int *kindptr)
7769{
177321bd
DJ
7770 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7771
9779414d 7772 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
177321bd
DJ
7773 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7774 that this is not confused with a 32-bit ARM breakpoint. */
7775 *kindptr = 3;
7776}
7777
cca44b1b
JB
7778/* Extract from an array REGBUF containing the (raw) register state a
7779 function return value of type TYPE, and copy that, in virtual
7780 format, into VALBUF. */
7781
7782static void
7783arm_extract_return_value (struct type *type, struct regcache *regs,
7784 gdb_byte *valbuf)
7785{
7786 struct gdbarch *gdbarch = get_regcache_arch (regs);
7787 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7788
7789 if (TYPE_CODE_FLT == TYPE_CODE (type))
7790 {
7791 switch (gdbarch_tdep (gdbarch)->fp_model)
7792 {
7793 case ARM_FLOAT_FPA:
7794 {
7795 /* The value is in register F0 in internal format. We need to
7796 extract the raw value and then convert it to the desired
7797 internal type. */
7798 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7799
7800 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7801 convert_from_extended (floatformat_from_type (type), tmpbuf,
7802 valbuf, gdbarch_byte_order (gdbarch));
7803 }
7804 break;
7805
7806 case ARM_FLOAT_SOFT_FPA:
7807 case ARM_FLOAT_SOFT_VFP:
7808 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7809 not using the VFP ABI code. */
7810 case ARM_FLOAT_VFP:
7811 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7812 if (TYPE_LENGTH (type) > 4)
7813 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7814 valbuf + INT_REGISTER_SIZE);
7815 break;
7816
7817 default:
0963b4bd
MS
7818 internal_error (__FILE__, __LINE__,
7819 _("arm_extract_return_value: "
7820 "Floating point model not supported"));
cca44b1b
JB
7821 break;
7822 }
7823 }
7824 else if (TYPE_CODE (type) == TYPE_CODE_INT
7825 || TYPE_CODE (type) == TYPE_CODE_CHAR
7826 || TYPE_CODE (type) == TYPE_CODE_BOOL
7827 || TYPE_CODE (type) == TYPE_CODE_PTR
7828 || TYPE_CODE (type) == TYPE_CODE_REF
7829 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7830 {
b021a221
MS
7831 /* If the type is a plain integer, then the access is
7832 straight-forward. Otherwise we have to play around a bit
7833 more. */
cca44b1b
JB
7834 int len = TYPE_LENGTH (type);
7835 int regno = ARM_A1_REGNUM;
7836 ULONGEST tmp;
7837
7838 while (len > 0)
7839 {
7840 /* By using store_unsigned_integer we avoid having to do
7841 anything special for small big-endian values. */
7842 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7843 store_unsigned_integer (valbuf,
7844 (len > INT_REGISTER_SIZE
7845 ? INT_REGISTER_SIZE : len),
7846 byte_order, tmp);
7847 len -= INT_REGISTER_SIZE;
7848 valbuf += INT_REGISTER_SIZE;
7849 }
7850 }
7851 else
7852 {
7853 /* For a structure or union the behaviour is as if the value had
7854 been stored to word-aligned memory and then loaded into
7855 registers with 32-bit load instruction(s). */
7856 int len = TYPE_LENGTH (type);
7857 int regno = ARM_A1_REGNUM;
7858 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7859
7860 while (len > 0)
7861 {
7862 regcache_cooked_read (regs, regno++, tmpbuf);
7863 memcpy (valbuf, tmpbuf,
7864 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7865 len -= INT_REGISTER_SIZE;
7866 valbuf += INT_REGISTER_SIZE;
7867 }
7868 }
7869}
7870
7871
7872/* Will a function return an aggregate type in memory or in a
7873 register? Return 0 if an aggregate type can be returned in a
7874 register, 1 if it must be returned in memory. */
7875
7876static int
7877arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7878{
cca44b1b
JB
7879 enum type_code code;
7880
f168693b 7881 type = check_typedef (type);
cca44b1b 7882
b13c8ab2
YQ
7883 /* Simple, non-aggregate types (ie not including vectors and
7884 complex) are always returned in a register (or registers). */
7885 code = TYPE_CODE (type);
7886 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7887 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7888 return 0;
cca44b1b 7889
c4312b19
YQ
7890 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7891 {
7892 /* Vector values should be returned using ARM registers if they
7893 are not over 16 bytes. */
7894 return (TYPE_LENGTH (type) > 16);
7895 }
7896
b13c8ab2 7897 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 7898 {
b13c8ab2
YQ
7899 /* The AAPCS says all aggregates not larger than a word are returned
7900 in a register. */
7901 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7902 return 0;
7903
cca44b1b
JB
7904 return 1;
7905 }
b13c8ab2
YQ
7906 else
7907 {
7908 int nRc;
cca44b1b 7909
b13c8ab2
YQ
7910 /* All aggregate types that won't fit in a register must be returned
7911 in memory. */
7912 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7913 return 1;
cca44b1b 7914
b13c8ab2
YQ
7915 /* In the ARM ABI, "integer" like aggregate types are returned in
7916 registers. For an aggregate type to be integer like, its size
7917 must be less than or equal to INT_REGISTER_SIZE and the
7918 offset of each addressable subfield must be zero. Note that bit
7919 fields are not addressable, and all addressable subfields of
7920 unions always start at offset zero.
cca44b1b 7921
b13c8ab2
YQ
7922 This function is based on the behaviour of GCC 2.95.1.
7923 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 7924
b13c8ab2
YQ
7925 Note: All versions of GCC before GCC 2.95.2 do not set up the
7926 parameters correctly for a function returning the following
7927 structure: struct { float f;}; This should be returned in memory,
7928 not a register. Richard Earnshaw sent me a patch, but I do not
7929 know of any way to detect if a function like the above has been
7930 compiled with the correct calling convention. */
7931
7932 /* Assume all other aggregate types can be returned in a register.
7933 Run a check for structures, unions and arrays. */
7934 nRc = 0;
67255d04 7935
b13c8ab2
YQ
7936 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7937 {
7938 int i;
7939 /* Need to check if this struct/union is "integer" like. For
7940 this to be true, its size must be less than or equal to
7941 INT_REGISTER_SIZE and the offset of each addressable
7942 subfield must be zero. Note that bit fields are not
7943 addressable, and unions always start at offset zero. If any
7944 of the subfields is a floating point type, the struct/union
7945 cannot be an integer type. */
7946
7947 /* For each field in the object, check:
7948 1) Is it FP? --> yes, nRc = 1;
7949 2) Is it addressable (bitpos != 0) and
7950 not packed (bitsize == 0)?
7951 --> yes, nRc = 1
7952 */
7953
7954 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 7955 {
b13c8ab2
YQ
7956 enum type_code field_type_code;
7957
7958 field_type_code
7959 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7960 i)));
7961
7962 /* Is it a floating point type field? */
7963 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
7964 {
7965 nRc = 1;
7966 break;
7967 }
b13c8ab2
YQ
7968
7969 /* If bitpos != 0, then we have to care about it. */
7970 if (TYPE_FIELD_BITPOS (type, i) != 0)
7971 {
7972 /* Bitfields are not addressable. If the field bitsize is
7973 zero, then the field is not packed. Hence it cannot be
7974 a bitfield or any other packed type. */
7975 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7976 {
7977 nRc = 1;
7978 break;
7979 }
7980 }
67255d04
RE
7981 }
7982 }
67255d04 7983
b13c8ab2
YQ
7984 return nRc;
7985 }
67255d04
RE
7986}
7987
34e8f22d
RE
7988/* Write into appropriate registers a function return value of type
7989 TYPE, given in virtual format. */
7990
7991static void
b508a996 7992arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 7993 const gdb_byte *valbuf)
34e8f22d 7994{
be8626e0 7995 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 7996 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 7997
34e8f22d
RE
7998 if (TYPE_CODE (type) == TYPE_CODE_FLT)
7999 {
e362b510 8000 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8001
be8626e0 8002 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8003 {
8004 case ARM_FLOAT_FPA:
8005
be8626e0
MD
8006 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8007 gdbarch_byte_order (gdbarch));
b508a996 8008 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8009 break;
8010
fd50bc42 8011 case ARM_FLOAT_SOFT_FPA:
08216dd7 8012 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8013 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8014 not using the VFP ABI code. */
8015 case ARM_FLOAT_VFP:
b508a996
RE
8016 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8017 if (TYPE_LENGTH (type) > 4)
8018 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8019 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8020 break;
8021
8022 default:
9b20d036
MS
8023 internal_error (__FILE__, __LINE__,
8024 _("arm_store_return_value: Floating "
8025 "point model not supported"));
08216dd7
RE
8026 break;
8027 }
34e8f22d 8028 }
b508a996
RE
8029 else if (TYPE_CODE (type) == TYPE_CODE_INT
8030 || TYPE_CODE (type) == TYPE_CODE_CHAR
8031 || TYPE_CODE (type) == TYPE_CODE_BOOL
8032 || TYPE_CODE (type) == TYPE_CODE_PTR
8033 || TYPE_CODE (type) == TYPE_CODE_REF
8034 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8035 {
8036 if (TYPE_LENGTH (type) <= 4)
8037 {
8038 /* Values of one word or less are zero/sign-extended and
8039 returned in r0. */
7a5ea0d4 8040 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8041 LONGEST val = unpack_long (type, valbuf);
8042
e17a4113 8043 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8044 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8045 }
8046 else
8047 {
8048 /* Integral values greater than one word are stored in consecutive
8049 registers starting with r0. This will always be a multiple of
8050 the regiser size. */
8051 int len = TYPE_LENGTH (type);
8052 int regno = ARM_A1_REGNUM;
8053
8054 while (len > 0)
8055 {
8056 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8057 len -= INT_REGISTER_SIZE;
8058 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8059 }
8060 }
8061 }
34e8f22d 8062 else
b508a996
RE
8063 {
8064 /* For a structure or union the behaviour is as if the value had
8065 been stored to word-aligned memory and then loaded into
8066 registers with 32-bit load instruction(s). */
8067 int len = TYPE_LENGTH (type);
8068 int regno = ARM_A1_REGNUM;
7a5ea0d4 8069 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8070
8071 while (len > 0)
8072 {
8073 memcpy (tmpbuf, valbuf,
7a5ea0d4 8074 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8075 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8076 len -= INT_REGISTER_SIZE;
8077 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8078 }
8079 }
34e8f22d
RE
8080}
8081
2af48f68
PB
8082
8083/* Handle function return values. */
8084
8085static enum return_value_convention
6a3a010b 8086arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8087 struct type *valtype, struct regcache *regcache,
8088 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8089{
7c00367c 8090 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8091 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8092 enum arm_vfp_cprc_base_type vfp_base_type;
8093 int vfp_base_count;
8094
8095 if (arm_vfp_abi_for_function (gdbarch, func_type)
8096 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8097 {
8098 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8099 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8100 int i;
8101 for (i = 0; i < vfp_base_count; i++)
8102 {
58d6951d
DJ
8103 if (reg_char == 'q')
8104 {
8105 if (writebuf)
8106 arm_neon_quad_write (gdbarch, regcache, i,
8107 writebuf + i * unit_length);
8108
8109 if (readbuf)
8110 arm_neon_quad_read (gdbarch, regcache, i,
8111 readbuf + i * unit_length);
8112 }
8113 else
8114 {
8115 char name_buf[4];
8116 int regnum;
8117
8c042590 8118 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8119 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8120 strlen (name_buf));
8121 if (writebuf)
8122 regcache_cooked_write (regcache, regnum,
8123 writebuf + i * unit_length);
8124 if (readbuf)
8125 regcache_cooked_read (regcache, regnum,
8126 readbuf + i * unit_length);
8127 }
90445bd3
DJ
8128 }
8129 return RETURN_VALUE_REGISTER_CONVENTION;
8130 }
7c00367c 8131
2af48f68
PB
8132 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8133 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8134 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8135 {
7c00367c
MK
8136 if (tdep->struct_return == pcc_struct_return
8137 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8138 return RETURN_VALUE_STRUCT_CONVENTION;
8139 }
b13c8ab2
YQ
8140 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8141 {
8142 if (arm_return_in_memory (gdbarch, valtype))
8143 return RETURN_VALUE_STRUCT_CONVENTION;
8144 }
7052e42c 8145
2af48f68
PB
8146 if (writebuf)
8147 arm_store_return_value (valtype, regcache, writebuf);
8148
8149 if (readbuf)
8150 arm_extract_return_value (valtype, regcache, readbuf);
8151
8152 return RETURN_VALUE_REGISTER_CONVENTION;
8153}
8154
8155
9df628e0 8156static int
60ade65d 8157arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8158{
e17a4113
UW
8159 struct gdbarch *gdbarch = get_frame_arch (frame);
8160 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8161 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8162 CORE_ADDR jb_addr;
e362b510 8163 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8164
60ade65d 8165 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8166
8167 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8168 INT_REGISTER_SIZE))
9df628e0
RE
8169 return 0;
8170
e17a4113 8171 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8172 return 1;
8173}
8174
faa95490
DJ
8175/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8176 return the target PC. Otherwise return 0. */
c906108c
SS
8177
8178CORE_ADDR
52f729a7 8179arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8180{
2c02bd72 8181 const char *name;
faa95490 8182 int namelen;
c906108c
SS
8183 CORE_ADDR start_addr;
8184
8185 /* Find the starting address and name of the function containing the PC. */
8186 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8187 {
8188 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8189 check here. */
8190 start_addr = arm_skip_bx_reg (frame, pc);
8191 if (start_addr != 0)
8192 return start_addr;
8193
8194 return 0;
8195 }
c906108c 8196
faa95490
DJ
8197 /* If PC is in a Thumb call or return stub, return the address of the
8198 target PC, which is in a register. The thunk functions are called
8199 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8200 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8201 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8202 if (startswith (name, "_call_via_")
8203 || startswith (name, "__ARM_call_via_"))
c906108c 8204 {
ed9a39eb
JM
8205 /* Use the name suffix to determine which register contains the
8206 target PC. */
c5aa993b
JM
8207 static char *table[15] =
8208 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8209 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8210 };
c906108c 8211 int regno;
faa95490 8212 int offset = strlen (name) - 2;
c906108c
SS
8213
8214 for (regno = 0; regno <= 14; regno++)
faa95490 8215 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8216 return get_frame_register_unsigned (frame, regno);
c906108c 8217 }
ed9a39eb 8218
faa95490
DJ
8219 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8220 non-interworking calls to foo. We could decode the stubs
8221 to find the target but it's easier to use the symbol table. */
8222 namelen = strlen (name);
8223 if (name[0] == '_' && name[1] == '_'
8224 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8225 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8226 || (namelen > 2 + strlen ("_from_arm")
61012eef 8227 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8228 {
8229 char *target_name;
8230 int target_len = namelen - 2;
3b7344d5 8231 struct bound_minimal_symbol minsym;
faa95490
DJ
8232 struct objfile *objfile;
8233 struct obj_section *sec;
8234
8235 if (name[namelen - 1] == 'b')
8236 target_len -= strlen ("_from_thumb");
8237 else
8238 target_len -= strlen ("_from_arm");
8239
224c3ddb 8240 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8241 memcpy (target_name, name + 2, target_len);
8242 target_name[target_len] = '\0';
8243
8244 sec = find_pc_section (pc);
8245 objfile = (sec == NULL) ? NULL : sec->objfile;
8246 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8247 if (minsym.minsym != NULL)
77e371c0 8248 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8249 else
8250 return 0;
8251 }
8252
c5aa993b 8253 return 0; /* not a stub */
c906108c
SS
8254}
8255
afd7eef0
RE
8256static void
8257set_arm_command (char *args, int from_tty)
8258{
edefbb7c
AC
8259 printf_unfiltered (_("\
8260\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8261 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8262}
8263
8264static void
8265show_arm_command (char *args, int from_tty)
8266{
26304000 8267 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8268}
8269
28e97307
DJ
8270static void
8271arm_update_current_architecture (void)
fd50bc42 8272{
28e97307 8273 struct gdbarch_info info;
fd50bc42 8274
28e97307 8275 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8276 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8277 return;
fd50bc42 8278
28e97307
DJ
8279 /* Update the architecture. */
8280 gdbarch_info_init (&info);
fd50bc42 8281
28e97307 8282 if (!gdbarch_update_p (info))
9b20d036 8283 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8284}
8285
8286static void
8287set_fp_model_sfunc (char *args, int from_tty,
8288 struct cmd_list_element *c)
8289{
570dc176 8290 int fp_model;
fd50bc42
RE
8291
8292 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8293 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8294 {
aead7601 8295 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8296 break;
8297 }
8298
8299 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8300 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8301 current_fp_model);
8302
28e97307 8303 arm_update_current_architecture ();
fd50bc42
RE
8304}
8305
8306static void
08546159
AC
8307show_fp_model (struct ui_file *file, int from_tty,
8308 struct cmd_list_element *c, const char *value)
fd50bc42 8309{
f5656ead 8310 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8311
28e97307 8312 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8313 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8314 fprintf_filtered (file, _("\
8315The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8316 fp_model_strings[tdep->fp_model]);
8317 else
8318 fprintf_filtered (file, _("\
8319The current ARM floating point model is \"%s\".\n"),
8320 fp_model_strings[arm_fp_model]);
8321}
8322
8323static void
8324arm_set_abi (char *args, int from_tty,
8325 struct cmd_list_element *c)
8326{
570dc176 8327 int arm_abi;
28e97307
DJ
8328
8329 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8330 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8331 {
aead7601 8332 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8333 break;
8334 }
8335
8336 if (arm_abi == ARM_ABI_LAST)
8337 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8338 arm_abi_string);
8339
8340 arm_update_current_architecture ();
8341}
8342
8343static void
8344arm_show_abi (struct ui_file *file, int from_tty,
8345 struct cmd_list_element *c, const char *value)
8346{
f5656ead 8347 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8348
8349 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8350 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8351 fprintf_filtered (file, _("\
8352The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8353 arm_abi_strings[tdep->arm_abi]);
8354 else
8355 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8356 arm_abi_string);
fd50bc42
RE
8357}
8358
0428b8f5
DJ
8359static void
8360arm_show_fallback_mode (struct ui_file *file, int from_tty,
8361 struct cmd_list_element *c, const char *value)
8362{
0963b4bd
MS
8363 fprintf_filtered (file,
8364 _("The current execution mode assumed "
8365 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8366 arm_fallback_mode_string);
8367}
8368
8369static void
8370arm_show_force_mode (struct ui_file *file, int from_tty,
8371 struct cmd_list_element *c, const char *value)
8372{
f5656ead 8373 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
0428b8f5 8374
0963b4bd
MS
8375 fprintf_filtered (file,
8376 _("The current execution mode assumed "
8377 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8378 arm_force_mode_string);
8379}
8380
afd7eef0
RE
8381/* If the user changes the register disassembly style used for info
8382 register and other commands, we have to also switch the style used
8383 in opcodes for disassembly output. This function is run in the "set
8384 arm disassembly" command, and does that. */
bc90b915
FN
8385
8386static void
afd7eef0 8387set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8388 struct cmd_list_element *c)
8389{
afd7eef0 8390 set_disassembly_style ();
bc90b915
FN
8391}
8392\f
966fbf70 8393/* Return the ARM register name corresponding to register I. */
a208b0cb 8394static const char *
d93859e2 8395arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8396{
58d6951d
DJ
8397 const int num_regs = gdbarch_num_regs (gdbarch);
8398
8399 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8400 && i >= num_regs && i < num_regs + 32)
8401 {
8402 static const char *const vfp_pseudo_names[] = {
8403 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8404 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8405 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8406 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8407 };
8408
8409 return vfp_pseudo_names[i - num_regs];
8410 }
8411
8412 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8413 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8414 {
8415 static const char *const neon_pseudo_names[] = {
8416 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8417 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8418 };
8419
8420 return neon_pseudo_names[i - num_regs - 32];
8421 }
8422
ff6f572f
DJ
8423 if (i >= ARRAY_SIZE (arm_register_names))
8424 /* These registers are only supported on targets which supply
8425 an XML description. */
8426 return "";
8427
966fbf70
RE
8428 return arm_register_names[i];
8429}
8430
bc90b915 8431static void
afd7eef0 8432set_disassembly_style (void)
bc90b915 8433{
123dc839 8434 int current;
bc90b915 8435
123dc839
DJ
8436 /* Find the style that the user wants. */
8437 for (current = 0; current < num_disassembly_options; current++)
8438 if (disassembly_style == valid_disassembly_styles[current])
8439 break;
8440 gdb_assert (current < num_disassembly_options);
bc90b915 8441
94c30b78 8442 /* Synchronize the disassembler. */
bc90b915
FN
8443 set_arm_regname_option (current);
8444}
8445
082fc60d
RE
8446/* Test whether the coff symbol specific value corresponds to a Thumb
8447 function. */
8448
8449static int
8450coff_sym_is_thumb (int val)
8451{
f8bf5763
PM
8452 return (val == C_THUMBEXT
8453 || val == C_THUMBSTAT
8454 || val == C_THUMBEXTFUNC
8455 || val == C_THUMBSTATFUNC
8456 || val == C_THUMBLABEL);
082fc60d
RE
8457}
8458
8459/* arm_coff_make_msymbol_special()
8460 arm_elf_make_msymbol_special()
8461
8462 These functions test whether the COFF or ELF symbol corresponds to
8463 an address in thumb code, and set a "special" bit in a minimal
8464 symbol to indicate that it does. */
8465
34e8f22d 8466static void
082fc60d
RE
8467arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8468{
467d42c4
UW
8469 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
8470 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8471 MSYMBOL_SET_SPECIAL (msym);
8472}
8473
34e8f22d 8474static void
082fc60d
RE
8475arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8476{
8477 if (coff_sym_is_thumb (val))
8478 MSYMBOL_SET_SPECIAL (msym);
8479}
8480
60c5725c 8481static void
c1bd65d0 8482arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8483{
9a3c8263 8484 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8485 unsigned int i;
8486
8487 for (i = 0; i < objfile->obfd->section_count; i++)
8488 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8489}
8490
8491static void
8492arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8493 asymbol *sym)
8494{
8495 const char *name = bfd_asymbol_name (sym);
8496 struct arm_per_objfile *data;
8497 VEC(arm_mapping_symbol_s) **map_p;
8498 struct arm_mapping_symbol new_map_sym;
8499
8500 gdb_assert (name[0] == '$');
8501 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8502 return;
8503
9a3c8263
SM
8504 data = (struct arm_per_objfile *) objfile_data (objfile,
8505 arm_objfile_data_key);
60c5725c
DJ
8506 if (data == NULL)
8507 {
8508 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8509 struct arm_per_objfile);
8510 set_objfile_data (objfile, arm_objfile_data_key, data);
8511 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8512 objfile->obfd->section_count,
8513 VEC(arm_mapping_symbol_s) *);
8514 }
8515 map_p = &data->section_maps[bfd_get_section (sym)->index];
8516
8517 new_map_sym.value = sym->value;
8518 new_map_sym.type = name[1];
8519
8520 /* Assume that most mapping symbols appear in order of increasing
8521 value. If they were randomly distributed, it would be faster to
8522 always push here and then sort at first use. */
8523 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8524 {
8525 struct arm_mapping_symbol *prev_map_sym;
8526
8527 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8528 if (prev_map_sym->value >= sym->value)
8529 {
8530 unsigned int idx;
8531 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8532 arm_compare_mapping_symbols);
8533 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8534 return;
8535 }
8536 }
8537
8538 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8539}
8540
756fe439 8541static void
61a1198a 8542arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8543{
9779414d 8544 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8545 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8546
8547 /* If necessary, set the T bit. */
8548 if (arm_apcs_32)
8549 {
9779414d 8550 ULONGEST val, t_bit;
61a1198a 8551 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8552 t_bit = arm_psr_thumb_bit (gdbarch);
8553 if (arm_pc_is_thumb (gdbarch, pc))
8554 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8555 val | t_bit);
756fe439 8556 else
61a1198a 8557 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8558 val & ~t_bit);
756fe439
DJ
8559 }
8560}
123dc839 8561
58d6951d
DJ
8562/* Read the contents of a NEON quad register, by reading from two
8563 double registers. This is used to implement the quad pseudo
8564 registers, and for argument passing in case the quad registers are
8565 missing; vectors are passed in quad registers when using the VFP
8566 ABI, even if a NEON unit is not present. REGNUM is the index of
8567 the quad register, in [0, 15]. */
8568
05d1431c 8569static enum register_status
58d6951d
DJ
8570arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8571 int regnum, gdb_byte *buf)
8572{
8573 char name_buf[4];
8574 gdb_byte reg_buf[8];
8575 int offset, double_regnum;
05d1431c 8576 enum register_status status;
58d6951d 8577
8c042590 8578 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8579 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8580 strlen (name_buf));
8581
8582 /* d0 is always the least significant half of q0. */
8583 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8584 offset = 8;
8585 else
8586 offset = 0;
8587
05d1431c
PA
8588 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8589 if (status != REG_VALID)
8590 return status;
58d6951d
DJ
8591 memcpy (buf + offset, reg_buf, 8);
8592
8593 offset = 8 - offset;
05d1431c
PA
8594 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8595 if (status != REG_VALID)
8596 return status;
58d6951d 8597 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8598
8599 return REG_VALID;
58d6951d
DJ
8600}
8601
05d1431c 8602static enum register_status
58d6951d
DJ
8603arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8604 int regnum, gdb_byte *buf)
8605{
8606 const int num_regs = gdbarch_num_regs (gdbarch);
8607 char name_buf[4];
8608 gdb_byte reg_buf[8];
8609 int offset, double_regnum;
8610
8611 gdb_assert (regnum >= num_regs);
8612 regnum -= num_regs;
8613
8614 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8615 /* Quad-precision register. */
05d1431c 8616 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8617 else
8618 {
05d1431c
PA
8619 enum register_status status;
8620
58d6951d
DJ
8621 /* Single-precision register. */
8622 gdb_assert (regnum < 32);
8623
8624 /* s0 is always the least significant half of d0. */
8625 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8626 offset = (regnum & 1) ? 0 : 4;
8627 else
8628 offset = (regnum & 1) ? 4 : 0;
8629
8c042590 8630 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8631 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8632 strlen (name_buf));
8633
05d1431c
PA
8634 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8635 if (status == REG_VALID)
8636 memcpy (buf, reg_buf + offset, 4);
8637 return status;
58d6951d
DJ
8638 }
8639}
8640
8641/* Store the contents of BUF to a NEON quad register, by writing to
8642 two double registers. This is used to implement the quad pseudo
8643 registers, and for argument passing in case the quad registers are
8644 missing; vectors are passed in quad registers when using the VFP
8645 ABI, even if a NEON unit is not present. REGNUM is the index
8646 of the quad register, in [0, 15]. */
8647
8648static void
8649arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8650 int regnum, const gdb_byte *buf)
8651{
8652 char name_buf[4];
58d6951d
DJ
8653 int offset, double_regnum;
8654
8c042590 8655 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8656 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8657 strlen (name_buf));
8658
8659 /* d0 is always the least significant half of q0. */
8660 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8661 offset = 8;
8662 else
8663 offset = 0;
8664
8665 regcache_raw_write (regcache, double_regnum, buf + offset);
8666 offset = 8 - offset;
8667 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8668}
8669
8670static void
8671arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8672 int regnum, const gdb_byte *buf)
8673{
8674 const int num_regs = gdbarch_num_regs (gdbarch);
8675 char name_buf[4];
8676 gdb_byte reg_buf[8];
8677 int offset, double_regnum;
8678
8679 gdb_assert (regnum >= num_regs);
8680 regnum -= num_regs;
8681
8682 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8683 /* Quad-precision register. */
8684 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8685 else
8686 {
8687 /* Single-precision register. */
8688 gdb_assert (regnum < 32);
8689
8690 /* s0 is always the least significant half of d0. */
8691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8692 offset = (regnum & 1) ? 0 : 4;
8693 else
8694 offset = (regnum & 1) ? 4 : 0;
8695
8c042590 8696 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8697 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8698 strlen (name_buf));
8699
8700 regcache_raw_read (regcache, double_regnum, reg_buf);
8701 memcpy (reg_buf + offset, buf, 4);
8702 regcache_raw_write (regcache, double_regnum, reg_buf);
8703 }
8704}
8705
123dc839
DJ
8706static struct value *
8707value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8708{
9a3c8263 8709 const int *reg_p = (const int *) baton;
123dc839
DJ
8710 return value_of_register (*reg_p, frame);
8711}
97e03143 8712\f
70f80edf
JT
8713static enum gdb_osabi
8714arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8715{
2af48f68 8716 unsigned int elfosabi;
70f80edf 8717 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8718
70f80edf 8719 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8720
28e97307
DJ
8721 if (elfosabi == ELFOSABI_ARM)
8722 /* GNU tools use this value. Check note sections in this case,
8723 as well. */
8724 bfd_map_over_sections (abfd,
8725 generic_elf_osabi_sniff_abi_tag_sections,
8726 &osabi);
97e03143 8727
28e97307 8728 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8729 return osabi;
97e03143
RE
8730}
8731
54483882
YQ
8732static int
8733arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8734 struct reggroup *group)
8735{
2c291032
YQ
8736 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8737 this, FPS register belongs to save_regroup, restore_reggroup, and
8738 all_reggroup, of course. */
54483882 8739 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8740 return (group == float_reggroup
8741 || group == save_reggroup
8742 || group == restore_reggroup
8743 || group == all_reggroup);
54483882
YQ
8744 else
8745 return default_register_reggroup_p (gdbarch, regnum, group);
8746}
8747
25f8c692
JL
8748\f
8749/* For backward-compatibility we allow two 'g' packet lengths with
8750 the remote protocol depending on whether FPA registers are
8751 supplied. M-profile targets do not have FPA registers, but some
8752 stubs already exist in the wild which use a 'g' packet which
8753 supplies them albeit with dummy values. The packet format which
8754 includes FPA registers should be considered deprecated for
8755 M-profile targets. */
8756
8757static void
8758arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8759{
8760 if (gdbarch_tdep (gdbarch)->is_m)
8761 {
8762 /* If we know from the executable this is an M-profile target,
8763 cater for remote targets whose register set layout is the
8764 same as the FPA layout. */
8765 register_remote_g_packet_guess (gdbarch,
03145bf4 8766 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8767 (16 * INT_REGISTER_SIZE)
8768 + (8 * FP_REGISTER_SIZE)
8769 + (2 * INT_REGISTER_SIZE),
8770 tdesc_arm_with_m_fpa_layout);
8771
8772 /* The regular M-profile layout. */
8773 register_remote_g_packet_guess (gdbarch,
8774 /* r0-r12,sp,lr,pc; xpsr */
8775 (16 * INT_REGISTER_SIZE)
8776 + INT_REGISTER_SIZE,
8777 tdesc_arm_with_m);
3184d3f9
JL
8778
8779 /* M-profile plus M4F VFP. */
8780 register_remote_g_packet_guess (gdbarch,
8781 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8782 (16 * INT_REGISTER_SIZE)
8783 + (16 * VFP_REGISTER_SIZE)
8784 + (2 * INT_REGISTER_SIZE),
8785 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8786 }
8787
8788 /* Otherwise we don't have a useful guess. */
8789}
8790
70f80edf 8791\f
da3c6d4a
MS
8792/* Initialize the current architecture based on INFO. If possible,
8793 re-use an architecture from ARCHES, which is a list of
8794 architectures already created during this debugging session.
97e03143 8795
da3c6d4a
MS
8796 Called e.g. at program startup, when reading a core file, and when
8797 reading a binary file. */
97e03143 8798
39bbf761
RE
8799static struct gdbarch *
8800arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8801{
97e03143 8802 struct gdbarch_tdep *tdep;
39bbf761 8803 struct gdbarch *gdbarch;
28e97307
DJ
8804 struct gdbarch_list *best_arch;
8805 enum arm_abi_kind arm_abi = arm_abi_global;
8806 enum arm_float_model fp_model = arm_fp_model;
123dc839 8807 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8808 int i, is_m = 0;
330c6ca9 8809 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8810 int have_wmmx_registers = 0;
58d6951d 8811 int have_neon = 0;
ff6f572f 8812 int have_fpa_registers = 1;
9779414d
DJ
8813 const struct target_desc *tdesc = info.target_desc;
8814
8815 /* If we have an object to base this architecture on, try to determine
8816 its ABI. */
8817
8818 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8819 {
8820 int ei_osabi, e_flags;
8821
8822 switch (bfd_get_flavour (info.abfd))
8823 {
8824 case bfd_target_aout_flavour:
8825 /* Assume it's an old APCS-style ABI. */
8826 arm_abi = ARM_ABI_APCS;
8827 break;
8828
8829 case bfd_target_coff_flavour:
8830 /* Assume it's an old APCS-style ABI. */
8831 /* XXX WinCE? */
8832 arm_abi = ARM_ABI_APCS;
8833 break;
8834
8835 case bfd_target_elf_flavour:
8836 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8837 e_flags = elf_elfheader (info.abfd)->e_flags;
8838
8839 if (ei_osabi == ELFOSABI_ARM)
8840 {
8841 /* GNU tools used to use this value, but do not for EABI
8842 objects. There's nowhere to tag an EABI version
8843 anyway, so assume APCS. */
8844 arm_abi = ARM_ABI_APCS;
8845 }
d403db27 8846 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
8847 {
8848 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8849 int attr_arch, attr_profile;
8850
8851 switch (eabi_ver)
8852 {
8853 case EF_ARM_EABI_UNKNOWN:
8854 /* Assume GNU tools. */
8855 arm_abi = ARM_ABI_APCS;
8856 break;
8857
8858 case EF_ARM_EABI_VER4:
8859 case EF_ARM_EABI_VER5:
8860 arm_abi = ARM_ABI_AAPCS;
8861 /* EABI binaries default to VFP float ordering.
8862 They may also contain build attributes that can
8863 be used to identify if the VFP argument-passing
8864 ABI is in use. */
8865 if (fp_model == ARM_FLOAT_AUTO)
8866 {
8867#ifdef HAVE_ELF
8868 switch (bfd_elf_get_obj_attr_int (info.abfd,
8869 OBJ_ATTR_PROC,
8870 Tag_ABI_VFP_args))
8871 {
b35b0298 8872 case AEABI_VFP_args_base:
9779414d
DJ
8873 /* "The user intended FP parameter/result
8874 passing to conform to AAPCS, base
8875 variant". */
8876 fp_model = ARM_FLOAT_SOFT_VFP;
8877 break;
b35b0298 8878 case AEABI_VFP_args_vfp:
9779414d
DJ
8879 /* "The user intended FP parameter/result
8880 passing to conform to AAPCS, VFP
8881 variant". */
8882 fp_model = ARM_FLOAT_VFP;
8883 break;
b35b0298 8884 case AEABI_VFP_args_toolchain:
9779414d
DJ
8885 /* "The user intended FP parameter/result
8886 passing to conform to tool chain-specific
8887 conventions" - we don't know any such
8888 conventions, so leave it as "auto". */
8889 break;
b35b0298 8890 case AEABI_VFP_args_compatible:
5c294fee
TG
8891 /* "Code is compatible with both the base
8892 and VFP variants; the user did not permit
8893 non-variadic functions to pass FP
8894 parameters/results" - leave it as
8895 "auto". */
8896 break;
9779414d
DJ
8897 default:
8898 /* Attribute value not mentioned in the
5c294fee 8899 November 2012 ABI, so leave it as
9779414d
DJ
8900 "auto". */
8901 break;
8902 }
8903#else
8904 fp_model = ARM_FLOAT_SOFT_VFP;
8905#endif
8906 }
8907 break;
8908
8909 default:
8910 /* Leave it as "auto". */
8911 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8912 break;
8913 }
8914
8915#ifdef HAVE_ELF
8916 /* Detect M-profile programs. This only works if the
8917 executable file includes build attributes; GCC does
8918 copy them to the executable, but e.g. RealView does
8919 not. */
8920 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8921 Tag_CPU_arch);
0963b4bd
MS
8922 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8923 OBJ_ATTR_PROC,
9779414d
DJ
8924 Tag_CPU_arch_profile);
8925 /* GCC specifies the profile for v6-M; RealView only
8926 specifies the profile for architectures starting with
8927 V7 (as opposed to architectures with a tag
8928 numerically greater than TAG_CPU_ARCH_V7). */
8929 if (!tdesc_has_registers (tdesc)
8930 && (attr_arch == TAG_CPU_ARCH_V6_M
8931 || attr_arch == TAG_CPU_ARCH_V6S_M
8932 || attr_profile == 'M'))
25f8c692 8933 is_m = 1;
9779414d
DJ
8934#endif
8935 }
8936
8937 if (fp_model == ARM_FLOAT_AUTO)
8938 {
8939 int e_flags = elf_elfheader (info.abfd)->e_flags;
8940
8941 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8942 {
8943 case 0:
8944 /* Leave it as "auto". Strictly speaking this case
8945 means FPA, but almost nobody uses that now, and
8946 many toolchains fail to set the appropriate bits
8947 for the floating-point model they use. */
8948 break;
8949 case EF_ARM_SOFT_FLOAT:
8950 fp_model = ARM_FLOAT_SOFT_FPA;
8951 break;
8952 case EF_ARM_VFP_FLOAT:
8953 fp_model = ARM_FLOAT_VFP;
8954 break;
8955 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8956 fp_model = ARM_FLOAT_SOFT_VFP;
8957 break;
8958 }
8959 }
8960
8961 if (e_flags & EF_ARM_BE8)
8962 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8963
8964 break;
8965
8966 default:
8967 /* Leave it as "auto". */
8968 break;
8969 }
8970 }
123dc839
DJ
8971
8972 /* Check any target description for validity. */
9779414d 8973 if (tdesc_has_registers (tdesc))
123dc839
DJ
8974 {
8975 /* For most registers we require GDB's default names; but also allow
8976 the numeric names for sp / lr / pc, as a convenience. */
8977 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8978 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8979 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8980
8981 const struct tdesc_feature *feature;
58d6951d 8982 int valid_p;
123dc839 8983
9779414d 8984 feature = tdesc_find_feature (tdesc,
123dc839
DJ
8985 "org.gnu.gdb.arm.core");
8986 if (feature == NULL)
9779414d
DJ
8987 {
8988 feature = tdesc_find_feature (tdesc,
8989 "org.gnu.gdb.arm.m-profile");
8990 if (feature == NULL)
8991 return NULL;
8992 else
8993 is_m = 1;
8994 }
123dc839
DJ
8995
8996 tdesc_data = tdesc_data_alloc ();
8997
8998 valid_p = 1;
8999 for (i = 0; i < ARM_SP_REGNUM; i++)
9000 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9001 arm_register_names[i]);
9002 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9003 ARM_SP_REGNUM,
9004 arm_sp_names);
9005 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9006 ARM_LR_REGNUM,
9007 arm_lr_names);
9008 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9009 ARM_PC_REGNUM,
9010 arm_pc_names);
9779414d
DJ
9011 if (is_m)
9012 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9013 ARM_PS_REGNUM, "xpsr");
9014 else
9015 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9016 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9017
9018 if (!valid_p)
9019 {
9020 tdesc_data_cleanup (tdesc_data);
9021 return NULL;
9022 }
9023
9779414d 9024 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9025 "org.gnu.gdb.arm.fpa");
9026 if (feature != NULL)
9027 {
9028 valid_p = 1;
9029 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9030 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9031 arm_register_names[i]);
9032 if (!valid_p)
9033 {
9034 tdesc_data_cleanup (tdesc_data);
9035 return NULL;
9036 }
9037 }
ff6f572f
DJ
9038 else
9039 have_fpa_registers = 0;
9040
9779414d 9041 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9042 "org.gnu.gdb.xscale.iwmmxt");
9043 if (feature != NULL)
9044 {
9045 static const char *const iwmmxt_names[] = {
9046 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9047 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9048 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9049 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9050 };
9051
9052 valid_p = 1;
9053 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9054 valid_p
9055 &= tdesc_numbered_register (feature, tdesc_data, i,
9056 iwmmxt_names[i - ARM_WR0_REGNUM]);
9057
9058 /* Check for the control registers, but do not fail if they
9059 are missing. */
9060 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9061 tdesc_numbered_register (feature, tdesc_data, i,
9062 iwmmxt_names[i - ARM_WR0_REGNUM]);
9063
9064 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9065 valid_p
9066 &= tdesc_numbered_register (feature, tdesc_data, i,
9067 iwmmxt_names[i - ARM_WR0_REGNUM]);
9068
9069 if (!valid_p)
9070 {
9071 tdesc_data_cleanup (tdesc_data);
9072 return NULL;
9073 }
a56cc1ce
YQ
9074
9075 have_wmmx_registers = 1;
ff6f572f 9076 }
58d6951d
DJ
9077
9078 /* If we have a VFP unit, check whether the single precision registers
9079 are present. If not, then we will synthesize them as pseudo
9080 registers. */
9779414d 9081 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9082 "org.gnu.gdb.arm.vfp");
9083 if (feature != NULL)
9084 {
9085 static const char *const vfp_double_names[] = {
9086 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9087 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9088 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9089 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9090 };
9091
9092 /* Require the double precision registers. There must be either
9093 16 or 32. */
9094 valid_p = 1;
9095 for (i = 0; i < 32; i++)
9096 {
9097 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9098 ARM_D0_REGNUM + i,
9099 vfp_double_names[i]);
9100 if (!valid_p)
9101 break;
9102 }
2b9e5ea6
UW
9103 if (!valid_p && i == 16)
9104 valid_p = 1;
58d6951d 9105
2b9e5ea6
UW
9106 /* Also require FPSCR. */
9107 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9108 ARM_FPSCR_REGNUM, "fpscr");
9109 if (!valid_p)
58d6951d
DJ
9110 {
9111 tdesc_data_cleanup (tdesc_data);
9112 return NULL;
9113 }
9114
9115 if (tdesc_unnumbered_register (feature, "s0") == 0)
9116 have_vfp_pseudos = 1;
9117
330c6ca9 9118 vfp_register_count = i;
58d6951d
DJ
9119
9120 /* If we have VFP, also check for NEON. The architecture allows
9121 NEON without VFP (integer vector operations only), but GDB
9122 does not support that. */
9779414d 9123 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9124 "org.gnu.gdb.arm.neon");
9125 if (feature != NULL)
9126 {
9127 /* NEON requires 32 double-precision registers. */
9128 if (i != 32)
9129 {
9130 tdesc_data_cleanup (tdesc_data);
9131 return NULL;
9132 }
9133
9134 /* If there are quad registers defined by the stub, use
9135 their type; otherwise (normally) provide them with
9136 the default type. */
9137 if (tdesc_unnumbered_register (feature, "q0") == 0)
9138 have_neon_pseudos = 1;
9139
9140 have_neon = 1;
9141 }
9142 }
123dc839 9143 }
39bbf761 9144
28e97307
DJ
9145 /* If there is already a candidate, use it. */
9146 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9147 best_arch != NULL;
9148 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9149 {
b8926edc
DJ
9150 if (arm_abi != ARM_ABI_AUTO
9151 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9152 continue;
9153
b8926edc
DJ
9154 if (fp_model != ARM_FLOAT_AUTO
9155 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9156 continue;
9157
58d6951d
DJ
9158 /* There are various other properties in tdep that we do not
9159 need to check here: those derived from a target description,
9160 since gdbarches with a different target description are
9161 automatically disqualified. */
9162
9779414d
DJ
9163 /* Do check is_m, though, since it might come from the binary. */
9164 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9165 continue;
9166
28e97307
DJ
9167 /* Found a match. */
9168 break;
9169 }
97e03143 9170
28e97307 9171 if (best_arch != NULL)
123dc839
DJ
9172 {
9173 if (tdesc_data != NULL)
9174 tdesc_data_cleanup (tdesc_data);
9175 return best_arch->gdbarch;
9176 }
28e97307 9177
8d749320 9178 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9179 gdbarch = gdbarch_alloc (&info, tdep);
9180
28e97307
DJ
9181 /* Record additional information about the architecture we are defining.
9182 These are gdbarch discriminators, like the OSABI. */
9183 tdep->arm_abi = arm_abi;
9184 tdep->fp_model = fp_model;
9779414d 9185 tdep->is_m = is_m;
ff6f572f 9186 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9187 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9188 gdb_assert (vfp_register_count == 0
9189 || vfp_register_count == 16
9190 || vfp_register_count == 32);
9191 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9192 tdep->have_vfp_pseudos = have_vfp_pseudos;
9193 tdep->have_neon_pseudos = have_neon_pseudos;
9194 tdep->have_neon = have_neon;
08216dd7 9195
25f8c692
JL
9196 arm_register_g_packet_guesses (gdbarch);
9197
08216dd7 9198 /* Breakpoints. */
9d4fde75 9199 switch (info.byte_order_for_code)
67255d04
RE
9200 {
9201 case BFD_ENDIAN_BIG:
66e810cd
RE
9202 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9203 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9204 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9205 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9206
67255d04
RE
9207 break;
9208
9209 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9210 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9211 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9212 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9213 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9214
67255d04
RE
9215 break;
9216
9217 default:
9218 internal_error (__FILE__, __LINE__,
edefbb7c 9219 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9220 }
9221
d7b486e7
RE
9222 /* On ARM targets char defaults to unsigned. */
9223 set_gdbarch_char_signed (gdbarch, 0);
9224
cca44b1b
JB
9225 /* Note: for displaced stepping, this includes the breakpoint, and one word
9226 of additional scratch space. This setting isn't used for anything beside
9227 displaced stepping at present. */
9228 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9229
9df628e0 9230 /* This should be low enough for everything. */
97e03143 9231 tdep->lowest_pc = 0x20;
94c30b78 9232 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9233
7c00367c
MK
9234 /* The default, for both APCS and AAPCS, is to return small
9235 structures in registers. */
9236 tdep->struct_return = reg_struct_return;
9237
2dd604e7 9238 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9239 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9240
756fe439
DJ
9241 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9242
148754e5 9243 /* Frame handling. */
a262aec2 9244 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9245 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9246 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9247
eb5492fa 9248 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9249
34e8f22d 9250 /* Address manipulation. */
34e8f22d
RE
9251 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9252
34e8f22d
RE
9253 /* Advance PC across function entry code. */
9254 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9255
c9cf6e20
MG
9256 /* Detect whether PC is at a point where the stack has been destroyed. */
9257 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9258
190dce09
UW
9259 /* Skip trampolines. */
9260 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9261
34e8f22d
RE
9262 /* The stack grows downward. */
9263 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9264
9265 /* Breakpoint manipulation. */
9266 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
177321bd
DJ
9267 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9268 arm_remote_breakpoint_from_pc);
34e8f22d
RE
9269
9270 /* Information about registers, etc. */
34e8f22d
RE
9271 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9272 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9273 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9274 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9275 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9276
ff6f572f
DJ
9277 /* This "info float" is FPA-specific. Use the generic version if we
9278 do not have FPA. */
9279 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9280 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9281
26216b98 9282 /* Internal <-> external register number maps. */
ff6f572f 9283 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9284 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9285
34e8f22d
RE
9286 set_gdbarch_register_name (gdbarch, arm_register_name);
9287
9288 /* Returning results. */
2af48f68 9289 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9290
03d48a7d
RE
9291 /* Disassembly. */
9292 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9293
34e8f22d
RE
9294 /* Minsymbol frobbing. */
9295 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9296 set_gdbarch_coff_make_msymbol_special (gdbarch,
9297 arm_coff_make_msymbol_special);
60c5725c 9298 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9299
f9d67f43
DJ
9300 /* Thumb-2 IT block support. */
9301 set_gdbarch_adjust_breakpoint_address (gdbarch,
9302 arm_adjust_breakpoint_address);
9303
0d5de010
DJ
9304 /* Virtual tables. */
9305 set_gdbarch_vbit_in_delta (gdbarch, 1);
9306
97e03143 9307 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9308 gdbarch_init_osabi (info, gdbarch);
97e03143 9309
b39cc962
DJ
9310 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9311
eb5492fa 9312 /* Add some default predicates. */
2ae28aa9
YQ
9313 if (is_m)
9314 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9315 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9316 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9317 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
a262aec2 9318 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9319
97e03143
RE
9320 /* Now we have tuned the configuration, set a few final things,
9321 based on what the OS ABI has told us. */
9322
b8926edc
DJ
9323 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9324 binaries are always marked. */
9325 if (tdep->arm_abi == ARM_ABI_AUTO)
9326 tdep->arm_abi = ARM_ABI_APCS;
9327
e3039479
UW
9328 /* Watchpoints are not steppable. */
9329 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9330
b8926edc
DJ
9331 /* We used to default to FPA for generic ARM, but almost nobody
9332 uses that now, and we now provide a way for the user to force
9333 the model. So default to the most useful variant. */
9334 if (tdep->fp_model == ARM_FLOAT_AUTO)
9335 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9336
9df628e0
RE
9337 if (tdep->jb_pc >= 0)
9338 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9339
08216dd7 9340 /* Floating point sizes and format. */
8da61cc4 9341 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9342 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9343 {
8da61cc4
DJ
9344 set_gdbarch_double_format
9345 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9346 set_gdbarch_long_double_format
9347 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9348 }
9349 else
9350 {
9351 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9352 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9353 }
9354
58d6951d
DJ
9355 if (have_vfp_pseudos)
9356 {
9357 /* NOTE: These are the only pseudo registers used by
9358 the ARM target at the moment. If more are added, a
9359 little more care in numbering will be needed. */
9360
9361 int num_pseudos = 32;
9362 if (have_neon_pseudos)
9363 num_pseudos += 16;
9364 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9365 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9366 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9367 }
9368
123dc839 9369 if (tdesc_data)
58d6951d
DJ
9370 {
9371 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9372
9779414d 9373 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9374
9375 /* Override tdesc_register_type to adjust the types of VFP
9376 registers for NEON. */
9377 set_gdbarch_register_type (gdbarch, arm_register_type);
9378 }
123dc839
DJ
9379
9380 /* Add standard register aliases. We add aliases even for those
9381 nanes which are used by the current architecture - it's simpler,
9382 and does no harm, since nothing ever lists user registers. */
9383 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9384 user_reg_add (gdbarch, arm_register_aliases[i].name,
9385 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9386
39bbf761
RE
9387 return gdbarch;
9388}
9389
97e03143 9390static void
2af46ca0 9391arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9392{
2af46ca0 9393 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9394
9395 if (tdep == NULL)
9396 return;
9397
edefbb7c 9398 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9399 (unsigned long) tdep->lowest_pc);
9400}
9401
a78f21af
AC
9402extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9403
c906108c 9404void
ed9a39eb 9405_initialize_arm_tdep (void)
c906108c 9406{
bc90b915
FN
9407 struct ui_file *stb;
9408 long length;
26304000 9409 struct cmd_list_element *new_set, *new_show;
53904c9e
AC
9410 const char *setname;
9411 const char *setdesc;
4bd7b427 9412 const char *const *regnames;
bc90b915
FN
9413 int numregs, i, j;
9414 static char *helptext;
edefbb7c
AC
9415 char regdesc[1024], *rdptr = regdesc;
9416 size_t rest = sizeof (regdesc);
085dd6e6 9417
42cf1509 9418 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9419
60c5725c 9420 arm_objfile_data_key
c1bd65d0 9421 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9422
0e9e9abd
UW
9423 /* Add ourselves to objfile event chain. */
9424 observer_attach_new_objfile (arm_exidx_new_objfile);
9425 arm_exidx_data_key
9426 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9427
70f80edf
JT
9428 /* Register an ELF OS ABI sniffer for ARM binaries. */
9429 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9430 bfd_target_elf_flavour,
9431 arm_elf_osabi_sniffer);
9432
9779414d
DJ
9433 /* Initialize the standard target descriptions. */
9434 initialize_tdesc_arm_with_m ();
25f8c692 9435 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9436 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9437 initialize_tdesc_arm_with_iwmmxt ();
9438 initialize_tdesc_arm_with_vfpv2 ();
9439 initialize_tdesc_arm_with_vfpv3 ();
9440 initialize_tdesc_arm_with_neon ();
9779414d 9441
94c30b78 9442 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9443 num_disassembly_options = get_arm_regname_num_options ();
9444
9445 /* Add root prefix command for all "set arm"/"show arm" commands. */
9446 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9447 _("Various ARM-specific commands."),
afd7eef0
RE
9448 &setarmcmdlist, "set arm ", 0, &setlist);
9449
9450 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9451 _("Various ARM-specific commands."),
afd7eef0 9452 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9453
94c30b78 9454 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9455 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9456
eefe576e
AC
9457 /* Initialize the array that will be passed to
9458 add_setshow_enum_cmd(). */
8d749320
SM
9459 valid_disassembly_styles = XNEWVEC (const char *,
9460 num_disassembly_options + 1);
afd7eef0 9461 for (i = 0; i < num_disassembly_options; i++)
bc90b915
FN
9462 {
9463 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9464 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9465 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9466 rdptr += length;
9467 rest -= length;
123dc839
DJ
9468 /* When we find the default names, tell the disassembler to use
9469 them. */
bc90b915
FN
9470 if (!strcmp (setname, "std"))
9471 {
afd7eef0 9472 disassembly_style = setname;
bc90b915
FN
9473 set_arm_regname_option (i);
9474 }
9475 }
94c30b78 9476 /* Mark the end of valid options. */
afd7eef0 9477 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9478
edefbb7c
AC
9479 /* Create the help text. */
9480 stb = mem_fileopen ();
9481 fprintf_unfiltered (stb, "%s%s%s",
9482 _("The valid values are:\n"),
9483 regdesc,
9484 _("The default is \"std\"."));
759ef836 9485 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 9486 ui_file_delete (stb);
ed9a39eb 9487
edefbb7c
AC
9488 add_setshow_enum_cmd("disassembler", no_class,
9489 valid_disassembly_styles, &disassembly_style,
9490 _("Set the disassembly style."),
9491 _("Show the disassembly style."),
9492 helptext,
2c5b56ce 9493 set_disassembly_style_sfunc,
0963b4bd
MS
9494 NULL, /* FIXME: i18n: The disassembly style is
9495 \"%s\". */
7376b4c2 9496 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9497
9498 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9499 _("Set usage of ARM 32-bit mode."),
9500 _("Show usage of ARM 32-bit mode."),
9501 _("When off, a 26-bit PC will be used."),
2c5b56ce 9502 NULL,
0963b4bd
MS
9503 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9504 mode is %s. */
26304000 9505 &setarmcmdlist, &showarmcmdlist);
c906108c 9506
fd50bc42 9507 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9508 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9509 _("Set the floating point type."),
9510 _("Show the floating point type."),
9511 _("auto - Determine the FP typefrom the OS-ABI.\n\
9512softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9513fpa - FPA co-processor (GCC compiled).\n\
9514softvfp - Software FP with pure-endian doubles.\n\
9515vfp - VFP co-processor."),
edefbb7c 9516 set_fp_model_sfunc, show_fp_model,
7376b4c2 9517 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9518
28e97307
DJ
9519 /* Add a command to allow the user to force the ABI. */
9520 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9521 _("Set the ABI."),
9522 _("Show the ABI."),
9523 NULL, arm_set_abi, arm_show_abi,
9524 &setarmcmdlist, &showarmcmdlist);
9525
0428b8f5
DJ
9526 /* Add two commands to allow the user to force the assumed
9527 execution mode. */
9528 add_setshow_enum_cmd ("fallback-mode", class_support,
9529 arm_mode_strings, &arm_fallback_mode_string,
9530 _("Set the mode assumed when symbols are unavailable."),
9531 _("Show the mode assumed when symbols are unavailable."),
9532 NULL, NULL, arm_show_fallback_mode,
9533 &setarmcmdlist, &showarmcmdlist);
9534 add_setshow_enum_cmd ("force-mode", class_support,
9535 arm_mode_strings, &arm_force_mode_string,
9536 _("Set the mode assumed even when symbols are available."),
9537 _("Show the mode assumed even when symbols are available."),
9538 NULL, NULL, arm_show_force_mode,
9539 &setarmcmdlist, &showarmcmdlist);
9540
6529d2dd 9541 /* Debugging flag. */
edefbb7c
AC
9542 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9543 _("Set ARM debugging."),
9544 _("Show ARM debugging."),
9545 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9546 NULL,
7915a72c 9547 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9548 &setdebuglist, &showdebuglist);
c906108c 9549}
72508ac0
PO
9550
9551/* ARM-reversible process record data structures. */
9552
9553#define ARM_INSN_SIZE_BYTES 4
9554#define THUMB_INSN_SIZE_BYTES 2
9555#define THUMB2_INSN_SIZE_BYTES 4
9556
9557
71e396f9
LM
9558/* Position of the bit within a 32-bit ARM instruction
9559 that defines whether the instruction is a load or store. */
72508ac0
PO
9560#define INSN_S_L_BIT_NUM 20
9561
9562#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9563 do \
9564 { \
9565 unsigned int reg_len = LENGTH; \
9566 if (reg_len) \
9567 { \
9568 REGS = XNEWVEC (uint32_t, reg_len); \
9569 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9570 } \
9571 } \
9572 while (0)
9573
9574#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9575 do \
9576 { \
9577 unsigned int mem_len = LENGTH; \
9578 if (mem_len) \
9579 { \
9580 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9581 memcpy(&MEMS->len, &RECORD_BUF[0], \
9582 sizeof(struct arm_mem_r) * LENGTH); \
9583 } \
9584 } \
9585 while (0)
9586
9587/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9588#define INSN_RECORDED(ARM_RECORD) \
9589 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9590
9591/* ARM memory record structure. */
9592struct arm_mem_r
9593{
9594 uint32_t len; /* Record length. */
bfbbec00 9595 uint32_t addr; /* Memory address. */
72508ac0
PO
9596};
9597
9598/* ARM instruction record contains opcode of current insn
9599 and execution state (before entry to decode_insn()),
9600 contains list of to-be-modified registers and
9601 memory blocks (on return from decode_insn()). */
9602
9603typedef struct insn_decode_record_t
9604{
9605 struct gdbarch *gdbarch;
9606 struct regcache *regcache;
9607 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9608 uint32_t arm_insn; /* Should accommodate thumb. */
9609 uint32_t cond; /* Condition code. */
9610 uint32_t opcode; /* Insn opcode. */
9611 uint32_t decode; /* Insn decode bits. */
9612 uint32_t mem_rec_count; /* No of mem records. */
9613 uint32_t reg_rec_count; /* No of reg records. */
9614 uint32_t *arm_regs; /* Registers to be saved for this record. */
9615 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9616} insn_decode_record;
9617
9618
9619/* Checks ARM SBZ and SBO mandatory fields. */
9620
9621static int
9622sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9623{
9624 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9625
9626 if (!len)
9627 return 1;
9628
9629 if (!sbo)
9630 ones = ~ones;
9631
9632 while (ones)
9633 {
9634 if (!(ones & sbo))
9635 {
9636 return 0;
9637 }
9638 ones = ones >> 1;
9639 }
9640 return 1;
9641}
9642
c6ec2b30
OJ
9643enum arm_record_result
9644{
9645 ARM_RECORD_SUCCESS = 0,
9646 ARM_RECORD_FAILURE = 1
9647};
9648
72508ac0
PO
9649typedef enum
9650{
9651 ARM_RECORD_STRH=1,
9652 ARM_RECORD_STRD
9653} arm_record_strx_t;
9654
9655typedef enum
9656{
9657 ARM_RECORD=1,
9658 THUMB_RECORD,
9659 THUMB2_RECORD
9660} record_type_t;
9661
9662
9663static int
9664arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9665 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9666{
9667
9668 struct regcache *reg_cache = arm_insn_r->regcache;
9669 ULONGEST u_regval[2]= {0};
9670
9671 uint32_t reg_src1 = 0, reg_src2 = 0;
9672 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9673 uint32_t opcode1 = 0;
9674
9675 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9676 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9677 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
9678
9679
9680 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9681 {
9682 /* 1) Handle misc store, immediate offset. */
9683 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9684 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9685 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9686 regcache_raw_read_unsigned (reg_cache, reg_src1,
9687 &u_regval[0]);
9688 if (ARM_PC_REGNUM == reg_src1)
9689 {
9690 /* If R15 was used as Rn, hence current PC+8. */
9691 u_regval[0] = u_regval[0] + 8;
9692 }
9693 offset_8 = (immed_high << 4) | immed_low;
9694 /* Calculate target store address. */
9695 if (14 == arm_insn_r->opcode)
9696 {
9697 tgt_mem_addr = u_regval[0] + offset_8;
9698 }
9699 else
9700 {
9701 tgt_mem_addr = u_regval[0] - offset_8;
9702 }
9703 if (ARM_RECORD_STRH == str_type)
9704 {
9705 record_buf_mem[0] = 2;
9706 record_buf_mem[1] = tgt_mem_addr;
9707 arm_insn_r->mem_rec_count = 1;
9708 }
9709 else if (ARM_RECORD_STRD == str_type)
9710 {
9711 record_buf_mem[0] = 4;
9712 record_buf_mem[1] = tgt_mem_addr;
9713 record_buf_mem[2] = 4;
9714 record_buf_mem[3] = tgt_mem_addr + 4;
9715 arm_insn_r->mem_rec_count = 2;
9716 }
9717 }
9718 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9719 {
9720 /* 2) Store, register offset. */
9721 /* Get Rm. */
9722 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9723 /* Get Rn. */
9724 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9725 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9726 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9727 if (15 == reg_src2)
9728 {
9729 /* If R15 was used as Rn, hence current PC+8. */
9730 u_regval[0] = u_regval[0] + 8;
9731 }
9732 /* Calculate target store address, Rn +/- Rm, register offset. */
9733 if (12 == arm_insn_r->opcode)
9734 {
9735 tgt_mem_addr = u_regval[0] + u_regval[1];
9736 }
9737 else
9738 {
9739 tgt_mem_addr = u_regval[1] - u_regval[0];
9740 }
9741 if (ARM_RECORD_STRH == str_type)
9742 {
9743 record_buf_mem[0] = 2;
9744 record_buf_mem[1] = tgt_mem_addr;
9745 arm_insn_r->mem_rec_count = 1;
9746 }
9747 else if (ARM_RECORD_STRD == str_type)
9748 {
9749 record_buf_mem[0] = 4;
9750 record_buf_mem[1] = tgt_mem_addr;
9751 record_buf_mem[2] = 4;
9752 record_buf_mem[3] = tgt_mem_addr + 4;
9753 arm_insn_r->mem_rec_count = 2;
9754 }
9755 }
9756 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9757 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9758 {
9759 /* 3) Store, immediate pre-indexed. */
9760 /* 5) Store, immediate post-indexed. */
9761 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9762 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9763 offset_8 = (immed_high << 4) | immed_low;
9764 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9765 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9766 /* Calculate target store address, Rn +/- Rm, register offset. */
9767 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9768 {
9769 tgt_mem_addr = u_regval[0] + offset_8;
9770 }
9771 else
9772 {
9773 tgt_mem_addr = u_regval[0] - offset_8;
9774 }
9775 if (ARM_RECORD_STRH == str_type)
9776 {
9777 record_buf_mem[0] = 2;
9778 record_buf_mem[1] = tgt_mem_addr;
9779 arm_insn_r->mem_rec_count = 1;
9780 }
9781 else if (ARM_RECORD_STRD == str_type)
9782 {
9783 record_buf_mem[0] = 4;
9784 record_buf_mem[1] = tgt_mem_addr;
9785 record_buf_mem[2] = 4;
9786 record_buf_mem[3] = tgt_mem_addr + 4;
9787 arm_insn_r->mem_rec_count = 2;
9788 }
9789 /* Record Rn also as it changes. */
9790 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9791 arm_insn_r->reg_rec_count = 1;
9792 }
9793 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9794 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9795 {
9796 /* 4) Store, register pre-indexed. */
9797 /* 6) Store, register post -indexed. */
9798 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9799 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9800 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9801 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9802 /* Calculate target store address, Rn +/- Rm, register offset. */
9803 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9804 {
9805 tgt_mem_addr = u_regval[0] + u_regval[1];
9806 }
9807 else
9808 {
9809 tgt_mem_addr = u_regval[1] - u_regval[0];
9810 }
9811 if (ARM_RECORD_STRH == str_type)
9812 {
9813 record_buf_mem[0] = 2;
9814 record_buf_mem[1] = tgt_mem_addr;
9815 arm_insn_r->mem_rec_count = 1;
9816 }
9817 else if (ARM_RECORD_STRD == str_type)
9818 {
9819 record_buf_mem[0] = 4;
9820 record_buf_mem[1] = tgt_mem_addr;
9821 record_buf_mem[2] = 4;
9822 record_buf_mem[3] = tgt_mem_addr + 4;
9823 arm_insn_r->mem_rec_count = 2;
9824 }
9825 /* Record Rn also as it changes. */
9826 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9827 arm_insn_r->reg_rec_count = 1;
9828 }
9829 return 0;
9830}
9831
9832/* Handling ARM extension space insns. */
9833
9834static int
9835arm_record_extension_space (insn_decode_record *arm_insn_r)
9836{
9837 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9838 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9839 uint32_t record_buf[8], record_buf_mem[8];
9840 uint32_t reg_src1 = 0;
9841 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9842 struct regcache *reg_cache = arm_insn_r->regcache;
9843 ULONGEST u_regval = 0;
9844
9845 gdb_assert (!INSN_RECORDED(arm_insn_r));
9846 /* Handle unconditional insn extension space. */
9847
9848 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9849 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9850 if (arm_insn_r->cond)
9851 {
9852 /* PLD has no affect on architectural state, it just affects
9853 the caches. */
9854 if (5 == ((opcode1 & 0xE0) >> 5))
9855 {
9856 /* BLX(1) */
9857 record_buf[0] = ARM_PS_REGNUM;
9858 record_buf[1] = ARM_LR_REGNUM;
9859 arm_insn_r->reg_rec_count = 2;
9860 }
9861 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9862 }
9863
9864
9865 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9866 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9867 {
9868 ret = -1;
9869 /* Undefined instruction on ARM V5; need to handle if later
9870 versions define it. */
9871 }
9872
9873 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9874 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9875 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9876
9877 /* Handle arithmetic insn extension space. */
9878 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9879 && !INSN_RECORDED(arm_insn_r))
9880 {
9881 /* Handle MLA(S) and MUL(S). */
9882 if (0 <= insn_op1 && 3 >= insn_op1)
9883 {
9884 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9885 record_buf[1] = ARM_PS_REGNUM;
9886 arm_insn_r->reg_rec_count = 2;
9887 }
9888 else if (4 <= insn_op1 && 15 >= insn_op1)
9889 {
9890 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9891 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9892 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9893 record_buf[2] = ARM_PS_REGNUM;
9894 arm_insn_r->reg_rec_count = 3;
9895 }
9896 }
9897
9898 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9899 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9900 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9901
9902 /* Handle control insn extension space. */
9903
9904 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9905 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9906 {
9907 if (!bit (arm_insn_r->arm_insn,25))
9908 {
9909 if (!bits (arm_insn_r->arm_insn, 4, 7))
9910 {
9911 if ((0 == insn_op1) || (2 == insn_op1))
9912 {
9913 /* MRS. */
9914 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9915 arm_insn_r->reg_rec_count = 1;
9916 }
9917 else if (1 == insn_op1)
9918 {
9919 /* CSPR is going to be changed. */
9920 record_buf[0] = ARM_PS_REGNUM;
9921 arm_insn_r->reg_rec_count = 1;
9922 }
9923 else if (3 == insn_op1)
9924 {
9925 /* SPSR is going to be changed. */
9926 /* We need to get SPSR value, which is yet to be done. */
9927 printf_unfiltered (_("Process record does not support "
9928 "instruction 0x%0x at address %s.\n"),
9929 arm_insn_r->arm_insn,
9930 paddress (arm_insn_r->gdbarch,
9931 arm_insn_r->this_addr));
9932 return -1;
9933 }
9934 }
9935 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9936 {
9937 if (1 == insn_op1)
9938 {
9939 /* BX. */
9940 record_buf[0] = ARM_PS_REGNUM;
9941 arm_insn_r->reg_rec_count = 1;
9942 }
9943 else if (3 == insn_op1)
9944 {
9945 /* CLZ. */
9946 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9947 arm_insn_r->reg_rec_count = 1;
9948 }
9949 }
9950 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9951 {
9952 /* BLX. */
9953 record_buf[0] = ARM_PS_REGNUM;
9954 record_buf[1] = ARM_LR_REGNUM;
9955 arm_insn_r->reg_rec_count = 2;
9956 }
9957 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9958 {
9959 /* QADD, QSUB, QDADD, QDSUB */
9960 record_buf[0] = ARM_PS_REGNUM;
9961 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9962 arm_insn_r->reg_rec_count = 2;
9963 }
9964 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9965 {
9966 /* BKPT. */
9967 record_buf[0] = ARM_PS_REGNUM;
9968 record_buf[1] = ARM_LR_REGNUM;
9969 arm_insn_r->reg_rec_count = 2;
9970
9971 /* Save SPSR also;how? */
9972 printf_unfiltered (_("Process record does not support "
9973 "instruction 0x%0x at address %s.\n"),
9974 arm_insn_r->arm_insn,
9975 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
9976 return -1;
9977 }
9978 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9979 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9980 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9981 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9982 )
9983 {
9984 if (0 == insn_op1 || 1 == insn_op1)
9985 {
9986 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9987 /* We dont do optimization for SMULW<y> where we
9988 need only Rd. */
9989 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9990 record_buf[1] = ARM_PS_REGNUM;
9991 arm_insn_r->reg_rec_count = 2;
9992 }
9993 else if (2 == insn_op1)
9994 {
9995 /* SMLAL<x><y>. */
9996 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9997 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
9998 arm_insn_r->reg_rec_count = 2;
9999 }
10000 else if (3 == insn_op1)
10001 {
10002 /* SMUL<x><y>. */
10003 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10004 arm_insn_r->reg_rec_count = 1;
10005 }
10006 }
10007 }
10008 else
10009 {
10010 /* MSR : immediate form. */
10011 if (1 == insn_op1)
10012 {
10013 /* CSPR is going to be changed. */
10014 record_buf[0] = ARM_PS_REGNUM;
10015 arm_insn_r->reg_rec_count = 1;
10016 }
10017 else if (3 == insn_op1)
10018 {
10019 /* SPSR is going to be changed. */
10020 /* we need to get SPSR value, which is yet to be done */
10021 printf_unfiltered (_("Process record does not support "
10022 "instruction 0x%0x at address %s.\n"),
10023 arm_insn_r->arm_insn,
10024 paddress (arm_insn_r->gdbarch,
10025 arm_insn_r->this_addr));
10026 return -1;
10027 }
10028 }
10029 }
10030
10031 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10032 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10033 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10034
10035 /* Handle load/store insn extension space. */
10036
10037 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10038 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10039 && !INSN_RECORDED(arm_insn_r))
10040 {
10041 /* SWP/SWPB. */
10042 if (0 == insn_op1)
10043 {
10044 /* These insn, changes register and memory as well. */
10045 /* SWP or SWPB insn. */
10046 /* Get memory address given by Rn. */
10047 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10048 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10049 /* SWP insn ?, swaps word. */
10050 if (8 == arm_insn_r->opcode)
10051 {
10052 record_buf_mem[0] = 4;
10053 }
10054 else
10055 {
10056 /* SWPB insn, swaps only byte. */
10057 record_buf_mem[0] = 1;
10058 }
10059 record_buf_mem[1] = u_regval;
10060 arm_insn_r->mem_rec_count = 1;
10061 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10062 arm_insn_r->reg_rec_count = 1;
10063 }
10064 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10065 {
10066 /* STRH. */
10067 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10068 ARM_RECORD_STRH);
10069 }
10070 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10071 {
10072 /* LDRD. */
10073 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10074 record_buf[1] = record_buf[0] + 1;
10075 arm_insn_r->reg_rec_count = 2;
10076 }
10077 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10078 {
10079 /* STRD. */
10080 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10081 ARM_RECORD_STRD);
10082 }
10083 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10084 {
10085 /* LDRH, LDRSB, LDRSH. */
10086 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10087 arm_insn_r->reg_rec_count = 1;
10088 }
10089
10090 }
10091
10092 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10093 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10094 && !INSN_RECORDED(arm_insn_r))
10095 {
10096 ret = -1;
10097 /* Handle coprocessor insn extension space. */
10098 }
10099
10100 /* To be done for ARMv5 and later; as of now we return -1. */
10101 if (-1 == ret)
10102 printf_unfiltered (_("Process record does not support instruction x%0x "
10103 "at address %s.\n"),arm_insn_r->arm_insn,
10104 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10105
10106
10107 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10108 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10109
10110 return ret;
10111}
10112
10113/* Handling opcode 000 insns. */
10114
10115static int
10116arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10117{
10118 struct regcache *reg_cache = arm_insn_r->regcache;
10119 uint32_t record_buf[8], record_buf_mem[8];
10120 ULONGEST u_regval[2] = {0};
10121
10122 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10123 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
10124 uint32_t opcode1 = 0;
10125
10126 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10127 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10128 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10129
10130 /* Data processing insn /multiply insn. */
10131 if (9 == arm_insn_r->decode
10132 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10133 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10134 {
10135 /* Handle multiply instructions. */
10136 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10137 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10138 {
10139 /* Handle MLA and MUL. */
10140 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10141 record_buf[1] = ARM_PS_REGNUM;
10142 arm_insn_r->reg_rec_count = 2;
10143 }
10144 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10145 {
10146 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10147 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10148 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10149 record_buf[2] = ARM_PS_REGNUM;
10150 arm_insn_r->reg_rec_count = 3;
10151 }
10152 }
10153 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10154 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10155 {
10156 /* Handle misc load insns, as 20th bit (L = 1). */
10157 /* LDR insn has a capability to do branching, if
10158 MOV LR, PC is precceded by LDR insn having Rn as R15
10159 in that case, it emulates branch and link insn, and hence we
10160 need to save CSPR and PC as well. I am not sure this is right
10161 place; as opcode = 010 LDR insn make this happen, if R15 was
10162 used. */
10163 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10164 if (15 != reg_dest)
10165 {
10166 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10167 arm_insn_r->reg_rec_count = 1;
10168 }
10169 else
10170 {
10171 record_buf[0] = reg_dest;
10172 record_buf[1] = ARM_PS_REGNUM;
10173 arm_insn_r->reg_rec_count = 2;
10174 }
10175 }
10176 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10177 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10178 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10179 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10180 {
10181 /* Handle MSR insn. */
10182 if (9 == arm_insn_r->opcode)
10183 {
10184 /* CSPR is going to be changed. */
10185 record_buf[0] = ARM_PS_REGNUM;
10186 arm_insn_r->reg_rec_count = 1;
10187 }
10188 else
10189 {
10190 /* SPSR is going to be changed. */
10191 /* How to read SPSR value? */
10192 printf_unfiltered (_("Process record does not support instruction "
10193 "0x%0x at address %s.\n"),
10194 arm_insn_r->arm_insn,
10195 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10196 return -1;
10197 }
10198 }
10199 else if (9 == arm_insn_r->decode
10200 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10201 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10202 {
10203 /* Handling SWP, SWPB. */
10204 /* These insn, changes register and memory as well. */
10205 /* SWP or SWPB insn. */
10206
10207 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10208 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10209 /* SWP insn ?, swaps word. */
10210 if (8 == arm_insn_r->opcode)
10211 {
10212 record_buf_mem[0] = 4;
10213 }
10214 else
10215 {
10216 /* SWPB insn, swaps only byte. */
10217 record_buf_mem[0] = 1;
10218 }
10219 record_buf_mem[1] = u_regval[0];
10220 arm_insn_r->mem_rec_count = 1;
10221 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10222 arm_insn_r->reg_rec_count = 1;
10223 }
10224 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10225 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10226 {
10227 /* Handle BLX, branch and link/exchange. */
10228 if (9 == arm_insn_r->opcode)
10229 {
10230 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10231 and R14 stores the return address. */
10232 record_buf[0] = ARM_PS_REGNUM;
10233 record_buf[1] = ARM_LR_REGNUM;
10234 arm_insn_r->reg_rec_count = 2;
10235 }
10236 }
10237 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10238 {
10239 /* Handle enhanced software breakpoint insn, BKPT. */
10240 /* CPSR is changed to be executed in ARM state, disabling normal
10241 interrupts, entering abort mode. */
10242 /* According to high vector configuration PC is set. */
10243 /* user hit breakpoint and type reverse, in
10244 that case, we need to go back with previous CPSR and
10245 Program Counter. */
10246 record_buf[0] = ARM_PS_REGNUM;
10247 record_buf[1] = ARM_LR_REGNUM;
10248 arm_insn_r->reg_rec_count = 2;
10249
10250 /* Save SPSR also; how? */
10251 printf_unfiltered (_("Process record does not support instruction "
10252 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10253 paddress (arm_insn_r->gdbarch,
10254 arm_insn_r->this_addr));
10255 return -1;
10256 }
10257 else if (11 == arm_insn_r->decode
10258 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10259 {
10260 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10261
10262 /* Handle str(x) insn */
10263 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10264 ARM_RECORD_STRH);
10265 }
10266 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10267 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10268 {
10269 /* Handle BX, branch and link/exchange. */
10270 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10271 record_buf[0] = ARM_PS_REGNUM;
10272 arm_insn_r->reg_rec_count = 1;
10273 }
10274 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10275 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10276 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10277 {
10278 /* Count leading zeros: CLZ. */
10279 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10280 arm_insn_r->reg_rec_count = 1;
10281 }
10282 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10283 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10284 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10285 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10286 )
10287 {
10288 /* Handle MRS insn. */
10289 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10290 arm_insn_r->reg_rec_count = 1;
10291 }
10292 else if (arm_insn_r->opcode <= 15)
10293 {
10294 /* Normal data processing insns. */
10295 /* Out of 11 shifter operands mode, all the insn modifies destination
10296 register, which is specified by 13-16 decode. */
10297 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10298 record_buf[1] = ARM_PS_REGNUM;
10299 arm_insn_r->reg_rec_count = 2;
10300 }
10301 else
10302 {
10303 return -1;
10304 }
10305
10306 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10307 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10308 return 0;
10309}
10310
10311/* Handling opcode 001 insns. */
10312
10313static int
10314arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10315{
10316 uint32_t record_buf[8], record_buf_mem[8];
10317
10318 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10319 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10320
10321 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10322 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10323 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10324 )
10325 {
10326 /* Handle MSR insn. */
10327 if (9 == arm_insn_r->opcode)
10328 {
10329 /* CSPR is going to be changed. */
10330 record_buf[0] = ARM_PS_REGNUM;
10331 arm_insn_r->reg_rec_count = 1;
10332 }
10333 else
10334 {
10335 /* SPSR is going to be changed. */
10336 }
10337 }
10338 else if (arm_insn_r->opcode <= 15)
10339 {
10340 /* Normal data processing insns. */
10341 /* Out of 11 shifter operands mode, all the insn modifies destination
10342 register, which is specified by 13-16 decode. */
10343 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10344 record_buf[1] = ARM_PS_REGNUM;
10345 arm_insn_r->reg_rec_count = 2;
10346 }
10347 else
10348 {
10349 return -1;
10350 }
10351
10352 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10353 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10354 return 0;
10355}
10356
71e396f9 10357/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10358
10359static int
10360arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10361{
10362 struct regcache *reg_cache = arm_insn_r->regcache;
10363
71e396f9
LM
10364 uint32_t reg_base , reg_dest;
10365 uint32_t offset_12, tgt_mem_addr;
72508ac0 10366 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10367 unsigned char wback;
10368 ULONGEST u_regval;
72508ac0 10369
71e396f9
LM
10370 /* Calculate wback. */
10371 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10372 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10373
71e396f9
LM
10374 arm_insn_r->reg_rec_count = 0;
10375 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10376
10377 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10378 {
71e396f9
LM
10379 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10380 and LDRT. */
10381
72508ac0 10382 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10383 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10384
10385 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10386 preceeds a LDR instruction having R15 as reg_base, it
10387 emulates a branch and link instruction, and hence we need to save
10388 CPSR and PC as well. */
10389 if (ARM_PC_REGNUM == reg_dest)
10390 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10391
10392 /* If wback is true, also save the base register, which is going to be
10393 written to. */
10394 if (wback)
10395 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10396 }
10397 else
10398 {
71e396f9
LM
10399 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10400
72508ac0 10401 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10402 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10403
10404 /* Handle bit U. */
72508ac0 10405 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10406 {
10407 /* U == 1: Add the offset. */
10408 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10409 }
72508ac0 10410 else
71e396f9
LM
10411 {
10412 /* U == 0: subtract the offset. */
10413 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10414 }
10415
10416 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10417 bytes. */
10418 if (bit (arm_insn_r->arm_insn, 22))
10419 {
10420 /* STRB and STRBT: 1 byte. */
10421 record_buf_mem[0] = 1;
10422 }
10423 else
10424 {
10425 /* STR and STRT: 4 bytes. */
10426 record_buf_mem[0] = 4;
10427 }
10428
10429 /* Handle bit P. */
10430 if (bit (arm_insn_r->arm_insn, 24))
10431 record_buf_mem[1] = tgt_mem_addr;
10432 else
10433 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10434
72508ac0
PO
10435 arm_insn_r->mem_rec_count = 1;
10436
71e396f9
LM
10437 /* If wback is true, also save the base register, which is going to be
10438 written to. */
10439 if (wback)
10440 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10441 }
10442
10443 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10444 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10445 return 0;
10446}
10447
10448/* Handling opcode 011 insns. */
10449
10450static int
10451arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10452{
10453 struct regcache *reg_cache = arm_insn_r->regcache;
10454
10455 uint32_t shift_imm = 0;
10456 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10457 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10458 uint32_t record_buf[8], record_buf_mem[8];
10459
10460 LONGEST s_word;
10461 ULONGEST u_regval[2];
10462
10463 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10464 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10465
10466 /* Handle enhanced store insns and LDRD DSP insn,
10467 order begins according to addressing modes for store insns
10468 STRH insn. */
10469
10470 /* LDR or STR? */
10471 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10472 {
10473 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10474 /* LDR insn has a capability to do branching, if
10475 MOV LR, PC is precedded by LDR insn having Rn as R15
10476 in that case, it emulates branch and link insn, and hence we
10477 need to save CSPR and PC as well. */
10478 if (15 != reg_dest)
10479 {
10480 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10481 arm_insn_r->reg_rec_count = 1;
10482 }
10483 else
10484 {
10485 record_buf[0] = reg_dest;
10486 record_buf[1] = ARM_PS_REGNUM;
10487 arm_insn_r->reg_rec_count = 2;
10488 }
10489 }
10490 else
10491 {
10492 if (! bits (arm_insn_r->arm_insn, 4, 11))
10493 {
10494 /* Store insn, register offset and register pre-indexed,
10495 register post-indexed. */
10496 /* Get Rm. */
10497 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10498 /* Get Rn. */
10499 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10500 regcache_raw_read_unsigned (reg_cache, reg_src1
10501 , &u_regval[0]);
10502 regcache_raw_read_unsigned (reg_cache, reg_src2
10503 , &u_regval[1]);
10504 if (15 == reg_src2)
10505 {
10506 /* If R15 was used as Rn, hence current PC+8. */
10507 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10508 u_regval[0] = u_regval[0] + 8;
10509 }
10510 /* Calculate target store address, Rn +/- Rm, register offset. */
10511 /* U == 1. */
10512 if (bit (arm_insn_r->arm_insn, 23))
10513 {
10514 tgt_mem_addr = u_regval[0] + u_regval[1];
10515 }
10516 else
10517 {
10518 tgt_mem_addr = u_regval[1] - u_regval[0];
10519 }
10520
10521 switch (arm_insn_r->opcode)
10522 {
10523 /* STR. */
10524 case 8:
10525 case 12:
10526 /* STR. */
10527 case 9:
10528 case 13:
10529 /* STRT. */
10530 case 1:
10531 case 5:
10532 /* STR. */
10533 case 0:
10534 case 4:
10535 record_buf_mem[0] = 4;
10536 break;
10537
10538 /* STRB. */
10539 case 10:
10540 case 14:
10541 /* STRB. */
10542 case 11:
10543 case 15:
10544 /* STRBT. */
10545 case 3:
10546 case 7:
10547 /* STRB. */
10548 case 2:
10549 case 6:
10550 record_buf_mem[0] = 1;
10551 break;
10552
10553 default:
10554 gdb_assert_not_reached ("no decoding pattern found");
10555 break;
10556 }
10557 record_buf_mem[1] = tgt_mem_addr;
10558 arm_insn_r->mem_rec_count = 1;
10559
10560 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10561 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10562 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10563 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10564 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10565 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10566 )
10567 {
10568 /* Rn is going to be changed in pre-indexed mode and
10569 post-indexed mode as well. */
10570 record_buf[0] = reg_src2;
10571 arm_insn_r->reg_rec_count = 1;
10572 }
10573 }
10574 else
10575 {
10576 /* Store insn, scaled register offset; scaled pre-indexed. */
10577 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10578 /* Get Rm. */
10579 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10580 /* Get Rn. */
10581 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10582 /* Get shift_imm. */
10583 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10584 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10585 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10586 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10587 /* Offset_12 used as shift. */
10588 switch (offset_12)
10589 {
10590 case 0:
10591 /* Offset_12 used as index. */
10592 offset_12 = u_regval[0] << shift_imm;
10593 break;
10594
10595 case 1:
10596 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10597 break;
10598
10599 case 2:
10600 if (!shift_imm)
10601 {
10602 if (bit (u_regval[0], 31))
10603 {
10604 offset_12 = 0xFFFFFFFF;
10605 }
10606 else
10607 {
10608 offset_12 = 0;
10609 }
10610 }
10611 else
10612 {
10613 /* This is arithmetic shift. */
10614 offset_12 = s_word >> shift_imm;
10615 }
10616 break;
10617
10618 case 3:
10619 if (!shift_imm)
10620 {
10621 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10622 &u_regval[1]);
10623 /* Get C flag value and shift it by 31. */
10624 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10625 | (u_regval[0]) >> 1);
10626 }
10627 else
10628 {
10629 offset_12 = (u_regval[0] >> shift_imm) \
10630 | (u_regval[0] <<
10631 (sizeof(uint32_t) - shift_imm));
10632 }
10633 break;
10634
10635 default:
10636 gdb_assert_not_reached ("no decoding pattern found");
10637 break;
10638 }
10639
10640 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10641 /* bit U set. */
10642 if (bit (arm_insn_r->arm_insn, 23))
10643 {
10644 tgt_mem_addr = u_regval[1] + offset_12;
10645 }
10646 else
10647 {
10648 tgt_mem_addr = u_regval[1] - offset_12;
10649 }
10650
10651 switch (arm_insn_r->opcode)
10652 {
10653 /* STR. */
10654 case 8:
10655 case 12:
10656 /* STR. */
10657 case 9:
10658 case 13:
10659 /* STRT. */
10660 case 1:
10661 case 5:
10662 /* STR. */
10663 case 0:
10664 case 4:
10665 record_buf_mem[0] = 4;
10666 break;
10667
10668 /* STRB. */
10669 case 10:
10670 case 14:
10671 /* STRB. */
10672 case 11:
10673 case 15:
10674 /* STRBT. */
10675 case 3:
10676 case 7:
10677 /* STRB. */
10678 case 2:
10679 case 6:
10680 record_buf_mem[0] = 1;
10681 break;
10682
10683 default:
10684 gdb_assert_not_reached ("no decoding pattern found");
10685 break;
10686 }
10687 record_buf_mem[1] = tgt_mem_addr;
10688 arm_insn_r->mem_rec_count = 1;
10689
10690 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10691 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10692 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10693 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10694 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10695 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10696 )
10697 {
10698 /* Rn is going to be changed in register scaled pre-indexed
10699 mode,and scaled post indexed mode. */
10700 record_buf[0] = reg_src2;
10701 arm_insn_r->reg_rec_count = 1;
10702 }
10703 }
10704 }
10705
10706 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10707 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10708 return 0;
10709}
10710
71e396f9 10711/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10712
10713static int
10714arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10715{
10716 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10717 uint32_t register_count = 0, register_bits;
10718 uint32_t reg_base, addr_mode;
72508ac0 10719 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10720 uint32_t wback;
10721 ULONGEST u_regval;
72508ac0 10722
71e396f9
LM
10723 /* Fetch the list of registers. */
10724 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10725 arm_insn_r->reg_rec_count = 0;
10726
10727 /* Fetch the base register that contains the address we are loading data
10728 to. */
10729 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10730
71e396f9
LM
10731 /* Calculate wback. */
10732 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10733
10734 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10735 {
71e396f9 10736 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10737
71e396f9 10738 /* Find out which registers are going to be loaded from memory. */
72508ac0 10739 while (register_bits)
71e396f9
LM
10740 {
10741 if (register_bits & 0x00000001)
10742 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10743 register_bits = register_bits >> 1;
10744 register_count++;
10745 }
72508ac0 10746
71e396f9
LM
10747
10748 /* If wback is true, also save the base register, which is going to be
10749 written to. */
10750 if (wback)
10751 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10752
10753 /* Save the CPSR register. */
10754 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10755 }
10756 else
10757 {
71e396f9 10758 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10759
71e396f9
LM
10760 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10761
10762 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10763
10764 /* Find out how many registers are going to be stored to memory. */
72508ac0 10765 while (register_bits)
71e396f9
LM
10766 {
10767 if (register_bits & 0x00000001)
10768 register_count++;
10769 register_bits = register_bits >> 1;
10770 }
72508ac0
PO
10771
10772 switch (addr_mode)
71e396f9
LM
10773 {
10774 /* STMDA (STMED): Decrement after. */
10775 case 0:
10776 record_buf_mem[1] = (uint32_t) u_regval
10777 - register_count * INT_REGISTER_SIZE + 4;
10778 break;
10779 /* STM (STMIA, STMEA): Increment after. */
10780 case 1:
10781 record_buf_mem[1] = (uint32_t) u_regval;
10782 break;
10783 /* STMDB (STMFD): Decrement before. */
10784 case 2:
10785 record_buf_mem[1] = (uint32_t) u_regval
10786 - register_count * INT_REGISTER_SIZE;
10787 break;
10788 /* STMIB (STMFA): Increment before. */
10789 case 3:
10790 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10791 break;
10792 default:
10793 gdb_assert_not_reached ("no decoding pattern found");
10794 break;
10795 }
72508ac0 10796
71e396f9
LM
10797 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10798 arm_insn_r->mem_rec_count = 1;
10799
10800 /* If wback is true, also save the base register, which is going to be
10801 written to. */
10802 if (wback)
10803 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10804 }
10805
10806 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10807 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10808 return 0;
10809}
10810
10811/* Handling opcode 101 insns. */
10812
10813static int
10814arm_record_b_bl (insn_decode_record *arm_insn_r)
10815{
10816 uint32_t record_buf[8];
10817
10818 /* Handle B, BL, BLX(1) insns. */
10819 /* B simply branches so we do nothing here. */
10820 /* Note: BLX(1) doesnt fall here but instead it falls into
10821 extension space. */
10822 if (bit (arm_insn_r->arm_insn, 24))
10823 {
10824 record_buf[0] = ARM_LR_REGNUM;
10825 arm_insn_r->reg_rec_count = 1;
10826 }
10827
10828 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10829
10830 return 0;
10831}
10832
10833/* Handling opcode 110 insns. */
10834
10835static int
c6ec2b30 10836arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
10837{
10838 printf_unfiltered (_("Process record does not support instruction "
10839 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10840 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10841
10842 return -1;
10843}
10844
5a578da5
OJ
10845/* Record handler for vector data transfer instructions. */
10846
10847static int
10848arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10849{
10850 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10851 uint32_t record_buf[4];
10852
10853 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10854 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10855 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10856 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10857 bit_l = bit (arm_insn_r->arm_insn, 20);
10858 bit_c = bit (arm_insn_r->arm_insn, 8);
10859
10860 /* Handle VMOV instruction. */
10861 if (bit_l && bit_c)
10862 {
10863 record_buf[0] = reg_t;
10864 arm_insn_r->reg_rec_count = 1;
10865 }
10866 else if (bit_l && !bit_c)
10867 {
10868 /* Handle VMOV instruction. */
10869 if (bits_a == 0x00)
10870 {
10871 if (bit (arm_insn_r->arm_insn, 20))
10872 record_buf[0] = reg_t;
10873 else
10874 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10875 (reg_v << 1));
10876
10877 arm_insn_r->reg_rec_count = 1;
10878 }
10879 /* Handle VMRS instruction. */
10880 else if (bits_a == 0x07)
10881 {
10882 if (reg_t == 15)
10883 reg_t = ARM_PS_REGNUM;
10884
10885 record_buf[0] = reg_t;
10886 arm_insn_r->reg_rec_count = 1;
10887 }
10888 }
10889 else if (!bit_l && !bit_c)
10890 {
10891 /* Handle VMOV instruction. */
10892 if (bits_a == 0x00)
10893 {
10894 if (bit (arm_insn_r->arm_insn, 20))
10895 record_buf[0] = reg_t;
10896 else
10897 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10898 (reg_v << 1));
10899
10900 arm_insn_r->reg_rec_count = 1;
10901 }
10902 /* Handle VMSR instruction. */
10903 else if (bits_a == 0x07)
10904 {
10905 record_buf[0] = ARM_FPSCR_REGNUM;
10906 arm_insn_r->reg_rec_count = 1;
10907 }
10908 }
10909 else if (!bit_l && bit_c)
10910 {
10911 /* Handle VMOV instruction. */
10912 if (!(bits_a & 0x04))
10913 {
10914 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
10915 + ARM_D0_REGNUM;
10916 arm_insn_r->reg_rec_count = 1;
10917 }
10918 /* Handle VDUP instruction. */
10919 else
10920 {
10921 if (bit (arm_insn_r->arm_insn, 21))
10922 {
10923 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10924 record_buf[0] = reg_v + ARM_D0_REGNUM;
10925 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
10926 arm_insn_r->reg_rec_count = 2;
10927 }
10928 else
10929 {
10930 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10931 record_buf[0] = reg_v + ARM_D0_REGNUM;
10932 arm_insn_r->reg_rec_count = 1;
10933 }
10934 }
10935 }
10936
10937 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10938 return 0;
10939}
10940
f20f80dd
OJ
10941/* Record handler for extension register load/store instructions. */
10942
10943static int
10944arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
10945{
10946 uint32_t opcode, single_reg;
10947 uint8_t op_vldm_vstm;
10948 uint32_t record_buf[8], record_buf_mem[128];
10949 ULONGEST u_regval = 0;
10950
10951 struct regcache *reg_cache = arm_insn_r->regcache;
10952 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10953
10954 opcode = bits (arm_insn_r->arm_insn, 20, 24);
10955 single_reg = bit (arm_insn_r->arm_insn, 8);
10956 op_vldm_vstm = opcode & 0x1b;
10957
10958 /* Handle VMOV instructions. */
10959 if ((opcode & 0x1e) == 0x04)
10960 {
10961 if (bit (arm_insn_r->arm_insn, 4))
10962 {
10963 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10964 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10965 arm_insn_r->reg_rec_count = 2;
10966 }
10967 else
10968 {
10969 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
10970 | bit (arm_insn_r->arm_insn, 5);
10971
10972 if (!single_reg)
10973 {
10974 record_buf[0] = num_regs + reg_m;
10975 record_buf[1] = num_regs + reg_m + 1;
10976 arm_insn_r->reg_rec_count = 2;
10977 }
10978 else
10979 {
10980 record_buf[0] = reg_m + ARM_D0_REGNUM;
10981 arm_insn_r->reg_rec_count = 1;
10982 }
10983 }
10984 }
10985 /* Handle VSTM and VPUSH instructions. */
10986 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
10987 || op_vldm_vstm == 0x12)
10988 {
10989 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
10990 uint32_t memory_index = 0;
10991
10992 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
10993 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
10994 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
10995 imm_off32 = imm_off8 << 24;
10996 memory_count = imm_off8;
10997
10998 if (bit (arm_insn_r->arm_insn, 23))
10999 start_address = u_regval;
11000 else
11001 start_address = u_regval - imm_off32;
11002
11003 if (bit (arm_insn_r->arm_insn, 21))
11004 {
11005 record_buf[0] = reg_rn;
11006 arm_insn_r->reg_rec_count = 1;
11007 }
11008
11009 while (memory_count > 0)
11010 {
11011 if (!single_reg)
11012 {
11013 record_buf_mem[memory_index] = start_address;
11014 record_buf_mem[memory_index + 1] = 4;
11015 start_address = start_address + 4;
11016 memory_index = memory_index + 2;
11017 }
11018 else
11019 {
11020 record_buf_mem[memory_index] = start_address;
11021 record_buf_mem[memory_index + 1] = 4;
11022 record_buf_mem[memory_index + 2] = start_address + 4;
11023 record_buf_mem[memory_index + 3] = 4;
11024 start_address = start_address + 8;
11025 memory_index = memory_index + 4;
11026 }
11027 memory_count--;
11028 }
11029 arm_insn_r->mem_rec_count = (memory_index >> 1);
11030 }
11031 /* Handle VLDM instructions. */
11032 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11033 || op_vldm_vstm == 0x13)
11034 {
11035 uint32_t reg_count, reg_vd;
11036 uint32_t reg_index = 0;
11037
11038 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11039 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11040
11041 if (single_reg)
11042 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11043 else
11044 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11045
11046 if (bit (arm_insn_r->arm_insn, 21))
11047 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11048
11049 while (reg_count > 0)
11050 {
11051 if (single_reg)
11052 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
11053 else
11054 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11055
11056 reg_count--;
11057 }
11058 arm_insn_r->reg_rec_count = reg_index;
11059 }
11060 /* VSTR Vector store register. */
11061 else if ((opcode & 0x13) == 0x10)
11062 {
11063 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11064 uint32_t memory_index = 0;
11065
11066 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11067 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11068 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11069 imm_off32 = imm_off8 << 24;
11070 memory_count = imm_off8;
11071
11072 if (bit (arm_insn_r->arm_insn, 23))
11073 start_address = u_regval + imm_off32;
11074 else
11075 start_address = u_regval - imm_off32;
11076
11077 if (single_reg)
11078 {
11079 record_buf_mem[memory_index] = start_address;
11080 record_buf_mem[memory_index + 1] = 4;
11081 arm_insn_r->mem_rec_count = 1;
11082 }
11083 else
11084 {
11085 record_buf_mem[memory_index] = start_address;
11086 record_buf_mem[memory_index + 1] = 4;
11087 record_buf_mem[memory_index + 2] = start_address + 4;
11088 record_buf_mem[memory_index + 3] = 4;
11089 arm_insn_r->mem_rec_count = 2;
11090 }
11091 }
11092 /* VLDR Vector load register. */
11093 else if ((opcode & 0x13) == 0x11)
11094 {
11095 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11096
11097 if (!single_reg)
11098 {
11099 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11100 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11101 }
11102 else
11103 {
11104 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11105 record_buf[0] = num_regs + reg_vd;
11106 }
11107 arm_insn_r->reg_rec_count = 1;
11108 }
11109
11110 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11111 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11112 return 0;
11113}
11114
851f26ae
OJ
11115/* Record handler for arm/thumb mode VFP data processing instructions. */
11116
11117static int
11118arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11119{
11120 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11121 uint32_t record_buf[4];
11122 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11123 enum insn_types curr_insn_type = INSN_INV;
11124
11125 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11126 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11127 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11128 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11129 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11130 bit_d = bit (arm_insn_r->arm_insn, 22);
11131 opc1 = opc1 & 0x04;
11132
11133 /* Handle VMLA, VMLS. */
11134 if (opc1 == 0x00)
11135 {
11136 if (bit (arm_insn_r->arm_insn, 10))
11137 {
11138 if (bit (arm_insn_r->arm_insn, 6))
11139 curr_insn_type = INSN_T0;
11140 else
11141 curr_insn_type = INSN_T1;
11142 }
11143 else
11144 {
11145 if (dp_op_sz)
11146 curr_insn_type = INSN_T1;
11147 else
11148 curr_insn_type = INSN_T2;
11149 }
11150 }
11151 /* Handle VNMLA, VNMLS, VNMUL. */
11152 else if (opc1 == 0x01)
11153 {
11154 if (dp_op_sz)
11155 curr_insn_type = INSN_T1;
11156 else
11157 curr_insn_type = INSN_T2;
11158 }
11159 /* Handle VMUL. */
11160 else if (opc1 == 0x02 && !(opc3 & 0x01))
11161 {
11162 if (bit (arm_insn_r->arm_insn, 10))
11163 {
11164 if (bit (arm_insn_r->arm_insn, 6))
11165 curr_insn_type = INSN_T0;
11166 else
11167 curr_insn_type = INSN_T1;
11168 }
11169 else
11170 {
11171 if (dp_op_sz)
11172 curr_insn_type = INSN_T1;
11173 else
11174 curr_insn_type = INSN_T2;
11175 }
11176 }
11177 /* Handle VADD, VSUB. */
11178 else if (opc1 == 0x03)
11179 {
11180 if (!bit (arm_insn_r->arm_insn, 9))
11181 {
11182 if (bit (arm_insn_r->arm_insn, 6))
11183 curr_insn_type = INSN_T0;
11184 else
11185 curr_insn_type = INSN_T1;
11186 }
11187 else
11188 {
11189 if (dp_op_sz)
11190 curr_insn_type = INSN_T1;
11191 else
11192 curr_insn_type = INSN_T2;
11193 }
11194 }
11195 /* Handle VDIV. */
11196 else if (opc1 == 0x0b)
11197 {
11198 if (dp_op_sz)
11199 curr_insn_type = INSN_T1;
11200 else
11201 curr_insn_type = INSN_T2;
11202 }
11203 /* Handle all other vfp data processing instructions. */
11204 else if (opc1 == 0x0b)
11205 {
11206 /* Handle VMOV. */
11207 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11208 {
11209 if (bit (arm_insn_r->arm_insn, 4))
11210 {
11211 if (bit (arm_insn_r->arm_insn, 6))
11212 curr_insn_type = INSN_T0;
11213 else
11214 curr_insn_type = INSN_T1;
11215 }
11216 else
11217 {
11218 if (dp_op_sz)
11219 curr_insn_type = INSN_T1;
11220 else
11221 curr_insn_type = INSN_T2;
11222 }
11223 }
11224 /* Handle VNEG and VABS. */
11225 else if ((opc2 == 0x01 && opc3 == 0x01)
11226 || (opc2 == 0x00 && opc3 == 0x03))
11227 {
11228 if (!bit (arm_insn_r->arm_insn, 11))
11229 {
11230 if (bit (arm_insn_r->arm_insn, 6))
11231 curr_insn_type = INSN_T0;
11232 else
11233 curr_insn_type = INSN_T1;
11234 }
11235 else
11236 {
11237 if (dp_op_sz)
11238 curr_insn_type = INSN_T1;
11239 else
11240 curr_insn_type = INSN_T2;
11241 }
11242 }
11243 /* Handle VSQRT. */
11244 else if (opc2 == 0x01 && opc3 == 0x03)
11245 {
11246 if (dp_op_sz)
11247 curr_insn_type = INSN_T1;
11248 else
11249 curr_insn_type = INSN_T2;
11250 }
11251 /* Handle VCVT. */
11252 else if (opc2 == 0x07 && opc3 == 0x03)
11253 {
11254 if (!dp_op_sz)
11255 curr_insn_type = INSN_T1;
11256 else
11257 curr_insn_type = INSN_T2;
11258 }
11259 else if (opc3 & 0x01)
11260 {
11261 /* Handle VCVT. */
11262 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11263 {
11264 if (!bit (arm_insn_r->arm_insn, 18))
11265 curr_insn_type = INSN_T2;
11266 else
11267 {
11268 if (dp_op_sz)
11269 curr_insn_type = INSN_T1;
11270 else
11271 curr_insn_type = INSN_T2;
11272 }
11273 }
11274 /* Handle VCVT. */
11275 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11276 {
11277 if (dp_op_sz)
11278 curr_insn_type = INSN_T1;
11279 else
11280 curr_insn_type = INSN_T2;
11281 }
11282 /* Handle VCVTB, VCVTT. */
11283 else if ((opc2 & 0x0e) == 0x02)
11284 curr_insn_type = INSN_T2;
11285 /* Handle VCMP, VCMPE. */
11286 else if ((opc2 & 0x0e) == 0x04)
11287 curr_insn_type = INSN_T3;
11288 }
11289 }
11290
11291 switch (curr_insn_type)
11292 {
11293 case INSN_T0:
11294 reg_vd = reg_vd | (bit_d << 4);
11295 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11296 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11297 arm_insn_r->reg_rec_count = 2;
11298 break;
11299
11300 case INSN_T1:
11301 reg_vd = reg_vd | (bit_d << 4);
11302 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11303 arm_insn_r->reg_rec_count = 1;
11304 break;
11305
11306 case INSN_T2:
11307 reg_vd = (reg_vd << 1) | bit_d;
11308 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11309 arm_insn_r->reg_rec_count = 1;
11310 break;
11311
11312 case INSN_T3:
11313 record_buf[0] = ARM_FPSCR_REGNUM;
11314 arm_insn_r->reg_rec_count = 1;
11315 break;
11316
11317 default:
11318 gdb_assert_not_reached ("no decoding pattern found");
11319 break;
11320 }
11321
11322 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11323 return 0;
11324}
11325
60cc5e93
OJ
11326/* Handling opcode 110 insns. */
11327
11328static int
11329arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11330{
11331 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
11332
11333 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11334 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11335 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11336
11337 if ((coproc & 0x0e) == 0x0a)
11338 {
11339 /* Handle extension register ld/st instructions. */
11340 if (!(op1 & 0x20))
f20f80dd 11341 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11342
11343 /* 64-bit transfers between arm core and extension registers. */
11344 if ((op1 & 0x3e) == 0x04)
f20f80dd 11345 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11346 }
11347 else
11348 {
11349 /* Handle coprocessor ld/st instructions. */
11350 if (!(op1 & 0x3a))
11351 {
11352 /* Store. */
11353 if (!op1_ebit)
11354 return arm_record_unsupported_insn (arm_insn_r);
11355 else
11356 /* Load. */
11357 return arm_record_unsupported_insn (arm_insn_r);
11358 }
11359
11360 /* Move to coprocessor from two arm core registers. */
11361 if (op1 == 0x4)
11362 return arm_record_unsupported_insn (arm_insn_r);
11363
11364 /* Move to two arm core registers from coprocessor. */
11365 if (op1 == 0x5)
11366 {
11367 uint32_t reg_t[2];
11368
11369 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11370 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11371 arm_insn_r->reg_rec_count = 2;
11372
11373 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11374 return 0;
11375 }
11376 }
11377 return arm_record_unsupported_insn (arm_insn_r);
11378}
11379
72508ac0
PO
11380/* Handling opcode 111 insns. */
11381
11382static int
11383arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11384{
60cc5e93 11385 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11386 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11387 struct regcache *reg_cache = arm_insn_r->regcache;
97dfe206 11388 ULONGEST u_regval = 0;
72508ac0
PO
11389
11390 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11391 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11392 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11393 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11394 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11395
11396 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11397 if (op1_sbit)
97dfe206
OJ
11398 {
11399 if (tdep->arm_syscall_record != NULL)
11400 {
11401 ULONGEST svc_operand, svc_number;
11402
11403 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11404
11405 if (svc_operand) /* OABI. */
11406 svc_number = svc_operand - 0x900000;
11407 else /* EABI. */
11408 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11409
60cc5e93 11410 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11411 }
11412 else
11413 {
11414 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11415 return -1;
97dfe206
OJ
11416 }
11417 }
60cc5e93
OJ
11418
11419 if ((coproc & 0x0e) == 0x0a)
11420 {
11421 /* VFP data-processing instructions. */
11422 if (!op1_sbit && !op)
851f26ae 11423 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11424
11425 /* Advanced SIMD, VFP instructions. */
11426 if (!op1_sbit && op)
5a578da5 11427 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11428 }
97dfe206
OJ
11429 else
11430 {
60cc5e93
OJ
11431 /* Coprocessor data operations. */
11432 if (!op1_sbit && !op)
11433 return arm_record_unsupported_insn (arm_insn_r);
11434
11435 /* Move to Coprocessor from ARM core register. */
11436 if (!op1_sbit && !op1_ebit && op)
11437 return arm_record_unsupported_insn (arm_insn_r);
11438
11439 /* Move to arm core register from coprocessor. */
11440 if (!op1_sbit && op1_ebit && op)
11441 {
11442 uint32_t record_buf[1];
11443
11444 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11445 if (record_buf[0] == 15)
11446 record_buf[0] = ARM_PS_REGNUM;
11447
11448 arm_insn_r->reg_rec_count = 1;
11449 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11450 record_buf);
11451 return 0;
11452 }
97dfe206 11453 }
72508ac0 11454
60cc5e93 11455 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11456}
11457
11458/* Handling opcode 000 insns. */
11459
11460static int
11461thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11462{
11463 uint32_t record_buf[8];
11464 uint32_t reg_src1 = 0;
11465
11466 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11467
11468 record_buf[0] = ARM_PS_REGNUM;
11469 record_buf[1] = reg_src1;
11470 thumb_insn_r->reg_rec_count = 2;
11471
11472 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11473
11474 return 0;
11475}
11476
11477
11478/* Handling opcode 001 insns. */
11479
11480static int
11481thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11482{
11483 uint32_t record_buf[8];
11484 uint32_t reg_src1 = 0;
11485
11486 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11487
11488 record_buf[0] = ARM_PS_REGNUM;
11489 record_buf[1] = reg_src1;
11490 thumb_insn_r->reg_rec_count = 2;
11491
11492 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11493
11494 return 0;
11495}
11496
11497/* Handling opcode 010 insns. */
11498
11499static int
11500thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11501{
11502 struct regcache *reg_cache = thumb_insn_r->regcache;
11503 uint32_t record_buf[8], record_buf_mem[8];
11504
11505 uint32_t reg_src1 = 0, reg_src2 = 0;
11506 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11507
11508 ULONGEST u_regval[2] = {0};
11509
11510 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11511
11512 if (bit (thumb_insn_r->arm_insn, 12))
11513 {
11514 /* Handle load/store register offset. */
11515 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11516 if (opcode2 >= 12 && opcode2 <= 15)
11517 {
11518 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11519 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11520 record_buf[0] = reg_src1;
11521 thumb_insn_r->reg_rec_count = 1;
11522 }
11523 else if (opcode2 >= 8 && opcode2 <= 10)
11524 {
11525 /* STR(2), STRB(2), STRH(2) . */
11526 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11527 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11528 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11529 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11530 if (8 == opcode2)
11531 record_buf_mem[0] = 4; /* STR (2). */
11532 else if (10 == opcode2)
11533 record_buf_mem[0] = 1; /* STRB (2). */
11534 else if (9 == opcode2)
11535 record_buf_mem[0] = 2; /* STRH (2). */
11536 record_buf_mem[1] = u_regval[0] + u_regval[1];
11537 thumb_insn_r->mem_rec_count = 1;
11538 }
11539 }
11540 else if (bit (thumb_insn_r->arm_insn, 11))
11541 {
11542 /* Handle load from literal pool. */
11543 /* LDR(3). */
11544 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11545 record_buf[0] = reg_src1;
11546 thumb_insn_r->reg_rec_count = 1;
11547 }
11548 else if (opcode1)
11549 {
11550 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11551 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11552 if ((3 == opcode2) && (!opcode3))
11553 {
11554 /* Branch with exchange. */
11555 record_buf[0] = ARM_PS_REGNUM;
11556 thumb_insn_r->reg_rec_count = 1;
11557 }
11558 else
11559 {
11560 /* Format 8; special data processing insns. */
11561 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11562 record_buf[0] = ARM_PS_REGNUM;
11563 record_buf[1] = reg_src1;
11564 thumb_insn_r->reg_rec_count = 2;
11565 }
11566 }
11567 else
11568 {
11569 /* Format 5; data processing insns. */
11570 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11571 if (bit (thumb_insn_r->arm_insn, 7))
11572 {
11573 reg_src1 = reg_src1 + 8;
11574 }
11575 record_buf[0] = ARM_PS_REGNUM;
11576 record_buf[1] = reg_src1;
11577 thumb_insn_r->reg_rec_count = 2;
11578 }
11579
11580 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11581 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11582 record_buf_mem);
11583
11584 return 0;
11585}
11586
11587/* Handling opcode 001 insns. */
11588
11589static int
11590thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11591{
11592 struct regcache *reg_cache = thumb_insn_r->regcache;
11593 uint32_t record_buf[8], record_buf_mem[8];
11594
11595 uint32_t reg_src1 = 0;
11596 uint32_t opcode = 0, immed_5 = 0;
11597
11598 ULONGEST u_regval = 0;
11599
11600 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11601
11602 if (opcode)
11603 {
11604 /* LDR(1). */
11605 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11606 record_buf[0] = reg_src1;
11607 thumb_insn_r->reg_rec_count = 1;
11608 }
11609 else
11610 {
11611 /* STR(1). */
11612 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11613 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11614 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11615 record_buf_mem[0] = 4;
11616 record_buf_mem[1] = u_regval + (immed_5 * 4);
11617 thumb_insn_r->mem_rec_count = 1;
11618 }
11619
11620 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11621 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11622 record_buf_mem);
11623
11624 return 0;
11625}
11626
11627/* Handling opcode 100 insns. */
11628
11629static int
11630thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11631{
11632 struct regcache *reg_cache = thumb_insn_r->regcache;
11633 uint32_t record_buf[8], record_buf_mem[8];
11634
11635 uint32_t reg_src1 = 0;
11636 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11637
11638 ULONGEST u_regval = 0;
11639
11640 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11641
11642 if (3 == opcode)
11643 {
11644 /* LDR(4). */
11645 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11646 record_buf[0] = reg_src1;
11647 thumb_insn_r->reg_rec_count = 1;
11648 }
11649 else if (1 == opcode)
11650 {
11651 /* LDRH(1). */
11652 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11653 record_buf[0] = reg_src1;
11654 thumb_insn_r->reg_rec_count = 1;
11655 }
11656 else if (2 == opcode)
11657 {
11658 /* STR(3). */
11659 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11660 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11661 record_buf_mem[0] = 4;
11662 record_buf_mem[1] = u_regval + (immed_8 * 4);
11663 thumb_insn_r->mem_rec_count = 1;
11664 }
11665 else if (0 == opcode)
11666 {
11667 /* STRH(1). */
11668 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11669 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11670 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11671 record_buf_mem[0] = 2;
11672 record_buf_mem[1] = u_regval + (immed_5 * 2);
11673 thumb_insn_r->mem_rec_count = 1;
11674 }
11675
11676 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11677 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11678 record_buf_mem);
11679
11680 return 0;
11681}
11682
11683/* Handling opcode 101 insns. */
11684
11685static int
11686thumb_record_misc (insn_decode_record *thumb_insn_r)
11687{
11688 struct regcache *reg_cache = thumb_insn_r->regcache;
11689
11690 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11691 uint32_t register_bits = 0, register_count = 0;
11692 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
11693 uint32_t record_buf[24], record_buf_mem[48];
11694 uint32_t reg_src1;
11695
11696 ULONGEST u_regval = 0;
11697
11698 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11699 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11700 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11701
11702 if (14 == opcode2)
11703 {
11704 /* POP. */
11705 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11706 while (register_bits)
f969241e
OJ
11707 {
11708 if (register_bits & 0x00000001)
11709 record_buf[index++] = register_count;
11710 register_bits = register_bits >> 1;
11711 register_count++;
11712 }
11713 record_buf[index++] = ARM_PS_REGNUM;
11714 record_buf[index++] = ARM_SP_REGNUM;
11715 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11716 }
11717 else if (10 == opcode2)
11718 {
11719 /* PUSH. */
11720 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11721 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11722 while (register_bits)
11723 {
11724 if (register_bits & 0x00000001)
11725 register_count++;
11726 register_bits = register_bits >> 1;
11727 }
11728 start_address = u_regval - \
11729 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11730 thumb_insn_r->mem_rec_count = register_count;
11731 while (register_count)
11732 {
11733 record_buf_mem[(register_count * 2) - 1] = start_address;
11734 record_buf_mem[(register_count * 2) - 2] = 4;
11735 start_address = start_address + 4;
11736 register_count--;
11737 }
11738 record_buf[0] = ARM_SP_REGNUM;
11739 thumb_insn_r->reg_rec_count = 1;
11740 }
11741 else if (0x1E == opcode1)
11742 {
11743 /* BKPT insn. */
11744 /* Handle enhanced software breakpoint insn, BKPT. */
11745 /* CPSR is changed to be executed in ARM state, disabling normal
11746 interrupts, entering abort mode. */
11747 /* According to high vector configuration PC is set. */
11748 /* User hits breakpoint and type reverse, in that case, we need to go back with
11749 previous CPSR and Program Counter. */
11750 record_buf[0] = ARM_PS_REGNUM;
11751 record_buf[1] = ARM_LR_REGNUM;
11752 thumb_insn_r->reg_rec_count = 2;
11753 /* We need to save SPSR value, which is not yet done. */
11754 printf_unfiltered (_("Process record does not support instruction "
11755 "0x%0x at address %s.\n"),
11756 thumb_insn_r->arm_insn,
11757 paddress (thumb_insn_r->gdbarch,
11758 thumb_insn_r->this_addr));
11759 return -1;
11760 }
11761 else if ((0 == opcode) || (1 == opcode))
11762 {
11763 /* ADD(5), ADD(6). */
11764 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11765 record_buf[0] = reg_src1;
11766 thumb_insn_r->reg_rec_count = 1;
11767 }
11768 else if (2 == opcode)
11769 {
11770 /* ADD(7), SUB(4). */
11771 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11772 record_buf[0] = ARM_SP_REGNUM;
11773 thumb_insn_r->reg_rec_count = 1;
11774 }
11775
11776 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11777 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11778 record_buf_mem);
11779
11780 return 0;
11781}
11782
11783/* Handling opcode 110 insns. */
11784
11785static int
11786thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11787{
11788 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11789 struct regcache *reg_cache = thumb_insn_r->regcache;
11790
11791 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11792 uint32_t reg_src1 = 0;
11793 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11794 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
11795 uint32_t record_buf[24], record_buf_mem[48];
11796
11797 ULONGEST u_regval = 0;
11798
11799 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11800 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11801
11802 if (1 == opcode2)
11803 {
11804
11805 /* LDMIA. */
11806 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11807 /* Get Rn. */
11808 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11809 while (register_bits)
11810 {
11811 if (register_bits & 0x00000001)
f969241e 11812 record_buf[index++] = register_count;
72508ac0 11813 register_bits = register_bits >> 1;
f969241e 11814 register_count++;
72508ac0 11815 }
f969241e
OJ
11816 record_buf[index++] = reg_src1;
11817 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11818 }
11819 else if (0 == opcode2)
11820 {
11821 /* It handles both STMIA. */
11822 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11823 /* Get Rn. */
11824 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11825 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11826 while (register_bits)
11827 {
11828 if (register_bits & 0x00000001)
11829 register_count++;
11830 register_bits = register_bits >> 1;
11831 }
11832 start_address = u_regval;
11833 thumb_insn_r->mem_rec_count = register_count;
11834 while (register_count)
11835 {
11836 record_buf_mem[(register_count * 2) - 1] = start_address;
11837 record_buf_mem[(register_count * 2) - 2] = 4;
11838 start_address = start_address + 4;
11839 register_count--;
11840 }
11841 }
11842 else if (0x1F == opcode1)
11843 {
11844 /* Handle arm syscall insn. */
97dfe206 11845 if (tdep->arm_syscall_record != NULL)
72508ac0 11846 {
97dfe206
OJ
11847 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11848 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
11849 }
11850 else
11851 {
11852 printf_unfiltered (_("no syscall record support\n"));
11853 return -1;
11854 }
11855 }
11856
11857 /* B (1), conditional branch is automatically taken care in process_record,
11858 as PC is saved there. */
11859
11860 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11861 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11862 record_buf_mem);
11863
11864 return ret;
11865}
11866
11867/* Handling opcode 111 insns. */
11868
11869static int
11870thumb_record_branch (insn_decode_record *thumb_insn_r)
11871{
11872 uint32_t record_buf[8];
11873 uint32_t bits_h = 0;
11874
11875 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
11876
11877 if (2 == bits_h || 3 == bits_h)
11878 {
11879 /* BL */
11880 record_buf[0] = ARM_LR_REGNUM;
11881 thumb_insn_r->reg_rec_count = 1;
11882 }
11883 else if (1 == bits_h)
11884 {
11885 /* BLX(1). */
11886 record_buf[0] = ARM_PS_REGNUM;
11887 record_buf[1] = ARM_LR_REGNUM;
11888 thumb_insn_r->reg_rec_count = 2;
11889 }
11890
11891 /* B(2) is automatically taken care in process_record, as PC is
11892 saved there. */
11893
11894 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11895
11896 return 0;
11897}
11898
c6ec2b30
OJ
11899/* Handler for thumb2 load/store multiple instructions. */
11900
11901static int
11902thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
11903{
11904 struct regcache *reg_cache = thumb2_insn_r->regcache;
11905
11906 uint32_t reg_rn, op;
11907 uint32_t register_bits = 0, register_count = 0;
11908 uint32_t index = 0, start_address = 0;
11909 uint32_t record_buf[24], record_buf_mem[48];
11910
11911 ULONGEST u_regval = 0;
11912
11913 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11914 op = bits (thumb2_insn_r->arm_insn, 23, 24);
11915
11916 if (0 == op || 3 == op)
11917 {
11918 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11919 {
11920 /* Handle RFE instruction. */
11921 record_buf[0] = ARM_PS_REGNUM;
11922 thumb2_insn_r->reg_rec_count = 1;
11923 }
11924 else
11925 {
11926 /* Handle SRS instruction after reading banked SP. */
11927 return arm_record_unsupported_insn (thumb2_insn_r);
11928 }
11929 }
11930 else if (1 == op || 2 == op)
11931 {
11932 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11933 {
11934 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
11935 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11936 while (register_bits)
11937 {
11938 if (register_bits & 0x00000001)
11939 record_buf[index++] = register_count;
11940
11941 register_count++;
11942 register_bits = register_bits >> 1;
11943 }
11944 record_buf[index++] = reg_rn;
11945 record_buf[index++] = ARM_PS_REGNUM;
11946 thumb2_insn_r->reg_rec_count = index;
11947 }
11948 else
11949 {
11950 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
11951 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11952 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11953 while (register_bits)
11954 {
11955 if (register_bits & 0x00000001)
11956 register_count++;
11957
11958 register_bits = register_bits >> 1;
11959 }
11960
11961 if (1 == op)
11962 {
11963 /* Start address calculation for LDMDB/LDMEA. */
11964 start_address = u_regval;
11965 }
11966 else if (2 == op)
11967 {
11968 /* Start address calculation for LDMDB/LDMEA. */
11969 start_address = u_regval - register_count * 4;
11970 }
11971
11972 thumb2_insn_r->mem_rec_count = register_count;
11973 while (register_count)
11974 {
11975 record_buf_mem[register_count * 2 - 1] = start_address;
11976 record_buf_mem[register_count * 2 - 2] = 4;
11977 start_address = start_address + 4;
11978 register_count--;
11979 }
11980 record_buf[0] = reg_rn;
11981 record_buf[1] = ARM_PS_REGNUM;
11982 thumb2_insn_r->reg_rec_count = 2;
11983 }
11984 }
11985
11986 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
11987 record_buf_mem);
11988 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
11989 record_buf);
11990 return ARM_RECORD_SUCCESS;
11991}
11992
11993/* Handler for thumb2 load/store (dual/exclusive) and table branch
11994 instructions. */
11995
11996static int
11997thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
11998{
11999 struct regcache *reg_cache = thumb2_insn_r->regcache;
12000
12001 uint32_t reg_rd, reg_rn, offset_imm;
12002 uint32_t reg_dest1, reg_dest2;
12003 uint32_t address, offset_addr;
12004 uint32_t record_buf[8], record_buf_mem[8];
12005 uint32_t op1, op2, op3;
12006 LONGEST s_word;
12007
12008 ULONGEST u_regval[2];
12009
12010 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12011 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12012 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12013
12014 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12015 {
12016 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12017 {
12018 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12019 record_buf[0] = reg_dest1;
12020 record_buf[1] = ARM_PS_REGNUM;
12021 thumb2_insn_r->reg_rec_count = 2;
12022 }
12023
12024 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12025 {
12026 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12027 record_buf[2] = reg_dest2;
12028 thumb2_insn_r->reg_rec_count = 3;
12029 }
12030 }
12031 else
12032 {
12033 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12034 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12035
12036 if (0 == op1 && 0 == op2)
12037 {
12038 /* Handle STREX. */
12039 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12040 address = u_regval[0] + (offset_imm * 4);
12041 record_buf_mem[0] = 4;
12042 record_buf_mem[1] = address;
12043 thumb2_insn_r->mem_rec_count = 1;
12044 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12045 record_buf[0] = reg_rd;
12046 thumb2_insn_r->reg_rec_count = 1;
12047 }
12048 else if (1 == op1 && 0 == op2)
12049 {
12050 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12051 record_buf[0] = reg_rd;
12052 thumb2_insn_r->reg_rec_count = 1;
12053 address = u_regval[0];
12054 record_buf_mem[1] = address;
12055
12056 if (4 == op3)
12057 {
12058 /* Handle STREXB. */
12059 record_buf_mem[0] = 1;
12060 thumb2_insn_r->mem_rec_count = 1;
12061 }
12062 else if (5 == op3)
12063 {
12064 /* Handle STREXH. */
12065 record_buf_mem[0] = 2 ;
12066 thumb2_insn_r->mem_rec_count = 1;
12067 }
12068 else if (7 == op3)
12069 {
12070 /* Handle STREXD. */
12071 address = u_regval[0];
12072 record_buf_mem[0] = 4;
12073 record_buf_mem[2] = 4;
12074 record_buf_mem[3] = address + 4;
12075 thumb2_insn_r->mem_rec_count = 2;
12076 }
12077 }
12078 else
12079 {
12080 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12081
12082 if (bit (thumb2_insn_r->arm_insn, 24))
12083 {
12084 if (bit (thumb2_insn_r->arm_insn, 23))
12085 offset_addr = u_regval[0] + (offset_imm * 4);
12086 else
12087 offset_addr = u_regval[0] - (offset_imm * 4);
12088
12089 address = offset_addr;
12090 }
12091 else
12092 address = u_regval[0];
12093
12094 record_buf_mem[0] = 4;
12095 record_buf_mem[1] = address;
12096 record_buf_mem[2] = 4;
12097 record_buf_mem[3] = address + 4;
12098 thumb2_insn_r->mem_rec_count = 2;
12099 record_buf[0] = reg_rn;
12100 thumb2_insn_r->reg_rec_count = 1;
12101 }
12102 }
12103
12104 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12105 record_buf);
12106 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12107 record_buf_mem);
12108 return ARM_RECORD_SUCCESS;
12109}
12110
12111/* Handler for thumb2 data processing (shift register and modified immediate)
12112 instructions. */
12113
12114static int
12115thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12116{
12117 uint32_t reg_rd, op;
12118 uint32_t record_buf[8];
12119
12120 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12121 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12122
12123 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12124 {
12125 record_buf[0] = ARM_PS_REGNUM;
12126 thumb2_insn_r->reg_rec_count = 1;
12127 }
12128 else
12129 {
12130 record_buf[0] = reg_rd;
12131 record_buf[1] = ARM_PS_REGNUM;
12132 thumb2_insn_r->reg_rec_count = 2;
12133 }
12134
12135 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12136 record_buf);
12137 return ARM_RECORD_SUCCESS;
12138}
12139
12140/* Generic handler for thumb2 instructions which effect destination and PS
12141 registers. */
12142
12143static int
12144thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12145{
12146 uint32_t reg_rd;
12147 uint32_t record_buf[8];
12148
12149 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12150
12151 record_buf[0] = reg_rd;
12152 record_buf[1] = ARM_PS_REGNUM;
12153 thumb2_insn_r->reg_rec_count = 2;
12154
12155 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12156 record_buf);
12157 return ARM_RECORD_SUCCESS;
12158}
12159
12160/* Handler for thumb2 branch and miscellaneous control instructions. */
12161
12162static int
12163thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12164{
12165 uint32_t op, op1, op2;
12166 uint32_t record_buf[8];
12167
12168 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12169 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12170 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12171
12172 /* Handle MSR insn. */
12173 if (!(op1 & 0x2) && 0x38 == op)
12174 {
12175 if (!(op2 & 0x3))
12176 {
12177 /* CPSR is going to be changed. */
12178 record_buf[0] = ARM_PS_REGNUM;
12179 thumb2_insn_r->reg_rec_count = 1;
12180 }
12181 else
12182 {
12183 arm_record_unsupported_insn(thumb2_insn_r);
12184 return -1;
12185 }
12186 }
12187 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12188 {
12189 /* BLX. */
12190 record_buf[0] = ARM_PS_REGNUM;
12191 record_buf[1] = ARM_LR_REGNUM;
12192 thumb2_insn_r->reg_rec_count = 2;
12193 }
12194
12195 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12196 record_buf);
12197 return ARM_RECORD_SUCCESS;
12198}
12199
12200/* Handler for thumb2 store single data item instructions. */
12201
12202static int
12203thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12204{
12205 struct regcache *reg_cache = thumb2_insn_r->regcache;
12206
12207 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12208 uint32_t address, offset_addr;
12209 uint32_t record_buf[8], record_buf_mem[8];
12210 uint32_t op1, op2;
12211
12212 ULONGEST u_regval[2];
12213
12214 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12215 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12216 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12217 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12218
12219 if (bit (thumb2_insn_r->arm_insn, 23))
12220 {
12221 /* T2 encoding. */
12222 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12223 offset_addr = u_regval[0] + offset_imm;
12224 address = offset_addr;
12225 }
12226 else
12227 {
12228 /* T3 encoding. */
12229 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12230 {
12231 /* Handle STRB (register). */
12232 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12233 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12234 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12235 offset_addr = u_regval[1] << shift_imm;
12236 address = u_regval[0] + offset_addr;
12237 }
12238 else
12239 {
12240 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12241 if (bit (thumb2_insn_r->arm_insn, 10))
12242 {
12243 if (bit (thumb2_insn_r->arm_insn, 9))
12244 offset_addr = u_regval[0] + offset_imm;
12245 else
12246 offset_addr = u_regval[0] - offset_imm;
12247
12248 address = offset_addr;
12249 }
12250 else
12251 address = u_regval[0];
12252 }
12253 }
12254
12255 switch (op1)
12256 {
12257 /* Store byte instructions. */
12258 case 4:
12259 case 0:
12260 record_buf_mem[0] = 1;
12261 break;
12262 /* Store half word instructions. */
12263 case 1:
12264 case 5:
12265 record_buf_mem[0] = 2;
12266 break;
12267 /* Store word instructions. */
12268 case 2:
12269 case 6:
12270 record_buf_mem[0] = 4;
12271 break;
12272
12273 default:
12274 gdb_assert_not_reached ("no decoding pattern found");
12275 break;
12276 }
12277
12278 record_buf_mem[1] = address;
12279 thumb2_insn_r->mem_rec_count = 1;
12280 record_buf[0] = reg_rn;
12281 thumb2_insn_r->reg_rec_count = 1;
12282
12283 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12284 record_buf);
12285 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12286 record_buf_mem);
12287 return ARM_RECORD_SUCCESS;
12288}
12289
12290/* Handler for thumb2 load memory hints instructions. */
12291
12292static int
12293thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12294{
12295 uint32_t record_buf[8];
12296 uint32_t reg_rt, reg_rn;
12297
12298 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12299 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12300
12301 if (ARM_PC_REGNUM != reg_rt)
12302 {
12303 record_buf[0] = reg_rt;
12304 record_buf[1] = reg_rn;
12305 record_buf[2] = ARM_PS_REGNUM;
12306 thumb2_insn_r->reg_rec_count = 3;
12307
12308 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12309 record_buf);
12310 return ARM_RECORD_SUCCESS;
12311 }
12312
12313 return ARM_RECORD_FAILURE;
12314}
12315
12316/* Handler for thumb2 load word instructions. */
12317
12318static int
12319thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12320{
12321 uint32_t opcode1 = 0, opcode2 = 0;
12322 uint32_t record_buf[8];
12323
12324 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12325 record_buf[1] = ARM_PS_REGNUM;
12326 thumb2_insn_r->reg_rec_count = 2;
12327
12328 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12329 record_buf);
12330 return ARM_RECORD_SUCCESS;
12331}
12332
12333/* Handler for thumb2 long multiply, long multiply accumulate, and
12334 divide instructions. */
12335
12336static int
12337thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12338{
12339 uint32_t opcode1 = 0, opcode2 = 0;
12340 uint32_t record_buf[8];
12341 uint32_t reg_src1 = 0;
12342
12343 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12344 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12345
12346 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12347 {
12348 /* Handle SMULL, UMULL, SMULAL. */
12349 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12350 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12351 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12352 record_buf[2] = ARM_PS_REGNUM;
12353 thumb2_insn_r->reg_rec_count = 3;
12354 }
12355 else if (1 == opcode1 || 3 == opcode2)
12356 {
12357 /* Handle SDIV and UDIV. */
12358 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12359 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12360 record_buf[2] = ARM_PS_REGNUM;
12361 thumb2_insn_r->reg_rec_count = 3;
12362 }
12363 else
12364 return ARM_RECORD_FAILURE;
12365
12366 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12367 record_buf);
12368 return ARM_RECORD_SUCCESS;
12369}
12370
60cc5e93
OJ
12371/* Record handler for thumb32 coprocessor instructions. */
12372
12373static int
12374thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12375{
12376 if (bit (thumb2_insn_r->arm_insn, 25))
12377 return arm_record_coproc_data_proc (thumb2_insn_r);
12378 else
12379 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12380}
12381
1e1b6563
OJ
12382/* Record handler for advance SIMD structure load/store instructions. */
12383
12384static int
12385thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12386{
12387 struct regcache *reg_cache = thumb2_insn_r->regcache;
12388 uint32_t l_bit, a_bit, b_bits;
12389 uint32_t record_buf[128], record_buf_mem[128];
12390 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
12391 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12392 uint8_t f_ebytes;
12393
12394 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12395 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12396 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12397 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12398 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12399 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12400 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12401 f_esize = 8 * f_ebytes;
12402 f_elem = 8 / f_ebytes;
12403
12404 if (!l_bit)
12405 {
12406 ULONGEST u_regval = 0;
12407 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12408 address = u_regval;
12409
12410 if (!a_bit)
12411 {
12412 /* Handle VST1. */
12413 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12414 {
12415 if (b_bits == 0x07)
12416 bf_regs = 1;
12417 else if (b_bits == 0x0a)
12418 bf_regs = 2;
12419 else if (b_bits == 0x06)
12420 bf_regs = 3;
12421 else if (b_bits == 0x02)
12422 bf_regs = 4;
12423 else
12424 bf_regs = 0;
12425
12426 for (index_r = 0; index_r < bf_regs; index_r++)
12427 {
12428 for (index_e = 0; index_e < f_elem; index_e++)
12429 {
12430 record_buf_mem[index_m++] = f_ebytes;
12431 record_buf_mem[index_m++] = address;
12432 address = address + f_ebytes;
12433 thumb2_insn_r->mem_rec_count += 1;
12434 }
12435 }
12436 }
12437 /* Handle VST2. */
12438 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12439 {
12440 if (b_bits == 0x09 || b_bits == 0x08)
12441 bf_regs = 1;
12442 else if (b_bits == 0x03)
12443 bf_regs = 2;
12444 else
12445 bf_regs = 0;
12446
12447 for (index_r = 0; index_r < bf_regs; index_r++)
12448 for (index_e = 0; index_e < f_elem; index_e++)
12449 {
12450 for (loop_t = 0; loop_t < 2; loop_t++)
12451 {
12452 record_buf_mem[index_m++] = f_ebytes;
12453 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12454 thumb2_insn_r->mem_rec_count += 1;
12455 }
12456 address = address + (2 * f_ebytes);
12457 }
12458 }
12459 /* Handle VST3. */
12460 else if ((b_bits & 0x0e) == 0x04)
12461 {
12462 for (index_e = 0; index_e < f_elem; index_e++)
12463 {
12464 for (loop_t = 0; loop_t < 3; loop_t++)
12465 {
12466 record_buf_mem[index_m++] = f_ebytes;
12467 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12468 thumb2_insn_r->mem_rec_count += 1;
12469 }
12470 address = address + (3 * f_ebytes);
12471 }
12472 }
12473 /* Handle VST4. */
12474 else if (!(b_bits & 0x0e))
12475 {
12476 for (index_e = 0; index_e < f_elem; index_e++)
12477 {
12478 for (loop_t = 0; loop_t < 4; loop_t++)
12479 {
12480 record_buf_mem[index_m++] = f_ebytes;
12481 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12482 thumb2_insn_r->mem_rec_count += 1;
12483 }
12484 address = address + (4 * f_ebytes);
12485 }
12486 }
12487 }
12488 else
12489 {
12490 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12491
12492 if (bft_size == 0x00)
12493 f_ebytes = 1;
12494 else if (bft_size == 0x01)
12495 f_ebytes = 2;
12496 else if (bft_size == 0x02)
12497 f_ebytes = 4;
12498 else
12499 f_ebytes = 0;
12500
12501 /* Handle VST1. */
12502 if (!(b_bits & 0x0b) || b_bits == 0x08)
12503 thumb2_insn_r->mem_rec_count = 1;
12504 /* Handle VST2. */
12505 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12506 thumb2_insn_r->mem_rec_count = 2;
12507 /* Handle VST3. */
12508 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12509 thumb2_insn_r->mem_rec_count = 3;
12510 /* Handle VST4. */
12511 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12512 thumb2_insn_r->mem_rec_count = 4;
12513
12514 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12515 {
12516 record_buf_mem[index_m] = f_ebytes;
12517 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12518 }
12519 }
12520 }
12521 else
12522 {
12523 if (!a_bit)
12524 {
12525 /* Handle VLD1. */
12526 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12527 thumb2_insn_r->reg_rec_count = 1;
12528 /* Handle VLD2. */
12529 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12530 thumb2_insn_r->reg_rec_count = 2;
12531 /* Handle VLD3. */
12532 else if ((b_bits & 0x0e) == 0x04)
12533 thumb2_insn_r->reg_rec_count = 3;
12534 /* Handle VLD4. */
12535 else if (!(b_bits & 0x0e))
12536 thumb2_insn_r->reg_rec_count = 4;
12537 }
12538 else
12539 {
12540 /* Handle VLD1. */
12541 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12542 thumb2_insn_r->reg_rec_count = 1;
12543 /* Handle VLD2. */
12544 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12545 thumb2_insn_r->reg_rec_count = 2;
12546 /* Handle VLD3. */
12547 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12548 thumb2_insn_r->reg_rec_count = 3;
12549 /* Handle VLD4. */
12550 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12551 thumb2_insn_r->reg_rec_count = 4;
12552
12553 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12554 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12555 }
12556 }
12557
12558 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12559 {
12560 record_buf[index_r] = reg_rn;
12561 thumb2_insn_r->reg_rec_count += 1;
12562 }
12563
12564 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12565 record_buf);
12566 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12567 record_buf_mem);
12568 return 0;
12569}
12570
c6ec2b30
OJ
12571/* Decodes thumb2 instruction type and invokes its record handler. */
12572
12573static unsigned int
12574thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12575{
12576 uint32_t op, op1, op2;
12577
12578 op = bit (thumb2_insn_r->arm_insn, 15);
12579 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12580 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12581
12582 if (op1 == 0x01)
12583 {
12584 if (!(op2 & 0x64 ))
12585 {
12586 /* Load/store multiple instruction. */
12587 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12588 }
12589 else if (!((op2 & 0x64) ^ 0x04))
12590 {
12591 /* Load/store (dual/exclusive) and table branch instruction. */
12592 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12593 }
12594 else if (!((op2 & 0x20) ^ 0x20))
12595 {
12596 /* Data-processing (shifted register). */
12597 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12598 }
12599 else if (op2 & 0x40)
12600 {
12601 /* Co-processor instructions. */
60cc5e93 12602 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12603 }
12604 }
12605 else if (op1 == 0x02)
12606 {
12607 if (op)
12608 {
12609 /* Branches and miscellaneous control instructions. */
12610 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12611 }
12612 else if (op2 & 0x20)
12613 {
12614 /* Data-processing (plain binary immediate) instruction. */
12615 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12616 }
12617 else
12618 {
12619 /* Data-processing (modified immediate). */
12620 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12621 }
12622 }
12623 else if (op1 == 0x03)
12624 {
12625 if (!(op2 & 0x71 ))
12626 {
12627 /* Store single data item. */
12628 return thumb2_record_str_single_data (thumb2_insn_r);
12629 }
12630 else if (!((op2 & 0x71) ^ 0x10))
12631 {
12632 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12633 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12634 }
12635 else if (!((op2 & 0x67) ^ 0x01))
12636 {
12637 /* Load byte, memory hints instruction. */
12638 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12639 }
12640 else if (!((op2 & 0x67) ^ 0x03))
12641 {
12642 /* Load halfword, memory hints instruction. */
12643 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12644 }
12645 else if (!((op2 & 0x67) ^ 0x05))
12646 {
12647 /* Load word instruction. */
12648 return thumb2_record_ld_word (thumb2_insn_r);
12649 }
12650 else if (!((op2 & 0x70) ^ 0x20))
12651 {
12652 /* Data-processing (register) instruction. */
12653 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12654 }
12655 else if (!((op2 & 0x78) ^ 0x30))
12656 {
12657 /* Multiply, multiply accumulate, abs diff instruction. */
12658 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12659 }
12660 else if (!((op2 & 0x78) ^ 0x38))
12661 {
12662 /* Long multiply, long multiply accumulate, and divide. */
12663 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12664 }
12665 else if (op2 & 0x40)
12666 {
12667 /* Co-processor instructions. */
60cc5e93 12668 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12669 }
12670 }
12671
12672 return -1;
12673}
72508ac0
PO
12674
12675/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12676and positive val on fauilure. */
12677
12678static int
12679extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12680{
12681 gdb_byte buf[insn_size];
12682
12683 memset (&buf[0], 0, insn_size);
12684
12685 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12686 return 1;
12687 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12688 insn_size,
2959fed9 12689 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12690 return 0;
12691}
12692
12693typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12694
12695/* Decode arm/thumb insn depending on condition cods and opcodes; and
12696 dispatch it. */
12697
12698static int
12699decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12700 uint32_t insn_size)
12701{
12702
12703 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
0fa9c223 12704 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12705 {
12706 arm_record_data_proc_misc_ld_str, /* 000. */
12707 arm_record_data_proc_imm, /* 001. */
12708 arm_record_ld_st_imm_offset, /* 010. */
12709 arm_record_ld_st_reg_offset, /* 011. */
12710 arm_record_ld_st_multiple, /* 100. */
12711 arm_record_b_bl, /* 101. */
60cc5e93 12712 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12713 arm_record_coproc_data_proc /* 111. */
12714 };
12715
12716 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
0fa9c223 12717 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12718 { \
12719 thumb_record_shift_add_sub, /* 000. */
12720 thumb_record_add_sub_cmp_mov, /* 001. */
12721 thumb_record_ld_st_reg_offset, /* 010. */
12722 thumb_record_ld_st_imm_offset, /* 011. */
12723 thumb_record_ld_st_stack, /* 100. */
12724 thumb_record_misc, /* 101. */
12725 thumb_record_ldm_stm_swi, /* 110. */
12726 thumb_record_branch /* 111. */
12727 };
12728
12729 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12730 uint32_t insn_id = 0;
12731
12732 if (extract_arm_insn (arm_record, insn_size))
12733 {
12734 if (record_debug)
12735 {
12736 printf_unfiltered (_("Process record: error reading memory at "
12737 "addr %s len = %d.\n"),
12738 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12739 }
12740 return -1;
12741 }
12742 else if (ARM_RECORD == record_type)
12743 {
12744 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12745 insn_id = bits (arm_record->arm_insn, 25, 27);
12746 ret = arm_record_extension_space (arm_record);
12747 /* If this insn has fallen into extension space
12748 then we need not decode it anymore. */
12749 if (ret != -1 && !INSN_RECORDED(arm_record))
12750 {
12751 ret = arm_handle_insn[insn_id] (arm_record);
12752 }
12753 }
12754 else if (THUMB_RECORD == record_type)
12755 {
12756 /* As thumb does not have condition codes, we set negative. */
12757 arm_record->cond = -1;
12758 insn_id = bits (arm_record->arm_insn, 13, 15);
12759 ret = thumb_handle_insn[insn_id] (arm_record);
12760 }
12761 else if (THUMB2_RECORD == record_type)
12762 {
c6ec2b30
OJ
12763 /* As thumb does not have condition codes, we set negative. */
12764 arm_record->cond = -1;
12765
12766 /* Swap first half of 32bit thumb instruction with second half. */
12767 arm_record->arm_insn
12768 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12769
12770 insn_id = thumb2_record_decode_insn_handler (arm_record);
12771
12772 if (insn_id != ARM_RECORD_SUCCESS)
12773 {
12774 arm_record_unsupported_insn (arm_record);
12775 ret = -1;
12776 }
72508ac0
PO
12777 }
12778 else
12779 {
12780 /* Throw assertion. */
12781 gdb_assert_not_reached ("not a valid instruction, could not decode");
12782 }
12783
12784 return ret;
12785}
12786
12787
12788/* Cleans up local record registers and memory allocations. */
12789
12790static void
12791deallocate_reg_mem (insn_decode_record *record)
12792{
12793 xfree (record->arm_regs);
12794 xfree (record->arm_mems);
12795}
12796
12797
12798/* Parse the current instruction and record the values of the registers and
12799 memory that will be changed in current instruction to record_arch_list".
12800 Return -1 if something is wrong. */
12801
12802int
12803arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12804 CORE_ADDR insn_addr)
12805{
12806
12807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
12808 uint32_t no_of_rec = 0;
12809 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12810 ULONGEST t_bit = 0, insn_id = 0;
12811
12812 ULONGEST u_regval = 0;
12813
12814 insn_decode_record arm_record;
12815
12816 memset (&arm_record, 0, sizeof (insn_decode_record));
12817 arm_record.regcache = regcache;
12818 arm_record.this_addr = insn_addr;
12819 arm_record.gdbarch = gdbarch;
12820
12821
12822 if (record_debug > 1)
12823 {
12824 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12825 "addr = %s\n",
12826 paddress (gdbarch, arm_record.this_addr));
12827 }
12828
12829 if (extract_arm_insn (&arm_record, 2))
12830 {
12831 if (record_debug)
12832 {
12833 printf_unfiltered (_("Process record: error reading memory at "
12834 "addr %s len = %d.\n"),
12835 paddress (arm_record.gdbarch,
12836 arm_record.this_addr), 2);
12837 }
12838 return -1;
12839 }
12840
12841 /* Check the insn, whether it is thumb or arm one. */
12842
12843 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12844 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12845
12846
12847 if (!(u_regval & t_bit))
12848 {
12849 /* We are decoding arm insn. */
12850 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12851 }
12852 else
12853 {
12854 insn_id = bits (arm_record.arm_insn, 11, 15);
12855 /* is it thumb2 insn? */
12856 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12857 {
12858 ret = decode_insn (&arm_record, THUMB2_RECORD,
12859 THUMB2_INSN_SIZE_BYTES);
12860 }
12861 else
12862 {
12863 /* We are decoding thumb insn. */
12864 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12865 }
12866 }
12867
12868 if (0 == ret)
12869 {
12870 /* Record registers. */
25ea693b 12871 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0
PO
12872 if (arm_record.arm_regs)
12873 {
12874 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12875 {
25ea693b
MM
12876 if (record_full_arch_list_add_reg
12877 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
72508ac0
PO
12878 ret = -1;
12879 }
12880 }
12881 /* Record memories. */
12882 if (arm_record.arm_mems)
12883 {
12884 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12885 {
25ea693b 12886 if (record_full_arch_list_add_mem
72508ac0 12887 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 12888 arm_record.arm_mems[no_of_rec].len))
72508ac0
PO
12889 ret = -1;
12890 }
12891 }
12892
25ea693b 12893 if (record_full_arch_list_add_end ())
72508ac0
PO
12894 ret = -1;
12895 }
12896
12897
12898 deallocate_reg_mem (&arm_record);
12899
12900 return ret;
12901}
12902
This page took 2.651593 seconds and 4 git commands to generate.