Remove leftover declarations in interps.h
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
61baf725 3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
e47ad6c0 30#include "disasm.h"
4e052eda 31#include "regcache.h"
54483882 32#include "reggroups.h"
d16aafd8 33#include "doublest.h"
fd0407d6 34#include "value.h"
34e8f22d 35#include "arch-utils.h"
4be87837 36#include "osabi.h"
eb5492fa
DJ
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
842e1f1e
DJ
40#include "objfiles.h"
41#include "dwarf2-frame.h"
e4c16157 42#include "gdbtypes.h"
29d73ae4 43#include "prologue-value.h"
25f8c692 44#include "remote.h"
123dc839
DJ
45#include "target-descriptions.h"
46#include "user-regs.h"
0e9e9abd 47#include "observer.h"
34e8f22d 48
8689682c 49#include "arch/arm.h"
d9311bfa 50#include "arch/arm-get-next-pcs.h"
34e8f22d 51#include "arm-tdep.h"
26216b98 52#include "gdb/sim-arm.h"
34e8f22d 53
082fc60d
RE
54#include "elf-bfd.h"
55#include "coff/internal.h"
97e03143 56#include "elf/arm.h"
c906108c 57
60c5725c 58#include "vec.h"
26216b98 59
72508ac0 60#include "record.h"
d02ed0bb 61#include "record-full.h"
325fac50 62#include <algorithm>
72508ac0 63
0a69eedb
YQ
64#include "features/arm/arm-with-m.c"
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
67#include "features/arm/arm-with-iwmmxt.c"
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
70#include "features/arm/arm-with-neon.c"
9779414d 71
b121eeb9
YQ
72#if GDB_SELF_TEST
73#include "selftest.h"
74#endif
75
6529d2dd
AC
76static int arm_debug;
77
082fc60d
RE
78/* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 80 is used for this purpose.
082fc60d
RE
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 84
0963b4bd 85#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
87
88#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 89 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 90
60c5725c
DJ
91/* Per-objfile data used for mapping symbols. */
92static const struct objfile_data *arm_objfile_data_key;
93
94struct arm_mapping_symbol
95{
96 bfd_vma value;
97 char type;
98};
99typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100DEF_VEC_O(arm_mapping_symbol_s);
101
102struct arm_per_objfile
103{
104 VEC(arm_mapping_symbol_s) **section_maps;
105};
106
afd7eef0
RE
107/* The list of available "set arm ..." and "show arm ..." commands. */
108static struct cmd_list_element *setarmcmdlist = NULL;
109static struct cmd_list_element *showarmcmdlist = NULL;
110
fd50bc42
RE
111/* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 113static const char *const fp_model_strings[] =
fd50bc42
RE
114{
115 "auto",
116 "softfpa",
117 "fpa",
118 "softvfp",
28e97307
DJ
119 "vfp",
120 NULL
fd50bc42
RE
121};
122
123/* A variable that can be configured by the user. */
124static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125static const char *current_fp_model = "auto";
126
28e97307 127/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 128static const char *const arm_abi_strings[] =
28e97307
DJ
129{
130 "auto",
131 "APCS",
132 "AAPCS",
133 NULL
134};
135
136/* A variable that can be configured by the user. */
137static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138static const char *arm_abi_string = "auto";
139
0428b8f5 140/* The execution mode to assume. */
40478521 141static const char *const arm_mode_strings[] =
0428b8f5
DJ
142 {
143 "auto",
144 "arm",
68770265
MGD
145 "thumb",
146 NULL
0428b8f5
DJ
147 };
148
149static const char *arm_fallback_mode_string = "auto";
150static const char *arm_force_mode_string = "auto";
151
f32bf4a4
YQ
152/* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
156static const struct
157{
158 const char *name;
159 int regnum;
160} arm_register_aliases[] = {
161 /* Basic register numbers. */
162 { "r0", 0 },
163 { "r1", 1 },
164 { "r2", 2 },
165 { "r3", 3 },
166 { "r4", 4 },
167 { "r5", 5 },
168 { "r6", 6 },
169 { "r7", 7 },
170 { "r8", 8 },
171 { "r9", 9 },
172 { "r10", 10 },
173 { "r11", 11 },
174 { "r12", 12 },
175 { "r13", 13 },
176 { "r14", 14 },
177 { "r15", 15 },
178 /* Synonyms (argument and variable registers). */
179 { "a1", 0 },
180 { "a2", 1 },
181 { "a3", 2 },
182 { "a4", 3 },
183 { "v1", 4 },
184 { "v2", 5 },
185 { "v3", 6 },
186 { "v4", 7 },
187 { "v5", 8 },
188 { "v6", 9 },
189 { "v7", 10 },
190 { "v8", 11 },
191 /* Other platform-specific names for r9. */
192 { "sb", 9 },
193 { "tr", 9 },
194 /* Special names. */
195 { "ip", 12 },
123dc839 196 { "lr", 14 },
123dc839
DJ
197 /* Names used by GCC (not listed in the ARM EABI). */
198 { "sl", 10 },
123dc839
DJ
199 /* A special name from the older ATPCS. */
200 { "wr", 7 },
201};
bc90b915 202
123dc839 203static const char *const arm_register_names[] =
da59e081
JM
204{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 210 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 211
65b48a81
PB
212/* Holds the current set of options to be passed to the disassembler. */
213static char *arm_disassembler_options;
214
afd7eef0
RE
215/* Valid register name styles. */
216static const char **valid_disassembly_styles;
ed9a39eb 217
afd7eef0
RE
218/* Disassembly style to use. Default to "std" register names. */
219static const char *disassembly_style;
96baa820 220
ed9a39eb 221/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
222 style. */
223static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 224 struct cmd_list_element *);
65b48a81
PB
225static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
227 const char *);
ed9a39eb 228
05d1431c
PA
229static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 struct regcache *regcache,
231 int regnum, gdb_byte *buf);
58d6951d
DJ
232static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
235
e7cf25a8 236static CORE_ADDR
553cb527 237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
238
239
d9311bfa
AT
240/* get_next_pcs operations. */
241static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
245 arm_get_next_pcs_is_thumb,
246 NULL,
d9311bfa
AT
247};
248
9b8d791a 249struct arm_prologue_cache
c3b4394c 250{
eb5492fa
DJ
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
254 CORE_ADDR prev_sp;
255
4be43953
DJ
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
eb5492fa 259
c3b4394c 260 int framesize;
eb5492fa
DJ
261
262 /* The register used to hold the frame pointer for this frame. */
c3b4394c 263 int framereg;
eb5492fa
DJ
264
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
c3b4394c 267};
ed9a39eb 268
0d39a070
DJ
269static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
273
cca44b1b
JB
274/* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
276
277#define DISPLACED_STEPPING_ARCH_VERSION 5
278
94c30b78 279/* Set to true if the 32-bit mode is in use. */
c906108c
SS
280
281int arm_apcs_32 = 1;
282
9779414d
DJ
283/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
284
478fd957 285int
9779414d
DJ
286arm_psr_thumb_bit (struct gdbarch *gdbarch)
287{
288 if (gdbarch_tdep (gdbarch)->is_m)
289 return XPSR_T;
290 else
291 return CPSR_T;
292}
293
d0e59a68
AT
294/* Determine if the processor is currently executing in Thumb mode. */
295
296int
297arm_is_thumb (struct regcache *regcache)
298{
299 ULONGEST cpsr;
300 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
301
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
303
304 return (cpsr & t_bit) != 0;
305}
306
b39cc962
DJ
307/* Determine if FRAME is executing in Thumb mode. */
308
25b41d01 309int
b39cc962
DJ
310arm_frame_is_thumb (struct frame_info *frame)
311{
312 CORE_ADDR cpsr;
9779414d 313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
314
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
320
9779414d 321 return (cpsr & t_bit) != 0;
b39cc962
DJ
322}
323
60c5725c
DJ
324/* Callback for VEC_lower_bound. */
325
326static inline int
327arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
329{
330 return lhs->value < rhs->value;
331}
332
f9d67f43
DJ
333/* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
c906108c 336
f9d67f43
DJ
337static char
338arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 339{
60c5725c 340 struct obj_section *sec;
0428b8f5 341
60c5725c
DJ
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
344 if (sec != NULL)
345 {
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
349 0 };
60c5725c
DJ
350 unsigned int idx;
351
9a3c8263
SM
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
60c5725c
DJ
354 if (data != NULL)
355 {
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
358 {
359 struct arm_mapping_symbol *map_sym;
360
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
363
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
369 {
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
f9d67f43
DJ
372 {
373 if (start)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
376 }
60c5725c
DJ
377 }
378
379 if (idx > 0)
380 {
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
382 if (start)
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
60c5725c
DJ
385 }
386 }
387 }
388 }
389
f9d67f43
DJ
390 return 0;
391}
392
393/* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
396
e3039479 397int
9779414d 398arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 399{
7cbd4a93 400 struct bound_minimal_symbol sym;
f9d67f43 401 char type;
a42244db
YQ
402 struct displaced_step_closure* dsc
403 = get_displaced_step_closure_by_addr(memaddr);
404
405 /* If checking the mode of displaced instruction in copy area, the mode
406 should be determined by instruction on the original address. */
407 if (dsc)
408 {
409 if (debug_displaced)
410 fprintf_unfiltered (gdb_stdlog,
411 "displaced: check mode of %.8lx instead of %.8lx\n",
412 (unsigned long) dsc->insn_addr,
413 (unsigned long) memaddr);
414 memaddr = dsc->insn_addr;
415 }
f9d67f43
DJ
416
417 /* If bit 0 of the address is set, assume this is a Thumb address. */
418 if (IS_THUMB_ADDR (memaddr))
419 return 1;
420
421 /* If the user wants to override the symbol table, let him. */
422 if (strcmp (arm_force_mode_string, "arm") == 0)
423 return 0;
424 if (strcmp (arm_force_mode_string, "thumb") == 0)
425 return 1;
426
9779414d
DJ
427 /* ARM v6-M and v7-M are always in Thumb mode. */
428 if (gdbarch_tdep (gdbarch)->is_m)
429 return 1;
430
f9d67f43
DJ
431 /* If there are mapping symbols, consult them. */
432 type = arm_find_mapping_symbol (memaddr, NULL);
433 if (type)
434 return type == 't';
435
ed9a39eb 436 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 437 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
438 if (sym.minsym)
439 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
440
441 /* If the user wants to override the fallback mode, let them. */
442 if (strcmp (arm_fallback_mode_string, "arm") == 0)
443 return 0;
444 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
445 return 1;
446
447 /* If we couldn't find any symbol, but we're talking to a running
448 target, then trust the current value of $cpsr. This lets
449 "display/i $pc" always show the correct mode (though if there is
450 a symbol table we will not reach here, so it still may not be
18819fa6 451 displayed in the mode it will be executed). */
0428b8f5 452 if (target_has_registers)
18819fa6 453 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
454
455 /* Otherwise we're out of luck; we assume ARM. */
456 return 0;
c906108c
SS
457}
458
ca90e760
FH
459/* Determine if the address specified equals any of these magic return
460 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
461 architectures.
462
463 From ARMv6-M Reference Manual B1.5.8
464 Table B1-5 Exception return behavior
465
466 EXC_RETURN Return To Return Stack
467 0xFFFFFFF1 Handler mode Main
468 0xFFFFFFF9 Thread mode Main
469 0xFFFFFFFD Thread mode Process
470
471 From ARMv7-M Reference Manual B1.5.8
472 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
473
474 EXC_RETURN Return To Return Stack
475 0xFFFFFFF1 Handler mode Main
476 0xFFFFFFF9 Thread mode Main
477 0xFFFFFFFD Thread mode Process
478
479 Table B1-9 EXC_RETURN definition of exception return behavior, with
480 FP
481
482 EXC_RETURN Return To Return Stack Frame Type
483 0xFFFFFFE1 Handler mode Main Extended
484 0xFFFFFFE9 Thread mode Main Extended
485 0xFFFFFFED Thread mode Process Extended
486 0xFFFFFFF1 Handler mode Main Basic
487 0xFFFFFFF9 Thread mode Main Basic
488 0xFFFFFFFD Thread mode Process Basic
489
490 For more details see "B1.5.8 Exception return behavior"
491 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
492
493static int
494arm_m_addr_is_magic (CORE_ADDR addr)
495{
496 switch (addr)
497 {
498 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
499 the exception return behavior. */
500 case 0xffffffe1:
501 case 0xffffffe9:
502 case 0xffffffed:
503 case 0xfffffff1:
504 case 0xfffffff9:
505 case 0xfffffffd:
506 /* Address is magic. */
507 return 1;
508
509 default:
510 /* Address is not magic. */
511 return 0;
512 }
513}
514
181c1381 515/* Remove useless bits from addresses in a running program. */
34e8f22d 516static CORE_ADDR
24568a2c 517arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 518{
2ae28aa9
YQ
519 /* On M-profile devices, do not strip the low bit from EXC_RETURN
520 (the magic exception return address). */
521 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 522 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
523 return val;
524
a3a2ee65 525 if (arm_apcs_32)
dd6be234 526 return UNMAKE_THUMB_ADDR (val);
c906108c 527 else
a3a2ee65 528 return (val & 0x03fffffc);
c906108c
SS
529}
530
0d39a070 531/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
532 can be safely ignored during prologue skipping. IS_THUMB is true
533 if the function is known to be a Thumb function due to the way it
534 is being called. */
0d39a070 535static int
e0634ccf 536skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 537{
e0634ccf 538 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 539 struct bound_minimal_symbol msym;
0d39a070
DJ
540
541 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 542 if (msym.minsym != NULL
77e371c0 543 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 544 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 545 {
efd66ac6 546 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 547
e0634ccf
UW
548 /* The GNU linker's Thumb call stub to foo is named
549 __foo_from_thumb. */
550 if (strstr (name, "_from_thumb") != NULL)
551 name += 2;
0d39a070 552
e0634ccf
UW
553 /* On soft-float targets, __truncdfsf2 is called to convert promoted
554 arguments to their argument types in non-prototyped
555 functions. */
61012eef 556 if (startswith (name, "__truncdfsf2"))
e0634ccf 557 return 1;
61012eef 558 if (startswith (name, "__aeabi_d2f"))
e0634ccf 559 return 1;
0d39a070 560
e0634ccf 561 /* Internal functions related to thread-local storage. */
61012eef 562 if (startswith (name, "__tls_get_addr"))
e0634ccf 563 return 1;
61012eef 564 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
565 return 1;
566 }
567 else
568 {
569 /* If we run against a stripped glibc, we may be unable to identify
570 special functions by name. Check for one important case,
571 __aeabi_read_tp, by comparing the *code* against the default
572 implementation (this is hand-written ARM assembler in glibc). */
573
574 if (!is_thumb
198cd59d 575 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
e0634ccf 576 == 0xe3e00a0f /* mov r0, #0xffff0fff */
198cd59d 577 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
e0634ccf
UW
578 == 0xe240f01f) /* sub pc, r0, #31 */
579 return 1;
580 }
ec3d575a 581
0d39a070
DJ
582 return 0;
583}
584
621c6d5b
YQ
585/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
586 the first 16-bit of instruction, and INSN2 is the second 16-bit of
587 instruction. */
588#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
589 ((bits ((insn1), 0, 3) << 12) \
590 | (bits ((insn1), 10, 10) << 11) \
591 | (bits ((insn2), 12, 14) << 8) \
592 | bits ((insn2), 0, 7))
593
594/* Extract the immediate from instruction movw/movt of encoding A. INSN is
595 the 32-bit instruction. */
596#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
597 ((bits ((insn), 16, 19) << 12) \
598 | bits ((insn), 0, 11))
599
ec3d575a
UW
600/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
601
602static unsigned int
603thumb_expand_immediate (unsigned int imm)
604{
605 unsigned int count = imm >> 7;
606
607 if (count < 8)
608 switch (count / 2)
609 {
610 case 0:
611 return imm & 0xff;
612 case 1:
613 return (imm & 0xff) | ((imm & 0xff) << 16);
614 case 2:
615 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
616 case 3:
617 return (imm & 0xff) | ((imm & 0xff) << 8)
618 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
619 }
620
621 return (0x80 | (imm & 0x7f)) << (32 - count);
622}
623
540314bd
YQ
624/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
625 epilogue, 0 otherwise. */
626
627static int
628thumb_instruction_restores_sp (unsigned short insn)
629{
630 return (insn == 0x46bd /* mov sp, r7 */
631 || (insn & 0xff80) == 0xb000 /* add sp, imm */
632 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
633}
634
29d73ae4
DJ
635/* Analyze a Thumb prologue, looking for a recognizable stack frame
636 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
637 clobber the stack frame unexpectedly, or an unknown instruction.
638 Return the last address which is definitely safe to skip for an
639 initial breakpoint. */
c906108c
SS
640
641static CORE_ADDR
29d73ae4
DJ
642thumb_analyze_prologue (struct gdbarch *gdbarch,
643 CORE_ADDR start, CORE_ADDR limit,
644 struct arm_prologue_cache *cache)
c906108c 645{
0d39a070 646 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
648 int i;
649 pv_t regs[16];
29d73ae4 650 CORE_ADDR offset;
ec3d575a 651 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 652
29d73ae4
DJ
653 for (i = 0; i < 16; i++)
654 regs[i] = pv_register (i, 0);
f7b7ed97 655 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4 656
29d73ae4 657 while (start < limit)
c906108c 658 {
29d73ae4
DJ
659 unsigned short insn;
660
198cd59d 661 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 662
94c30b78 663 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 664 {
29d73ae4
DJ
665 int regno;
666 int mask;
4be43953 667
f7b7ed97 668 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953 669 break;
29d73ae4
DJ
670
671 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
672 whether to save LR (R14). */
673 mask = (insn & 0xff) | ((insn & 0x100) << 6);
674
675 /* Calculate offsets of saved R0-R7 and LR. */
676 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
677 if (mask & (1 << regno))
678 {
29d73ae4
DJ
679 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
680 -4);
f7b7ed97 681 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
29d73ae4 682 }
da59e081 683 }
1db01f22 684 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 685 {
29d73ae4 686 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
687 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
688 -offset);
da59e081 689 }
808f7ab1
YQ
690 else if (thumb_instruction_restores_sp (insn))
691 {
692 /* Don't scan past the epilogue. */
693 break;
694 }
0d39a070
DJ
695 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
696 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
697 (insn & 0xff) << 2);
698 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
699 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
700 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
701 bits (insn, 6, 8));
702 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
703 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
704 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
705 bits (insn, 0, 7));
706 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
707 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
708 && pv_is_constant (regs[bits (insn, 3, 5)]))
709 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
710 regs[bits (insn, 6, 8)]);
711 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
712 && pv_is_constant (regs[bits (insn, 3, 6)]))
713 {
714 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
715 int rm = bits (insn, 3, 6);
716 regs[rd] = pv_add (regs[rd], regs[rm]);
717 }
29d73ae4 718 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 719 {
29d73ae4
DJ
720 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
721 int src_reg = (insn & 0x78) >> 3;
722 regs[dst_reg] = regs[src_reg];
da59e081 723 }
29d73ae4 724 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 725 {
29d73ae4
DJ
726 /* Handle stores to the stack. Normally pushes are used,
727 but with GCC -mtpcs-frame, there may be other stores
728 in the prologue to create the frame. */
729 int regno = (insn >> 8) & 0x7;
730 pv_t addr;
731
732 offset = (insn & 0xff) << 2;
733 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
734
f7b7ed97 735 if (stack.store_would_trash (addr))
29d73ae4
DJ
736 break;
737
f7b7ed97 738 stack.store (addr, 4, regs[regno]);
da59e081 739 }
0d39a070
DJ
740 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
741 {
742 int rd = bits (insn, 0, 2);
743 int rn = bits (insn, 3, 5);
744 pv_t addr;
745
746 offset = bits (insn, 6, 10) << 2;
747 addr = pv_add_constant (regs[rn], offset);
748
f7b7ed97 749 if (stack.store_would_trash (addr))
0d39a070
DJ
750 break;
751
f7b7ed97 752 stack.store (addr, 4, regs[rd]);
0d39a070
DJ
753 }
754 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
755 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
756 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
757 /* Ignore stores of argument registers to the stack. */
758 ;
759 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
760 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
761 /* Ignore block loads from the stack, potentially copying
762 parameters from memory. */
763 ;
764 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
765 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
766 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
767 /* Similarly ignore single loads from the stack. */
768 ;
769 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
770 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
771 /* Skip register copies, i.e. saves to another register
772 instead of the stack. */
773 ;
774 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
775 /* Recognize constant loads; even with small stacks these are necessary
776 on Thumb. */
777 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
778 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
779 {
780 /* Constant pool loads, for the same reason. */
781 unsigned int constant;
782 CORE_ADDR loc;
783
784 loc = start + 4 + bits (insn, 0, 7) * 4;
785 constant = read_memory_unsigned_integer (loc, 4, byte_order);
786 regs[bits (insn, 8, 10)] = pv_constant (constant);
787 }
db24da6d 788 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 789 {
0d39a070
DJ
790 unsigned short inst2;
791
198cd59d
YQ
792 inst2 = read_code_unsigned_integer (start + 2, 2,
793 byte_order_for_code);
0d39a070
DJ
794
795 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
796 {
797 /* BL, BLX. Allow some special function calls when
798 skipping the prologue; GCC generates these before
799 storing arguments to the stack. */
800 CORE_ADDR nextpc;
801 int j1, j2, imm1, imm2;
802
803 imm1 = sbits (insn, 0, 10);
804 imm2 = bits (inst2, 0, 10);
805 j1 = bit (inst2, 13);
806 j2 = bit (inst2, 11);
807
808 offset = ((imm1 << 12) + (imm2 << 1));
809 offset ^= ((!j2) << 22) | ((!j1) << 23);
810
811 nextpc = start + 4 + offset;
812 /* For BLX make sure to clear the low bits. */
813 if (bit (inst2, 12) == 0)
814 nextpc = nextpc & 0xfffffffc;
815
e0634ccf
UW
816 if (!skip_prologue_function (gdbarch, nextpc,
817 bit (inst2, 12) != 0))
0d39a070
DJ
818 break;
819 }
ec3d575a 820
0963b4bd
MS
821 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
822 { registers } */
ec3d575a
UW
823 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
824 {
825 pv_t addr = regs[bits (insn, 0, 3)];
826 int regno;
827
f7b7ed97 828 if (stack.store_would_trash (addr))
ec3d575a
UW
829 break;
830
831 /* Calculate offsets of saved registers. */
832 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
833 if (inst2 & (1 << regno))
834 {
835 addr = pv_add_constant (addr, -4);
f7b7ed97 836 stack.store (addr, 4, regs[regno]);
ec3d575a
UW
837 }
838
839 if (insn & 0x0020)
840 regs[bits (insn, 0, 3)] = addr;
841 }
842
0963b4bd
MS
843 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
844 [Rn, #+/-imm]{!} */
ec3d575a
UW
845 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
846 {
847 int regno1 = bits (inst2, 12, 15);
848 int regno2 = bits (inst2, 8, 11);
849 pv_t addr = regs[bits (insn, 0, 3)];
850
851 offset = inst2 & 0xff;
852 if (insn & 0x0080)
853 addr = pv_add_constant (addr, offset);
854 else
855 addr = pv_add_constant (addr, -offset);
856
f7b7ed97 857 if (stack.store_would_trash (addr))
ec3d575a
UW
858 break;
859
f7b7ed97
TT
860 stack.store (addr, 4, regs[regno1]);
861 stack.store (pv_add_constant (addr, 4),
862 4, regs[regno2]);
ec3d575a
UW
863
864 if (insn & 0x0020)
865 regs[bits (insn, 0, 3)] = addr;
866 }
867
868 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
869 && (inst2 & 0x0c00) == 0x0c00
870 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
871 {
872 int regno = bits (inst2, 12, 15);
873 pv_t addr = regs[bits (insn, 0, 3)];
874
875 offset = inst2 & 0xff;
876 if (inst2 & 0x0200)
877 addr = pv_add_constant (addr, offset);
878 else
879 addr = pv_add_constant (addr, -offset);
880
f7b7ed97 881 if (stack.store_would_trash (addr))
ec3d575a
UW
882 break;
883
f7b7ed97 884 stack.store (addr, 4, regs[regno]);
ec3d575a
UW
885
886 if (inst2 & 0x0100)
887 regs[bits (insn, 0, 3)] = addr;
888 }
889
890 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
891 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
892 {
893 int regno = bits (inst2, 12, 15);
894 pv_t addr;
895
896 offset = inst2 & 0xfff;
897 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
898
f7b7ed97 899 if (stack.store_would_trash (addr))
ec3d575a
UW
900 break;
901
f7b7ed97 902 stack.store (addr, 4, regs[regno]);
ec3d575a
UW
903 }
904
905 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 906 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 907 /* Ignore stores of argument registers to the stack. */
0d39a070 908 ;
ec3d575a
UW
909
910 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
911 && (inst2 & 0x0d00) == 0x0c00
0d39a070 912 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 913 /* Ignore stores of argument registers to the stack. */
0d39a070 914 ;
ec3d575a 915
0963b4bd
MS
916 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
917 { registers } */
ec3d575a
UW
918 && (inst2 & 0x8000) == 0x0000
919 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
920 /* Ignore block loads from the stack, potentially copying
921 parameters from memory. */
0d39a070 922 ;
ec3d575a 923
0963b4bd
MS
924 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
925 [Rn, #+/-imm] */
0d39a070 926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 927 /* Similarly ignore dual loads from the stack. */
0d39a070 928 ;
ec3d575a
UW
929
930 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
931 && (inst2 & 0x0d00) == 0x0c00
0d39a070 932 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 933 /* Similarly ignore single loads from the stack. */
0d39a070 934 ;
ec3d575a
UW
935
936 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 937 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 938 /* Similarly ignore single loads from the stack. */
0d39a070 939 ;
ec3d575a
UW
940
941 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
942 && (inst2 & 0x8000) == 0x0000)
943 {
944 unsigned int imm = ((bits (insn, 10, 10) << 11)
945 | (bits (inst2, 12, 14) << 8)
946 | bits (inst2, 0, 7));
947
948 regs[bits (inst2, 8, 11)]
949 = pv_add_constant (regs[bits (insn, 0, 3)],
950 thumb_expand_immediate (imm));
951 }
952
953 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
954 && (inst2 & 0x8000) == 0x0000)
0d39a070 955 {
ec3d575a
UW
956 unsigned int imm = ((bits (insn, 10, 10) << 11)
957 | (bits (inst2, 12, 14) << 8)
958 | bits (inst2, 0, 7));
959
960 regs[bits (inst2, 8, 11)]
961 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
962 }
963
964 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
965 && (inst2 & 0x8000) == 0x0000)
966 {
967 unsigned int imm = ((bits (insn, 10, 10) << 11)
968 | (bits (inst2, 12, 14) << 8)
969 | bits (inst2, 0, 7));
970
971 regs[bits (inst2, 8, 11)]
972 = pv_add_constant (regs[bits (insn, 0, 3)],
973 - (CORE_ADDR) thumb_expand_immediate (imm));
974 }
975
976 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
977 && (inst2 & 0x8000) == 0x0000)
978 {
979 unsigned int imm = ((bits (insn, 10, 10) << 11)
980 | (bits (inst2, 12, 14) << 8)
981 | bits (inst2, 0, 7));
982
983 regs[bits (inst2, 8, 11)]
984 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
985 }
986
987 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
988 {
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
992
993 regs[bits (inst2, 8, 11)]
994 = pv_constant (thumb_expand_immediate (imm));
995 }
996
997 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
998 {
621c6d5b
YQ
999 unsigned int imm
1000 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1001
1002 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1003 }
1004
1005 else if (insn == 0xea5f /* mov.w Rd,Rm */
1006 && (inst2 & 0xf0f0) == 0)
1007 {
1008 int dst_reg = (inst2 & 0x0f00) >> 8;
1009 int src_reg = inst2 & 0xf;
1010 regs[dst_reg] = regs[src_reg];
1011 }
1012
1013 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1014 {
1015 /* Constant pool loads. */
1016 unsigned int constant;
1017 CORE_ADDR loc;
1018
cac395ea 1019 offset = bits (inst2, 0, 11);
ec3d575a
UW
1020 if (insn & 0x0080)
1021 loc = start + 4 + offset;
1022 else
1023 loc = start + 4 - offset;
1024
1025 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1026 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1027 }
1028
1029 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1030 {
1031 /* Constant pool loads. */
1032 unsigned int constant;
1033 CORE_ADDR loc;
1034
cac395ea 1035 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1036 if (insn & 0x0080)
1037 loc = start + 4 + offset;
1038 else
1039 loc = start + 4 - offset;
1040
1041 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1042 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1043
1044 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1045 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1046 }
1047
1048 else if (thumb2_instruction_changes_pc (insn, inst2))
1049 {
1050 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1051 break;
1052 }
ec3d575a
UW
1053 else
1054 {
1055 /* The optimizer might shove anything into the prologue,
1056 so we just skip what we don't recognize. */
1057 unrecognized_pc = start;
1058 }
0d39a070
DJ
1059
1060 start += 2;
1061 }
ec3d575a 1062 else if (thumb_instruction_changes_pc (insn))
3d74b771 1063 {
ec3d575a 1064 /* Don't scan past anything that might change control flow. */
da3c6d4a 1065 break;
3d74b771 1066 }
ec3d575a
UW
1067 else
1068 {
1069 /* The optimizer might shove anything into the prologue,
1070 so we just skip what we don't recognize. */
1071 unrecognized_pc = start;
1072 }
29d73ae4
DJ
1073
1074 start += 2;
c906108c
SS
1075 }
1076
0d39a070
DJ
1077 if (arm_debug)
1078 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1079 paddress (gdbarch, start));
1080
ec3d575a
UW
1081 if (unrecognized_pc == 0)
1082 unrecognized_pc = start;
1083
29d73ae4 1084 if (cache == NULL)
f7b7ed97 1085 return unrecognized_pc;
29d73ae4 1086
29d73ae4
DJ
1087 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1088 {
1089 /* Frame pointer is fp. Frame size is constant. */
1090 cache->framereg = ARM_FP_REGNUM;
1091 cache->framesize = -regs[ARM_FP_REGNUM].k;
1092 }
1093 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1094 {
1095 /* Frame pointer is r7. Frame size is constant. */
1096 cache->framereg = THUMB_FP_REGNUM;
1097 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1098 }
72a2e3dc 1099 else
29d73ae4
DJ
1100 {
1101 /* Try the stack pointer... this is a bit desperate. */
1102 cache->framereg = ARM_SP_REGNUM;
1103 cache->framesize = -regs[ARM_SP_REGNUM].k;
1104 }
29d73ae4
DJ
1105
1106 for (i = 0; i < 16; i++)
f7b7ed97 1107 if (stack.find_reg (gdbarch, i, &offset))
29d73ae4
DJ
1108 cache->saved_regs[i].addr = offset;
1109
ec3d575a 1110 return unrecognized_pc;
c906108c
SS
1111}
1112
621c6d5b
YQ
1113
1114/* Try to analyze the instructions starting from PC, which load symbol
1115 __stack_chk_guard. Return the address of instruction after loading this
1116 symbol, set the dest register number to *BASEREG, and set the size of
1117 instructions for loading symbol in OFFSET. Return 0 if instructions are
1118 not recognized. */
1119
1120static CORE_ADDR
1121arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1122 unsigned int *destreg, int *offset)
1123{
1124 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1125 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1126 unsigned int low, high, address;
1127
1128 address = 0;
1129 if (is_thumb)
1130 {
1131 unsigned short insn1
198cd59d 1132 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
621c6d5b
YQ
1133
1134 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1135 {
1136 *destreg = bits (insn1, 8, 10);
1137 *offset = 2;
6ae274b7
YQ
1138 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1139 address = read_memory_unsigned_integer (address, 4,
1140 byte_order_for_code);
621c6d5b
YQ
1141 }
1142 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1143 {
1144 unsigned short insn2
198cd59d 1145 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
621c6d5b
YQ
1146
1147 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1148
1149 insn1
198cd59d 1150 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
621c6d5b 1151 insn2
198cd59d 1152 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
621c6d5b
YQ
1153
1154 /* movt Rd, #const */
1155 if ((insn1 & 0xfbc0) == 0xf2c0)
1156 {
1157 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1158 *destreg = bits (insn2, 8, 11);
1159 *offset = 8;
1160 address = (high << 16 | low);
1161 }
1162 }
1163 }
1164 else
1165 {
2e9e421f 1166 unsigned int insn
198cd59d 1167 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
2e9e421f 1168
6ae274b7 1169 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1170 {
6ae274b7
YQ
1171 address = bits (insn, 0, 11) + pc + 8;
1172 address = read_memory_unsigned_integer (address, 4,
1173 byte_order_for_code);
1174
2e9e421f
UW
1175 *destreg = bits (insn, 12, 15);
1176 *offset = 4;
1177 }
1178 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1179 {
1180 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1181
1182 insn
198cd59d 1183 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
2e9e421f
UW
1184
1185 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1186 {
1187 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1188 *destreg = bits (insn, 12, 15);
1189 *offset = 8;
1190 address = (high << 16 | low);
1191 }
1192 }
621c6d5b
YQ
1193 }
1194
1195 return address;
1196}
1197
1198/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1199 points to the first instruction of this sequence, return the address of
1200 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1201
1202 On arm, this sequence of instructions is composed of mainly three steps,
1203 Step 1: load symbol __stack_chk_guard,
1204 Step 2: load from address of __stack_chk_guard,
1205 Step 3: store it to somewhere else.
1206
1207 Usually, instructions on step 2 and step 3 are the same on various ARM
1208 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1209 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1210 instructions in step 1 vary from different ARM architectures. On ARMv7,
1211 they are,
1212
1213 movw Rn, #:lower16:__stack_chk_guard
1214 movt Rn, #:upper16:__stack_chk_guard
1215
1216 On ARMv5t, it is,
1217
1218 ldr Rn, .Label
1219 ....
1220 .Lable:
1221 .word __stack_chk_guard
1222
1223 Since ldr/str is a very popular instruction, we can't use them as
1224 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1225 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1226 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1227
1228static CORE_ADDR
1229arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1230{
1231 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1232 unsigned int basereg;
7cbd4a93 1233 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1234 int offset;
1235 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1236 CORE_ADDR addr;
1237
1238 /* Try to parse the instructions in Step 1. */
1239 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1240 &basereg, &offset);
1241 if (!addr)
1242 return pc;
1243
1244 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1245 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1246 Otherwise, this sequence cannot be for stack protector. */
1247 if (stack_chk_guard.minsym == NULL
61012eef 1248 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1249 return pc;
1250
1251 if (is_thumb)
1252 {
1253 unsigned int destreg;
1254 unsigned short insn
198cd59d 1255 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
621c6d5b
YQ
1256
1257 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1258 if ((insn & 0xf800) != 0x6800)
1259 return pc;
1260 if (bits (insn, 3, 5) != basereg)
1261 return pc;
1262 destreg = bits (insn, 0, 2);
1263
198cd59d
YQ
1264 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1265 byte_order_for_code);
621c6d5b
YQ
1266 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1267 if ((insn & 0xf800) != 0x6000)
1268 return pc;
1269 if (destreg != bits (insn, 0, 2))
1270 return pc;
1271 }
1272 else
1273 {
1274 unsigned int destreg;
1275 unsigned int insn
198cd59d 1276 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
621c6d5b
YQ
1277
1278 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1279 if ((insn & 0x0e500000) != 0x04100000)
1280 return pc;
1281 if (bits (insn, 16, 19) != basereg)
1282 return pc;
1283 destreg = bits (insn, 12, 15);
1284 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
198cd59d 1285 insn = read_code_unsigned_integer (pc + offset + 4,
621c6d5b
YQ
1286 4, byte_order_for_code);
1287 if ((insn & 0x0e500000) != 0x04000000)
1288 return pc;
1289 if (bits (insn, 12, 15) != destreg)
1290 return pc;
1291 }
1292 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1293 on arm. */
1294 if (is_thumb)
1295 return pc + offset + 4;
1296 else
1297 return pc + offset + 8;
1298}
1299
da3c6d4a
MS
1300/* Advance the PC across any function entry prologue instructions to
1301 reach some "real" code.
34e8f22d
RE
1302
1303 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1304 prologue:
c906108c 1305
c5aa993b
JM
1306 mov ip, sp
1307 [stmfd sp!, {a1,a2,a3,a4}]
1308 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1309 [stfe f7, [sp, #-12]!]
1310 [stfe f6, [sp, #-12]!]
1311 [stfe f5, [sp, #-12]!]
1312 [stfe f4, [sp, #-12]!]
0963b4bd 1313 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1314
34e8f22d 1315static CORE_ADDR
6093d2eb 1316arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1317{
a89fea3c 1318 CORE_ADDR func_addr, limit_pc;
c906108c 1319
a89fea3c
JL
1320 /* See if we can determine the end of the prologue via the symbol table.
1321 If so, then return either PC, or the PC after the prologue, whichever
1322 is greater. */
1323 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1324 {
d80b854b
UW
1325 CORE_ADDR post_prologue_pc
1326 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1327 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1328
621c6d5b
YQ
1329 if (post_prologue_pc)
1330 post_prologue_pc
1331 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1332
1333
0d39a070
DJ
1334 /* GCC always emits a line note before the prologue and another
1335 one after, even if the two are at the same address or on the
1336 same line. Take advantage of this so that we do not need to
1337 know every instruction that might appear in the prologue. We
1338 will have producer information for most binaries; if it is
1339 missing (e.g. for -gstabs), assuming the GNU tools. */
1340 if (post_prologue_pc
43f3e411
DE
1341 && (cust == NULL
1342 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1343 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1344 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1345 return post_prologue_pc;
1346
a89fea3c 1347 if (post_prologue_pc != 0)
0d39a070
DJ
1348 {
1349 CORE_ADDR analyzed_limit;
1350
1351 /* For non-GCC compilers, make sure the entire line is an
1352 acceptable prologue; GDB will round this function's
1353 return value up to the end of the following line so we
1354 can not skip just part of a line (and we do not want to).
1355
1356 RealView does not treat the prologue specially, but does
1357 associate prologue code with the opening brace; so this
1358 lets us skip the first line if we think it is the opening
1359 brace. */
9779414d 1360 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1361 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1362 post_prologue_pc, NULL);
1363 else
1364 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1365 post_prologue_pc, NULL);
1366
1367 if (analyzed_limit != post_prologue_pc)
1368 return func_addr;
1369
1370 return post_prologue_pc;
1371 }
c906108c
SS
1372 }
1373
a89fea3c
JL
1374 /* Can't determine prologue from the symbol table, need to examine
1375 instructions. */
c906108c 1376
a89fea3c
JL
1377 /* Find an upper limit on the function prologue using the debug
1378 information. If the debug information could not be used to provide
1379 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1380 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1381 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1382 if (limit_pc == 0)
1383 limit_pc = pc + 64; /* Magic. */
1384
c906108c 1385
29d73ae4 1386 /* Check if this is Thumb code. */
9779414d 1387 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1388 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1389 else
1390 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1391}
94c30b78 1392
c5aa993b 1393/* *INDENT-OFF* */
c906108c
SS
1394/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1395 This function decodes a Thumb function prologue to determine:
1396 1) the size of the stack frame
1397 2) which registers are saved on it
1398 3) the offsets of saved regs
1399 4) the offset from the stack pointer to the frame pointer
c906108c 1400
da59e081
JM
1401 A typical Thumb function prologue would create this stack frame
1402 (offsets relative to FP)
c906108c
SS
1403 old SP -> 24 stack parameters
1404 20 LR
1405 16 R7
1406 R7 -> 0 local variables (16 bytes)
1407 SP -> -12 additional stack space (12 bytes)
1408 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1409 12 bytes. The frame register is R7.
da59e081 1410
da3c6d4a
MS
1411 The comments for thumb_skip_prolog() describe the algorithm we use
1412 to detect the end of the prolog. */
c5aa993b
JM
1413/* *INDENT-ON* */
1414
c906108c 1415static void
be8626e0 1416thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1417 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1418{
1419 CORE_ADDR prologue_start;
1420 CORE_ADDR prologue_end;
c906108c 1421
b39cc962
DJ
1422 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1423 &prologue_end))
c906108c 1424 {
ec3d575a
UW
1425 /* See comment in arm_scan_prologue for an explanation of
1426 this heuristics. */
1427 if (prologue_end > prologue_start + 64)
1428 {
1429 prologue_end = prologue_start + 64;
1430 }
c906108c
SS
1431 }
1432 else
f7060f85
DJ
1433 /* We're in the boondocks: we have no idea where the start of the
1434 function is. */
1435 return;
c906108c 1436
325fac50 1437 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1438
be8626e0 1439 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1440}
1441
f303bc3e
YQ
1442/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1443 otherwise. */
1444
1445static int
1446arm_instruction_restores_sp (unsigned int insn)
1447{
1448 if (bits (insn, 28, 31) != INST_NV)
1449 {
1450 if ((insn & 0x0df0f000) == 0x0080d000
1451 /* ADD SP (register or immediate). */
1452 || (insn & 0x0df0f000) == 0x0040d000
1453 /* SUB SP (register or immediate). */
1454 || (insn & 0x0ffffff0) == 0x01a0d000
1455 /* MOV SP. */
1456 || (insn & 0x0fff0000) == 0x08bd0000
1457 /* POP (LDMIA). */
1458 || (insn & 0x0fff0000) == 0x049d0000)
1459 /* POP of a single register. */
1460 return 1;
1461 }
1462
1463 return 0;
1464}
1465
0d39a070
DJ
1466/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1467 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1468 fill it in. Return the first address not recognized as a prologue
1469 instruction.
eb5492fa 1470
0d39a070
DJ
1471 We recognize all the instructions typically found in ARM prologues,
1472 plus harmless instructions which can be skipped (either for analysis
1473 purposes, or a more restrictive set that can be skipped when finding
1474 the end of the prologue). */
1475
1476static CORE_ADDR
1477arm_analyze_prologue (struct gdbarch *gdbarch,
1478 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1479 struct arm_prologue_cache *cache)
1480{
0d39a070
DJ
1481 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1482 int regno;
1483 CORE_ADDR offset, current_pc;
1484 pv_t regs[ARM_FPS_REGNUM];
0d39a070
DJ
1485 CORE_ADDR unrecognized_pc = 0;
1486
1487 /* Search the prologue looking for instructions that set up the
96baa820 1488 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1489
96baa820
JM
1490 Be careful, however, and if it doesn't look like a prologue,
1491 don't try to scan it. If, for instance, a frameless function
1492 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1493 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1494 and other operations that rely on a knowledge of the stack
0d39a070 1495 traceback. */
d4473757 1496
4be43953
DJ
1497 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1498 regs[regno] = pv_register (regno, 0);
f7b7ed97 1499 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953 1500
94c30b78
MS
1501 for (current_pc = prologue_start;
1502 current_pc < prologue_end;
f43845b3 1503 current_pc += 4)
96baa820 1504 {
e17a4113 1505 unsigned int insn
198cd59d 1506 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1507
94c30b78 1508 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1509 {
4be43953 1510 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1511 continue;
1512 }
0d39a070
DJ
1513 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1514 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1515 {
1516 unsigned imm = insn & 0xff; /* immediate value */
1517 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1518 int rd = bits (insn, 12, 15);
28cd8767 1519 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1520 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1521 continue;
1522 }
0d39a070
DJ
1523 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1525 {
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1528 int rd = bits (insn, 12, 15);
28cd8767 1529 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1531 continue;
1532 }
0963b4bd
MS
1533 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1534 [sp, #-4]! */
f43845b3 1535 {
f7b7ed97 1536 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1537 break;
1538 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
f7b7ed97
TT
1539 stack.store (regs[ARM_SP_REGNUM], 4,
1540 regs[bits (insn, 12, 15)]);
f43845b3
MS
1541 continue;
1542 }
1543 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1544 /* stmfd sp!, {..., fp, ip, lr, pc}
1545 or
1546 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1547 {
d4473757 1548 int mask = insn & 0xffff;
ed9a39eb 1549
f7b7ed97 1550 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1551 break;
1552
94c30b78 1553 /* Calculate offsets of saved registers. */
34e8f22d 1554 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1555 if (mask & (1 << regno))
1556 {
0963b4bd
MS
1557 regs[ARM_SP_REGNUM]
1558 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
f7b7ed97 1559 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1560 }
1561 }
0d39a070
DJ
1562 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1563 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1564 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1565 {
1566 /* No need to add this to saved_regs -- it's just an arg reg. */
1567 continue;
1568 }
0d39a070
DJ
1569 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1570 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1571 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1572 {
1573 /* No need to add this to saved_regs -- it's just an arg reg. */
1574 continue;
1575 }
0963b4bd
MS
1576 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1577 { registers } */
0d39a070
DJ
1578 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1579 {
1580 /* No need to add this to saved_regs -- it's just arg regs. */
1581 continue;
1582 }
d4473757
KB
1583 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1584 {
94c30b78
MS
1585 unsigned imm = insn & 0xff; /* immediate value */
1586 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1587 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1588 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1589 }
1590 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1591 {
94c30b78
MS
1592 unsigned imm = insn & 0xff; /* immediate value */
1593 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1594 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1595 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1596 }
0963b4bd
MS
1597 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1598 [sp, -#c]! */
2af46ca0 1599 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1600 {
f7b7ed97 1601 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1602 break;
1603
1604 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1605 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
f7b7ed97 1606 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1607 }
0963b4bd
MS
1608 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1609 [sp!] */
2af46ca0 1610 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1611 {
1612 int n_saved_fp_regs;
1613 unsigned int fp_start_reg, fp_bound_reg;
1614
f7b7ed97 1615 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1616 break;
1617
94c30b78 1618 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1619 {
d4473757
KB
1620 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1621 n_saved_fp_regs = 3;
1622 else
1623 n_saved_fp_regs = 1;
96baa820 1624 }
d4473757 1625 else
96baa820 1626 {
d4473757
KB
1627 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1628 n_saved_fp_regs = 2;
1629 else
1630 n_saved_fp_regs = 4;
96baa820 1631 }
d4473757 1632
34e8f22d 1633 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1634 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1635 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1636 {
4be43953 1637 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
f7b7ed97
TT
1638 stack.store (regs[ARM_SP_REGNUM], 12,
1639 regs[fp_start_reg++]);
96baa820 1640 }
c906108c 1641 }
0d39a070
DJ
1642 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1643 {
1644 /* Allow some special function calls when skipping the
1645 prologue; GCC generates these before storing arguments to
1646 the stack. */
1647 CORE_ADDR dest = BranchDest (current_pc, insn);
1648
e0634ccf 1649 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1650 continue;
1651 else
1652 break;
1653 }
d4473757 1654 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1655 break; /* Condition not true, exit early. */
0d39a070
DJ
1656 else if (arm_instruction_changes_pc (insn))
1657 /* Don't scan past anything that might change control flow. */
1658 break;
f303bc3e
YQ
1659 else if (arm_instruction_restores_sp (insn))
1660 {
1661 /* Don't scan past the epilogue. */
1662 break;
1663 }
d19f7eee
UW
1664 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1665 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1666 /* Ignore block loads from the stack, potentially copying
1667 parameters from memory. */
1668 continue;
1669 else if ((insn & 0xfc500000) == 0xe4100000
1670 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1671 /* Similarly ignore single loads from the stack. */
1672 continue;
0d39a070
DJ
1673 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1674 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1675 register instead of the stack. */
d4473757 1676 continue;
0d39a070
DJ
1677 else
1678 {
21daaaaf
YQ
1679 /* The optimizer might shove anything into the prologue, if
1680 we build up cache (cache != NULL) from scanning prologue,
1681 we just skip what we don't recognize and scan further to
1682 make cache as complete as possible. However, if we skip
1683 prologue, we'll stop immediately on unrecognized
1684 instruction. */
0d39a070 1685 unrecognized_pc = current_pc;
21daaaaf
YQ
1686 if (cache != NULL)
1687 continue;
1688 else
1689 break;
0d39a070 1690 }
c906108c
SS
1691 }
1692
0d39a070
DJ
1693 if (unrecognized_pc == 0)
1694 unrecognized_pc = current_pc;
1695
0d39a070
DJ
1696 if (cache)
1697 {
4072f920
YQ
1698 int framereg, framesize;
1699
1700 /* The frame size is just the distance from the frame register
1701 to the original stack pointer. */
1702 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1703 {
1704 /* Frame pointer is fp. */
1705 framereg = ARM_FP_REGNUM;
1706 framesize = -regs[ARM_FP_REGNUM].k;
1707 }
1708 else
1709 {
1710 /* Try the stack pointer... this is a bit desperate. */
1711 framereg = ARM_SP_REGNUM;
1712 framesize = -regs[ARM_SP_REGNUM].k;
1713 }
1714
0d39a070
DJ
1715 cache->framereg = framereg;
1716 cache->framesize = framesize;
1717
1718 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
f7b7ed97 1719 if (stack.find_reg (gdbarch, regno, &offset))
0d39a070
DJ
1720 cache->saved_regs[regno].addr = offset;
1721 }
1722
1723 if (arm_debug)
1724 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1725 paddress (gdbarch, unrecognized_pc));
4be43953 1726
0d39a070
DJ
1727 return unrecognized_pc;
1728}
1729
1730static void
1731arm_scan_prologue (struct frame_info *this_frame,
1732 struct arm_prologue_cache *cache)
1733{
1734 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1735 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1736 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1737 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1738 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1739
1740 /* Assume there is no frame until proven otherwise. */
1741 cache->framereg = ARM_SP_REGNUM;
1742 cache->framesize = 0;
1743
1744 /* Check for Thumb prologue. */
1745 if (arm_frame_is_thumb (this_frame))
1746 {
1747 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1748 return;
1749 }
1750
1751 /* Find the function prologue. If we can't find the function in
1752 the symbol table, peek in the stack frame to find the PC. */
1753 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1754 &prologue_end))
1755 {
1756 /* One way to find the end of the prologue (which works well
1757 for unoptimized code) is to do the following:
1758
1759 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1760
1761 if (sal.line == 0)
1762 prologue_end = prev_pc;
1763 else if (sal.end < prologue_end)
1764 prologue_end = sal.end;
1765
1766 This mechanism is very accurate so long as the optimizer
1767 doesn't move any instructions from the function body into the
1768 prologue. If this happens, sal.end will be the last
1769 instruction in the first hunk of prologue code just before
1770 the first instruction that the scheduler has moved from
1771 the body to the prologue.
1772
1773 In order to make sure that we scan all of the prologue
1774 instructions, we use a slightly less accurate mechanism which
1775 may scan more than necessary. To help compensate for this
1776 lack of accuracy, the prologue scanning loop below contains
1777 several clauses which'll cause the loop to terminate early if
1778 an implausible prologue instruction is encountered.
1779
1780 The expression
1781
1782 prologue_start + 64
1783
1784 is a suitable endpoint since it accounts for the largest
1785 possible prologue plus up to five instructions inserted by
1786 the scheduler. */
1787
1788 if (prologue_end > prologue_start + 64)
1789 {
1790 prologue_end = prologue_start + 64; /* See above. */
1791 }
1792 }
1793 else
1794 {
1795 /* We have no symbol information. Our only option is to assume this
1796 function has a standard stack frame and the normal frame register.
1797 Then, we can find the value of our frame pointer on entrance to
1798 the callee (or at the present moment if this is the innermost frame).
1799 The value stored there should be the address of the stmfd + 8. */
1800 CORE_ADDR frame_loc;
7913a64c 1801 ULONGEST return_value;
0d39a070
DJ
1802
1803 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
7913a64c
YQ
1804 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1805 &return_value))
0d39a070
DJ
1806 return;
1807 else
1808 {
1809 prologue_start = gdbarch_addr_bits_remove
1810 (gdbarch, return_value) - 8;
1811 prologue_end = prologue_start + 64; /* See above. */
1812 }
1813 }
1814
1815 if (prev_pc < prologue_end)
1816 prologue_end = prev_pc;
1817
1818 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1819}
1820
eb5492fa 1821static struct arm_prologue_cache *
a262aec2 1822arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1823{
eb5492fa
DJ
1824 int reg;
1825 struct arm_prologue_cache *cache;
1826 CORE_ADDR unwound_fp;
c5aa993b 1827
35d5d4ee 1828 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1829 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1830
a262aec2 1831 arm_scan_prologue (this_frame, cache);
848cfffb 1832
a262aec2 1833 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1834 if (unwound_fp == 0)
1835 return cache;
c906108c 1836
4be43953 1837 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1838
eb5492fa
DJ
1839 /* Calculate actual addresses of saved registers using offsets
1840 determined by arm_scan_prologue. */
a262aec2 1841 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1842 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1843 cache->saved_regs[reg].addr += cache->prev_sp;
1844
1845 return cache;
c906108c
SS
1846}
1847
c1ee9414
LM
1848/* Implementation of the stop_reason hook for arm_prologue frames. */
1849
1850static enum unwind_stop_reason
1851arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1852 void **this_cache)
1853{
1854 struct arm_prologue_cache *cache;
1855 CORE_ADDR pc;
1856
1857 if (*this_cache == NULL)
1858 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1859 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1860
1861 /* This is meant to halt the backtrace at "_start". */
1862 pc = get_frame_pc (this_frame);
1863 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1864 return UNWIND_OUTERMOST;
1865
1866 /* If we've hit a wall, stop. */
1867 if (cache->prev_sp == 0)
1868 return UNWIND_OUTERMOST;
1869
1870 return UNWIND_NO_REASON;
1871}
1872
eb5492fa
DJ
1873/* Our frame ID for a normal frame is the current function's starting PC
1874 and the caller's SP when we were called. */
c906108c 1875
148754e5 1876static void
a262aec2 1877arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1878 void **this_cache,
1879 struct frame_id *this_id)
c906108c 1880{
eb5492fa
DJ
1881 struct arm_prologue_cache *cache;
1882 struct frame_id id;
2c404490 1883 CORE_ADDR pc, func;
f079148d 1884
eb5492fa 1885 if (*this_cache == NULL)
a262aec2 1886 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1887 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1888
0e9e9abd
UW
1889 /* Use function start address as part of the frame ID. If we cannot
1890 identify the start address (due to missing symbol information),
1891 fall back to just using the current PC. */
c1ee9414 1892 pc = get_frame_pc (this_frame);
2c404490 1893 func = get_frame_func (this_frame);
0e9e9abd
UW
1894 if (!func)
1895 func = pc;
1896
eb5492fa 1897 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1898 *this_id = id;
c906108c
SS
1899}
1900
a262aec2
DJ
1901static struct value *
1902arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1903 void **this_cache,
a262aec2 1904 int prev_regnum)
24de872b 1905{
24568a2c 1906 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1907 struct arm_prologue_cache *cache;
1908
eb5492fa 1909 if (*this_cache == NULL)
a262aec2 1910 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1911 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1912
eb5492fa 1913 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1914 instead. The prologue may save PC, but it will point into this
1915 frame's prologue, not the next frame's resume location. Also
1916 strip the saved T bit. A valid LR may have the low bit set, but
1917 a valid PC never does. */
eb5492fa 1918 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1919 {
1920 CORE_ADDR lr;
1921
1922 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1923 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1924 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1925 }
24de872b 1926
eb5492fa 1927 /* SP is generally not saved to the stack, but this frame is
a262aec2 1928 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1929 The value was already reconstructed into PREV_SP. */
1930 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1931 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1932
b39cc962
DJ
1933 /* The CPSR may have been changed by the call instruction and by the
1934 called function. The only bit we can reconstruct is the T bit,
1935 by checking the low bit of LR as of the call. This is a reliable
1936 indicator of Thumb-ness except for some ARM v4T pre-interworking
1937 Thumb code, which could get away with a clear low bit as long as
1938 the called function did not use bx. Guess that all other
1939 bits are unchanged; the condition flags are presumably lost,
1940 but the processor status is likely valid. */
1941 if (prev_regnum == ARM_PS_REGNUM)
1942 {
1943 CORE_ADDR lr, cpsr;
9779414d 1944 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1945
1946 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1947 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1948 if (IS_THUMB_ADDR (lr))
9779414d 1949 cpsr |= t_bit;
b39cc962 1950 else
9779414d 1951 cpsr &= ~t_bit;
b39cc962
DJ
1952 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1953 }
1954
a262aec2
DJ
1955 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1956 prev_regnum);
eb5492fa
DJ
1957}
1958
1959struct frame_unwind arm_prologue_unwind = {
1960 NORMAL_FRAME,
c1ee9414 1961 arm_prologue_unwind_stop_reason,
eb5492fa 1962 arm_prologue_this_id,
a262aec2
DJ
1963 arm_prologue_prev_register,
1964 NULL,
1965 default_frame_sniffer
eb5492fa
DJ
1966};
1967
0e9e9abd
UW
1968/* Maintain a list of ARM exception table entries per objfile, similar to the
1969 list of mapping symbols. We only cache entries for standard ARM-defined
1970 personality routines; the cache will contain only the frame unwinding
1971 instructions associated with the entry (not the descriptors). */
1972
1973static const struct objfile_data *arm_exidx_data_key;
1974
1975struct arm_exidx_entry
1976{
1977 bfd_vma addr;
1978 gdb_byte *entry;
1979};
1980typedef struct arm_exidx_entry arm_exidx_entry_s;
1981DEF_VEC_O(arm_exidx_entry_s);
1982
1983struct arm_exidx_data
1984{
1985 VEC(arm_exidx_entry_s) **section_maps;
1986};
1987
1988static void
1989arm_exidx_data_free (struct objfile *objfile, void *arg)
1990{
9a3c8263 1991 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
1992 unsigned int i;
1993
1994 for (i = 0; i < objfile->obfd->section_count; i++)
1995 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1996}
1997
1998static inline int
1999arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2000 const struct arm_exidx_entry *rhs)
2001{
2002 return lhs->addr < rhs->addr;
2003}
2004
2005static struct obj_section *
2006arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2007{
2008 struct obj_section *osect;
2009
2010 ALL_OBJFILE_OSECTIONS (objfile, osect)
2011 if (bfd_get_section_flags (objfile->obfd,
2012 osect->the_bfd_section) & SEC_ALLOC)
2013 {
2014 bfd_vma start, size;
2015 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2016 size = bfd_get_section_size (osect->the_bfd_section);
2017
2018 if (start <= vma && vma < start + size)
2019 return osect;
2020 }
2021
2022 return NULL;
2023}
2024
2025/* Parse contents of exception table and exception index sections
2026 of OBJFILE, and fill in the exception table entry cache.
2027
2028 For each entry that refers to a standard ARM-defined personality
2029 routine, extract the frame unwinding instructions (from either
2030 the index or the table section). The unwinding instructions
2031 are normalized by:
2032 - extracting them from the rest of the table data
2033 - converting to host endianness
2034 - appending the implicit 0xb0 ("Finish") code
2035
2036 The extracted and normalized instructions are stored for later
2037 retrieval by the arm_find_exidx_entry routine. */
2038
2039static void
2040arm_exidx_new_objfile (struct objfile *objfile)
2041{
3bb47e8b 2042 struct cleanup *cleanups;
0e9e9abd
UW
2043 struct arm_exidx_data *data;
2044 asection *exidx, *extab;
2045 bfd_vma exidx_vma = 0, extab_vma = 0;
2046 bfd_size_type exidx_size = 0, extab_size = 0;
2047 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2048 LONGEST i;
2049
2050 /* If we've already touched this file, do nothing. */
2051 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2052 return;
3bb47e8b 2053 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2054
2055 /* Read contents of exception table and index. */
a5eda10c 2056 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2057 if (exidx)
2058 {
2059 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2060 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2061 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2062 make_cleanup (xfree, exidx_data);
2063
2064 if (!bfd_get_section_contents (objfile->obfd, exidx,
2065 exidx_data, 0, exidx_size))
2066 {
2067 do_cleanups (cleanups);
2068 return;
2069 }
2070 }
2071
2072 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2073 if (extab)
2074 {
2075 extab_vma = bfd_section_vma (objfile->obfd, extab);
2076 extab_size = bfd_get_section_size (extab);
224c3ddb 2077 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2078 make_cleanup (xfree, extab_data);
2079
2080 if (!bfd_get_section_contents (objfile->obfd, extab,
2081 extab_data, 0, extab_size))
2082 {
2083 do_cleanups (cleanups);
2084 return;
2085 }
2086 }
2087
2088 /* Allocate exception table data structure. */
2089 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2090 set_objfile_data (objfile, arm_exidx_data_key, data);
2091 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2092 objfile->obfd->section_count,
2093 VEC(arm_exidx_entry_s) *);
2094
2095 /* Fill in exception table. */
2096 for (i = 0; i < exidx_size / 8; i++)
2097 {
2098 struct arm_exidx_entry new_exidx_entry;
2099 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2100 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2101 bfd_vma addr = 0, word = 0;
2102 int n_bytes = 0, n_words = 0;
2103 struct obj_section *sec;
2104 gdb_byte *entry = NULL;
2105
2106 /* Extract address of start of function. */
2107 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2108 idx += exidx_vma + i * 8;
2109
2110 /* Find section containing function and compute section offset. */
2111 sec = arm_obj_section_from_vma (objfile, idx);
2112 if (sec == NULL)
2113 continue;
2114 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2115
2116 /* Determine address of exception table entry. */
2117 if (val == 1)
2118 {
2119 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2120 }
2121 else if ((val & 0xff000000) == 0x80000000)
2122 {
2123 /* Exception table entry embedded in .ARM.exidx
2124 -- must be short form. */
2125 word = val;
2126 n_bytes = 3;
2127 }
2128 else if (!(val & 0x80000000))
2129 {
2130 /* Exception table entry in .ARM.extab. */
2131 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2132 addr += exidx_vma + i * 8 + 4;
2133
2134 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2135 {
2136 word = bfd_h_get_32 (objfile->obfd,
2137 extab_data + addr - extab_vma);
2138 addr += 4;
2139
2140 if ((word & 0xff000000) == 0x80000000)
2141 {
2142 /* Short form. */
2143 n_bytes = 3;
2144 }
2145 else if ((word & 0xff000000) == 0x81000000
2146 || (word & 0xff000000) == 0x82000000)
2147 {
2148 /* Long form. */
2149 n_bytes = 2;
2150 n_words = ((word >> 16) & 0xff);
2151 }
2152 else if (!(word & 0x80000000))
2153 {
2154 bfd_vma pers;
2155 struct obj_section *pers_sec;
2156 int gnu_personality = 0;
2157
2158 /* Custom personality routine. */
2159 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2160 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2161
2162 /* Check whether we've got one of the variants of the
2163 GNU personality routines. */
2164 pers_sec = arm_obj_section_from_vma (objfile, pers);
2165 if (pers_sec)
2166 {
2167 static const char *personality[] =
2168 {
2169 "__gcc_personality_v0",
2170 "__gxx_personality_v0",
2171 "__gcj_personality_v0",
2172 "__gnu_objc_personality_v0",
2173 NULL
2174 };
2175
2176 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2177 int k;
2178
2179 for (k = 0; personality[k]; k++)
2180 if (lookup_minimal_symbol_by_pc_name
2181 (pc, personality[k], objfile))
2182 {
2183 gnu_personality = 1;
2184 break;
2185 }
2186 }
2187
2188 /* If so, the next word contains a word count in the high
2189 byte, followed by the same unwind instructions as the
2190 pre-defined forms. */
2191 if (gnu_personality
2192 && addr + 4 <= extab_vma + extab_size)
2193 {
2194 word = bfd_h_get_32 (objfile->obfd,
2195 extab_data + addr - extab_vma);
2196 addr += 4;
2197 n_bytes = 3;
2198 n_words = ((word >> 24) & 0xff);
2199 }
2200 }
2201 }
2202 }
2203
2204 /* Sanity check address. */
2205 if (n_words)
2206 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2207 n_words = n_bytes = 0;
2208
2209 /* The unwind instructions reside in WORD (only the N_BYTES least
2210 significant bytes are valid), followed by N_WORDS words in the
2211 extab section starting at ADDR. */
2212 if (n_bytes || n_words)
2213 {
224c3ddb
SM
2214 gdb_byte *p = entry
2215 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2216 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2217
2218 while (n_bytes--)
2219 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2220
2221 while (n_words--)
2222 {
2223 word = bfd_h_get_32 (objfile->obfd,
2224 extab_data + addr - extab_vma);
2225 addr += 4;
2226
2227 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2228 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2229 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2230 *p++ = (gdb_byte) (word & 0xff);
2231 }
2232
2233 /* Implied "Finish" to terminate the list. */
2234 *p++ = 0xb0;
2235 }
2236
2237 /* Push entry onto vector. They are guaranteed to always
2238 appear in order of increasing addresses. */
2239 new_exidx_entry.addr = idx;
2240 new_exidx_entry.entry = entry;
2241 VEC_safe_push (arm_exidx_entry_s,
2242 data->section_maps[sec->the_bfd_section->index],
2243 &new_exidx_entry);
2244 }
2245
2246 do_cleanups (cleanups);
2247}
2248
2249/* Search for the exception table entry covering MEMADDR. If one is found,
2250 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2251 set *START to the start of the region covered by this entry. */
2252
2253static gdb_byte *
2254arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2255{
2256 struct obj_section *sec;
2257
2258 sec = find_pc_section (memaddr);
2259 if (sec != NULL)
2260 {
2261 struct arm_exidx_data *data;
2262 VEC(arm_exidx_entry_s) *map;
2263 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2264 unsigned int idx;
2265
9a3c8263
SM
2266 data = ((struct arm_exidx_data *)
2267 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2268 if (data != NULL)
2269 {
2270 map = data->section_maps[sec->the_bfd_section->index];
2271 if (!VEC_empty (arm_exidx_entry_s, map))
2272 {
2273 struct arm_exidx_entry *map_sym;
2274
2275 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2276 arm_compare_exidx_entries);
2277
2278 /* VEC_lower_bound finds the earliest ordered insertion
2279 point. If the following symbol starts at this exact
2280 address, we use that; otherwise, the preceding
2281 exception table entry covers this address. */
2282 if (idx < VEC_length (arm_exidx_entry_s, map))
2283 {
2284 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2285 if (map_sym->addr == map_key.addr)
2286 {
2287 if (start)
2288 *start = map_sym->addr + obj_section_addr (sec);
2289 return map_sym->entry;
2290 }
2291 }
2292
2293 if (idx > 0)
2294 {
2295 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2296 if (start)
2297 *start = map_sym->addr + obj_section_addr (sec);
2298 return map_sym->entry;
2299 }
2300 }
2301 }
2302 }
2303
2304 return NULL;
2305}
2306
2307/* Given the current frame THIS_FRAME, and its associated frame unwinding
2308 instruction list from the ARM exception table entry ENTRY, allocate and
2309 return a prologue cache structure describing how to unwind this frame.
2310
2311 Return NULL if the unwinding instruction list contains a "spare",
2312 "reserved" or "refuse to unwind" instruction as defined in section
2313 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2314 for the ARM Architecture" document. */
2315
2316static struct arm_prologue_cache *
2317arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2318{
2319 CORE_ADDR vsp = 0;
2320 int vsp_valid = 0;
2321
2322 struct arm_prologue_cache *cache;
2323 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2324 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2325
2326 for (;;)
2327 {
2328 gdb_byte insn;
2329
2330 /* Whenever we reload SP, we actually have to retrieve its
2331 actual value in the current frame. */
2332 if (!vsp_valid)
2333 {
2334 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2335 {
2336 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2337 vsp = get_frame_register_unsigned (this_frame, reg);
2338 }
2339 else
2340 {
2341 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2342 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2343 }
2344
2345 vsp_valid = 1;
2346 }
2347
2348 /* Decode next unwind instruction. */
2349 insn = *entry++;
2350
2351 if ((insn & 0xc0) == 0)
2352 {
2353 int offset = insn & 0x3f;
2354 vsp += (offset << 2) + 4;
2355 }
2356 else if ((insn & 0xc0) == 0x40)
2357 {
2358 int offset = insn & 0x3f;
2359 vsp -= (offset << 2) + 4;
2360 }
2361 else if ((insn & 0xf0) == 0x80)
2362 {
2363 int mask = ((insn & 0xf) << 8) | *entry++;
2364 int i;
2365
2366 /* The special case of an all-zero mask identifies
2367 "Refuse to unwind". We return NULL to fall back
2368 to the prologue analyzer. */
2369 if (mask == 0)
2370 return NULL;
2371
2372 /* Pop registers r4..r15 under mask. */
2373 for (i = 0; i < 12; i++)
2374 if (mask & (1 << i))
2375 {
2376 cache->saved_regs[4 + i].addr = vsp;
2377 vsp += 4;
2378 }
2379
2380 /* Special-case popping SP -- we need to reload vsp. */
2381 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2382 vsp_valid = 0;
2383 }
2384 else if ((insn & 0xf0) == 0x90)
2385 {
2386 int reg = insn & 0xf;
2387
2388 /* Reserved cases. */
2389 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2390 return NULL;
2391
2392 /* Set SP from another register and mark VSP for reload. */
2393 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2394 vsp_valid = 0;
2395 }
2396 else if ((insn & 0xf0) == 0xa0)
2397 {
2398 int count = insn & 0x7;
2399 int pop_lr = (insn & 0x8) != 0;
2400 int i;
2401
2402 /* Pop r4..r[4+count]. */
2403 for (i = 0; i <= count; i++)
2404 {
2405 cache->saved_regs[4 + i].addr = vsp;
2406 vsp += 4;
2407 }
2408
2409 /* If indicated by flag, pop LR as well. */
2410 if (pop_lr)
2411 {
2412 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2413 vsp += 4;
2414 }
2415 }
2416 else if (insn == 0xb0)
2417 {
2418 /* We could only have updated PC by popping into it; if so, it
2419 will show up as address. Otherwise, copy LR into PC. */
2420 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2421 cache->saved_regs[ARM_PC_REGNUM]
2422 = cache->saved_regs[ARM_LR_REGNUM];
2423
2424 /* We're done. */
2425 break;
2426 }
2427 else if (insn == 0xb1)
2428 {
2429 int mask = *entry++;
2430 int i;
2431
2432 /* All-zero mask and mask >= 16 is "spare". */
2433 if (mask == 0 || mask >= 16)
2434 return NULL;
2435
2436 /* Pop r0..r3 under mask. */
2437 for (i = 0; i < 4; i++)
2438 if (mask & (1 << i))
2439 {
2440 cache->saved_regs[i].addr = vsp;
2441 vsp += 4;
2442 }
2443 }
2444 else if (insn == 0xb2)
2445 {
2446 ULONGEST offset = 0;
2447 unsigned shift = 0;
2448
2449 do
2450 {
2451 offset |= (*entry & 0x7f) << shift;
2452 shift += 7;
2453 }
2454 while (*entry++ & 0x80);
2455
2456 vsp += 0x204 + (offset << 2);
2457 }
2458 else if (insn == 0xb3)
2459 {
2460 int start = *entry >> 4;
2461 int count = (*entry++) & 0xf;
2462 int i;
2463
2464 /* Only registers D0..D15 are valid here. */
2465 if (start + count >= 16)
2466 return NULL;
2467
2468 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2469 for (i = 0; i <= count; i++)
2470 {
2471 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2472 vsp += 8;
2473 }
2474
2475 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2476 vsp += 4;
2477 }
2478 else if ((insn & 0xf8) == 0xb8)
2479 {
2480 int count = insn & 0x7;
2481 int i;
2482
2483 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2484 for (i = 0; i <= count; i++)
2485 {
2486 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2487 vsp += 8;
2488 }
2489
2490 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2491 vsp += 4;
2492 }
2493 else if (insn == 0xc6)
2494 {
2495 int start = *entry >> 4;
2496 int count = (*entry++) & 0xf;
2497 int i;
2498
2499 /* Only registers WR0..WR15 are valid. */
2500 if (start + count >= 16)
2501 return NULL;
2502
2503 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2504 for (i = 0; i <= count; i++)
2505 {
2506 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2507 vsp += 8;
2508 }
2509 }
2510 else if (insn == 0xc7)
2511 {
2512 int mask = *entry++;
2513 int i;
2514
2515 /* All-zero mask and mask >= 16 is "spare". */
2516 if (mask == 0 || mask >= 16)
2517 return NULL;
2518
2519 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2520 for (i = 0; i < 4; i++)
2521 if (mask & (1 << i))
2522 {
2523 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2524 vsp += 4;
2525 }
2526 }
2527 else if ((insn & 0xf8) == 0xc0)
2528 {
2529 int count = insn & 0x7;
2530 int i;
2531
2532 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2533 for (i = 0; i <= count; i++)
2534 {
2535 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2536 vsp += 8;
2537 }
2538 }
2539 else if (insn == 0xc8)
2540 {
2541 int start = *entry >> 4;
2542 int count = (*entry++) & 0xf;
2543 int i;
2544
2545 /* Only registers D0..D31 are valid. */
2546 if (start + count >= 16)
2547 return NULL;
2548
2549 /* Pop VFP double-precision registers
2550 D[16+start]..D[16+start+count]. */
2551 for (i = 0; i <= count; i++)
2552 {
2553 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2554 vsp += 8;
2555 }
2556 }
2557 else if (insn == 0xc9)
2558 {
2559 int start = *entry >> 4;
2560 int count = (*entry++) & 0xf;
2561 int i;
2562
2563 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2564 for (i = 0; i <= count; i++)
2565 {
2566 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2567 vsp += 8;
2568 }
2569 }
2570 else if ((insn & 0xf8) == 0xd0)
2571 {
2572 int count = insn & 0x7;
2573 int i;
2574
2575 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2576 for (i = 0; i <= count; i++)
2577 {
2578 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2579 vsp += 8;
2580 }
2581 }
2582 else
2583 {
2584 /* Everything else is "spare". */
2585 return NULL;
2586 }
2587 }
2588
2589 /* If we restore SP from a register, assume this was the frame register.
2590 Otherwise just fall back to SP as frame register. */
2591 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2592 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2593 else
2594 cache->framereg = ARM_SP_REGNUM;
2595
2596 /* Determine offset to previous frame. */
2597 cache->framesize
2598 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2599
2600 /* We already got the previous SP. */
2601 cache->prev_sp = vsp;
2602
2603 return cache;
2604}
2605
2606/* Unwinding via ARM exception table entries. Note that the sniffer
2607 already computes a filled-in prologue cache, which is then used
2608 with the same arm_prologue_this_id and arm_prologue_prev_register
2609 routines also used for prologue-parsing based unwinding. */
2610
2611static int
2612arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2613 struct frame_info *this_frame,
2614 void **this_prologue_cache)
2615{
2616 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2617 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2618 CORE_ADDR addr_in_block, exidx_region, func_start;
2619 struct arm_prologue_cache *cache;
2620 gdb_byte *entry;
2621
2622 /* See if we have an ARM exception table entry covering this address. */
2623 addr_in_block = get_frame_address_in_block (this_frame);
2624 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2625 if (!entry)
2626 return 0;
2627
2628 /* The ARM exception table does not describe unwind information
2629 for arbitrary PC values, but is guaranteed to be correct only
2630 at call sites. We have to decide here whether we want to use
2631 ARM exception table information for this frame, or fall back
2632 to using prologue parsing. (Note that if we have DWARF CFI,
2633 this sniffer isn't even called -- CFI is always preferred.)
2634
2635 Before we make this decision, however, we check whether we
2636 actually have *symbol* information for the current frame.
2637 If not, prologue parsing would not work anyway, so we might
2638 as well use the exception table and hope for the best. */
2639 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2640 {
2641 int exc_valid = 0;
2642
2643 /* If the next frame is "normal", we are at a call site in this
2644 frame, so exception information is guaranteed to be valid. */
2645 if (get_next_frame (this_frame)
2646 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2647 exc_valid = 1;
2648
2649 /* We also assume exception information is valid if we're currently
2650 blocked in a system call. The system library is supposed to
d9311bfa
AT
2651 ensure this, so that e.g. pthread cancellation works. */
2652 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2653 {
7913a64c 2654 ULONGEST insn;
416dc9c6 2655
7913a64c
YQ
2656 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2657 2, byte_order_for_code, &insn)
d9311bfa
AT
2658 && (insn & 0xff00) == 0xdf00 /* svc */)
2659 exc_valid = 1;
0e9e9abd 2660 }
d9311bfa
AT
2661 else
2662 {
7913a64c 2663 ULONGEST insn;
416dc9c6 2664
7913a64c
YQ
2665 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2666 4, byte_order_for_code, &insn)
d9311bfa
AT
2667 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2668 exc_valid = 1;
2669 }
2670
0e9e9abd
UW
2671 /* Bail out if we don't know that exception information is valid. */
2672 if (!exc_valid)
2673 return 0;
2674
2675 /* The ARM exception index does not mark the *end* of the region
2676 covered by the entry, and some functions will not have any entry.
2677 To correctly recognize the end of the covered region, the linker
2678 should have inserted dummy records with a CANTUNWIND marker.
2679
2680 Unfortunately, current versions of GNU ld do not reliably do
2681 this, and thus we may have found an incorrect entry above.
2682 As a (temporary) sanity check, we only use the entry if it
2683 lies *within* the bounds of the function. Note that this check
2684 might reject perfectly valid entries that just happen to cover
2685 multiple functions; therefore this check ought to be removed
2686 once the linker is fixed. */
2687 if (func_start > exidx_region)
2688 return 0;
2689 }
2690
2691 /* Decode the list of unwinding instructions into a prologue cache.
2692 Note that this may fail due to e.g. a "refuse to unwind" code. */
2693 cache = arm_exidx_fill_cache (this_frame, entry);
2694 if (!cache)
2695 return 0;
2696
2697 *this_prologue_cache = cache;
2698 return 1;
2699}
2700
2701struct frame_unwind arm_exidx_unwind = {
2702 NORMAL_FRAME,
8fbca658 2703 default_frame_unwind_stop_reason,
0e9e9abd
UW
2704 arm_prologue_this_id,
2705 arm_prologue_prev_register,
2706 NULL,
2707 arm_exidx_unwind_sniffer
2708};
2709
779aa56f
YQ
2710static struct arm_prologue_cache *
2711arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2712{
2713 struct arm_prologue_cache *cache;
779aa56f
YQ
2714 int reg;
2715
2716 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2717 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2718
2719 /* Still rely on the offset calculated from prologue. */
2720 arm_scan_prologue (this_frame, cache);
2721
2722 /* Since we are in epilogue, the SP has been restored. */
2723 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2724
2725 /* Calculate actual addresses of saved registers using offsets
2726 determined by arm_scan_prologue. */
2727 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2728 if (trad_frame_addr_p (cache->saved_regs, reg))
2729 cache->saved_regs[reg].addr += cache->prev_sp;
2730
2731 return cache;
2732}
2733
2734/* Implementation of function hook 'this_id' in
2735 'struct frame_uwnind' for epilogue unwinder. */
2736
2737static void
2738arm_epilogue_frame_this_id (struct frame_info *this_frame,
2739 void **this_cache,
2740 struct frame_id *this_id)
2741{
2742 struct arm_prologue_cache *cache;
2743 CORE_ADDR pc, func;
2744
2745 if (*this_cache == NULL)
2746 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2747 cache = (struct arm_prologue_cache *) *this_cache;
2748
2749 /* Use function start address as part of the frame ID. If we cannot
2750 identify the start address (due to missing symbol information),
2751 fall back to just using the current PC. */
2752 pc = get_frame_pc (this_frame);
2753 func = get_frame_func (this_frame);
fb3f3d25 2754 if (func == 0)
779aa56f
YQ
2755 func = pc;
2756
2757 (*this_id) = frame_id_build (cache->prev_sp, pc);
2758}
2759
2760/* Implementation of function hook 'prev_register' in
2761 'struct frame_uwnind' for epilogue unwinder. */
2762
2763static struct value *
2764arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2765 void **this_cache, int regnum)
2766{
779aa56f
YQ
2767 if (*this_cache == NULL)
2768 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2769
2770 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2771}
2772
2773static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2774 CORE_ADDR pc);
2775static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2776 CORE_ADDR pc);
2777
2778/* Implementation of function hook 'sniffer' in
2779 'struct frame_uwnind' for epilogue unwinder. */
2780
2781static int
2782arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2783 struct frame_info *this_frame,
2784 void **this_prologue_cache)
2785{
2786 if (frame_relative_level (this_frame) == 0)
2787 {
2788 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2789 CORE_ADDR pc = get_frame_pc (this_frame);
2790
2791 if (arm_frame_is_thumb (this_frame))
2792 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2793 else
2794 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2795 }
2796 else
2797 return 0;
2798}
2799
2800/* Frame unwinder from epilogue. */
2801
2802static const struct frame_unwind arm_epilogue_frame_unwind =
2803{
2804 NORMAL_FRAME,
2805 default_frame_unwind_stop_reason,
2806 arm_epilogue_frame_this_id,
2807 arm_epilogue_frame_prev_register,
2808 NULL,
2809 arm_epilogue_frame_sniffer,
2810};
2811
80d8d390
YQ
2812/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2813 trampoline, return the target PC. Otherwise return 0.
2814
2815 void call0a (char c, short s, int i, long l) {}
2816
2817 int main (void)
2818 {
2819 (*pointer_to_call0a) (c, s, i, l);
2820 }
2821
2822 Instead of calling a stub library function _call_via_xx (xx is
2823 the register name), GCC may inline the trampoline in the object
2824 file as below (register r2 has the address of call0a).
2825
2826 .global main
2827 .type main, %function
2828 ...
2829 bl .L1
2830 ...
2831 .size main, .-main
2832
2833 .L1:
2834 bx r2
2835
2836 The trampoline 'bx r2' doesn't belong to main. */
2837
2838static CORE_ADDR
2839arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2840{
2841 /* The heuristics of recognizing such trampoline is that FRAME is
2842 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2843 if (arm_frame_is_thumb (frame))
2844 {
2845 gdb_byte buf[2];
2846
2847 if (target_read_memory (pc, buf, 2) == 0)
2848 {
2849 struct gdbarch *gdbarch = get_frame_arch (frame);
2850 enum bfd_endian byte_order_for_code
2851 = gdbarch_byte_order_for_code (gdbarch);
2852 uint16_t insn
2853 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2854
2855 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2856 {
2857 CORE_ADDR dest
2858 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2859
2860 /* Clear the LSB so that gdb core sets step-resume
2861 breakpoint at the right address. */
2862 return UNMAKE_THUMB_ADDR (dest);
2863 }
2864 }
2865 }
2866
2867 return 0;
2868}
2869
909cf6ea 2870static struct arm_prologue_cache *
a262aec2 2871arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2872{
909cf6ea 2873 struct arm_prologue_cache *cache;
909cf6ea 2874
35d5d4ee 2875 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2876 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2877
a262aec2 2878 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2879
2880 return cache;
2881}
2882
2883/* Our frame ID for a stub frame is the current SP and LR. */
2884
2885static void
a262aec2 2886arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2887 void **this_cache,
2888 struct frame_id *this_id)
2889{
2890 struct arm_prologue_cache *cache;
2891
2892 if (*this_cache == NULL)
a262aec2 2893 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2894 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2895
a262aec2 2896 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2897}
2898
a262aec2
DJ
2899static int
2900arm_stub_unwind_sniffer (const struct frame_unwind *self,
2901 struct frame_info *this_frame,
2902 void **this_prologue_cache)
909cf6ea 2903{
93d42b30 2904 CORE_ADDR addr_in_block;
948f8e3d 2905 gdb_byte dummy[4];
18d18ac8
YQ
2906 CORE_ADDR pc, start_addr;
2907 const char *name;
909cf6ea 2908
a262aec2 2909 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2910 pc = get_frame_pc (this_frame);
3e5d3a5a 2911 if (in_plt_section (addr_in_block)
fc36e839
DE
2912 /* We also use the stub winder if the target memory is unreadable
2913 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2914 || target_read_memory (pc, dummy, 4) != 0)
2915 return 1;
2916
2917 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2918 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2919 return 1;
909cf6ea 2920
a262aec2 2921 return 0;
909cf6ea
DJ
2922}
2923
a262aec2
DJ
2924struct frame_unwind arm_stub_unwind = {
2925 NORMAL_FRAME,
8fbca658 2926 default_frame_unwind_stop_reason,
a262aec2
DJ
2927 arm_stub_this_id,
2928 arm_prologue_prev_register,
2929 NULL,
2930 arm_stub_unwind_sniffer
2931};
2932
2ae28aa9
YQ
2933/* Put here the code to store, into CACHE->saved_regs, the addresses
2934 of the saved registers of frame described by THIS_FRAME. CACHE is
2935 returned. */
2936
2937static struct arm_prologue_cache *
2938arm_m_exception_cache (struct frame_info *this_frame)
2939{
2940 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2941 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2942 struct arm_prologue_cache *cache;
2943 CORE_ADDR unwound_sp;
2944 LONGEST xpsr;
2945
2946 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2947 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2948
2949 unwound_sp = get_frame_register_unsigned (this_frame,
2950 ARM_SP_REGNUM);
2951
2952 /* The hardware saves eight 32-bit words, comprising xPSR,
2953 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2954 "B1.5.6 Exception entry behavior" in
2955 "ARMv7-M Architecture Reference Manual". */
2956 cache->saved_regs[0].addr = unwound_sp;
2957 cache->saved_regs[1].addr = unwound_sp + 4;
2958 cache->saved_regs[2].addr = unwound_sp + 8;
2959 cache->saved_regs[3].addr = unwound_sp + 12;
2960 cache->saved_regs[12].addr = unwound_sp + 16;
2961 cache->saved_regs[14].addr = unwound_sp + 20;
2962 cache->saved_regs[15].addr = unwound_sp + 24;
2963 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2964
2965 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2966 aligner between the top of the 32-byte stack frame and the
2967 previous context's stack pointer. */
2968 cache->prev_sp = unwound_sp + 32;
2969 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2970 && (xpsr & (1 << 9)) != 0)
2971 cache->prev_sp += 4;
2972
2973 return cache;
2974}
2975
2976/* Implementation of function hook 'this_id' in
2977 'struct frame_uwnind'. */
2978
2979static void
2980arm_m_exception_this_id (struct frame_info *this_frame,
2981 void **this_cache,
2982 struct frame_id *this_id)
2983{
2984 struct arm_prologue_cache *cache;
2985
2986 if (*this_cache == NULL)
2987 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2988 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2989
2990 /* Our frame ID for a stub frame is the current SP and LR. */
2991 *this_id = frame_id_build (cache->prev_sp,
2992 get_frame_pc (this_frame));
2993}
2994
2995/* Implementation of function hook 'prev_register' in
2996 'struct frame_uwnind'. */
2997
2998static struct value *
2999arm_m_exception_prev_register (struct frame_info *this_frame,
3000 void **this_cache,
3001 int prev_regnum)
3002{
2ae28aa9
YQ
3003 struct arm_prologue_cache *cache;
3004
3005 if (*this_cache == NULL)
3006 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3007 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3008
3009 /* The value was already reconstructed into PREV_SP. */
3010 if (prev_regnum == ARM_SP_REGNUM)
3011 return frame_unwind_got_constant (this_frame, prev_regnum,
3012 cache->prev_sp);
3013
3014 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3015 prev_regnum);
3016}
3017
3018/* Implementation of function hook 'sniffer' in
3019 'struct frame_uwnind'. */
3020
3021static int
3022arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3023 struct frame_info *this_frame,
3024 void **this_prologue_cache)
3025{
3026 CORE_ADDR this_pc = get_frame_pc (this_frame);
3027
3028 /* No need to check is_m; this sniffer is only registered for
3029 M-profile architectures. */
3030
ca90e760
FH
3031 /* Check if exception frame returns to a magic PC value. */
3032 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3033}
3034
3035/* Frame unwinder for M-profile exceptions. */
3036
3037struct frame_unwind arm_m_exception_unwind =
3038{
3039 SIGTRAMP_FRAME,
3040 default_frame_unwind_stop_reason,
3041 arm_m_exception_this_id,
3042 arm_m_exception_prev_register,
3043 NULL,
3044 arm_m_exception_unwind_sniffer
3045};
3046
24de872b 3047static CORE_ADDR
a262aec2 3048arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3049{
3050 struct arm_prologue_cache *cache;
3051
eb5492fa 3052 if (*this_cache == NULL)
a262aec2 3053 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3054 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3055
4be43953 3056 return cache->prev_sp - cache->framesize;
24de872b
DJ
3057}
3058
eb5492fa
DJ
3059struct frame_base arm_normal_base = {
3060 &arm_prologue_unwind,
3061 arm_normal_frame_base,
3062 arm_normal_frame_base,
3063 arm_normal_frame_base
3064};
3065
a262aec2 3066/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3067 dummy frame. The frame ID's base needs to match the TOS value
3068 saved by save_dummy_frame_tos() and returned from
3069 arm_push_dummy_call, and the PC needs to match the dummy frame's
3070 breakpoint. */
c906108c 3071
eb5492fa 3072static struct frame_id
a262aec2 3073arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3074{
0963b4bd
MS
3075 return frame_id_build (get_frame_register_unsigned (this_frame,
3076 ARM_SP_REGNUM),
a262aec2 3077 get_frame_pc (this_frame));
eb5492fa 3078}
c3b4394c 3079
eb5492fa
DJ
3080/* Given THIS_FRAME, find the previous frame's resume PC (which will
3081 be used to construct the previous frame's ID, after looking up the
3082 containing function). */
c3b4394c 3083
eb5492fa
DJ
3084static CORE_ADDR
3085arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3086{
3087 CORE_ADDR pc;
3088 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3089 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3090}
3091
3092static CORE_ADDR
3093arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3094{
3095 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3096}
3097
b39cc962
DJ
3098static struct value *
3099arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3100 int regnum)
3101{
24568a2c 3102 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3103 CORE_ADDR lr, cpsr;
9779414d 3104 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3105
3106 switch (regnum)
3107 {
3108 case ARM_PC_REGNUM:
3109 /* The PC is normally copied from the return column, which
3110 describes saves of LR. However, that version may have an
3111 extra bit set to indicate Thumb state. The bit is not
3112 part of the PC. */
3113 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3114 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3115 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3116
3117 case ARM_PS_REGNUM:
3118 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3119 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3120 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3121 if (IS_THUMB_ADDR (lr))
9779414d 3122 cpsr |= t_bit;
b39cc962 3123 else
9779414d 3124 cpsr &= ~t_bit;
ca38c58e 3125 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3126
3127 default:
3128 internal_error (__FILE__, __LINE__,
3129 _("Unexpected register %d"), regnum);
3130 }
3131}
3132
3133static void
3134arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3135 struct dwarf2_frame_state_reg *reg,
3136 struct frame_info *this_frame)
3137{
3138 switch (regnum)
3139 {
3140 case ARM_PC_REGNUM:
3141 case ARM_PS_REGNUM:
3142 reg->how = DWARF2_FRAME_REG_FN;
3143 reg->loc.fn = arm_dwarf2_prev_register;
3144 break;
3145 case ARM_SP_REGNUM:
3146 reg->how = DWARF2_FRAME_REG_CFA;
3147 break;
3148 }
3149}
3150
c9cf6e20 3151/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3152
3153static int
c9cf6e20 3154thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3155{
3156 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3157 unsigned int insn, insn2;
3158 int found_return = 0, found_stack_adjust = 0;
3159 CORE_ADDR func_start, func_end;
3160 CORE_ADDR scan_pc;
3161 gdb_byte buf[4];
3162
3163 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3164 return 0;
3165
3166 /* The epilogue is a sequence of instructions along the following lines:
3167
3168 - add stack frame size to SP or FP
3169 - [if frame pointer used] restore SP from FP
3170 - restore registers from SP [may include PC]
3171 - a return-type instruction [if PC wasn't already restored]
3172
3173 In a first pass, we scan forward from the current PC and verify the
3174 instructions we find as compatible with this sequence, ending in a
3175 return instruction.
3176
3177 However, this is not sufficient to distinguish indirect function calls
3178 within a function from indirect tail calls in the epilogue in some cases.
3179 Therefore, if we didn't already find any SP-changing instruction during
3180 forward scan, we add a backward scanning heuristic to ensure we actually
3181 are in the epilogue. */
3182
3183 scan_pc = pc;
3184 while (scan_pc < func_end && !found_return)
3185 {
3186 if (target_read_memory (scan_pc, buf, 2))
3187 break;
3188
3189 scan_pc += 2;
3190 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3191
3192 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3193 found_return = 1;
3194 else if (insn == 0x46f7) /* mov pc, lr */
3195 found_return = 1;
540314bd 3196 else if (thumb_instruction_restores_sp (insn))
4024ca99 3197 {
b7576e5c 3198 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3199 found_return = 1;
3200 }
db24da6d 3201 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3202 {
3203 if (target_read_memory (scan_pc, buf, 2))
3204 break;
3205
3206 scan_pc += 2;
3207 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3208
3209 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3210 {
4024ca99
UW
3211 if (insn2 & 0x8000) /* <registers> include PC. */
3212 found_return = 1;
3213 }
3214 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3215 && (insn2 & 0x0fff) == 0x0b04)
3216 {
4024ca99
UW
3217 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3218 found_return = 1;
3219 }
3220 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3221 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3222 ;
4024ca99
UW
3223 else
3224 break;
3225 }
3226 else
3227 break;
3228 }
3229
3230 if (!found_return)
3231 return 0;
3232
3233 /* Since any instruction in the epilogue sequence, with the possible
3234 exception of return itself, updates the stack pointer, we need to
3235 scan backwards for at most one instruction. Try either a 16-bit or
3236 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3237 too much about false positives. */
4024ca99 3238
6b65d1b6
YQ
3239 if (pc - 4 < func_start)
3240 return 0;
3241 if (target_read_memory (pc - 4, buf, 4))
3242 return 0;
4024ca99 3243
6b65d1b6
YQ
3244 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3245 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3246
3247 if (thumb_instruction_restores_sp (insn2))
3248 found_stack_adjust = 1;
3249 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3250 found_stack_adjust = 1;
3251 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3252 && (insn2 & 0x0fff) == 0x0b04)
3253 found_stack_adjust = 1;
3254 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3255 && (insn2 & 0x0e00) == 0x0a00)
3256 found_stack_adjust = 1;
4024ca99
UW
3257
3258 return found_stack_adjust;
3259}
3260
4024ca99 3261static int
c58b006a 3262arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3263{
3264 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3265 unsigned int insn;
f303bc3e 3266 int found_return;
4024ca99
UW
3267 CORE_ADDR func_start, func_end;
3268
4024ca99
UW
3269 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3270 return 0;
3271
3272 /* We are in the epilogue if the previous instruction was a stack
3273 adjustment and the next instruction is a possible return (bx, mov
3274 pc, or pop). We could have to scan backwards to find the stack
3275 adjustment, or forwards to find the return, but this is a decent
3276 approximation. First scan forwards. */
3277
3278 found_return = 0;
3279 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3280 if (bits (insn, 28, 31) != INST_NV)
3281 {
3282 if ((insn & 0x0ffffff0) == 0x012fff10)
3283 /* BX. */
3284 found_return = 1;
3285 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3286 /* MOV PC. */
3287 found_return = 1;
3288 else if ((insn & 0x0fff0000) == 0x08bd0000
3289 && (insn & 0x0000c000) != 0)
3290 /* POP (LDMIA), including PC or LR. */
3291 found_return = 1;
3292 }
3293
3294 if (!found_return)
3295 return 0;
3296
3297 /* Scan backwards. This is just a heuristic, so do not worry about
3298 false positives from mode changes. */
3299
3300 if (pc < func_start + 4)
3301 return 0;
3302
3303 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3304 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3305 return 1;
3306
3307 return 0;
3308}
3309
c58b006a
YQ
3310/* Implement the stack_frame_destroyed_p gdbarch method. */
3311
3312static int
3313arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3314{
3315 if (arm_pc_is_thumb (gdbarch, pc))
3316 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3317 else
3318 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3319}
4024ca99 3320
2dd604e7
RE
3321/* When arguments must be pushed onto the stack, they go on in reverse
3322 order. The code below implements a FILO (stack) to do this. */
3323
3324struct stack_item
3325{
3326 int len;
3327 struct stack_item *prev;
7c543f7b 3328 gdb_byte *data;
2dd604e7
RE
3329};
3330
3331static struct stack_item *
df3b6708 3332push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3333{
3334 struct stack_item *si;
8d749320 3335 si = XNEW (struct stack_item);
7c543f7b 3336 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3337 si->len = len;
3338 si->prev = prev;
3339 memcpy (si->data, contents, len);
3340 return si;
3341}
3342
3343static struct stack_item *
3344pop_stack_item (struct stack_item *si)
3345{
3346 struct stack_item *dead = si;
3347 si = si->prev;
3348 xfree (dead->data);
3349 xfree (dead);
3350 return si;
3351}
3352
2af48f68
PB
3353
3354/* Return the alignment (in bytes) of the given type. */
3355
3356static int
3357arm_type_align (struct type *t)
3358{
3359 int n;
3360 int align;
3361 int falign;
3362
3363 t = check_typedef (t);
3364 switch (TYPE_CODE (t))
3365 {
3366 default:
3367 /* Should never happen. */
3368 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3369 return 4;
3370
3371 case TYPE_CODE_PTR:
3372 case TYPE_CODE_ENUM:
3373 case TYPE_CODE_INT:
3374 case TYPE_CODE_FLT:
3375 case TYPE_CODE_SET:
3376 case TYPE_CODE_RANGE:
2af48f68 3377 case TYPE_CODE_REF:
aa006118 3378 case TYPE_CODE_RVALUE_REF:
2af48f68
PB
3379 case TYPE_CODE_CHAR:
3380 case TYPE_CODE_BOOL:
3381 return TYPE_LENGTH (t);
3382
3383 case TYPE_CODE_ARRAY:
c4312b19
YQ
3384 if (TYPE_VECTOR (t))
3385 {
3386 /* Use the natural alignment for vector types (the same for
3387 scalar type), but the maximum alignment is 64-bit. */
3388 if (TYPE_LENGTH (t) > 8)
3389 return 8;
3390 else
3391 return TYPE_LENGTH (t);
3392 }
3393 else
3394 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3395 case TYPE_CODE_COMPLEX:
2af48f68
PB
3396 return arm_type_align (TYPE_TARGET_TYPE (t));
3397
3398 case TYPE_CODE_STRUCT:
3399 case TYPE_CODE_UNION:
3400 align = 1;
3401 for (n = 0; n < TYPE_NFIELDS (t); n++)
3402 {
3403 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3404 if (falign > align)
3405 align = falign;
3406 }
3407 return align;
3408 }
3409}
3410
90445bd3
DJ
3411/* Possible base types for a candidate for passing and returning in
3412 VFP registers. */
3413
3414enum arm_vfp_cprc_base_type
3415{
3416 VFP_CPRC_UNKNOWN,
3417 VFP_CPRC_SINGLE,
3418 VFP_CPRC_DOUBLE,
3419 VFP_CPRC_VEC64,
3420 VFP_CPRC_VEC128
3421};
3422
3423/* The length of one element of base type B. */
3424
3425static unsigned
3426arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3427{
3428 switch (b)
3429 {
3430 case VFP_CPRC_SINGLE:
3431 return 4;
3432 case VFP_CPRC_DOUBLE:
3433 return 8;
3434 case VFP_CPRC_VEC64:
3435 return 8;
3436 case VFP_CPRC_VEC128:
3437 return 16;
3438 default:
3439 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3440 (int) b);
3441 }
3442}
3443
3444/* The character ('s', 'd' or 'q') for the type of VFP register used
3445 for passing base type B. */
3446
3447static int
3448arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3449{
3450 switch (b)
3451 {
3452 case VFP_CPRC_SINGLE:
3453 return 's';
3454 case VFP_CPRC_DOUBLE:
3455 return 'd';
3456 case VFP_CPRC_VEC64:
3457 return 'd';
3458 case VFP_CPRC_VEC128:
3459 return 'q';
3460 default:
3461 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3462 (int) b);
3463 }
3464}
3465
3466/* Determine whether T may be part of a candidate for passing and
3467 returning in VFP registers, ignoring the limit on the total number
3468 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3469 classification of the first valid component found; if it is not
3470 VFP_CPRC_UNKNOWN, all components must have the same classification
3471 as *BASE_TYPE. If it is found that T contains a type not permitted
3472 for passing and returning in VFP registers, a type differently
3473 classified from *BASE_TYPE, or two types differently classified
3474 from each other, return -1, otherwise return the total number of
3475 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3476 array). Vector types are not currently supported, matching the
3477 generic AAPCS support. */
90445bd3
DJ
3478
3479static int
3480arm_vfp_cprc_sub_candidate (struct type *t,
3481 enum arm_vfp_cprc_base_type *base_type)
3482{
3483 t = check_typedef (t);
3484 switch (TYPE_CODE (t))
3485 {
3486 case TYPE_CODE_FLT:
3487 switch (TYPE_LENGTH (t))
3488 {
3489 case 4:
3490 if (*base_type == VFP_CPRC_UNKNOWN)
3491 *base_type = VFP_CPRC_SINGLE;
3492 else if (*base_type != VFP_CPRC_SINGLE)
3493 return -1;
3494 return 1;
3495
3496 case 8:
3497 if (*base_type == VFP_CPRC_UNKNOWN)
3498 *base_type = VFP_CPRC_DOUBLE;
3499 else if (*base_type != VFP_CPRC_DOUBLE)
3500 return -1;
3501 return 1;
3502
3503 default:
3504 return -1;
3505 }
3506 break;
3507
817e0957
YQ
3508 case TYPE_CODE_COMPLEX:
3509 /* Arguments of complex T where T is one of the types float or
3510 double get treated as if they are implemented as:
3511
3512 struct complexT
3513 {
3514 T real;
3515 T imag;
5f52445b
YQ
3516 };
3517
3518 */
817e0957
YQ
3519 switch (TYPE_LENGTH (t))
3520 {
3521 case 8:
3522 if (*base_type == VFP_CPRC_UNKNOWN)
3523 *base_type = VFP_CPRC_SINGLE;
3524 else if (*base_type != VFP_CPRC_SINGLE)
3525 return -1;
3526 return 2;
3527
3528 case 16:
3529 if (*base_type == VFP_CPRC_UNKNOWN)
3530 *base_type = VFP_CPRC_DOUBLE;
3531 else if (*base_type != VFP_CPRC_DOUBLE)
3532 return -1;
3533 return 2;
3534
3535 default:
3536 return -1;
3537 }
3538 break;
3539
90445bd3
DJ
3540 case TYPE_CODE_ARRAY:
3541 {
c4312b19 3542 if (TYPE_VECTOR (t))
90445bd3 3543 {
c4312b19
YQ
3544 /* A 64-bit or 128-bit containerized vector type are VFP
3545 CPRCs. */
3546 switch (TYPE_LENGTH (t))
3547 {
3548 case 8:
3549 if (*base_type == VFP_CPRC_UNKNOWN)
3550 *base_type = VFP_CPRC_VEC64;
3551 return 1;
3552 case 16:
3553 if (*base_type == VFP_CPRC_UNKNOWN)
3554 *base_type = VFP_CPRC_VEC128;
3555 return 1;
3556 default:
3557 return -1;
3558 }
3559 }
3560 else
3561 {
3562 int count;
3563 unsigned unitlen;
3564
3565 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3566 base_type);
3567 if (count == -1)
3568 return -1;
3569 if (TYPE_LENGTH (t) == 0)
3570 {
3571 gdb_assert (count == 0);
3572 return 0;
3573 }
3574 else if (count == 0)
3575 return -1;
3576 unitlen = arm_vfp_cprc_unit_length (*base_type);
3577 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3578 return TYPE_LENGTH (t) / unitlen;
90445bd3 3579 }
90445bd3
DJ
3580 }
3581 break;
3582
3583 case TYPE_CODE_STRUCT:
3584 {
3585 int count = 0;
3586 unsigned unitlen;
3587 int i;
3588 for (i = 0; i < TYPE_NFIELDS (t); i++)
3589 {
1040b979
YQ
3590 int sub_count = 0;
3591
3592 if (!field_is_static (&TYPE_FIELD (t, i)))
3593 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3594 base_type);
90445bd3
DJ
3595 if (sub_count == -1)
3596 return -1;
3597 count += sub_count;
3598 }
3599 if (TYPE_LENGTH (t) == 0)
3600 {
3601 gdb_assert (count == 0);
3602 return 0;
3603 }
3604 else if (count == 0)
3605 return -1;
3606 unitlen = arm_vfp_cprc_unit_length (*base_type);
3607 if (TYPE_LENGTH (t) != unitlen * count)
3608 return -1;
3609 return count;
3610 }
3611
3612 case TYPE_CODE_UNION:
3613 {
3614 int count = 0;
3615 unsigned unitlen;
3616 int i;
3617 for (i = 0; i < TYPE_NFIELDS (t); i++)
3618 {
3619 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3620 base_type);
3621 if (sub_count == -1)
3622 return -1;
3623 count = (count > sub_count ? count : sub_count);
3624 }
3625 if (TYPE_LENGTH (t) == 0)
3626 {
3627 gdb_assert (count == 0);
3628 return 0;
3629 }
3630 else if (count == 0)
3631 return -1;
3632 unitlen = arm_vfp_cprc_unit_length (*base_type);
3633 if (TYPE_LENGTH (t) != unitlen * count)
3634 return -1;
3635 return count;
3636 }
3637
3638 default:
3639 break;
3640 }
3641
3642 return -1;
3643}
3644
3645/* Determine whether T is a VFP co-processor register candidate (CPRC)
3646 if passed to or returned from a non-variadic function with the VFP
3647 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3648 *BASE_TYPE to the base type for T and *COUNT to the number of
3649 elements of that base type before returning. */
3650
3651static int
3652arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3653 int *count)
3654{
3655 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3656 int c = arm_vfp_cprc_sub_candidate (t, &b);
3657 if (c <= 0 || c > 4)
3658 return 0;
3659 *base_type = b;
3660 *count = c;
3661 return 1;
3662}
3663
3664/* Return 1 if the VFP ABI should be used for passing arguments to and
3665 returning values from a function of type FUNC_TYPE, 0
3666 otherwise. */
3667
3668static int
3669arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3670{
3671 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3672 /* Variadic functions always use the base ABI. Assume that functions
3673 without debug info are not variadic. */
3674 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3675 return 0;
3676 /* The VFP ABI is only supported as a variant of AAPCS. */
3677 if (tdep->arm_abi != ARM_ABI_AAPCS)
3678 return 0;
3679 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3680}
3681
3682/* We currently only support passing parameters in integer registers, which
3683 conforms with GCC's default model, and VFP argument passing following
3684 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3685 we should probably support some of them based on the selected ABI. */
3686
3687static CORE_ADDR
7d9b040b 3688arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3689 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3690 struct value **args, CORE_ADDR sp, int struct_return,
3691 CORE_ADDR struct_addr)
2dd604e7 3692{
e17a4113 3693 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3694 int argnum;
3695 int argreg;
3696 int nstack;
3697 struct stack_item *si = NULL;
90445bd3
DJ
3698 int use_vfp_abi;
3699 struct type *ftype;
3700 unsigned vfp_regs_free = (1 << 16) - 1;
3701
3702 /* Determine the type of this function and whether the VFP ABI
3703 applies. */
3704 ftype = check_typedef (value_type (function));
3705 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3706 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3707 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3708
6a65450a
AC
3709 /* Set the return address. For the ARM, the return breakpoint is
3710 always at BP_ADDR. */
9779414d 3711 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3712 bp_addr |= 1;
6a65450a 3713 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3714
3715 /* Walk through the list of args and determine how large a temporary
3716 stack is required. Need to take care here as structs may be
7a9dd1b2 3717 passed on the stack, and we have to push them. */
2dd604e7
RE
3718 nstack = 0;
3719
3720 argreg = ARM_A1_REGNUM;
3721 nstack = 0;
3722
2dd604e7
RE
3723 /* The struct_return pointer occupies the first parameter
3724 passing register. */
3725 if (struct_return)
3726 {
3727 if (arm_debug)
5af949e3 3728 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3729 gdbarch_register_name (gdbarch, argreg),
5af949e3 3730 paddress (gdbarch, struct_addr));
2dd604e7
RE
3731 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3732 argreg++;
3733 }
3734
3735 for (argnum = 0; argnum < nargs; argnum++)
3736 {
3737 int len;
3738 struct type *arg_type;
3739 struct type *target_type;
3740 enum type_code typecode;
8c6363cf 3741 const bfd_byte *val;
2af48f68 3742 int align;
90445bd3
DJ
3743 enum arm_vfp_cprc_base_type vfp_base_type;
3744 int vfp_base_count;
3745 int may_use_core_reg = 1;
2dd604e7 3746
df407dfe 3747 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3748 len = TYPE_LENGTH (arg_type);
3749 target_type = TYPE_TARGET_TYPE (arg_type);
3750 typecode = TYPE_CODE (arg_type);
8c6363cf 3751 val = value_contents (args[argnum]);
2dd604e7 3752
2af48f68
PB
3753 align = arm_type_align (arg_type);
3754 /* Round alignment up to a whole number of words. */
3755 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3756 /* Different ABIs have different maximum alignments. */
3757 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3758 {
3759 /* The APCS ABI only requires word alignment. */
3760 align = INT_REGISTER_SIZE;
3761 }
3762 else
3763 {
3764 /* The AAPCS requires at most doubleword alignment. */
3765 if (align > INT_REGISTER_SIZE * 2)
3766 align = INT_REGISTER_SIZE * 2;
3767 }
3768
90445bd3
DJ
3769 if (use_vfp_abi
3770 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3771 &vfp_base_count))
3772 {
3773 int regno;
3774 int unit_length;
3775 int shift;
3776 unsigned mask;
3777
3778 /* Because this is a CPRC it cannot go in a core register or
3779 cause a core register to be skipped for alignment.
3780 Either it goes in VFP registers and the rest of this loop
3781 iteration is skipped for this argument, or it goes on the
3782 stack (and the stack alignment code is correct for this
3783 case). */
3784 may_use_core_reg = 0;
3785
3786 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3787 shift = unit_length / 4;
3788 mask = (1 << (shift * vfp_base_count)) - 1;
3789 for (regno = 0; regno < 16; regno += shift)
3790 if (((vfp_regs_free >> regno) & mask) == mask)
3791 break;
3792
3793 if (regno < 16)
3794 {
3795 int reg_char;
3796 int reg_scaled;
3797 int i;
3798
3799 vfp_regs_free &= ~(mask << regno);
3800 reg_scaled = regno / shift;
3801 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3802 for (i = 0; i < vfp_base_count; i++)
3803 {
3804 char name_buf[4];
3805 int regnum;
58d6951d
DJ
3806 if (reg_char == 'q')
3807 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3808 val + i * unit_length);
58d6951d
DJ
3809 else
3810 {
8c042590
PM
3811 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3812 reg_char, reg_scaled + i);
58d6951d
DJ
3813 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3814 strlen (name_buf));
3815 regcache_cooked_write (regcache, regnum,
3816 val + i * unit_length);
3817 }
90445bd3
DJ
3818 }
3819 continue;
3820 }
3821 else
3822 {
3823 /* This CPRC could not go in VFP registers, so all VFP
3824 registers are now marked as used. */
3825 vfp_regs_free = 0;
3826 }
3827 }
3828
2af48f68
PB
3829 /* Push stack padding for dowubleword alignment. */
3830 if (nstack & (align - 1))
3831 {
3832 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3833 nstack += INT_REGISTER_SIZE;
3834 }
3835
3836 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3837 if (may_use_core_reg
3838 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3839 && align > INT_REGISTER_SIZE
3840 && argreg & 1)
3841 argreg++;
3842
2dd604e7
RE
3843 /* If the argument is a pointer to a function, and it is a
3844 Thumb function, create a LOCAL copy of the value and set
3845 the THUMB bit in it. */
3846 if (TYPE_CODE_PTR == typecode
3847 && target_type != NULL
f96b8fa0 3848 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3849 {
e17a4113 3850 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3851 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3852 {
224c3ddb 3853 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3854 store_unsigned_integer (copy, len, byte_order,
e17a4113 3855 MAKE_THUMB_ADDR (regval));
8c6363cf 3856 val = copy;
2dd604e7
RE
3857 }
3858 }
3859
3860 /* Copy the argument to general registers or the stack in
3861 register-sized pieces. Large arguments are split between
3862 registers and stack. */
3863 while (len > 0)
3864 {
f0c9063c 3865 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3866 CORE_ADDR regval
3867 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3868
90445bd3 3869 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3870 {
3871 /* The argument is being passed in a general purpose
3872 register. */
e17a4113 3873 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3874 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3875 if (arm_debug)
3876 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3877 argnum,
3878 gdbarch_register_name
2af46ca0 3879 (gdbarch, argreg),
f0c9063c 3880 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3881 regcache_cooked_write_unsigned (regcache, argreg, regval);
3882 argreg++;
3883 }
3884 else
3885 {
ef9bd0b8
YQ
3886 gdb_byte buf[INT_REGISTER_SIZE];
3887
3888 memset (buf, 0, sizeof (buf));
3889 store_unsigned_integer (buf, partial_len, byte_order, regval);
3890
2dd604e7
RE
3891 /* Push the arguments onto the stack. */
3892 if (arm_debug)
3893 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3894 argnum, nstack);
ef9bd0b8 3895 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3896 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3897 }
3898
3899 len -= partial_len;
3900 val += partial_len;
3901 }
3902 }
3903 /* If we have an odd number of words to push, then decrement the stack
3904 by one word now, so first stack argument will be dword aligned. */
3905 if (nstack & 4)
3906 sp -= 4;
3907
3908 while (si)
3909 {
3910 sp -= si->len;
3911 write_memory (sp, si->data, si->len);
3912 si = pop_stack_item (si);
3913 }
3914
3915 /* Finally, update teh SP register. */
3916 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3917
3918 return sp;
3919}
3920
f53f0d0b
PB
3921
3922/* Always align the frame to an 8-byte boundary. This is required on
3923 some platforms and harmless on the rest. */
3924
3925static CORE_ADDR
3926arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3927{
3928 /* Align the stack to eight bytes. */
3929 return sp & ~ (CORE_ADDR) 7;
3930}
3931
c906108c 3932static void
12b27276 3933print_fpu_flags (struct ui_file *file, int flags)
c906108c 3934{
c5aa993b 3935 if (flags & (1 << 0))
12b27276 3936 fputs_filtered ("IVO ", file);
c5aa993b 3937 if (flags & (1 << 1))
12b27276 3938 fputs_filtered ("DVZ ", file);
c5aa993b 3939 if (flags & (1 << 2))
12b27276 3940 fputs_filtered ("OFL ", file);
c5aa993b 3941 if (flags & (1 << 3))
12b27276 3942 fputs_filtered ("UFL ", file);
c5aa993b 3943 if (flags & (1 << 4))
12b27276
WN
3944 fputs_filtered ("INX ", file);
3945 fputc_filtered ('\n', file);
c906108c
SS
3946}
3947
5e74b15c
RE
3948/* Print interesting information about the floating point processor
3949 (if present) or emulator. */
34e8f22d 3950static void
d855c300 3951arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3952 struct frame_info *frame, const char *args)
c906108c 3953{
9c9acae0 3954 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3955 int type;
3956
3957 type = (status >> 24) & 127;
edefbb7c 3958 if (status & (1 << 31))
12b27276 3959 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3960 else
12b27276 3961 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3962 /* i18n: [floating point unit] mask */
12b27276
WN
3963 fputs_filtered (_("mask: "), file);
3964 print_fpu_flags (file, status >> 16);
edefbb7c 3965 /* i18n: [floating point unit] flags */
12b27276
WN
3966 fputs_filtered (_("flags: "), file);
3967 print_fpu_flags (file, status);
c906108c
SS
3968}
3969
27067745
UW
3970/* Construct the ARM extended floating point type. */
3971static struct type *
3972arm_ext_type (struct gdbarch *gdbarch)
3973{
3974 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3975
3976 if (!tdep->arm_ext_type)
3977 tdep->arm_ext_type
e9bb382b 3978 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3979 floatformats_arm_ext);
3980
3981 return tdep->arm_ext_type;
3982}
3983
58d6951d
DJ
3984static struct type *
3985arm_neon_double_type (struct gdbarch *gdbarch)
3986{
3987 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3988
3989 if (tdep->neon_double_type == NULL)
3990 {
3991 struct type *t, *elem;
3992
3993 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3994 TYPE_CODE_UNION);
3995 elem = builtin_type (gdbarch)->builtin_uint8;
3996 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3997 elem = builtin_type (gdbarch)->builtin_uint16;
3998 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3999 elem = builtin_type (gdbarch)->builtin_uint32;
4000 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4001 elem = builtin_type (gdbarch)->builtin_uint64;
4002 append_composite_type_field (t, "u64", elem);
4003 elem = builtin_type (gdbarch)->builtin_float;
4004 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4005 elem = builtin_type (gdbarch)->builtin_double;
4006 append_composite_type_field (t, "f64", elem);
4007
4008 TYPE_VECTOR (t) = 1;
4009 TYPE_NAME (t) = "neon_d";
4010 tdep->neon_double_type = t;
4011 }
4012
4013 return tdep->neon_double_type;
4014}
4015
4016/* FIXME: The vector types are not correctly ordered on big-endian
4017 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4018 bits of d0 - regardless of what unit size is being held in d0. So
4019 the offset of the first uint8 in d0 is 7, but the offset of the
4020 first float is 4. This code works as-is for little-endian
4021 targets. */
4022
4023static struct type *
4024arm_neon_quad_type (struct gdbarch *gdbarch)
4025{
4026 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4027
4028 if (tdep->neon_quad_type == NULL)
4029 {
4030 struct type *t, *elem;
4031
4032 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4033 TYPE_CODE_UNION);
4034 elem = builtin_type (gdbarch)->builtin_uint8;
4035 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4036 elem = builtin_type (gdbarch)->builtin_uint16;
4037 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4038 elem = builtin_type (gdbarch)->builtin_uint32;
4039 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4040 elem = builtin_type (gdbarch)->builtin_uint64;
4041 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4042 elem = builtin_type (gdbarch)->builtin_float;
4043 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4044 elem = builtin_type (gdbarch)->builtin_double;
4045 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4046
4047 TYPE_VECTOR (t) = 1;
4048 TYPE_NAME (t) = "neon_q";
4049 tdep->neon_quad_type = t;
4050 }
4051
4052 return tdep->neon_quad_type;
4053}
4054
34e8f22d
RE
4055/* Return the GDB type object for the "standard" data type of data in
4056 register N. */
4057
4058static struct type *
7a5ea0d4 4059arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4060{
58d6951d
DJ
4061 int num_regs = gdbarch_num_regs (gdbarch);
4062
4063 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4064 && regnum >= num_regs && regnum < num_regs + 32)
4065 return builtin_type (gdbarch)->builtin_float;
4066
4067 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4068 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4069 return arm_neon_quad_type (gdbarch);
4070
4071 /* If the target description has register information, we are only
4072 in this function so that we can override the types of
4073 double-precision registers for NEON. */
4074 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4075 {
4076 struct type *t = tdesc_register_type (gdbarch, regnum);
4077
4078 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4079 && TYPE_CODE (t) == TYPE_CODE_FLT
4080 && gdbarch_tdep (gdbarch)->have_neon)
4081 return arm_neon_double_type (gdbarch);
4082 else
4083 return t;
4084 }
4085
34e8f22d 4086 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4087 {
4088 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4089 return builtin_type (gdbarch)->builtin_void;
4090
4091 return arm_ext_type (gdbarch);
4092 }
e4c16157 4093 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4094 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4095 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4096 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4097 else if (regnum >= ARRAY_SIZE (arm_register_names))
4098 /* These registers are only supported on targets which supply
4099 an XML description. */
df4df182 4100 return builtin_type (gdbarch)->builtin_int0;
032758dc 4101 else
df4df182 4102 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4103}
4104
ff6f572f
DJ
4105/* Map a DWARF register REGNUM onto the appropriate GDB register
4106 number. */
4107
4108static int
d3f73121 4109arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4110{
4111 /* Core integer regs. */
4112 if (reg >= 0 && reg <= 15)
4113 return reg;
4114
4115 /* Legacy FPA encoding. These were once used in a way which
4116 overlapped with VFP register numbering, so their use is
4117 discouraged, but GDB doesn't support the ARM toolchain
4118 which used them for VFP. */
4119 if (reg >= 16 && reg <= 23)
4120 return ARM_F0_REGNUM + reg - 16;
4121
4122 /* New assignments for the FPA registers. */
4123 if (reg >= 96 && reg <= 103)
4124 return ARM_F0_REGNUM + reg - 96;
4125
4126 /* WMMX register assignments. */
4127 if (reg >= 104 && reg <= 111)
4128 return ARM_WCGR0_REGNUM + reg - 104;
4129
4130 if (reg >= 112 && reg <= 127)
4131 return ARM_WR0_REGNUM + reg - 112;
4132
4133 if (reg >= 192 && reg <= 199)
4134 return ARM_WC0_REGNUM + reg - 192;
4135
58d6951d
DJ
4136 /* VFP v2 registers. A double precision value is actually
4137 in d1 rather than s2, but the ABI only defines numbering
4138 for the single precision registers. This will "just work"
4139 in GDB for little endian targets (we'll read eight bytes,
4140 starting in s0 and then progressing to s1), but will be
4141 reversed on big endian targets with VFP. This won't
4142 be a problem for the new Neon quad registers; you're supposed
4143 to use DW_OP_piece for those. */
4144 if (reg >= 64 && reg <= 95)
4145 {
4146 char name_buf[4];
4147
8c042590 4148 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4149 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4150 strlen (name_buf));
4151 }
4152
4153 /* VFP v3 / Neon registers. This range is also used for VFP v2
4154 registers, except that it now describes d0 instead of s0. */
4155 if (reg >= 256 && reg <= 287)
4156 {
4157 char name_buf[4];
4158
8c042590 4159 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4160 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4161 strlen (name_buf));
4162 }
4163
ff6f572f
DJ
4164 return -1;
4165}
4166
26216b98
AC
4167/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4168static int
e7faf938 4169arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4170{
4171 int reg = regnum;
e7faf938 4172 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4173
ff6f572f
DJ
4174 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4175 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4176
4177 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4178 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4179
4180 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4181 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4182
26216b98
AC
4183 if (reg < NUM_GREGS)
4184 return SIM_ARM_R0_REGNUM + reg;
4185 reg -= NUM_GREGS;
4186
4187 if (reg < NUM_FREGS)
4188 return SIM_ARM_FP0_REGNUM + reg;
4189 reg -= NUM_FREGS;
4190
4191 if (reg < NUM_SREGS)
4192 return SIM_ARM_FPS_REGNUM + reg;
4193 reg -= NUM_SREGS;
4194
edefbb7c 4195 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4196}
34e8f22d 4197
d9311bfa
AT
4198/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4199 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4200 NULL if an error occurs. BUF is freed. */
c906108c 4201
d9311bfa
AT
4202static gdb_byte *
4203extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4204 int old_len, int new_len)
4205{
4206 gdb_byte *new_buf;
4207 int bytes_to_read = new_len - old_len;
c906108c 4208
d9311bfa
AT
4209 new_buf = (gdb_byte *) xmalloc (new_len);
4210 memcpy (new_buf + bytes_to_read, buf, old_len);
4211 xfree (buf);
198cd59d 4212 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
d9311bfa
AT
4213 {
4214 xfree (new_buf);
4215 return NULL;
c906108c 4216 }
d9311bfa 4217 return new_buf;
c906108c
SS
4218}
4219
d9311bfa
AT
4220/* An IT block is at most the 2-byte IT instruction followed by
4221 four 4-byte instructions. The furthest back we must search to
4222 find an IT block that affects the current instruction is thus
4223 2 + 3 * 4 == 14 bytes. */
4224#define MAX_IT_BLOCK_PREFIX 14
177321bd 4225
d9311bfa
AT
4226/* Use a quick scan if there are more than this many bytes of
4227 code. */
4228#define IT_SCAN_THRESHOLD 32
177321bd 4229
d9311bfa
AT
4230/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4231 A breakpoint in an IT block may not be hit, depending on the
4232 condition flags. */
ad527d2e 4233static CORE_ADDR
d9311bfa 4234arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4235{
d9311bfa
AT
4236 gdb_byte *buf;
4237 char map_type;
4238 CORE_ADDR boundary, func_start;
4239 int buf_len;
4240 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4241 int i, any, last_it, last_it_count;
177321bd 4242
d9311bfa
AT
4243 /* If we are using BKPT breakpoints, none of this is necessary. */
4244 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4245 return bpaddr;
177321bd 4246
d9311bfa
AT
4247 /* ARM mode does not have this problem. */
4248 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4249 return bpaddr;
177321bd 4250
d9311bfa
AT
4251 /* We are setting a breakpoint in Thumb code that could potentially
4252 contain an IT block. The first step is to find how much Thumb
4253 code there is; we do not need to read outside of known Thumb
4254 sequences. */
4255 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4256 if (map_type == 0)
4257 /* Thumb-2 code must have mapping symbols to have a chance. */
4258 return bpaddr;
9dca5578 4259
d9311bfa 4260 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4261
d9311bfa
AT
4262 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4263 && func_start > boundary)
4264 boundary = func_start;
9dca5578 4265
d9311bfa
AT
4266 /* Search for a candidate IT instruction. We have to do some fancy
4267 footwork to distinguish a real IT instruction from the second
4268 half of a 32-bit instruction, but there is no need for that if
4269 there's no candidate. */
325fac50 4270 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4271 if (buf_len == 0)
4272 /* No room for an IT instruction. */
4273 return bpaddr;
c906108c 4274
d9311bfa 4275 buf = (gdb_byte *) xmalloc (buf_len);
198cd59d 4276 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
d9311bfa
AT
4277 return bpaddr;
4278 any = 0;
4279 for (i = 0; i < buf_len; i += 2)
c906108c 4280 {
d9311bfa
AT
4281 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4282 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4283 {
d9311bfa
AT
4284 any = 1;
4285 break;
25b41d01 4286 }
c906108c 4287 }
d9311bfa
AT
4288
4289 if (any == 0)
c906108c 4290 {
d9311bfa
AT
4291 xfree (buf);
4292 return bpaddr;
f9d67f43
DJ
4293 }
4294
4295 /* OK, the code bytes before this instruction contain at least one
4296 halfword which resembles an IT instruction. We know that it's
4297 Thumb code, but there are still two possibilities. Either the
4298 halfword really is an IT instruction, or it is the second half of
4299 a 32-bit Thumb instruction. The only way we can tell is to
4300 scan forwards from a known instruction boundary. */
4301 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4302 {
4303 int definite;
4304
4305 /* There's a lot of code before this instruction. Start with an
4306 optimistic search; it's easy to recognize halfwords that can
4307 not be the start of a 32-bit instruction, and use that to
4308 lock on to the instruction boundaries. */
4309 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4310 if (buf == NULL)
4311 return bpaddr;
4312 buf_len = IT_SCAN_THRESHOLD;
4313
4314 definite = 0;
4315 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4316 {
4317 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4318 if (thumb_insn_size (inst1) == 2)
4319 {
4320 definite = 1;
4321 break;
4322 }
4323 }
4324
4325 /* At this point, if DEFINITE, BUF[I] is the first place we
4326 are sure that we know the instruction boundaries, and it is far
4327 enough from BPADDR that we could not miss an IT instruction
4328 affecting BPADDR. If ! DEFINITE, give up - start from a
4329 known boundary. */
4330 if (! definite)
4331 {
0963b4bd
MS
4332 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4333 bpaddr - boundary);
f9d67f43
DJ
4334 if (buf == NULL)
4335 return bpaddr;
4336 buf_len = bpaddr - boundary;
4337 i = 0;
4338 }
4339 }
4340 else
4341 {
4342 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4343 if (buf == NULL)
4344 return bpaddr;
4345 buf_len = bpaddr - boundary;
4346 i = 0;
4347 }
4348
4349 /* Scan forwards. Find the last IT instruction before BPADDR. */
4350 last_it = -1;
4351 last_it_count = 0;
4352 while (i < buf_len)
4353 {
4354 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4355 last_it_count--;
4356 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4357 {
4358 last_it = i;
4359 if (inst1 & 0x0001)
4360 last_it_count = 4;
4361 else if (inst1 & 0x0002)
4362 last_it_count = 3;
4363 else if (inst1 & 0x0004)
4364 last_it_count = 2;
4365 else
4366 last_it_count = 1;
4367 }
4368 i += thumb_insn_size (inst1);
4369 }
4370
4371 xfree (buf);
4372
4373 if (last_it == -1)
4374 /* There wasn't really an IT instruction after all. */
4375 return bpaddr;
4376
4377 if (last_it_count < 1)
4378 /* It was too far away. */
4379 return bpaddr;
4380
4381 /* This really is a trouble spot. Move the breakpoint to the IT
4382 instruction. */
4383 return bpaddr - buf_len + last_it;
4384}
4385
cca44b1b 4386/* ARM displaced stepping support.
c906108c 4387
cca44b1b 4388 Generally ARM displaced stepping works as follows:
c906108c 4389
cca44b1b 4390 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4391 arm_process_displaced_insn. Depending on the type of instruction, it is
4392 then copied to a scratch location, possibly in a modified form. The
4393 copy_* set of functions performs such modification, as necessary. A
4394 breakpoint is placed after the modified instruction in the scratch space
4395 to return control to GDB. Note in particular that instructions which
4396 modify the PC will no longer do so after modification.
c5aa993b 4397
cca44b1b
JB
4398 2. The instruction is single-stepped, by setting the PC to the scratch
4399 location address, and resuming. Control returns to GDB when the
4400 breakpoint is hit.
c5aa993b 4401
cca44b1b
JB
4402 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4403 function used for the current instruction. This function's job is to
4404 put the CPU/memory state back to what it would have been if the
4405 instruction had been executed unmodified in its original location. */
c5aa993b 4406
cca44b1b
JB
4407/* NOP instruction (mov r0, r0). */
4408#define ARM_NOP 0xe1a00000
34518530 4409#define THUMB_NOP 0x4600
cca44b1b
JB
4410
4411/* Helper for register reads for displaced stepping. In particular, this
4412 returns the PC as it would be seen by the instruction at its original
4413 location. */
4414
4415ULONGEST
36073a92
YQ
4416displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4417 int regno)
cca44b1b
JB
4418{
4419 ULONGEST ret;
36073a92 4420 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4421
bf9f652a 4422 if (regno == ARM_PC_REGNUM)
cca44b1b 4423 {
4db71c0b
YQ
4424 /* Compute pipeline offset:
4425 - When executing an ARM instruction, PC reads as the address of the
4426 current instruction plus 8.
4427 - When executing a Thumb instruction, PC reads as the address of the
4428 current instruction plus 4. */
4429
36073a92 4430 if (!dsc->is_thumb)
4db71c0b
YQ
4431 from += 8;
4432 else
4433 from += 4;
4434
cca44b1b
JB
4435 if (debug_displaced)
4436 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4437 (unsigned long) from);
4438 return (ULONGEST) from;
cca44b1b 4439 }
c906108c 4440 else
cca44b1b
JB
4441 {
4442 regcache_cooked_read_unsigned (regs, regno, &ret);
4443 if (debug_displaced)
4444 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4445 regno, (unsigned long) ret);
4446 return ret;
4447 }
c906108c
SS
4448}
4449
cca44b1b
JB
4450static int
4451displaced_in_arm_mode (struct regcache *regs)
4452{
4453 ULONGEST ps;
9779414d 4454 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4455
cca44b1b 4456 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4457
9779414d 4458 return (ps & t_bit) == 0;
cca44b1b 4459}
66e810cd 4460
cca44b1b 4461/* Write to the PC as from a branch instruction. */
c906108c 4462
cca44b1b 4463static void
36073a92
YQ
4464branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4465 ULONGEST val)
c906108c 4466{
36073a92 4467 if (!dsc->is_thumb)
cca44b1b
JB
4468 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4469 architecture versions < 6. */
0963b4bd
MS
4470 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4471 val & ~(ULONGEST) 0x3);
cca44b1b 4472 else
0963b4bd
MS
4473 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4474 val & ~(ULONGEST) 0x1);
cca44b1b 4475}
66e810cd 4476
cca44b1b
JB
4477/* Write to the PC as from a branch-exchange instruction. */
4478
4479static void
4480bx_write_pc (struct regcache *regs, ULONGEST val)
4481{
4482 ULONGEST ps;
9779414d 4483 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4484
4485 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4486
4487 if ((val & 1) == 1)
c906108c 4488 {
9779414d 4489 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4490 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4491 }
4492 else if ((val & 2) == 0)
4493 {
9779414d 4494 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4495 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4496 }
4497 else
4498 {
cca44b1b
JB
4499 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4500 mode, align dest to 4 bytes). */
4501 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4502 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4503 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4504 }
4505}
ed9a39eb 4506
cca44b1b 4507/* Write to the PC as if from a load instruction. */
ed9a39eb 4508
34e8f22d 4509static void
36073a92
YQ
4510load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4511 ULONGEST val)
ed9a39eb 4512{
cca44b1b
JB
4513 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4514 bx_write_pc (regs, val);
4515 else
36073a92 4516 branch_write_pc (regs, dsc, val);
cca44b1b 4517}
be8626e0 4518
cca44b1b
JB
4519/* Write to the PC as if from an ALU instruction. */
4520
4521static void
36073a92
YQ
4522alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4523 ULONGEST val)
cca44b1b 4524{
36073a92 4525 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4526 bx_write_pc (regs, val);
4527 else
36073a92 4528 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4529}
4530
4531/* Helper for writing to registers for displaced stepping. Writing to the PC
4532 has a varying effects depending on the instruction which does the write:
4533 this is controlled by the WRITE_PC argument. */
4534
4535void
4536displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4537 int regno, ULONGEST val, enum pc_write_style write_pc)
4538{
bf9f652a 4539 if (regno == ARM_PC_REGNUM)
08216dd7 4540 {
cca44b1b
JB
4541 if (debug_displaced)
4542 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4543 (unsigned long) val);
4544 switch (write_pc)
08216dd7 4545 {
cca44b1b 4546 case BRANCH_WRITE_PC:
36073a92 4547 branch_write_pc (regs, dsc, val);
08216dd7
RE
4548 break;
4549
cca44b1b
JB
4550 case BX_WRITE_PC:
4551 bx_write_pc (regs, val);
4552 break;
4553
4554 case LOAD_WRITE_PC:
36073a92 4555 load_write_pc (regs, dsc, val);
cca44b1b
JB
4556 break;
4557
4558 case ALU_WRITE_PC:
36073a92 4559 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4560 break;
4561
4562 case CANNOT_WRITE_PC:
4563 warning (_("Instruction wrote to PC in an unexpected way when "
4564 "single-stepping"));
08216dd7
RE
4565 break;
4566
4567 default:
97b9747c
JB
4568 internal_error (__FILE__, __LINE__,
4569 _("Invalid argument to displaced_write_reg"));
08216dd7 4570 }
b508a996 4571
cca44b1b 4572 dsc->wrote_to_pc = 1;
b508a996 4573 }
ed9a39eb 4574 else
b508a996 4575 {
cca44b1b
JB
4576 if (debug_displaced)
4577 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4578 regno, (unsigned long) val);
4579 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4580 }
34e8f22d
RE
4581}
4582
cca44b1b
JB
4583/* This function is used to concisely determine if an instruction INSN
4584 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4585 corresponding fields of BITMASK set to 0b1111. The function
4586 returns return 1 if any of these fields in INSN reference the PC
4587 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4588
4589static int
cca44b1b 4590insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4591{
cca44b1b 4592 uint32_t lowbit = 1;
67255d04 4593
cca44b1b
JB
4594 while (bitmask != 0)
4595 {
4596 uint32_t mask;
44e1a9eb 4597
cca44b1b
JB
4598 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4599 ;
67255d04 4600
cca44b1b
JB
4601 if (!lowbit)
4602 break;
67255d04 4603
cca44b1b 4604 mask = lowbit * 0xf;
67255d04 4605
cca44b1b
JB
4606 if ((insn & mask) == mask)
4607 return 1;
4608
4609 bitmask &= ~mask;
67255d04
RE
4610 }
4611
cca44b1b
JB
4612 return 0;
4613}
2af48f68 4614
cca44b1b
JB
4615/* The simplest copy function. Many instructions have the same effect no
4616 matter what address they are executed at: in those cases, use this. */
67255d04 4617
cca44b1b 4618static int
7ff120b4
YQ
4619arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4620 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4621{
4622 if (debug_displaced)
4623 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4624 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4625 iname);
67255d04 4626
cca44b1b 4627 dsc->modinsn[0] = insn;
67255d04 4628
cca44b1b
JB
4629 return 0;
4630}
4631
34518530
YQ
4632static int
4633thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4634 uint16_t insn2, const char *iname,
4635 struct displaced_step_closure *dsc)
4636{
4637 if (debug_displaced)
4638 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4639 "opcode/class '%s' unmodified\n", insn1, insn2,
4640 iname);
4641
4642 dsc->modinsn[0] = insn1;
4643 dsc->modinsn[1] = insn2;
4644 dsc->numinsns = 2;
4645
4646 return 0;
4647}
4648
4649/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4650 modification. */
4651static int
615234c1 4652thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4653 const char *iname,
4654 struct displaced_step_closure *dsc)
4655{
4656 if (debug_displaced)
4657 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4658 "opcode/class '%s' unmodified\n", insn,
4659 iname);
4660
4661 dsc->modinsn[0] = insn;
4662
4663 return 0;
4664}
4665
cca44b1b
JB
4666/* Preload instructions with immediate offset. */
4667
4668static void
6e39997a 4669cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4670 struct regcache *regs, struct displaced_step_closure *dsc)
4671{
4672 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4673 if (!dsc->u.preload.immed)
4674 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4675}
4676
7ff120b4
YQ
4677static void
4678install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4679 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4680{
cca44b1b 4681 ULONGEST rn_val;
cca44b1b
JB
4682 /* Preload instructions:
4683
4684 {pli/pld} [rn, #+/-imm]
4685 ->
4686 {pli/pld} [r0, #+/-imm]. */
4687
36073a92
YQ
4688 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4689 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4690 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4691 dsc->u.preload.immed = 1;
4692
cca44b1b 4693 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4694}
4695
cca44b1b 4696static int
7ff120b4 4697arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4698 struct displaced_step_closure *dsc)
4699{
4700 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4701
7ff120b4
YQ
4702 if (!insn_references_pc (insn, 0x000f0000ul))
4703 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4704
4705 if (debug_displaced)
4706 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4707 (unsigned long) insn);
4708
7ff120b4
YQ
4709 dsc->modinsn[0] = insn & 0xfff0ffff;
4710
4711 install_preload (gdbarch, regs, dsc, rn);
4712
4713 return 0;
4714}
4715
34518530
YQ
4716static int
4717thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4718 struct regcache *regs, struct displaced_step_closure *dsc)
4719{
4720 unsigned int rn = bits (insn1, 0, 3);
4721 unsigned int u_bit = bit (insn1, 7);
4722 int imm12 = bits (insn2, 0, 11);
4723 ULONGEST pc_val;
4724
4725 if (rn != ARM_PC_REGNUM)
4726 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4727
4728 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4729 PLD (literal) Encoding T1. */
4730 if (debug_displaced)
4731 fprintf_unfiltered (gdb_stdlog,
4732 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4733 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4734 imm12);
4735
4736 if (!u_bit)
4737 imm12 = -1 * imm12;
4738
4739 /* Rewrite instruction {pli/pld} PC imm12 into:
4740 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4741
4742 {pli/pld} [r0, r1]
4743
4744 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4745
4746 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4747 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4748
4749 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4750
4751 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4752 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4753 dsc->u.preload.immed = 0;
4754
4755 /* {pli/pld} [r0, r1] */
4756 dsc->modinsn[0] = insn1 & 0xfff0;
4757 dsc->modinsn[1] = 0xf001;
4758 dsc->numinsns = 2;
4759
4760 dsc->cleanup = &cleanup_preload;
4761 return 0;
4762}
4763
7ff120b4
YQ
4764/* Preload instructions with register offset. */
4765
4766static void
4767install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4768 struct displaced_step_closure *dsc, unsigned int rn,
4769 unsigned int rm)
4770{
4771 ULONGEST rn_val, rm_val;
4772
cca44b1b
JB
4773 /* Preload register-offset instructions:
4774
4775 {pli/pld} [rn, rm {, shift}]
4776 ->
4777 {pli/pld} [r0, r1 {, shift}]. */
4778
36073a92
YQ
4779 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4780 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4781 rn_val = displaced_read_reg (regs, dsc, rn);
4782 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4783 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4784 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4785 dsc->u.preload.immed = 0;
4786
cca44b1b 4787 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4788}
4789
4790static int
4791arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4792 struct regcache *regs,
4793 struct displaced_step_closure *dsc)
4794{
4795 unsigned int rn = bits (insn, 16, 19);
4796 unsigned int rm = bits (insn, 0, 3);
4797
4798
4799 if (!insn_references_pc (insn, 0x000f000ful))
4800 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4801
4802 if (debug_displaced)
4803 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4804 (unsigned long) insn);
4805
4806 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4807
7ff120b4 4808 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4809 return 0;
4810}
4811
4812/* Copy/cleanup coprocessor load and store instructions. */
4813
4814static void
6e39997a 4815cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4816 struct regcache *regs,
4817 struct displaced_step_closure *dsc)
4818{
36073a92 4819 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4820
4821 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4822
4823 if (dsc->u.ldst.writeback)
4824 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4825}
4826
7ff120b4
YQ
4827static void
4828install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4829 struct displaced_step_closure *dsc,
4830 int writeback, unsigned int rn)
cca44b1b 4831{
cca44b1b 4832 ULONGEST rn_val;
cca44b1b 4833
cca44b1b
JB
4834 /* Coprocessor load/store instructions:
4835
4836 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4837 ->
4838 {stc/stc2} [r0, #+/-imm].
4839
4840 ldc/ldc2 are handled identically. */
4841
36073a92
YQ
4842 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4843 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4844 /* PC should be 4-byte aligned. */
4845 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4846 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4847
7ff120b4 4848 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4849 dsc->u.ldst.rn = rn;
4850
7ff120b4
YQ
4851 dsc->cleanup = &cleanup_copro_load_store;
4852}
4853
4854static int
4855arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4856 struct regcache *regs,
4857 struct displaced_step_closure *dsc)
4858{
4859 unsigned int rn = bits (insn, 16, 19);
4860
4861 if (!insn_references_pc (insn, 0x000f0000ul))
4862 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4863
4864 if (debug_displaced)
4865 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4866 "load/store insn %.8lx\n", (unsigned long) insn);
4867
cca44b1b
JB
4868 dsc->modinsn[0] = insn & 0xfff0ffff;
4869
7ff120b4 4870 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4871
4872 return 0;
4873}
4874
34518530
YQ
4875static int
4876thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4877 uint16_t insn2, struct regcache *regs,
4878 struct displaced_step_closure *dsc)
4879{
4880 unsigned int rn = bits (insn1, 0, 3);
4881
4882 if (rn != ARM_PC_REGNUM)
4883 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4884 "copro load/store", dsc);
4885
4886 if (debug_displaced)
4887 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4888 "load/store insn %.4x%.4x\n", insn1, insn2);
4889
4890 dsc->modinsn[0] = insn1 & 0xfff0;
4891 dsc->modinsn[1] = insn2;
4892 dsc->numinsns = 2;
4893
4894 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4895 doesn't support writeback, so pass 0. */
4896 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4897
4898 return 0;
4899}
4900
cca44b1b
JB
4901/* Clean up branch instructions (actually perform the branch, by setting
4902 PC). */
4903
4904static void
6e39997a 4905cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4906 struct displaced_step_closure *dsc)
4907{
36073a92 4908 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4909 int branch_taken = condition_true (dsc->u.branch.cond, status);
4910 enum pc_write_style write_pc = dsc->u.branch.exchange
4911 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4912
4913 if (!branch_taken)
4914 return;
4915
4916 if (dsc->u.branch.link)
4917 {
8c8dba6d
YQ
4918 /* The value of LR should be the next insn of current one. In order
4919 not to confuse logic hanlding later insn `bx lr', if current insn mode
4920 is Thumb, the bit 0 of LR value should be set to 1. */
4921 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4922
4923 if (dsc->is_thumb)
4924 next_insn_addr |= 0x1;
4925
4926 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4927 CANNOT_WRITE_PC);
cca44b1b
JB
4928 }
4929
bf9f652a 4930 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4931}
4932
4933/* Copy B/BL/BLX instructions with immediate destinations. */
4934
7ff120b4
YQ
4935static void
4936install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4937 struct displaced_step_closure *dsc,
4938 unsigned int cond, int exchange, int link, long offset)
4939{
4940 /* Implement "BL<cond> <label>" as:
4941
4942 Preparation: cond <- instruction condition
4943 Insn: mov r0, r0 (nop)
4944 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4945
4946 B<cond> similar, but don't set r14 in cleanup. */
4947
4948 dsc->u.branch.cond = cond;
4949 dsc->u.branch.link = link;
4950 dsc->u.branch.exchange = exchange;
4951
2b16b2e3
YQ
4952 dsc->u.branch.dest = dsc->insn_addr;
4953 if (link && exchange)
4954 /* For BLX, offset is computed from the Align (PC, 4). */
4955 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4956
7ff120b4 4957 if (dsc->is_thumb)
2b16b2e3 4958 dsc->u.branch.dest += 4 + offset;
7ff120b4 4959 else
2b16b2e3 4960 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
4961
4962 dsc->cleanup = &cleanup_branch;
4963}
cca44b1b 4964static int
7ff120b4
YQ
4965arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4966 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
4967{
4968 unsigned int cond = bits (insn, 28, 31);
4969 int exchange = (cond == 0xf);
4970 int link = exchange || bit (insn, 24);
cca44b1b
JB
4971 long offset;
4972
4973 if (debug_displaced)
4974 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4975 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4976 (unsigned long) insn);
cca44b1b
JB
4977 if (exchange)
4978 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4979 then arrange the switch into Thumb mode. */
4980 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4981 else
4982 offset = bits (insn, 0, 23) << 2;
4983
4984 if (bit (offset, 25))
4985 offset = offset | ~0x3ffffff;
4986
cca44b1b
JB
4987 dsc->modinsn[0] = ARM_NOP;
4988
7ff120b4 4989 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
4990 return 0;
4991}
4992
34518530
YQ
4993static int
4994thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4995 uint16_t insn2, struct regcache *regs,
4996 struct displaced_step_closure *dsc)
4997{
4998 int link = bit (insn2, 14);
4999 int exchange = link && !bit (insn2, 12);
5000 int cond = INST_AL;
5001 long offset = 0;
5002 int j1 = bit (insn2, 13);
5003 int j2 = bit (insn2, 11);
5004 int s = sbits (insn1, 10, 10);
5005 int i1 = !(j1 ^ bit (insn1, 10));
5006 int i2 = !(j2 ^ bit (insn1, 10));
5007
5008 if (!link && !exchange) /* B */
5009 {
5010 offset = (bits (insn2, 0, 10) << 1);
5011 if (bit (insn2, 12)) /* Encoding T4 */
5012 {
5013 offset |= (bits (insn1, 0, 9) << 12)
5014 | (i2 << 22)
5015 | (i1 << 23)
5016 | (s << 24);
5017 cond = INST_AL;
5018 }
5019 else /* Encoding T3 */
5020 {
5021 offset |= (bits (insn1, 0, 5) << 12)
5022 | (j1 << 18)
5023 | (j2 << 19)
5024 | (s << 20);
5025 cond = bits (insn1, 6, 9);
5026 }
5027 }
5028 else
5029 {
5030 offset = (bits (insn1, 0, 9) << 12);
5031 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5032 offset |= exchange ?
5033 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5034 }
5035
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5038 "%.4x %.4x with offset %.8lx\n",
5039 link ? (exchange) ? "blx" : "bl" : "b",
5040 insn1, insn2, offset);
5041
5042 dsc->modinsn[0] = THUMB_NOP;
5043
5044 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5045 return 0;
5046}
5047
5048/* Copy B Thumb instructions. */
5049static int
615234c1 5050thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5051 struct displaced_step_closure *dsc)
5052{
5053 unsigned int cond = 0;
5054 int offset = 0;
5055 unsigned short bit_12_15 = bits (insn, 12, 15);
5056 CORE_ADDR from = dsc->insn_addr;
5057
5058 if (bit_12_15 == 0xd)
5059 {
5060 /* offset = SignExtend (imm8:0, 32) */
5061 offset = sbits ((insn << 1), 0, 8);
5062 cond = bits (insn, 8, 11);
5063 }
5064 else if (bit_12_15 == 0xe) /* Encoding T2 */
5065 {
5066 offset = sbits ((insn << 1), 0, 11);
5067 cond = INST_AL;
5068 }
5069
5070 if (debug_displaced)
5071 fprintf_unfiltered (gdb_stdlog,
5072 "displaced: copying b immediate insn %.4x "
5073 "with offset %d\n", insn, offset);
5074
5075 dsc->u.branch.cond = cond;
5076 dsc->u.branch.link = 0;
5077 dsc->u.branch.exchange = 0;
5078 dsc->u.branch.dest = from + 4 + offset;
5079
5080 dsc->modinsn[0] = THUMB_NOP;
5081
5082 dsc->cleanup = &cleanup_branch;
5083
5084 return 0;
5085}
5086
cca44b1b
JB
5087/* Copy BX/BLX with register-specified destinations. */
5088
7ff120b4
YQ
5089static void
5090install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5091 struct displaced_step_closure *dsc, int link,
5092 unsigned int cond, unsigned int rm)
cca44b1b 5093{
cca44b1b
JB
5094 /* Implement {BX,BLX}<cond> <reg>" as:
5095
5096 Preparation: cond <- instruction condition
5097 Insn: mov r0, r0 (nop)
5098 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5099
5100 Don't set r14 in cleanup for BX. */
5101
36073a92 5102 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5103
5104 dsc->u.branch.cond = cond;
5105 dsc->u.branch.link = link;
cca44b1b 5106
7ff120b4 5107 dsc->u.branch.exchange = 1;
cca44b1b
JB
5108
5109 dsc->cleanup = &cleanup_branch;
7ff120b4 5110}
cca44b1b 5111
7ff120b4
YQ
5112static int
5113arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5114 struct regcache *regs, struct displaced_step_closure *dsc)
5115{
5116 unsigned int cond = bits (insn, 28, 31);
5117 /* BX: x12xxx1x
5118 BLX: x12xxx3x. */
5119 int link = bit (insn, 5);
5120 unsigned int rm = bits (insn, 0, 3);
5121
5122 if (debug_displaced)
5123 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5124 (unsigned long) insn);
5125
5126 dsc->modinsn[0] = ARM_NOP;
5127
5128 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5129 return 0;
5130}
5131
34518530
YQ
5132static int
5133thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5134 struct regcache *regs,
5135 struct displaced_step_closure *dsc)
5136{
5137 int link = bit (insn, 7);
5138 unsigned int rm = bits (insn, 3, 6);
5139
5140 if (debug_displaced)
5141 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5142 (unsigned short) insn);
5143
5144 dsc->modinsn[0] = THUMB_NOP;
5145
5146 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5147
5148 return 0;
5149}
5150
5151
0963b4bd 5152/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5153
5154static void
6e39997a 5155cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5156 struct regcache *regs, struct displaced_step_closure *dsc)
5157{
36073a92 5158 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5159 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5160 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5161 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5162}
5163
5164static int
7ff120b4
YQ
5165arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5166 struct displaced_step_closure *dsc)
cca44b1b
JB
5167{
5168 unsigned int rn = bits (insn, 16, 19);
5169 unsigned int rd = bits (insn, 12, 15);
5170 unsigned int op = bits (insn, 21, 24);
5171 int is_mov = (op == 0xd);
5172 ULONGEST rd_val, rn_val;
cca44b1b
JB
5173
5174 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5175 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5176
5177 if (debug_displaced)
5178 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5179 "%.8lx\n", is_mov ? "move" : "ALU",
5180 (unsigned long) insn);
5181
5182 /* Instruction is of form:
5183
5184 <op><cond> rd, [rn,] #imm
5185
5186 Rewrite as:
5187
5188 Preparation: tmp1, tmp2 <- r0, r1;
5189 r0, r1 <- rd, rn
5190 Insn: <op><cond> r0, r1, #imm
5191 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5192 */
5193
36073a92
YQ
5194 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5195 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5196 rn_val = displaced_read_reg (regs, dsc, rn);
5197 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5198 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5199 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5200 dsc->rd = rd;
5201
5202 if (is_mov)
5203 dsc->modinsn[0] = insn & 0xfff00fff;
5204 else
5205 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5206
5207 dsc->cleanup = &cleanup_alu_imm;
5208
5209 return 0;
5210}
5211
34518530
YQ
5212static int
5213thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5214 uint16_t insn2, struct regcache *regs,
5215 struct displaced_step_closure *dsc)
5216{
5217 unsigned int op = bits (insn1, 5, 8);
5218 unsigned int rn, rm, rd;
5219 ULONGEST rd_val, rn_val;
5220
5221 rn = bits (insn1, 0, 3); /* Rn */
5222 rm = bits (insn2, 0, 3); /* Rm */
5223 rd = bits (insn2, 8, 11); /* Rd */
5224
5225 /* This routine is only called for instruction MOV. */
5226 gdb_assert (op == 0x2 && rn == 0xf);
5227
5228 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5229 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5230
5231 if (debug_displaced)
5232 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5233 "ALU", insn1, insn2);
5234
5235 /* Instruction is of form:
5236
5237 <op><cond> rd, [rn,] #imm
5238
5239 Rewrite as:
5240
5241 Preparation: tmp1, tmp2 <- r0, r1;
5242 r0, r1 <- rd, rn
5243 Insn: <op><cond> r0, r1, #imm
5244 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5245 */
5246
5247 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5248 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5249 rn_val = displaced_read_reg (regs, dsc, rn);
5250 rd_val = displaced_read_reg (regs, dsc, rd);
5251 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5252 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5253 dsc->rd = rd;
5254
5255 dsc->modinsn[0] = insn1;
5256 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5257 dsc->numinsns = 2;
5258
5259 dsc->cleanup = &cleanup_alu_imm;
5260
5261 return 0;
5262}
5263
cca44b1b
JB
5264/* Copy/cleanup arithmetic/logic insns with register RHS. */
5265
5266static void
6e39997a 5267cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5268 struct regcache *regs, struct displaced_step_closure *dsc)
5269{
5270 ULONGEST rd_val;
5271 int i;
5272
36073a92 5273 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5274
5275 for (i = 0; i < 3; i++)
5276 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5277
5278 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5279}
5280
7ff120b4
YQ
5281static void
5282install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5283 struct displaced_step_closure *dsc,
5284 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5285{
cca44b1b 5286 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5287
cca44b1b
JB
5288 /* Instruction is of form:
5289
5290 <op><cond> rd, [rn,] rm [, <shift>]
5291
5292 Rewrite as:
5293
5294 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5295 r0, r1, r2 <- rd, rn, rm
ef713951 5296 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5297 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5298 */
5299
36073a92
YQ
5300 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5301 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5302 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5303 rd_val = displaced_read_reg (regs, dsc, rd);
5304 rn_val = displaced_read_reg (regs, dsc, rn);
5305 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5306 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5307 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5308 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5309 dsc->rd = rd;
5310
7ff120b4
YQ
5311 dsc->cleanup = &cleanup_alu_reg;
5312}
5313
5314static int
5315arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5316 struct displaced_step_closure *dsc)
5317{
5318 unsigned int op = bits (insn, 21, 24);
5319 int is_mov = (op == 0xd);
5320
5321 if (!insn_references_pc (insn, 0x000ff00ful))
5322 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5323
5324 if (debug_displaced)
5325 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5326 is_mov ? "move" : "ALU", (unsigned long) insn);
5327
cca44b1b
JB
5328 if (is_mov)
5329 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5330 else
5331 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5332
7ff120b4
YQ
5333 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5334 bits (insn, 0, 3));
cca44b1b
JB
5335 return 0;
5336}
5337
34518530
YQ
5338static int
5339thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5340 struct regcache *regs,
5341 struct displaced_step_closure *dsc)
5342{
ef713951 5343 unsigned rm, rd;
34518530 5344
ef713951
YQ
5345 rm = bits (insn, 3, 6);
5346 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5347
ef713951 5348 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5349 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5350
5351 if (debug_displaced)
ef713951
YQ
5352 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5353 (unsigned short) insn);
34518530 5354
ef713951 5355 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5356
ef713951 5357 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5358
5359 return 0;
5360}
5361
cca44b1b
JB
5362/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5363
5364static void
6e39997a 5365cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5366 struct regcache *regs,
5367 struct displaced_step_closure *dsc)
5368{
36073a92 5369 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5370 int i;
5371
5372 for (i = 0; i < 4; i++)
5373 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5374
5375 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5376}
5377
7ff120b4
YQ
5378static void
5379install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5380 struct displaced_step_closure *dsc,
5381 unsigned int rd, unsigned int rn, unsigned int rm,
5382 unsigned rs)
cca44b1b 5383{
7ff120b4 5384 int i;
cca44b1b 5385 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5386
cca44b1b
JB
5387 /* Instruction is of form:
5388
5389 <op><cond> rd, [rn,] rm, <shift> rs
5390
5391 Rewrite as:
5392
5393 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5394 r0, r1, r2, r3 <- rd, rn, rm, rs
5395 Insn: <op><cond> r0, r1, r2, <shift> r3
5396 Cleanup: tmp5 <- r0
5397 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5398 rd <- tmp5
5399 */
5400
5401 for (i = 0; i < 4; i++)
36073a92 5402 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5403
36073a92
YQ
5404 rd_val = displaced_read_reg (regs, dsc, rd);
5405 rn_val = displaced_read_reg (regs, dsc, rn);
5406 rm_val = displaced_read_reg (regs, dsc, rm);
5407 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5408 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5409 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5410 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5411 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5412 dsc->rd = rd;
7ff120b4
YQ
5413 dsc->cleanup = &cleanup_alu_shifted_reg;
5414}
5415
5416static int
5417arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5418 struct regcache *regs,
5419 struct displaced_step_closure *dsc)
5420{
5421 unsigned int op = bits (insn, 21, 24);
5422 int is_mov = (op == 0xd);
5423 unsigned int rd, rn, rm, rs;
5424
5425 if (!insn_references_pc (insn, 0x000fff0ful))
5426 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5427
5428 if (debug_displaced)
5429 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5430 "%.8lx\n", is_mov ? "move" : "ALU",
5431 (unsigned long) insn);
5432
5433 rn = bits (insn, 16, 19);
5434 rm = bits (insn, 0, 3);
5435 rs = bits (insn, 8, 11);
5436 rd = bits (insn, 12, 15);
cca44b1b
JB
5437
5438 if (is_mov)
5439 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5440 else
5441 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5442
7ff120b4 5443 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5444
5445 return 0;
5446}
5447
5448/* Clean up load instructions. */
5449
5450static void
6e39997a 5451cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5452 struct displaced_step_closure *dsc)
5453{
5454 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5455
36073a92 5456 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5457 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5458 rt_val2 = displaced_read_reg (regs, dsc, 1);
5459 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5460
5461 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5462 if (dsc->u.ldst.xfersize > 4)
5463 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5464 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5465 if (!dsc->u.ldst.immed)
5466 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5467
5468 /* Handle register writeback. */
5469 if (dsc->u.ldst.writeback)
5470 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5471 /* Put result in right place. */
5472 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5473 if (dsc->u.ldst.xfersize == 8)
5474 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5475}
5476
5477/* Clean up store instructions. */
5478
5479static void
6e39997a 5480cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5481 struct displaced_step_closure *dsc)
5482{
36073a92 5483 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5484
5485 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5486 if (dsc->u.ldst.xfersize > 4)
5487 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5488 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5489 if (!dsc->u.ldst.immed)
5490 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5491 if (!dsc->u.ldst.restore_r4)
5492 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5493
5494 /* Writeback. */
5495 if (dsc->u.ldst.writeback)
5496 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5497}
5498
5499/* Copy "extra" load/store instructions. These are halfword/doubleword
5500 transfers, which have a different encoding to byte/word transfers. */
5501
5502static int
550dc4e2 5503arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5504 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5505{
5506 unsigned int op1 = bits (insn, 20, 24);
5507 unsigned int op2 = bits (insn, 5, 6);
5508 unsigned int rt = bits (insn, 12, 15);
5509 unsigned int rn = bits (insn, 16, 19);
5510 unsigned int rm = bits (insn, 0, 3);
5511 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5512 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5513 int immed = (op1 & 0x4) != 0;
5514 int opcode;
5515 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5516
5517 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5518 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5519
5520 if (debug_displaced)
5521 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5522 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5523 (unsigned long) insn);
5524
5525 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5526
5527 if (opcode < 0)
5528 internal_error (__FILE__, __LINE__,
5529 _("copy_extra_ld_st: instruction decode error"));
5530
36073a92
YQ
5531 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5532 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5533 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5534 if (!immed)
36073a92 5535 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5536
36073a92 5537 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5538 if (bytesize[opcode] == 8)
36073a92
YQ
5539 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5540 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5541 if (!immed)
36073a92 5542 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5543
5544 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5545 if (bytesize[opcode] == 8)
5546 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5547 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5548 if (!immed)
5549 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5550
5551 dsc->rd = rt;
5552 dsc->u.ldst.xfersize = bytesize[opcode];
5553 dsc->u.ldst.rn = rn;
5554 dsc->u.ldst.immed = immed;
5555 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5556 dsc->u.ldst.restore_r4 = 0;
5557
5558 if (immed)
5559 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5560 ->
5561 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5562 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5563 else
5564 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5565 ->
5566 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5567 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5568
5569 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5570
5571 return 0;
5572}
5573
0f6f04ba 5574/* Copy byte/half word/word loads and stores. */
cca44b1b 5575
7ff120b4 5576static void
0f6f04ba
YQ
5577install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5578 struct displaced_step_closure *dsc, int load,
5579 int immed, int writeback, int size, int usermode,
5580 int rt, int rm, int rn)
cca44b1b 5581{
cca44b1b 5582 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5583
36073a92
YQ
5584 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5585 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5586 if (!immed)
36073a92 5587 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5588 if (!load)
36073a92 5589 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5590
36073a92
YQ
5591 rt_val = displaced_read_reg (regs, dsc, rt);
5592 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5593 if (!immed)
36073a92 5594 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5595
5596 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5597 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5598 if (!immed)
5599 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5600 dsc->rd = rt;
0f6f04ba 5601 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5602 dsc->u.ldst.rn = rn;
5603 dsc->u.ldst.immed = immed;
7ff120b4 5604 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5605
5606 /* To write PC we can do:
5607
494e194e
YQ
5608 Before this sequence of instructions:
5609 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5610 r2 is the Rn value got from dispalced_read_reg.
5611
5612 Insn1: push {pc} Write address of STR instruction + offset on stack
5613 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5614 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5615 = addr(Insn1) + offset - addr(Insn3) - 8
5616 = offset - 16
5617 Insn4: add r4, r4, #8 r4 = offset - 8
5618 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5619 = from + offset
5620 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5621
5622 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5623 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5624 of this can be found in Section "Saving from r15" in
5625 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5626
7ff120b4
YQ
5627 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5628}
5629
34518530
YQ
5630
5631static int
5632thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5633 uint16_t insn2, struct regcache *regs,
5634 struct displaced_step_closure *dsc, int size)
5635{
5636 unsigned int u_bit = bit (insn1, 7);
5637 unsigned int rt = bits (insn2, 12, 15);
5638 int imm12 = bits (insn2, 0, 11);
5639 ULONGEST pc_val;
5640
5641 if (debug_displaced)
5642 fprintf_unfiltered (gdb_stdlog,
5643 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5644 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5645 imm12);
5646
5647 if (!u_bit)
5648 imm12 = -1 * imm12;
5649
5650 /* Rewrite instruction LDR Rt imm12 into:
5651
5652 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5653
5654 LDR R0, R2, R3,
5655
5656 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5657
5658
5659 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5660 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5661 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5662
5663 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5664
5665 pc_val = pc_val & 0xfffffffc;
5666
5667 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5668 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5669
5670 dsc->rd = rt;
5671
5672 dsc->u.ldst.xfersize = size;
5673 dsc->u.ldst.immed = 0;
5674 dsc->u.ldst.writeback = 0;
5675 dsc->u.ldst.restore_r4 = 0;
5676
5677 /* LDR R0, R2, R3 */
5678 dsc->modinsn[0] = 0xf852;
5679 dsc->modinsn[1] = 0x3;
5680 dsc->numinsns = 2;
5681
5682 dsc->cleanup = &cleanup_load;
5683
5684 return 0;
5685}
5686
5687static int
5688thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5689 uint16_t insn2, struct regcache *regs,
5690 struct displaced_step_closure *dsc,
5691 int writeback, int immed)
5692{
5693 unsigned int rt = bits (insn2, 12, 15);
5694 unsigned int rn = bits (insn1, 0, 3);
5695 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5696 /* In LDR (register), there is also a register Rm, which is not allowed to
5697 be PC, so we don't have to check it. */
5698
5699 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5700 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5701 dsc);
5702
5703 if (debug_displaced)
5704 fprintf_unfiltered (gdb_stdlog,
5705 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5706 rt, rn, insn1, insn2);
5707
5708 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5709 0, rt, rm, rn);
5710
5711 dsc->u.ldst.restore_r4 = 0;
5712
5713 if (immed)
5714 /* ldr[b]<cond> rt, [rn, #imm], etc.
5715 ->
5716 ldr[b]<cond> r0, [r2, #imm]. */
5717 {
5718 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5719 dsc->modinsn[1] = insn2 & 0x0fff;
5720 }
5721 else
5722 /* ldr[b]<cond> rt, [rn, rm], etc.
5723 ->
5724 ldr[b]<cond> r0, [r2, r3]. */
5725 {
5726 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5727 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5728 }
5729
5730 dsc->numinsns = 2;
5731
5732 return 0;
5733}
5734
5735
7ff120b4
YQ
5736static int
5737arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5738 struct regcache *regs,
5739 struct displaced_step_closure *dsc,
0f6f04ba 5740 int load, int size, int usermode)
7ff120b4
YQ
5741{
5742 int immed = !bit (insn, 25);
5743 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5744 unsigned int rt = bits (insn, 12, 15);
5745 unsigned int rn = bits (insn, 16, 19);
5746 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5747
5748 if (!insn_references_pc (insn, 0x000ff00ful))
5749 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5750
5751 if (debug_displaced)
5752 fprintf_unfiltered (gdb_stdlog,
5753 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5754 load ? (size == 1 ? "ldrb" : "ldr")
5755 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5756 rt, rn,
5757 (unsigned long) insn);
5758
0f6f04ba
YQ
5759 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5760 usermode, rt, rm, rn);
7ff120b4 5761
bf9f652a 5762 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5763 {
5764 dsc->u.ldst.restore_r4 = 0;
5765
5766 if (immed)
5767 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5768 ->
5769 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5770 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5771 else
5772 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5773 ->
5774 {ldr,str}[b]<cond> r0, [r2, r3]. */
5775 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5776 }
5777 else
5778 {
5779 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5780 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5781 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5782 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5783 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5784 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5785 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5786
5787 /* As above. */
5788 if (immed)
5789 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5790 else
5791 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5792
cca44b1b
JB
5793 dsc->numinsns = 6;
5794 }
5795
5796 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5797
5798 return 0;
5799}
5800
5801/* Cleanup LDM instructions with fully-populated register list. This is an
5802 unfortunate corner case: it's impossible to implement correctly by modifying
5803 the instruction. The issue is as follows: we have an instruction,
5804
5805 ldm rN, {r0-r15}
5806
5807 which we must rewrite to avoid loading PC. A possible solution would be to
5808 do the load in two halves, something like (with suitable cleanup
5809 afterwards):
5810
5811 mov r8, rN
5812 ldm[id][ab] r8!, {r0-r7}
5813 str r7, <temp>
5814 ldm[id][ab] r8, {r7-r14}
5815 <bkpt>
5816
5817 but at present there's no suitable place for <temp>, since the scratch space
5818 is overwritten before the cleanup routine is called. For now, we simply
5819 emulate the instruction. */
5820
5821static void
5822cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5823 struct displaced_step_closure *dsc)
5824{
cca44b1b
JB
5825 int inc = dsc->u.block.increment;
5826 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5827 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5828 uint32_t regmask = dsc->u.block.regmask;
5829 int regno = inc ? 0 : 15;
5830 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5831 int exception_return = dsc->u.block.load && dsc->u.block.user
5832 && (regmask & 0x8000) != 0;
36073a92 5833 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5834 int do_transfer = condition_true (dsc->u.block.cond, status);
5835 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5836
5837 if (!do_transfer)
5838 return;
5839
5840 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5841 sensible we can do here. Complain loudly. */
5842 if (exception_return)
5843 error (_("Cannot single-step exception return"));
5844
5845 /* We don't handle any stores here for now. */
5846 gdb_assert (dsc->u.block.load != 0);
5847
5848 if (debug_displaced)
5849 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5850 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5851 dsc->u.block.increment ? "inc" : "dec",
5852 dsc->u.block.before ? "before" : "after");
5853
5854 while (regmask)
5855 {
5856 uint32_t memword;
5857
5858 if (inc)
bf9f652a 5859 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5860 regno++;
5861 else
5862 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5863 regno--;
5864
5865 xfer_addr += bump_before;
5866
5867 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5868 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5869
5870 xfer_addr += bump_after;
5871
5872 regmask &= ~(1 << regno);
5873 }
5874
5875 if (dsc->u.block.writeback)
5876 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5877 CANNOT_WRITE_PC);
5878}
5879
5880/* Clean up an STM which included the PC in the register list. */
5881
5882static void
5883cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5884 struct displaced_step_closure *dsc)
5885{
36073a92 5886 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5887 int store_executed = condition_true (dsc->u.block.cond, status);
5888 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5889 CORE_ADDR stm_insn_addr;
5890 uint32_t pc_val;
5891 long offset;
5892 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5893
5894 /* If condition code fails, there's nothing else to do. */
5895 if (!store_executed)
5896 return;
5897
5898 if (dsc->u.block.increment)
5899 {
5900 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5901
5902 if (dsc->u.block.before)
5903 pc_stored_at += 4;
5904 }
5905 else
5906 {
5907 pc_stored_at = dsc->u.block.xfer_addr;
5908
5909 if (dsc->u.block.before)
5910 pc_stored_at -= 4;
5911 }
5912
5913 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5914 stm_insn_addr = dsc->scratch_base;
5915 offset = pc_val - stm_insn_addr;
5916
5917 if (debug_displaced)
5918 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5919 "STM instruction\n", offset);
5920
5921 /* Rewrite the stored PC to the proper value for the non-displaced original
5922 instruction. */
5923 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5924 dsc->insn_addr + offset);
5925}
5926
5927/* Clean up an LDM which includes the PC in the register list. We clumped all
5928 the registers in the transferred list into a contiguous range r0...rX (to
5929 avoid loading PC directly and losing control of the debugged program), so we
5930 must undo that here. */
5931
5932static void
6e39997a 5933cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5934 struct regcache *regs,
5935 struct displaced_step_closure *dsc)
5936{
36073a92 5937 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5938 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5939 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5940 unsigned int regs_loaded = bitcount (mask);
5941 unsigned int num_to_shuffle = regs_loaded, clobbered;
5942
5943 /* The method employed here will fail if the register list is fully populated
5944 (we need to avoid loading PC directly). */
5945 gdb_assert (num_to_shuffle < 16);
5946
5947 if (!load_executed)
5948 return;
5949
5950 clobbered = (1 << num_to_shuffle) - 1;
5951
5952 while (num_to_shuffle > 0)
5953 {
5954 if ((mask & (1 << write_reg)) != 0)
5955 {
5956 unsigned int read_reg = num_to_shuffle - 1;
5957
5958 if (read_reg != write_reg)
5959 {
36073a92 5960 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
5961 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5962 if (debug_displaced)
5963 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5964 "loaded register r%d to r%d\n"), read_reg,
5965 write_reg);
5966 }
5967 else if (debug_displaced)
5968 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5969 "r%d already in the right place\n"),
5970 write_reg);
5971
5972 clobbered &= ~(1 << write_reg);
5973
5974 num_to_shuffle--;
5975 }
5976
5977 write_reg--;
5978 }
5979
5980 /* Restore any registers we scribbled over. */
5981 for (write_reg = 0; clobbered != 0; write_reg++)
5982 {
5983 if ((clobbered & (1 << write_reg)) != 0)
5984 {
5985 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5986 CANNOT_WRITE_PC);
5987 if (debug_displaced)
5988 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5989 "clobbered register r%d\n"), write_reg);
5990 clobbered &= ~(1 << write_reg);
5991 }
5992 }
5993
5994 /* Perform register writeback manually. */
5995 if (dsc->u.block.writeback)
5996 {
5997 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5998
5999 if (dsc->u.block.increment)
6000 new_rn_val += regs_loaded * 4;
6001 else
6002 new_rn_val -= regs_loaded * 4;
6003
6004 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6005 CANNOT_WRITE_PC);
6006 }
6007}
6008
6009/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6010 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6011
6012static int
7ff120b4
YQ
6013arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6014 struct regcache *regs,
6015 struct displaced_step_closure *dsc)
cca44b1b
JB
6016{
6017 int load = bit (insn, 20);
6018 int user = bit (insn, 22);
6019 int increment = bit (insn, 23);
6020 int before = bit (insn, 24);
6021 int writeback = bit (insn, 21);
6022 int rn = bits (insn, 16, 19);
cca44b1b 6023
0963b4bd
MS
6024 /* Block transfers which don't mention PC can be run directly
6025 out-of-line. */
bf9f652a 6026 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6027 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6028
bf9f652a 6029 if (rn == ARM_PC_REGNUM)
cca44b1b 6030 {
0963b4bd
MS
6031 warning (_("displaced: Unpredictable LDM or STM with "
6032 "base register r15"));
7ff120b4 6033 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6034 }
6035
6036 if (debug_displaced)
6037 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6038 "%.8lx\n", (unsigned long) insn);
6039
36073a92 6040 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6041 dsc->u.block.rn = rn;
6042
6043 dsc->u.block.load = load;
6044 dsc->u.block.user = user;
6045 dsc->u.block.increment = increment;
6046 dsc->u.block.before = before;
6047 dsc->u.block.writeback = writeback;
6048 dsc->u.block.cond = bits (insn, 28, 31);
6049
6050 dsc->u.block.regmask = insn & 0xffff;
6051
6052 if (load)
6053 {
6054 if ((insn & 0xffff) == 0xffff)
6055 {
6056 /* LDM with a fully-populated register list. This case is
6057 particularly tricky. Implement for now by fully emulating the
6058 instruction (which might not behave perfectly in all cases, but
6059 these instructions should be rare enough for that not to matter
6060 too much). */
6061 dsc->modinsn[0] = ARM_NOP;
6062
6063 dsc->cleanup = &cleanup_block_load_all;
6064 }
6065 else
6066 {
6067 /* LDM of a list of registers which includes PC. Implement by
6068 rewriting the list of registers to be transferred into a
6069 contiguous chunk r0...rX before doing the transfer, then shuffling
6070 registers into the correct places in the cleanup routine. */
6071 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6072 unsigned int num_in_list = bitcount (regmask), new_regmask;
6073 unsigned int i;
cca44b1b
JB
6074
6075 for (i = 0; i < num_in_list; i++)
36073a92 6076 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6077
6078 /* Writeback makes things complicated. We need to avoid clobbering
6079 the base register with one of the registers in our modified
6080 register list, but just using a different register can't work in
6081 all cases, e.g.:
6082
6083 ldm r14!, {r0-r13,pc}
6084
6085 which would need to be rewritten as:
6086
6087 ldm rN!, {r0-r14}
6088
6089 but that can't work, because there's no free register for N.
6090
6091 Solve this by turning off the writeback bit, and emulating
6092 writeback manually in the cleanup routine. */
6093
6094 if (writeback)
6095 insn &= ~(1 << 21);
6096
6097 new_regmask = (1 << num_in_list) - 1;
6098
6099 if (debug_displaced)
6100 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6101 "{..., pc}: original reg list %.4x, modified "
6102 "list %.4x\n"), rn, writeback ? "!" : "",
6103 (int) insn & 0xffff, new_regmask);
6104
6105 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6106
6107 dsc->cleanup = &cleanup_block_load_pc;
6108 }
6109 }
6110 else
6111 {
6112 /* STM of a list of registers which includes PC. Run the instruction
6113 as-is, but out of line: this will store the wrong value for the PC,
6114 so we must manually fix up the memory in the cleanup routine.
6115 Doing things this way has the advantage that we can auto-detect
6116 the offset of the PC write (which is architecture-dependent) in
6117 the cleanup routine. */
6118 dsc->modinsn[0] = insn;
6119
6120 dsc->cleanup = &cleanup_block_store_pc;
6121 }
6122
6123 return 0;
6124}
6125
34518530
YQ
6126static int
6127thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6128 struct regcache *regs,
6129 struct displaced_step_closure *dsc)
cca44b1b 6130{
34518530
YQ
6131 int rn = bits (insn1, 0, 3);
6132 int load = bit (insn1, 4);
6133 int writeback = bit (insn1, 5);
cca44b1b 6134
34518530
YQ
6135 /* Block transfers which don't mention PC can be run directly
6136 out-of-line. */
6137 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6138 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6139
34518530
YQ
6140 if (rn == ARM_PC_REGNUM)
6141 {
6142 warning (_("displaced: Unpredictable LDM or STM with "
6143 "base register r15"));
6144 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6145 "unpredictable ldm/stm", dsc);
6146 }
cca44b1b
JB
6147
6148 if (debug_displaced)
34518530
YQ
6149 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6150 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6151
34518530
YQ
6152 /* Clear bit 13, since it should be always zero. */
6153 dsc->u.block.regmask = (insn2 & 0xdfff);
6154 dsc->u.block.rn = rn;
cca44b1b 6155
34518530
YQ
6156 dsc->u.block.load = load;
6157 dsc->u.block.user = 0;
6158 dsc->u.block.increment = bit (insn1, 7);
6159 dsc->u.block.before = bit (insn1, 8);
6160 dsc->u.block.writeback = writeback;
6161 dsc->u.block.cond = INST_AL;
6162 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6163
34518530
YQ
6164 if (load)
6165 {
6166 if (dsc->u.block.regmask == 0xffff)
6167 {
6168 /* This branch is impossible to happen. */
6169 gdb_assert (0);
6170 }
6171 else
6172 {
6173 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6174 unsigned int num_in_list = bitcount (regmask), new_regmask;
6175 unsigned int i;
34518530
YQ
6176
6177 for (i = 0; i < num_in_list; i++)
6178 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6179
6180 if (writeback)
6181 insn1 &= ~(1 << 5);
6182
6183 new_regmask = (1 << num_in_list) - 1;
6184
6185 if (debug_displaced)
6186 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6187 "{..., pc}: original reg list %.4x, modified "
6188 "list %.4x\n"), rn, writeback ? "!" : "",
6189 (int) dsc->u.block.regmask, new_regmask);
6190
6191 dsc->modinsn[0] = insn1;
6192 dsc->modinsn[1] = (new_regmask & 0xffff);
6193 dsc->numinsns = 2;
6194
6195 dsc->cleanup = &cleanup_block_load_pc;
6196 }
6197 }
6198 else
6199 {
6200 dsc->modinsn[0] = insn1;
6201 dsc->modinsn[1] = insn2;
6202 dsc->numinsns = 2;
6203 dsc->cleanup = &cleanup_block_store_pc;
6204 }
6205 return 0;
6206}
6207
d9311bfa
AT
6208/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6209 This is used to avoid a dependency on BFD's bfd_endian enum. */
6210
6211ULONGEST
6212arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6213 int byte_order)
6214{
5f2dfcfd
AT
6215 return read_memory_unsigned_integer (memaddr, len,
6216 (enum bfd_endian) byte_order);
d9311bfa
AT
6217}
6218
6219/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6220
6221CORE_ADDR
6222arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6223 CORE_ADDR val)
6224{
6225 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6226}
6227
6228/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6229
e7cf25a8 6230static CORE_ADDR
553cb527 6231arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6232{
d9311bfa
AT
6233 return 0;
6234}
6235
6236/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6237
6238int
6239arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6240{
6241 return arm_is_thumb (self->regcache);
6242}
6243
6244/* single_step() is called just before we want to resume the inferior,
6245 if we want to single-step it but there is no hardware or kernel
6246 single-step support. We find the target of the coming instructions
6247 and breakpoint them. */
6248
a0ff9e1a 6249std::vector<CORE_ADDR>
f5ea389a 6250arm_software_single_step (struct regcache *regcache)
d9311bfa 6251{
d9311bfa 6252 struct gdbarch *gdbarch = get_regcache_arch (regcache);
d9311bfa 6253 struct arm_get_next_pcs next_pcs_ctx;
d9311bfa
AT
6254
6255 arm_get_next_pcs_ctor (&next_pcs_ctx,
6256 &arm_get_next_pcs_ops,
6257 gdbarch_byte_order (gdbarch),
6258 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6259 0,
d9311bfa
AT
6260 regcache);
6261
a0ff9e1a 6262 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa 6263
a0ff9e1a
SM
6264 for (CORE_ADDR &pc_ref : next_pcs)
6265 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
d9311bfa 6266
93f9a11f 6267 return next_pcs;
d9311bfa
AT
6268}
6269
34518530
YQ
6270/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6271 for Linux, where some SVC instructions must be treated specially. */
6272
6273static void
6274cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6275 struct displaced_step_closure *dsc)
6276{
6277 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6278
6279 if (debug_displaced)
6280 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6281 "%.8lx\n", (unsigned long) resume_addr);
6282
6283 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6284}
6285
6286
6287/* Common copy routine for svc instruciton. */
6288
6289static int
6290install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6291 struct displaced_step_closure *dsc)
6292{
6293 /* Preparation: none.
6294 Insn: unmodified svc.
6295 Cleanup: pc <- insn_addr + insn_size. */
6296
6297 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6298 instruction. */
6299 dsc->wrote_to_pc = 1;
6300
6301 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6302 if (dsc->u.svc.copy_svc_os)
6303 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6304 else
6305 {
6306 dsc->cleanup = &cleanup_svc;
6307 return 0;
6308 }
34518530
YQ
6309}
6310
6311static int
6312arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6313 struct regcache *regs, struct displaced_step_closure *dsc)
6314{
6315
6316 if (debug_displaced)
6317 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6318 (unsigned long) insn);
6319
6320 dsc->modinsn[0] = insn;
6321
6322 return install_svc (gdbarch, regs, dsc);
6323}
6324
6325static int
6326thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6327 struct regcache *regs, struct displaced_step_closure *dsc)
6328{
6329
6330 if (debug_displaced)
6331 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6332 insn);
bd18283a 6333
34518530
YQ
6334 dsc->modinsn[0] = insn;
6335
6336 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6337}
6338
6339/* Copy undefined instructions. */
6340
6341static int
7ff120b4
YQ
6342arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6343 struct displaced_step_closure *dsc)
cca44b1b
JB
6344{
6345 if (debug_displaced)
0963b4bd
MS
6346 fprintf_unfiltered (gdb_stdlog,
6347 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6348 (unsigned long) insn);
6349
6350 dsc->modinsn[0] = insn;
6351
6352 return 0;
6353}
6354
34518530
YQ
6355static int
6356thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6357 struct displaced_step_closure *dsc)
6358{
6359
6360 if (debug_displaced)
6361 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6362 "%.4x %.4x\n", (unsigned short) insn1,
6363 (unsigned short) insn2);
6364
6365 dsc->modinsn[0] = insn1;
6366 dsc->modinsn[1] = insn2;
6367 dsc->numinsns = 2;
6368
6369 return 0;
6370}
6371
cca44b1b
JB
6372/* Copy unpredictable instructions. */
6373
6374static int
7ff120b4
YQ
6375arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6376 struct displaced_step_closure *dsc)
cca44b1b
JB
6377{
6378 if (debug_displaced)
6379 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6380 "%.8lx\n", (unsigned long) insn);
6381
6382 dsc->modinsn[0] = insn;
6383
6384 return 0;
6385}
6386
6387/* The decode_* functions are instruction decoding helpers. They mostly follow
6388 the presentation in the ARM ARM. */
6389
6390static int
7ff120b4
YQ
6391arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6392 struct regcache *regs,
6393 struct displaced_step_closure *dsc)
cca44b1b
JB
6394{
6395 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6396 unsigned int rn = bits (insn, 16, 19);
6397
2f924de6 6398 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7ff120b4 6399 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
2f924de6 6400 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7ff120b4 6401 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6402 else if ((op1 & 0x60) == 0x20)
7ff120b4 6403 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6404 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6405 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6406 dsc);
cca44b1b 6407 else if ((op1 & 0x77) == 0x41)
7ff120b4 6408 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6409 else if ((op1 & 0x77) == 0x45)
7ff120b4 6410 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6411 else if ((op1 & 0x77) == 0x51)
6412 {
6413 if (rn != 0xf)
7ff120b4 6414 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6415 else
7ff120b4 6416 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6417 }
6418 else if ((op1 & 0x77) == 0x55)
7ff120b4 6419 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6420 else if (op1 == 0x57)
6421 switch (op2)
6422 {
7ff120b4
YQ
6423 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6424 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6425 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6426 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6427 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6428 }
6429 else if ((op1 & 0x63) == 0x43)
7ff120b4 6430 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6431 else if ((op2 & 0x1) == 0x0)
6432 switch (op1 & ~0x80)
6433 {
6434 case 0x61:
7ff120b4 6435 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6436 case 0x65:
7ff120b4 6437 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6438 case 0x71: case 0x75:
6439 /* pld/pldw reg. */
7ff120b4 6440 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6441 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6442 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6443 default:
7ff120b4 6444 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6445 }
6446 else
7ff120b4 6447 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6448}
6449
6450static int
7ff120b4
YQ
6451arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 struct displaced_step_closure *dsc)
cca44b1b
JB
6454{
6455 if (bit (insn, 27) == 0)
7ff120b4 6456 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6457 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6458 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6459 {
6460 case 0x0: case 0x2:
7ff120b4 6461 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6462
6463 case 0x1: case 0x3:
7ff120b4 6464 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6465
6466 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6467 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6468
6469 case 0x8:
6470 switch ((insn & 0xe00000) >> 21)
6471 {
6472 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6473 /* stc/stc2. */
7ff120b4 6474 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6475
6476 case 0x2:
7ff120b4 6477 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6478
6479 default:
7ff120b4 6480 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6481 }
6482
6483 case 0x9:
6484 {
6485 int rn_f = (bits (insn, 16, 19) == 0xf);
6486 switch ((insn & 0xe00000) >> 21)
6487 {
6488 case 0x1: case 0x3:
6489 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6490 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6491 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6492
6493 case 0x2:
7ff120b4 6494 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6495
6496 case 0x4: case 0x5: case 0x6: case 0x7:
6497 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6498 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6499 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6500
6501 default:
7ff120b4 6502 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6503 }
6504 }
6505
6506 case 0xa:
7ff120b4 6507 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6508
6509 case 0xb:
6510 if (bits (insn, 16, 19) == 0xf)
6511 /* ldc/ldc2 lit. */
7ff120b4 6512 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6513 else
7ff120b4 6514 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6515
6516 case 0xc:
6517 if (bit (insn, 4))
7ff120b4 6518 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6519 else
7ff120b4 6520 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6521
6522 case 0xd:
6523 if (bit (insn, 4))
7ff120b4 6524 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6525 else
7ff120b4 6526 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6527
6528 default:
7ff120b4 6529 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6530 }
6531}
6532
6533/* Decode miscellaneous instructions in dp/misc encoding space. */
6534
6535static int
7ff120b4
YQ
6536arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6537 struct regcache *regs,
6538 struct displaced_step_closure *dsc)
cca44b1b
JB
6539{
6540 unsigned int op2 = bits (insn, 4, 6);
6541 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6542
6543 switch (op2)
6544 {
6545 case 0x0:
7ff120b4 6546 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6547
6548 case 0x1:
6549 if (op == 0x1) /* bx. */
7ff120b4 6550 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6551 else if (op == 0x3)
7ff120b4 6552 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6553 else
7ff120b4 6554 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6555
6556 case 0x2:
6557 if (op == 0x1)
6558 /* Not really supported. */
7ff120b4 6559 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6560 else
7ff120b4 6561 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6562
6563 case 0x3:
6564 if (op == 0x1)
7ff120b4 6565 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6566 regs, dsc); /* blx register. */
cca44b1b 6567 else
7ff120b4 6568 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6569
6570 case 0x5:
7ff120b4 6571 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6572
6573 case 0x7:
6574 if (op == 0x1)
7ff120b4 6575 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6576 else if (op == 0x3)
6577 /* Not really supported. */
7ff120b4 6578 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6579
6580 default:
7ff120b4 6581 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6582 }
6583}
6584
6585static int
7ff120b4
YQ
6586arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6587 struct regcache *regs,
6588 struct displaced_step_closure *dsc)
cca44b1b
JB
6589{
6590 if (bit (insn, 25))
6591 switch (bits (insn, 20, 24))
6592 {
6593 case 0x10:
7ff120b4 6594 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6595
6596 case 0x14:
7ff120b4 6597 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6598
6599 case 0x12: case 0x16:
7ff120b4 6600 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6601
6602 default:
7ff120b4 6603 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6604 }
6605 else
6606 {
6607 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6608
6609 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6610 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6611 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6612 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6613 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6614 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6615 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6616 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6617 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6618 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6619 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6620 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6621 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6622 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6623 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6624 dsc);
cca44b1b
JB
6625 }
6626
6627 /* Should be unreachable. */
6628 return 1;
6629}
6630
6631static int
7ff120b4
YQ
6632arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6633 struct regcache *regs,
6634 struct displaced_step_closure *dsc)
cca44b1b
JB
6635{
6636 int a = bit (insn, 25), b = bit (insn, 4);
6637 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6638
6639 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6640 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6641 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6642 else if ((!a && (op1 & 0x17) == 0x02)
6643 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6644 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6645 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6646 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6647 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6648 else if ((!a && (op1 & 0x17) == 0x03)
6649 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6650 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6651 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6652 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6653 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6654 else if ((!a && (op1 & 0x17) == 0x06)
6655 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6656 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6657 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6658 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6659 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6660 else if ((!a && (op1 & 0x17) == 0x07)
6661 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6662 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6663
6664 /* Should be unreachable. */
6665 return 1;
6666}
6667
6668static int
7ff120b4
YQ
6669arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6670 struct displaced_step_closure *dsc)
cca44b1b
JB
6671{
6672 switch (bits (insn, 20, 24))
6673 {
6674 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6675 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6676
6677 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6678 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6679
6680 case 0x08: case 0x09: case 0x0a: case 0x0b:
6681 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6682 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6683 "decode/pack/unpack/saturate/reverse", dsc);
6684
6685 case 0x18:
6686 if (bits (insn, 5, 7) == 0) /* op2. */
6687 {
6688 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6689 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6690 else
7ff120b4 6691 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6692 }
6693 else
7ff120b4 6694 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6695
6696 case 0x1a: case 0x1b:
6697 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6698 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6699 else
7ff120b4 6700 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6701
6702 case 0x1c: case 0x1d:
6703 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6704 {
6705 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6706 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6707 else
7ff120b4 6708 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6709 }
6710 else
7ff120b4 6711 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6712
6713 case 0x1e: case 0x1f:
6714 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6715 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6716 else
7ff120b4 6717 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6718 }
6719
6720 /* Should be unreachable. */
6721 return 1;
6722}
6723
6724static int
615234c1 6725arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6726 struct regcache *regs,
6727 struct displaced_step_closure *dsc)
cca44b1b
JB
6728{
6729 if (bit (insn, 25))
7ff120b4 6730 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6731 else
7ff120b4 6732 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6733}
6734
6735static int
7ff120b4
YQ
6736arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6737 struct regcache *regs,
6738 struct displaced_step_closure *dsc)
cca44b1b
JB
6739{
6740 unsigned int opcode = bits (insn, 20, 24);
6741
6742 switch (opcode)
6743 {
6744 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6745 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6746
6747 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6748 case 0x12: case 0x16:
7ff120b4 6749 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6750
6751 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6752 case 0x13: case 0x17:
7ff120b4 6753 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6754
6755 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6756 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6757 /* Note: no writeback for these instructions. Bit 25 will always be
6758 zero though (via caller), so the following works OK. */
7ff120b4 6759 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6760 }
6761
6762 /* Should be unreachable. */
6763 return 1;
6764}
6765
34518530
YQ
6766/* Decode shifted register instructions. */
6767
6768static int
6769thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6770 uint16_t insn2, struct regcache *regs,
6771 struct displaced_step_closure *dsc)
6772{
6773 /* PC is only allowed to be used in instruction MOV. */
6774
6775 unsigned int op = bits (insn1, 5, 8);
6776 unsigned int rn = bits (insn1, 0, 3);
6777
6778 if (op == 0x2 && rn == 0xf) /* MOV */
6779 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6780 else
6781 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6782 "dp (shift reg)", dsc);
6783}
6784
6785
6786/* Decode extension register load/store. Exactly the same as
6787 arm_decode_ext_reg_ld_st. */
6788
6789static int
6790thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6791 uint16_t insn2, struct regcache *regs,
6792 struct displaced_step_closure *dsc)
6793{
6794 unsigned int opcode = bits (insn1, 4, 8);
6795
6796 switch (opcode)
6797 {
6798 case 0x04: case 0x05:
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "vfp/neon vmov", dsc);
6801
6802 case 0x08: case 0x0c: /* 01x00 */
6803 case 0x0a: case 0x0e: /* 01x10 */
6804 case 0x12: case 0x16: /* 10x10 */
6805 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6806 "vfp/neon vstm/vpush", dsc);
6807
6808 case 0x09: case 0x0d: /* 01x01 */
6809 case 0x0b: case 0x0f: /* 01x11 */
6810 case 0x13: case 0x17: /* 10x11 */
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 "vfp/neon vldm/vpop", dsc);
6813
6814 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6815 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6816 "vstr", dsc);
6817 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6818 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6819 }
6820
6821 /* Should be unreachable. */
6822 return 1;
6823}
6824
cca44b1b 6825static int
12545665 6826arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6827 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6828{
6829 unsigned int op1 = bits (insn, 20, 25);
6830 int op = bit (insn, 4);
6831 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6832
6833 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6834 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6835 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6836 && (coproc & 0xe) != 0xa)
6837 /* stc/stc2. */
7ff120b4 6838 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6839 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6840 && (coproc & 0xe) != 0xa)
6841 /* ldc/ldc2 imm/lit. */
7ff120b4 6842 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6843 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6844 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6845 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6846 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6847 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6848 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6849 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6850 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6851 else if ((op1 & 0x30) == 0x20 && !op)
6852 {
6853 if ((coproc & 0xe) == 0xa)
7ff120b4 6854 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6855 else
7ff120b4 6856 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6857 }
6858 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6859 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6860 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6861 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6862 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6863 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6864 else if ((op1 & 0x30) == 0x30)
7ff120b4 6865 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6866 else
7ff120b4 6867 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6868}
6869
34518530
YQ
6870static int
6871thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6872 uint16_t insn2, struct regcache *regs,
6873 struct displaced_step_closure *dsc)
6874{
6875 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6876 unsigned int bit_5_8 = bits (insn1, 5, 8);
6877 unsigned int bit_9 = bit (insn1, 9);
6878 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6879
6880 if (bit_9 == 0)
6881 {
6882 if (bit_5_8 == 2)
6883 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6884 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6885 dsc);
6886 else if (bit_5_8 == 0) /* UNDEFINED. */
6887 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6888 else
6889 {
6890 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6891 if ((coproc & 0xe) == 0xa)
6892 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6893 dsc);
6894 else /* coproc is not 101x. */
6895 {
6896 if (bit_4 == 0) /* STC/STC2. */
6897 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6898 "stc/stc2", dsc);
6899 else /* LDC/LDC2 {literal, immeidate}. */
6900 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6901 regs, dsc);
6902 }
6903 }
6904 }
6905 else
6906 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6907
6908 return 0;
6909}
6910
6911static void
6912install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6913 struct displaced_step_closure *dsc, int rd)
6914{
6915 /* ADR Rd, #imm
6916
6917 Rewrite as:
6918
6919 Preparation: Rd <- PC
6920 Insn: ADD Rd, #imm
6921 Cleanup: Null.
6922 */
6923
6924 /* Rd <- PC */
6925 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6926 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6927}
6928
6929static int
6930thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6931 struct displaced_step_closure *dsc,
6932 int rd, unsigned int imm)
6933{
6934
6935 /* Encoding T2: ADDS Rd, #imm */
6936 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6937
6938 install_pc_relative (gdbarch, regs, dsc, rd);
6939
6940 return 0;
6941}
6942
6943static int
6944thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6945 struct regcache *regs,
6946 struct displaced_step_closure *dsc)
6947{
6948 unsigned int rd = bits (insn, 8, 10);
6949 unsigned int imm8 = bits (insn, 0, 7);
6950
6951 if (debug_displaced)
6952 fprintf_unfiltered (gdb_stdlog,
6953 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6954 rd, imm8, insn);
6955
6956 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6957}
6958
6959static int
6960thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6961 uint16_t insn2, struct regcache *regs,
6962 struct displaced_step_closure *dsc)
6963{
6964 unsigned int rd = bits (insn2, 8, 11);
6965 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6966 extract raw immediate encoding rather than computing immediate. When
6967 generating ADD or SUB instruction, we can simply perform OR operation to
6968 set immediate into ADD. */
6969 unsigned int imm_3_8 = insn2 & 0x70ff;
6970 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6971
6972 if (debug_displaced)
6973 fprintf_unfiltered (gdb_stdlog,
6974 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6975 rd, imm_i, imm_3_8, insn1, insn2);
6976
6977 if (bit (insn1, 7)) /* Encoding T2 */
6978 {
6979 /* Encoding T3: SUB Rd, Rd, #imm */
6980 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6981 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6982 }
6983 else /* Encoding T3 */
6984 {
6985 /* Encoding T3: ADD Rd, Rd, #imm */
6986 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6987 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6988 }
6989 dsc->numinsns = 2;
6990
6991 install_pc_relative (gdbarch, regs, dsc, rd);
6992
6993 return 0;
6994}
6995
6996static int
615234c1 6997thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
6998 struct regcache *regs,
6999 struct displaced_step_closure *dsc)
7000{
7001 unsigned int rt = bits (insn1, 8, 10);
7002 unsigned int pc;
7003 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7004
7005 /* LDR Rd, #imm8
7006
7007 Rwrite as:
7008
7009 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7010
7011 Insn: LDR R0, [R2, R3];
7012 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7013
7014 if (debug_displaced)
7015 fprintf_unfiltered (gdb_stdlog,
7016 "displaced: copying thumb ldr r%d [pc #%d]\n"
7017 , rt, imm8);
7018
7019 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7020 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7021 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7022 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7023 /* The assembler calculates the required value of the offset from the
7024 Align(PC,4) value of this instruction to the label. */
7025 pc = pc & 0xfffffffc;
7026
7027 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7028 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7029
7030 dsc->rd = rt;
7031 dsc->u.ldst.xfersize = 4;
7032 dsc->u.ldst.rn = 0;
7033 dsc->u.ldst.immed = 0;
7034 dsc->u.ldst.writeback = 0;
7035 dsc->u.ldst.restore_r4 = 0;
7036
7037 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7038
7039 dsc->cleanup = &cleanup_load;
7040
7041 return 0;
7042}
7043
7044/* Copy Thumb cbnz/cbz insruction. */
7045
7046static int
7047thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7048 struct regcache *regs,
7049 struct displaced_step_closure *dsc)
7050{
7051 int non_zero = bit (insn1, 11);
7052 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7053 CORE_ADDR from = dsc->insn_addr;
7054 int rn = bits (insn1, 0, 2);
7055 int rn_val = displaced_read_reg (regs, dsc, rn);
7056
7057 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7058 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7059 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7060 condition is false, let it be, cleanup_branch will do nothing. */
7061 if (dsc->u.branch.cond)
7062 {
7063 dsc->u.branch.cond = INST_AL;
7064 dsc->u.branch.dest = from + 4 + imm5;
7065 }
7066 else
7067 dsc->u.branch.dest = from + 2;
7068
7069 dsc->u.branch.link = 0;
7070 dsc->u.branch.exchange = 0;
7071
7072 if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7074 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7075 rn, rn_val, insn1, dsc->u.branch.dest);
7076
7077 dsc->modinsn[0] = THUMB_NOP;
7078
7079 dsc->cleanup = &cleanup_branch;
7080 return 0;
7081}
7082
7083/* Copy Table Branch Byte/Halfword */
7084static int
7085thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7086 uint16_t insn2, struct regcache *regs,
7087 struct displaced_step_closure *dsc)
7088{
7089 ULONGEST rn_val, rm_val;
7090 int is_tbh = bit (insn2, 4);
7091 CORE_ADDR halfwords = 0;
7092 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7093
7094 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7095 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7096
7097 if (is_tbh)
7098 {
7099 gdb_byte buf[2];
7100
7101 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7102 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7103 }
7104 else
7105 {
7106 gdb_byte buf[1];
7107
7108 target_read_memory (rn_val + rm_val, buf, 1);
7109 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7110 }
7111
7112 if (debug_displaced)
7113 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7114 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7115 (unsigned int) rn_val, (unsigned int) rm_val,
7116 (unsigned int) halfwords);
7117
7118 dsc->u.branch.cond = INST_AL;
7119 dsc->u.branch.link = 0;
7120 dsc->u.branch.exchange = 0;
7121 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7122
7123 dsc->cleanup = &cleanup_branch;
7124
7125 return 0;
7126}
7127
7128static void
7129cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7130 struct displaced_step_closure *dsc)
7131{
7132 /* PC <- r7 */
7133 int val = displaced_read_reg (regs, dsc, 7);
7134 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7135
7136 /* r7 <- r8 */
7137 val = displaced_read_reg (regs, dsc, 8);
7138 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7139
7140 /* r8 <- tmp[0] */
7141 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7142
7143}
7144
7145static int
615234c1 7146thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7147 struct regcache *regs,
7148 struct displaced_step_closure *dsc)
7149{
7150 dsc->u.block.regmask = insn1 & 0x00ff;
7151
7152 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7153 to :
7154
7155 (1) register list is full, that is, r0-r7 are used.
7156 Prepare: tmp[0] <- r8
7157
7158 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7159 MOV r8, r7; Move value of r7 to r8;
7160 POP {r7}; Store PC value into r7.
7161
7162 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7163
7164 (2) register list is not full, supposing there are N registers in
7165 register list (except PC, 0 <= N <= 7).
7166 Prepare: for each i, 0 - N, tmp[i] <- ri.
7167
7168 POP {r0, r1, ...., rN};
7169
7170 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7171 from tmp[] properly.
7172 */
7173 if (debug_displaced)
7174 fprintf_unfiltered (gdb_stdlog,
7175 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7176 dsc->u.block.regmask, insn1);
7177
7178 if (dsc->u.block.regmask == 0xff)
7179 {
7180 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7181
7182 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7183 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7184 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7185
7186 dsc->numinsns = 3;
7187 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7188 }
7189 else
7190 {
7191 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7192 unsigned int i;
7193 unsigned int new_regmask;
34518530
YQ
7194
7195 for (i = 0; i < num_in_list + 1; i++)
7196 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7197
7198 new_regmask = (1 << (num_in_list + 1)) - 1;
7199
7200 if (debug_displaced)
7201 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7202 "{..., pc}: original reg list %.4x,"
7203 " modified list %.4x\n"),
7204 (int) dsc->u.block.regmask, new_regmask);
7205
7206 dsc->u.block.regmask |= 0x8000;
7207 dsc->u.block.writeback = 0;
7208 dsc->u.block.cond = INST_AL;
7209
7210 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7211
7212 dsc->cleanup = &cleanup_block_load_pc;
7213 }
7214
7215 return 0;
7216}
7217
7218static void
7219thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7220 struct regcache *regs,
7221 struct displaced_step_closure *dsc)
7222{
7223 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7224 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7225 int err = 0;
7226
7227 /* 16-bit thumb instructions. */
7228 switch (op_bit_12_15)
7229 {
7230 /* Shift (imme), add, subtract, move and compare. */
7231 case 0: case 1: case 2: case 3:
7232 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7233 "shift/add/sub/mov/cmp",
7234 dsc);
7235 break;
7236 case 4:
7237 switch (op_bit_10_11)
7238 {
7239 case 0: /* Data-processing */
7240 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7241 "data-processing",
7242 dsc);
7243 break;
7244 case 1: /* Special data instructions and branch and exchange. */
7245 {
7246 unsigned short op = bits (insn1, 7, 9);
7247 if (op == 6 || op == 7) /* BX or BLX */
7248 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7249 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7250 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7251 else
7252 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7253 dsc);
7254 }
7255 break;
7256 default: /* LDR (literal) */
7257 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7258 }
7259 break;
7260 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7262 break;
7263 case 10:
7264 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7265 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7266 else /* Generate SP-relative address */
7267 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7268 break;
7269 case 11: /* Misc 16-bit instructions */
7270 {
7271 switch (bits (insn1, 8, 11))
7272 {
7273 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7274 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7275 break;
7276 case 12: case 13: /* POP */
7277 if (bit (insn1, 8)) /* PC is in register list. */
7278 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7279 else
7280 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7281 break;
7282 case 15: /* If-Then, and hints */
7283 if (bits (insn1, 0, 3))
7284 /* If-Then makes up to four following instructions conditional.
7285 IT instruction itself is not conditional, so handle it as a
7286 common unmodified instruction. */
7287 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7288 dsc);
7289 else
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7291 break;
7292 default:
7293 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7294 }
7295 }
7296 break;
7297 case 12:
7298 if (op_bit_10_11 < 2) /* Store multiple registers */
7299 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7300 else /* Load multiple registers */
7301 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7302 break;
7303 case 13: /* Conditional branch and supervisor call */
7304 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7305 err = thumb_copy_b (gdbarch, insn1, dsc);
7306 else
7307 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7308 break;
7309 case 14: /* Unconditional branch */
7310 err = thumb_copy_b (gdbarch, insn1, dsc);
7311 break;
7312 default:
7313 err = 1;
7314 }
7315
7316 if (err)
7317 internal_error (__FILE__, __LINE__,
7318 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7319}
7320
7321static int
7322decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7323 uint16_t insn1, uint16_t insn2,
7324 struct regcache *regs,
7325 struct displaced_step_closure *dsc)
7326{
7327 int rt = bits (insn2, 12, 15);
7328 int rn = bits (insn1, 0, 3);
7329 int op1 = bits (insn1, 7, 8);
34518530
YQ
7330
7331 switch (bits (insn1, 5, 6))
7332 {
7333 case 0: /* Load byte and memory hints */
7334 if (rt == 0xf) /* PLD/PLI */
7335 {
7336 if (rn == 0xf)
7337 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7338 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7339 else
7340 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7341 "pli/pld", dsc);
7342 }
7343 else
7344 {
7345 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7346 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7347 1);
7348 else
7349 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7350 "ldrb{reg, immediate}/ldrbt",
7351 dsc);
7352 }
7353
7354 break;
7355 case 1: /* Load halfword and memory hints. */
7356 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7357 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7358 "pld/unalloc memhint", dsc);
7359 else
7360 {
7361 if (rn == 0xf)
7362 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7363 2);
7364 else
7365 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7366 "ldrh/ldrht", dsc);
7367 }
7368 break;
7369 case 2: /* Load word */
7370 {
7371 int insn2_bit_8_11 = bits (insn2, 8, 11);
7372
7373 if (rn == 0xf)
7374 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7375 else if (op1 == 0x1) /* Encoding T3 */
7376 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7377 0, 1);
7378 else /* op1 == 0x0 */
7379 {
7380 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7381 /* LDR (immediate) */
7382 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7383 dsc, bit (insn2, 8), 1);
7384 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7385 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7386 "ldrt", dsc);
7387 else
7388 /* LDR (register) */
7389 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7390 dsc, 0, 0);
7391 }
7392 break;
7393 }
7394 default:
7395 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7396 break;
7397 }
7398 return 0;
7399}
7400
7401static void
7402thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7403 uint16_t insn2, struct regcache *regs,
7404 struct displaced_step_closure *dsc)
7405{
7406 int err = 0;
7407 unsigned short op = bit (insn2, 15);
7408 unsigned int op1 = bits (insn1, 11, 12);
7409
7410 switch (op1)
7411 {
7412 case 1:
7413 {
7414 switch (bits (insn1, 9, 10))
7415 {
7416 case 0:
7417 if (bit (insn1, 6))
7418 {
7419 /* Load/store {dual, execlusive}, table branch. */
7420 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7421 && bits (insn2, 5, 7) == 0)
7422 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7423 dsc);
7424 else
7425 /* PC is not allowed to use in load/store {dual, exclusive}
7426 instructions. */
7427 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7428 "load/store dual/ex", dsc);
7429 }
7430 else /* load/store multiple */
7431 {
7432 switch (bits (insn1, 7, 8))
7433 {
7434 case 0: case 3: /* SRS, RFE */
7435 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7436 "srs/rfe", dsc);
7437 break;
7438 case 1: case 2: /* LDM/STM/PUSH/POP */
7439 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7440 break;
7441 }
7442 }
7443 break;
7444
7445 case 1:
7446 /* Data-processing (shift register). */
7447 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7448 dsc);
7449 break;
7450 default: /* Coprocessor instructions. */
7451 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7452 break;
7453 }
7454 break;
7455 }
7456 case 2: /* op1 = 2 */
7457 if (op) /* Branch and misc control. */
7458 {
7459 if (bit (insn2, 14) /* BLX/BL */
7460 || bit (insn2, 12) /* Unconditional branch */
7461 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7462 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7463 else
7464 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7465 "misc ctrl", dsc);
7466 }
7467 else
7468 {
7469 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7470 {
7471 int op = bits (insn1, 4, 8);
7472 int rn = bits (insn1, 0, 3);
7473 if ((op == 0 || op == 0xa) && rn == 0xf)
7474 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7475 regs, dsc);
7476 else
7477 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7478 "dp/pb", dsc);
7479 }
7480 else /* Data processing (modified immeidate) */
7481 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7482 "dp/mi", dsc);
7483 }
7484 break;
7485 case 3: /* op1 = 3 */
7486 switch (bits (insn1, 9, 10))
7487 {
7488 case 0:
7489 if (bit (insn1, 4))
7490 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7491 regs, dsc);
7492 else /* NEON Load/Store and Store single data item */
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "neon elt/struct load/store",
7495 dsc);
7496 break;
7497 case 1: /* op1 = 3, bits (9, 10) == 1 */
7498 switch (bits (insn1, 7, 8))
7499 {
7500 case 0: case 1: /* Data processing (register) */
7501 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7502 "dp(reg)", dsc);
7503 break;
7504 case 2: /* Multiply and absolute difference */
7505 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7506 "mul/mua/diff", dsc);
7507 break;
7508 case 3: /* Long multiply and divide */
7509 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7510 "lmul/lmua", dsc);
7511 break;
7512 }
7513 break;
7514 default: /* Coprocessor instructions */
7515 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7516 break;
7517 }
7518 break;
7519 default:
7520 err = 1;
7521 }
7522
7523 if (err)
7524 internal_error (__FILE__, __LINE__,
7525 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7526
7527}
7528
b434a28f
YQ
7529static void
7530thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7531 struct regcache *regs,
b434a28f
YQ
7532 struct displaced_step_closure *dsc)
7533{
34518530
YQ
7534 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7535 uint16_t insn1
7536 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7537
7538 if (debug_displaced)
7539 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7540 "at %.8lx\n", insn1, (unsigned long) from);
7541
7542 dsc->is_thumb = 1;
7543 dsc->insn_size = thumb_insn_size (insn1);
7544 if (thumb_insn_size (insn1) == 4)
7545 {
7546 uint16_t insn2
7547 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7548 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7549 }
7550 else
7551 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7552}
7553
cca44b1b 7554void
b434a28f
YQ
7555arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7556 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7557 struct displaced_step_closure *dsc)
7558{
7559 int err = 0;
b434a28f
YQ
7560 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7561 uint32_t insn;
cca44b1b
JB
7562
7563 /* Most displaced instructions use a 1-instruction scratch space, so set this
7564 here and override below if/when necessary. */
7565 dsc->numinsns = 1;
7566 dsc->insn_addr = from;
7567 dsc->scratch_base = to;
7568 dsc->cleanup = NULL;
7569 dsc->wrote_to_pc = 0;
7570
b434a28f 7571 if (!displaced_in_arm_mode (regs))
12545665 7572 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7573
4db71c0b
YQ
7574 dsc->is_thumb = 0;
7575 dsc->insn_size = 4;
b434a28f
YQ
7576 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7577 if (debug_displaced)
7578 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7579 "at %.8lx\n", (unsigned long) insn,
7580 (unsigned long) from);
7581
cca44b1b 7582 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7583 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7584 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7585 {
7586 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7587 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7588 break;
7589
7590 case 0x4: case 0x5: case 0x6:
7ff120b4 7591 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7592 break;
7593
7594 case 0x7:
7ff120b4 7595 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7596 break;
7597
7598 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7599 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7600 break;
7601
7602 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7603 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7604 break;
7605 }
7606
7607 if (err)
7608 internal_error (__FILE__, __LINE__,
7609 _("arm_process_displaced_insn: Instruction decode error"));
7610}
7611
7612/* Actually set up the scratch space for a displaced instruction. */
7613
7614void
7615arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7616 CORE_ADDR to, struct displaced_step_closure *dsc)
7617{
7618 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7619 unsigned int i, len, offset;
cca44b1b 7620 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7621 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7622 const gdb_byte *bkp_insn;
cca44b1b 7623
4db71c0b 7624 offset = 0;
cca44b1b
JB
7625 /* Poke modified instruction(s). */
7626 for (i = 0; i < dsc->numinsns; i++)
7627 {
7628 if (debug_displaced)
4db71c0b
YQ
7629 {
7630 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7631 if (size == 4)
7632 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7633 dsc->modinsn[i]);
7634 else if (size == 2)
7635 fprintf_unfiltered (gdb_stdlog, "%.4x",
7636 (unsigned short)dsc->modinsn[i]);
7637
7638 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7639 (unsigned long) to + offset);
7640
7641 }
7642 write_memory_unsigned_integer (to + offset, size,
7643 byte_order_for_code,
cca44b1b 7644 dsc->modinsn[i]);
4db71c0b
YQ
7645 offset += size;
7646 }
7647
7648 /* Choose the correct breakpoint instruction. */
7649 if (dsc->is_thumb)
7650 {
7651 bkp_insn = tdep->thumb_breakpoint;
7652 len = tdep->thumb_breakpoint_size;
7653 }
7654 else
7655 {
7656 bkp_insn = tdep->arm_breakpoint;
7657 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7658 }
7659
7660 /* Put breakpoint afterwards. */
4db71c0b 7661 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7662
7663 if (debug_displaced)
7664 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7665 paddress (gdbarch, from), paddress (gdbarch, to));
7666}
7667
cca44b1b
JB
7668/* Entry point for cleaning things up after a displaced instruction has been
7669 single-stepped. */
7670
7671void
7672arm_displaced_step_fixup (struct gdbarch *gdbarch,
7673 struct displaced_step_closure *dsc,
7674 CORE_ADDR from, CORE_ADDR to,
7675 struct regcache *regs)
7676{
7677 if (dsc->cleanup)
7678 dsc->cleanup (gdbarch, regs, dsc);
7679
7680 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7681 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7682 dsc->insn_addr + dsc->insn_size);
7683
cca44b1b
JB
7684}
7685
7686#include "bfd-in2.h"
7687#include "libcoff.h"
7688
7689static int
7690gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7691{
e47ad6c0
YQ
7692 gdb_disassembler *di
7693 = static_cast<gdb_disassembler *>(info->application_data);
7694 struct gdbarch *gdbarch = di->arch ();
9779414d
DJ
7695
7696 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7697 {
7698 static asymbol *asym;
7699 static combined_entry_type ce;
7700 static struct coff_symbol_struct csym;
7701 static struct bfd fake_bfd;
7702 static bfd_target fake_target;
7703
7704 if (csym.native == NULL)
7705 {
7706 /* Create a fake symbol vector containing a Thumb symbol.
7707 This is solely so that the code in print_insn_little_arm()
7708 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7709 the presence of a Thumb symbol and switch to decoding
7710 Thumb instructions. */
7711
7712 fake_target.flavour = bfd_target_coff_flavour;
7713 fake_bfd.xvec = &fake_target;
7714 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7715 csym.native = &ce;
7716 csym.symbol.the_bfd = &fake_bfd;
7717 csym.symbol.name = "fake";
7718 asym = (asymbol *) & csym;
7719 }
7720
7721 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7722 info->symbols = &asym;
7723 }
7724 else
7725 info->symbols = NULL;
7726
e60eb288
YQ
7727 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7728 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7729 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7730 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7731 in default_print_insn. */
7732 if (exec_bfd != NULL)
7733 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7734
6394c606 7735 return default_print_insn (memaddr, info);
cca44b1b
JB
7736}
7737
7738/* The following define instruction sequences that will cause ARM
7739 cpu's to take an undefined instruction trap. These are used to
7740 signal a breakpoint to GDB.
7741
7742 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7743 modes. A different instruction is required for each mode. The ARM
7744 cpu's can also be big or little endian. Thus four different
7745 instructions are needed to support all cases.
7746
7747 Note: ARMv4 defines several new instructions that will take the
7748 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7749 not in fact add the new instructions. The new undefined
7750 instructions in ARMv4 are all instructions that had no defined
7751 behaviour in earlier chips. There is no guarantee that they will
7752 raise an exception, but may be treated as NOP's. In practice, it
7753 may only safe to rely on instructions matching:
7754
7755 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7756 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7757 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7758
0963b4bd 7759 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7760 following use a condition predicate of ALWAYS so it is always TRUE.
7761
7762 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7763 and NetBSD all use a software interrupt rather than an undefined
7764 instruction to force a trap. This can be handled by by the
7765 abi-specific code during establishment of the gdbarch vector. */
7766
7767#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7768#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7769#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7770#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7771
948f8e3d
PA
7772static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7773static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7774static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7775static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7776
cd6c3b4f
YQ
7777/* Implement the breakpoint_kind_from_pc gdbarch method. */
7778
d19280ad
YQ
7779static int
7780arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7781{
7782 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7783 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7784
9779414d 7785 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7786 {
7787 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7788
7789 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7790 check whether we are replacing a 32-bit instruction. */
7791 if (tdep->thumb2_breakpoint != NULL)
7792 {
7793 gdb_byte buf[2];
d19280ad 7794
177321bd
DJ
7795 if (target_read_memory (*pcptr, buf, 2) == 0)
7796 {
7797 unsigned short inst1;
d19280ad 7798
177321bd 7799 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7800 if (thumb_insn_size (inst1) == 4)
d19280ad 7801 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7802 }
7803 }
7804
d19280ad 7805 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7806 }
7807 else
d19280ad
YQ
7808 return ARM_BP_KIND_ARM;
7809
7810}
7811
cd6c3b4f
YQ
7812/* Implement the sw_breakpoint_from_kind gdbarch method. */
7813
d19280ad
YQ
7814static const gdb_byte *
7815arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7816{
7817 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7818
7819 switch (kind)
cca44b1b 7820 {
d19280ad
YQ
7821 case ARM_BP_KIND_ARM:
7822 *size = tdep->arm_breakpoint_size;
cca44b1b 7823 return tdep->arm_breakpoint;
d19280ad
YQ
7824 case ARM_BP_KIND_THUMB:
7825 *size = tdep->thumb_breakpoint_size;
7826 return tdep->thumb_breakpoint;
7827 case ARM_BP_KIND_THUMB2:
7828 *size = tdep->thumb2_breakpoint_size;
7829 return tdep->thumb2_breakpoint;
7830 default:
7831 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7832 }
7833}
7834
833b7ab5
YQ
7835/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7836
7837static int
7838arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7839 struct regcache *regcache,
7840 CORE_ADDR *pcptr)
7841{
7842 gdb_byte buf[4];
7843
7844 /* Check the memory pointed by PC is readable. */
7845 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7846 {
7847 struct arm_get_next_pcs next_pcs_ctx;
833b7ab5
YQ
7848
7849 arm_get_next_pcs_ctor (&next_pcs_ctx,
7850 &arm_get_next_pcs_ops,
7851 gdbarch_byte_order (gdbarch),
7852 gdbarch_byte_order_for_code (gdbarch),
7853 0,
7854 regcache);
7855
a0ff9e1a 7856 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
833b7ab5
YQ
7857
7858 /* If MEMADDR is the next instruction of current pc, do the
7859 software single step computation, and get the thumb mode by
7860 the destination address. */
a0ff9e1a 7861 for (CORE_ADDR pc : next_pcs)
833b7ab5
YQ
7862 {
7863 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7864 {
833b7ab5
YQ
7865 if (IS_THUMB_ADDR (pc))
7866 {
7867 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7868 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7869 }
7870 else
7871 return ARM_BP_KIND_ARM;
7872 }
7873 }
833b7ab5
YQ
7874 }
7875
7876 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7877}
7878
cca44b1b
JB
7879/* Extract from an array REGBUF containing the (raw) register state a
7880 function return value of type TYPE, and copy that, in virtual
7881 format, into VALBUF. */
7882
7883static void
7884arm_extract_return_value (struct type *type, struct regcache *regs,
7885 gdb_byte *valbuf)
7886{
7887 struct gdbarch *gdbarch = get_regcache_arch (regs);
7888 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7889
7890 if (TYPE_CODE_FLT == TYPE_CODE (type))
7891 {
7892 switch (gdbarch_tdep (gdbarch)->fp_model)
7893 {
7894 case ARM_FLOAT_FPA:
7895 {
7896 /* The value is in register F0 in internal format. We need to
7897 extract the raw value and then convert it to the desired
7898 internal type. */
7899 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7900
7901 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
96a5a1d3
UW
7902 convert_typed_floating (tmpbuf, arm_ext_type (gdbarch),
7903 valbuf, type);
cca44b1b
JB
7904 }
7905 break;
7906
7907 case ARM_FLOAT_SOFT_FPA:
7908 case ARM_FLOAT_SOFT_VFP:
7909 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7910 not using the VFP ABI code. */
7911 case ARM_FLOAT_VFP:
7912 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7913 if (TYPE_LENGTH (type) > 4)
7914 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7915 valbuf + INT_REGISTER_SIZE);
7916 break;
7917
7918 default:
0963b4bd
MS
7919 internal_error (__FILE__, __LINE__,
7920 _("arm_extract_return_value: "
7921 "Floating point model not supported"));
cca44b1b
JB
7922 break;
7923 }
7924 }
7925 else if (TYPE_CODE (type) == TYPE_CODE_INT
7926 || TYPE_CODE (type) == TYPE_CODE_CHAR
7927 || TYPE_CODE (type) == TYPE_CODE_BOOL
7928 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 7929 || TYPE_IS_REFERENCE (type)
cca44b1b
JB
7930 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7931 {
b021a221
MS
7932 /* If the type is a plain integer, then the access is
7933 straight-forward. Otherwise we have to play around a bit
7934 more. */
cca44b1b
JB
7935 int len = TYPE_LENGTH (type);
7936 int regno = ARM_A1_REGNUM;
7937 ULONGEST tmp;
7938
7939 while (len > 0)
7940 {
7941 /* By using store_unsigned_integer we avoid having to do
7942 anything special for small big-endian values. */
7943 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7944 store_unsigned_integer (valbuf,
7945 (len > INT_REGISTER_SIZE
7946 ? INT_REGISTER_SIZE : len),
7947 byte_order, tmp);
7948 len -= INT_REGISTER_SIZE;
7949 valbuf += INT_REGISTER_SIZE;
7950 }
7951 }
7952 else
7953 {
7954 /* For a structure or union the behaviour is as if the value had
7955 been stored to word-aligned memory and then loaded into
7956 registers with 32-bit load instruction(s). */
7957 int len = TYPE_LENGTH (type);
7958 int regno = ARM_A1_REGNUM;
7959 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7960
7961 while (len > 0)
7962 {
7963 regcache_cooked_read (regs, regno++, tmpbuf);
7964 memcpy (valbuf, tmpbuf,
7965 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7966 len -= INT_REGISTER_SIZE;
7967 valbuf += INT_REGISTER_SIZE;
7968 }
7969 }
7970}
7971
7972
7973/* Will a function return an aggregate type in memory or in a
7974 register? Return 0 if an aggregate type can be returned in a
7975 register, 1 if it must be returned in memory. */
7976
7977static int
7978arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7979{
cca44b1b
JB
7980 enum type_code code;
7981
f168693b 7982 type = check_typedef (type);
cca44b1b 7983
b13c8ab2
YQ
7984 /* Simple, non-aggregate types (ie not including vectors and
7985 complex) are always returned in a register (or registers). */
7986 code = TYPE_CODE (type);
7987 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7988 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7989 return 0;
cca44b1b 7990
c4312b19
YQ
7991 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7992 {
7993 /* Vector values should be returned using ARM registers if they
7994 are not over 16 bytes. */
7995 return (TYPE_LENGTH (type) > 16);
7996 }
7997
b13c8ab2 7998 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 7999 {
b13c8ab2
YQ
8000 /* The AAPCS says all aggregates not larger than a word are returned
8001 in a register. */
8002 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8003 return 0;
8004
cca44b1b
JB
8005 return 1;
8006 }
b13c8ab2
YQ
8007 else
8008 {
8009 int nRc;
cca44b1b 8010
b13c8ab2
YQ
8011 /* All aggregate types that won't fit in a register must be returned
8012 in memory. */
8013 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8014 return 1;
cca44b1b 8015
b13c8ab2
YQ
8016 /* In the ARM ABI, "integer" like aggregate types are returned in
8017 registers. For an aggregate type to be integer like, its size
8018 must be less than or equal to INT_REGISTER_SIZE and the
8019 offset of each addressable subfield must be zero. Note that bit
8020 fields are not addressable, and all addressable subfields of
8021 unions always start at offset zero.
cca44b1b 8022
b13c8ab2
YQ
8023 This function is based on the behaviour of GCC 2.95.1.
8024 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8025
b13c8ab2
YQ
8026 Note: All versions of GCC before GCC 2.95.2 do not set up the
8027 parameters correctly for a function returning the following
8028 structure: struct { float f;}; This should be returned in memory,
8029 not a register. Richard Earnshaw sent me a patch, but I do not
8030 know of any way to detect if a function like the above has been
8031 compiled with the correct calling convention. */
8032
8033 /* Assume all other aggregate types can be returned in a register.
8034 Run a check for structures, unions and arrays. */
8035 nRc = 0;
67255d04 8036
b13c8ab2
YQ
8037 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8038 {
8039 int i;
8040 /* Need to check if this struct/union is "integer" like. For
8041 this to be true, its size must be less than or equal to
8042 INT_REGISTER_SIZE and the offset of each addressable
8043 subfield must be zero. Note that bit fields are not
8044 addressable, and unions always start at offset zero. If any
8045 of the subfields is a floating point type, the struct/union
8046 cannot be an integer type. */
8047
8048 /* For each field in the object, check:
8049 1) Is it FP? --> yes, nRc = 1;
8050 2) Is it addressable (bitpos != 0) and
8051 not packed (bitsize == 0)?
8052 --> yes, nRc = 1
8053 */
8054
8055 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8056 {
b13c8ab2
YQ
8057 enum type_code field_type_code;
8058
8059 field_type_code
8060 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8061 i)));
8062
8063 /* Is it a floating point type field? */
8064 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8065 {
8066 nRc = 1;
8067 break;
8068 }
b13c8ab2
YQ
8069
8070 /* If bitpos != 0, then we have to care about it. */
8071 if (TYPE_FIELD_BITPOS (type, i) != 0)
8072 {
8073 /* Bitfields are not addressable. If the field bitsize is
8074 zero, then the field is not packed. Hence it cannot be
8075 a bitfield or any other packed type. */
8076 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8077 {
8078 nRc = 1;
8079 break;
8080 }
8081 }
67255d04
RE
8082 }
8083 }
67255d04 8084
b13c8ab2
YQ
8085 return nRc;
8086 }
67255d04
RE
8087}
8088
34e8f22d
RE
8089/* Write into appropriate registers a function return value of type
8090 TYPE, given in virtual format. */
8091
8092static void
b508a996 8093arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8094 const gdb_byte *valbuf)
34e8f22d 8095{
be8626e0 8096 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8097 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8098
34e8f22d
RE
8099 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8100 {
64403bd1 8101 gdb_byte buf[FP_REGISTER_SIZE];
34e8f22d 8102
be8626e0 8103 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8104 {
8105 case ARM_FLOAT_FPA:
8106
96a5a1d3 8107 convert_typed_floating (valbuf, type, buf, arm_ext_type (gdbarch));
b508a996 8108 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8109 break;
8110
fd50bc42 8111 case ARM_FLOAT_SOFT_FPA:
08216dd7 8112 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8113 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8114 not using the VFP ABI code. */
8115 case ARM_FLOAT_VFP:
b508a996
RE
8116 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8117 if (TYPE_LENGTH (type) > 4)
8118 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8119 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8120 break;
8121
8122 default:
9b20d036
MS
8123 internal_error (__FILE__, __LINE__,
8124 _("arm_store_return_value: Floating "
8125 "point model not supported"));
08216dd7
RE
8126 break;
8127 }
34e8f22d 8128 }
b508a996
RE
8129 else if (TYPE_CODE (type) == TYPE_CODE_INT
8130 || TYPE_CODE (type) == TYPE_CODE_CHAR
8131 || TYPE_CODE (type) == TYPE_CODE_BOOL
8132 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 8133 || TYPE_IS_REFERENCE (type)
b508a996
RE
8134 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8135 {
8136 if (TYPE_LENGTH (type) <= 4)
8137 {
8138 /* Values of one word or less are zero/sign-extended and
8139 returned in r0. */
7a5ea0d4 8140 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8141 LONGEST val = unpack_long (type, valbuf);
8142
e17a4113 8143 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8144 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8145 }
8146 else
8147 {
8148 /* Integral values greater than one word are stored in consecutive
8149 registers starting with r0. This will always be a multiple of
8150 the regiser size. */
8151 int len = TYPE_LENGTH (type);
8152 int regno = ARM_A1_REGNUM;
8153
8154 while (len > 0)
8155 {
8156 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8157 len -= INT_REGISTER_SIZE;
8158 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8159 }
8160 }
8161 }
34e8f22d 8162 else
b508a996
RE
8163 {
8164 /* For a structure or union the behaviour is as if the value had
8165 been stored to word-aligned memory and then loaded into
8166 registers with 32-bit load instruction(s). */
8167 int len = TYPE_LENGTH (type);
8168 int regno = ARM_A1_REGNUM;
7a5ea0d4 8169 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8170
8171 while (len > 0)
8172 {
8173 memcpy (tmpbuf, valbuf,
7a5ea0d4 8174 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8175 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8176 len -= INT_REGISTER_SIZE;
8177 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8178 }
8179 }
34e8f22d
RE
8180}
8181
2af48f68
PB
8182
8183/* Handle function return values. */
8184
8185static enum return_value_convention
6a3a010b 8186arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8187 struct type *valtype, struct regcache *regcache,
8188 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8189{
7c00367c 8190 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8191 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8192 enum arm_vfp_cprc_base_type vfp_base_type;
8193 int vfp_base_count;
8194
8195 if (arm_vfp_abi_for_function (gdbarch, func_type)
8196 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8197 {
8198 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8199 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8200 int i;
8201 for (i = 0; i < vfp_base_count; i++)
8202 {
58d6951d
DJ
8203 if (reg_char == 'q')
8204 {
8205 if (writebuf)
8206 arm_neon_quad_write (gdbarch, regcache, i,
8207 writebuf + i * unit_length);
8208
8209 if (readbuf)
8210 arm_neon_quad_read (gdbarch, regcache, i,
8211 readbuf + i * unit_length);
8212 }
8213 else
8214 {
8215 char name_buf[4];
8216 int regnum;
8217
8c042590 8218 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8219 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8220 strlen (name_buf));
8221 if (writebuf)
8222 regcache_cooked_write (regcache, regnum,
8223 writebuf + i * unit_length);
8224 if (readbuf)
8225 regcache_cooked_read (regcache, regnum,
8226 readbuf + i * unit_length);
8227 }
90445bd3
DJ
8228 }
8229 return RETURN_VALUE_REGISTER_CONVENTION;
8230 }
7c00367c 8231
2af48f68
PB
8232 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8233 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8234 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8235 {
7c00367c
MK
8236 if (tdep->struct_return == pcc_struct_return
8237 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8238 return RETURN_VALUE_STRUCT_CONVENTION;
8239 }
b13c8ab2
YQ
8240 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8241 {
8242 if (arm_return_in_memory (gdbarch, valtype))
8243 return RETURN_VALUE_STRUCT_CONVENTION;
8244 }
7052e42c 8245
2af48f68
PB
8246 if (writebuf)
8247 arm_store_return_value (valtype, regcache, writebuf);
8248
8249 if (readbuf)
8250 arm_extract_return_value (valtype, regcache, readbuf);
8251
8252 return RETURN_VALUE_REGISTER_CONVENTION;
8253}
8254
8255
9df628e0 8256static int
60ade65d 8257arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8258{
e17a4113
UW
8259 struct gdbarch *gdbarch = get_frame_arch (frame);
8260 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8261 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8262 CORE_ADDR jb_addr;
e362b510 8263 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8264
60ade65d 8265 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8266
8267 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8268 INT_REGISTER_SIZE))
9df628e0
RE
8269 return 0;
8270
e17a4113 8271 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8272 return 1;
8273}
8274
faa95490
DJ
8275/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8276 return the target PC. Otherwise return 0. */
c906108c
SS
8277
8278CORE_ADDR
52f729a7 8279arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8280{
2c02bd72 8281 const char *name;
faa95490 8282 int namelen;
c906108c
SS
8283 CORE_ADDR start_addr;
8284
8285 /* Find the starting address and name of the function containing the PC. */
8286 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8287 {
8288 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8289 check here. */
8290 start_addr = arm_skip_bx_reg (frame, pc);
8291 if (start_addr != 0)
8292 return start_addr;
8293
8294 return 0;
8295 }
c906108c 8296
faa95490
DJ
8297 /* If PC is in a Thumb call or return stub, return the address of the
8298 target PC, which is in a register. The thunk functions are called
8299 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8300 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8301 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8302 if (startswith (name, "_call_via_")
8303 || startswith (name, "__ARM_call_via_"))
c906108c 8304 {
ed9a39eb
JM
8305 /* Use the name suffix to determine which register contains the
8306 target PC. */
a121b7c1 8307 static const char *table[15] =
c5aa993b
JM
8308 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8309 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8310 };
c906108c 8311 int regno;
faa95490 8312 int offset = strlen (name) - 2;
c906108c
SS
8313
8314 for (regno = 0; regno <= 14; regno++)
faa95490 8315 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8316 return get_frame_register_unsigned (frame, regno);
c906108c 8317 }
ed9a39eb 8318
faa95490
DJ
8319 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8320 non-interworking calls to foo. We could decode the stubs
8321 to find the target but it's easier to use the symbol table. */
8322 namelen = strlen (name);
8323 if (name[0] == '_' && name[1] == '_'
8324 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8325 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8326 || (namelen > 2 + strlen ("_from_arm")
61012eef 8327 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8328 {
8329 char *target_name;
8330 int target_len = namelen - 2;
3b7344d5 8331 struct bound_minimal_symbol minsym;
faa95490
DJ
8332 struct objfile *objfile;
8333 struct obj_section *sec;
8334
8335 if (name[namelen - 1] == 'b')
8336 target_len -= strlen ("_from_thumb");
8337 else
8338 target_len -= strlen ("_from_arm");
8339
224c3ddb 8340 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8341 memcpy (target_name, name + 2, target_len);
8342 target_name[target_len] = '\0';
8343
8344 sec = find_pc_section (pc);
8345 objfile = (sec == NULL) ? NULL : sec->objfile;
8346 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8347 if (minsym.minsym != NULL)
77e371c0 8348 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8349 else
8350 return 0;
8351 }
8352
c5aa993b 8353 return 0; /* not a stub */
c906108c
SS
8354}
8355
afd7eef0 8356static void
981a3fb3 8357set_arm_command (const char *args, int from_tty)
afd7eef0 8358{
edefbb7c
AC
8359 printf_unfiltered (_("\
8360\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8361 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8362}
8363
8364static void
981a3fb3 8365show_arm_command (const char *args, int from_tty)
afd7eef0 8366{
26304000 8367 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8368}
8369
28e97307
DJ
8370static void
8371arm_update_current_architecture (void)
fd50bc42 8372{
28e97307 8373 struct gdbarch_info info;
fd50bc42 8374
28e97307 8375 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8376 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8377 return;
fd50bc42 8378
28e97307
DJ
8379 /* Update the architecture. */
8380 gdbarch_info_init (&info);
fd50bc42 8381
28e97307 8382 if (!gdbarch_update_p (info))
9b20d036 8383 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8384}
8385
8386static void
8387set_fp_model_sfunc (char *args, int from_tty,
8388 struct cmd_list_element *c)
8389{
570dc176 8390 int fp_model;
fd50bc42
RE
8391
8392 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8393 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8394 {
aead7601 8395 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8396 break;
8397 }
8398
8399 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8400 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8401 current_fp_model);
8402
28e97307 8403 arm_update_current_architecture ();
fd50bc42
RE
8404}
8405
8406static void
08546159
AC
8407show_fp_model (struct ui_file *file, int from_tty,
8408 struct cmd_list_element *c, const char *value)
fd50bc42 8409{
f5656ead 8410 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8411
28e97307 8412 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8413 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8414 fprintf_filtered (file, _("\
8415The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8416 fp_model_strings[tdep->fp_model]);
8417 else
8418 fprintf_filtered (file, _("\
8419The current ARM floating point model is \"%s\".\n"),
8420 fp_model_strings[arm_fp_model]);
8421}
8422
8423static void
8424arm_set_abi (char *args, int from_tty,
8425 struct cmd_list_element *c)
8426{
570dc176 8427 int arm_abi;
28e97307
DJ
8428
8429 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8430 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8431 {
aead7601 8432 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8433 break;
8434 }
8435
8436 if (arm_abi == ARM_ABI_LAST)
8437 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8438 arm_abi_string);
8439
8440 arm_update_current_architecture ();
8441}
8442
8443static void
8444arm_show_abi (struct ui_file *file, int from_tty,
8445 struct cmd_list_element *c, const char *value)
8446{
f5656ead 8447 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8448
8449 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8450 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8451 fprintf_filtered (file, _("\
8452The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8453 arm_abi_strings[tdep->arm_abi]);
8454 else
8455 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8456 arm_abi_string);
fd50bc42
RE
8457}
8458
0428b8f5
DJ
8459static void
8460arm_show_fallback_mode (struct ui_file *file, int from_tty,
8461 struct cmd_list_element *c, const char *value)
8462{
0963b4bd
MS
8463 fprintf_filtered (file,
8464 _("The current execution mode assumed "
8465 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8466 arm_fallback_mode_string);
8467}
8468
8469static void
8470arm_show_force_mode (struct ui_file *file, int from_tty,
8471 struct cmd_list_element *c, const char *value)
8472{
0963b4bd
MS
8473 fprintf_filtered (file,
8474 _("The current execution mode assumed "
8475 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8476 arm_force_mode_string);
8477}
8478
afd7eef0
RE
8479/* If the user changes the register disassembly style used for info
8480 register and other commands, we have to also switch the style used
8481 in opcodes for disassembly output. This function is run in the "set
8482 arm disassembly" command, and does that. */
bc90b915
FN
8483
8484static void
afd7eef0 8485set_disassembly_style_sfunc (char *args, int from_tty,
65b48a81 8486 struct cmd_list_element *c)
bc90b915 8487{
65b48a81
PB
8488 /* Convert the short style name into the long style name (eg, reg-names-*)
8489 before calling the generic set_disassembler_options() function. */
8490 std::string long_name = std::string ("reg-names-") + disassembly_style;
8491 set_disassembler_options (&long_name[0]);
8492}
8493
8494static void
8495show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8496 struct cmd_list_element *c, const char *value)
8497{
8498 struct gdbarch *gdbarch = get_current_arch ();
8499 char *options = get_disassembler_options (gdbarch);
8500 const char *style = "";
8501 int len = 0;
f995bbe8 8502 const char *opt;
65b48a81
PB
8503
8504 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8505 if (CONST_STRNEQ (opt, "reg-names-"))
8506 {
8507 style = &opt[strlen ("reg-names-")];
8508 len = strcspn (style, ",");
8509 }
8510
8511 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
bc90b915
FN
8512}
8513\f
966fbf70 8514/* Return the ARM register name corresponding to register I. */
a208b0cb 8515static const char *
d93859e2 8516arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8517{
58d6951d
DJ
8518 const int num_regs = gdbarch_num_regs (gdbarch);
8519
8520 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8521 && i >= num_regs && i < num_regs + 32)
8522 {
8523 static const char *const vfp_pseudo_names[] = {
8524 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8525 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8526 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8527 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8528 };
8529
8530 return vfp_pseudo_names[i - num_regs];
8531 }
8532
8533 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8534 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8535 {
8536 static const char *const neon_pseudo_names[] = {
8537 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8538 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8539 };
8540
8541 return neon_pseudo_names[i - num_regs - 32];
8542 }
8543
ff6f572f
DJ
8544 if (i >= ARRAY_SIZE (arm_register_names))
8545 /* These registers are only supported on targets which supply
8546 an XML description. */
8547 return "";
8548
966fbf70
RE
8549 return arm_register_names[i];
8550}
8551
082fc60d
RE
8552/* Test whether the coff symbol specific value corresponds to a Thumb
8553 function. */
8554
8555static int
8556coff_sym_is_thumb (int val)
8557{
f8bf5763
PM
8558 return (val == C_THUMBEXT
8559 || val == C_THUMBSTAT
8560 || val == C_THUMBEXTFUNC
8561 || val == C_THUMBSTATFUNC
8562 || val == C_THUMBLABEL);
082fc60d
RE
8563}
8564
8565/* arm_coff_make_msymbol_special()
8566 arm_elf_make_msymbol_special()
8567
8568 These functions test whether the COFF or ELF symbol corresponds to
8569 an address in thumb code, and set a "special" bit in a minimal
8570 symbol to indicate that it does. */
8571
34e8f22d 8572static void
082fc60d
RE
8573arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8574{
39d911fc
TP
8575 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8576
8577 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8578 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8579 MSYMBOL_SET_SPECIAL (msym);
8580}
8581
34e8f22d 8582static void
082fc60d
RE
8583arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8584{
8585 if (coff_sym_is_thumb (val))
8586 MSYMBOL_SET_SPECIAL (msym);
8587}
8588
60c5725c 8589static void
c1bd65d0 8590arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8591{
9a3c8263 8592 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8593 unsigned int i;
8594
8595 for (i = 0; i < objfile->obfd->section_count; i++)
8596 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8597}
8598
8599static void
8600arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8601 asymbol *sym)
8602{
8603 const char *name = bfd_asymbol_name (sym);
8604 struct arm_per_objfile *data;
8605 VEC(arm_mapping_symbol_s) **map_p;
8606 struct arm_mapping_symbol new_map_sym;
8607
8608 gdb_assert (name[0] == '$');
8609 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8610 return;
8611
9a3c8263
SM
8612 data = (struct arm_per_objfile *) objfile_data (objfile,
8613 arm_objfile_data_key);
60c5725c
DJ
8614 if (data == NULL)
8615 {
8616 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8617 struct arm_per_objfile);
8618 set_objfile_data (objfile, arm_objfile_data_key, data);
8619 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8620 objfile->obfd->section_count,
8621 VEC(arm_mapping_symbol_s) *);
8622 }
8623 map_p = &data->section_maps[bfd_get_section (sym)->index];
8624
8625 new_map_sym.value = sym->value;
8626 new_map_sym.type = name[1];
8627
8628 /* Assume that most mapping symbols appear in order of increasing
8629 value. If they were randomly distributed, it would be faster to
8630 always push here and then sort at first use. */
8631 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8632 {
8633 struct arm_mapping_symbol *prev_map_sym;
8634
8635 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8636 if (prev_map_sym->value >= sym->value)
8637 {
8638 unsigned int idx;
8639 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8640 arm_compare_mapping_symbols);
8641 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8642 return;
8643 }
8644 }
8645
8646 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8647}
8648
756fe439 8649static void
61a1198a 8650arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8651{
9779414d 8652 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8653 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8654
8655 /* If necessary, set the T bit. */
8656 if (arm_apcs_32)
8657 {
9779414d 8658 ULONGEST val, t_bit;
61a1198a 8659 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8660 t_bit = arm_psr_thumb_bit (gdbarch);
8661 if (arm_pc_is_thumb (gdbarch, pc))
8662 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8663 val | t_bit);
756fe439 8664 else
61a1198a 8665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8666 val & ~t_bit);
756fe439
DJ
8667 }
8668}
123dc839 8669
58d6951d
DJ
8670/* Read the contents of a NEON quad register, by reading from two
8671 double registers. This is used to implement the quad pseudo
8672 registers, and for argument passing in case the quad registers are
8673 missing; vectors are passed in quad registers when using the VFP
8674 ABI, even if a NEON unit is not present. REGNUM is the index of
8675 the quad register, in [0, 15]. */
8676
05d1431c 8677static enum register_status
58d6951d
DJ
8678arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8679 int regnum, gdb_byte *buf)
8680{
8681 char name_buf[4];
8682 gdb_byte reg_buf[8];
8683 int offset, double_regnum;
05d1431c 8684 enum register_status status;
58d6951d 8685
8c042590 8686 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8687 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8688 strlen (name_buf));
8689
8690 /* d0 is always the least significant half of q0. */
8691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8692 offset = 8;
8693 else
8694 offset = 0;
8695
05d1431c
PA
8696 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8697 if (status != REG_VALID)
8698 return status;
58d6951d
DJ
8699 memcpy (buf + offset, reg_buf, 8);
8700
8701 offset = 8 - offset;
05d1431c
PA
8702 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8703 if (status != REG_VALID)
8704 return status;
58d6951d 8705 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8706
8707 return REG_VALID;
58d6951d
DJ
8708}
8709
05d1431c 8710static enum register_status
58d6951d
DJ
8711arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8712 int regnum, gdb_byte *buf)
8713{
8714 const int num_regs = gdbarch_num_regs (gdbarch);
8715 char name_buf[4];
8716 gdb_byte reg_buf[8];
8717 int offset, double_regnum;
8718
8719 gdb_assert (regnum >= num_regs);
8720 regnum -= num_regs;
8721
8722 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8723 /* Quad-precision register. */
05d1431c 8724 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8725 else
8726 {
05d1431c
PA
8727 enum register_status status;
8728
58d6951d
DJ
8729 /* Single-precision register. */
8730 gdb_assert (regnum < 32);
8731
8732 /* s0 is always the least significant half of d0. */
8733 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8734 offset = (regnum & 1) ? 0 : 4;
8735 else
8736 offset = (regnum & 1) ? 4 : 0;
8737
8c042590 8738 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8739 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8740 strlen (name_buf));
8741
05d1431c
PA
8742 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8743 if (status == REG_VALID)
8744 memcpy (buf, reg_buf + offset, 4);
8745 return status;
58d6951d
DJ
8746 }
8747}
8748
8749/* Store the contents of BUF to a NEON quad register, by writing to
8750 two double registers. This is used to implement the quad pseudo
8751 registers, and for argument passing in case the quad registers are
8752 missing; vectors are passed in quad registers when using the VFP
8753 ABI, even if a NEON unit is not present. REGNUM is the index
8754 of the quad register, in [0, 15]. */
8755
8756static void
8757arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8758 int regnum, const gdb_byte *buf)
8759{
8760 char name_buf[4];
58d6951d
DJ
8761 int offset, double_regnum;
8762
8c042590 8763 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8764 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8765 strlen (name_buf));
8766
8767 /* d0 is always the least significant half of q0. */
8768 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8769 offset = 8;
8770 else
8771 offset = 0;
8772
8773 regcache_raw_write (regcache, double_regnum, buf + offset);
8774 offset = 8 - offset;
8775 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8776}
8777
8778static void
8779arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8780 int regnum, const gdb_byte *buf)
8781{
8782 const int num_regs = gdbarch_num_regs (gdbarch);
8783 char name_buf[4];
8784 gdb_byte reg_buf[8];
8785 int offset, double_regnum;
8786
8787 gdb_assert (regnum >= num_regs);
8788 regnum -= num_regs;
8789
8790 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8791 /* Quad-precision register. */
8792 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8793 else
8794 {
8795 /* Single-precision register. */
8796 gdb_assert (regnum < 32);
8797
8798 /* s0 is always the least significant half of d0. */
8799 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8800 offset = (regnum & 1) ? 0 : 4;
8801 else
8802 offset = (regnum & 1) ? 4 : 0;
8803
8c042590 8804 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8805 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8806 strlen (name_buf));
8807
8808 regcache_raw_read (regcache, double_regnum, reg_buf);
8809 memcpy (reg_buf + offset, buf, 4);
8810 regcache_raw_write (regcache, double_regnum, reg_buf);
8811 }
8812}
8813
123dc839
DJ
8814static struct value *
8815value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8816{
9a3c8263 8817 const int *reg_p = (const int *) baton;
123dc839
DJ
8818 return value_of_register (*reg_p, frame);
8819}
97e03143 8820\f
70f80edf
JT
8821static enum gdb_osabi
8822arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8823{
2af48f68 8824 unsigned int elfosabi;
70f80edf 8825 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8826
70f80edf 8827 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8828
28e97307
DJ
8829 if (elfosabi == ELFOSABI_ARM)
8830 /* GNU tools use this value. Check note sections in this case,
8831 as well. */
8832 bfd_map_over_sections (abfd,
8833 generic_elf_osabi_sniff_abi_tag_sections,
8834 &osabi);
97e03143 8835
28e97307 8836 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8837 return osabi;
97e03143
RE
8838}
8839
54483882
YQ
8840static int
8841arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8842 struct reggroup *group)
8843{
2c291032
YQ
8844 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8845 this, FPS register belongs to save_regroup, restore_reggroup, and
8846 all_reggroup, of course. */
54483882 8847 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8848 return (group == float_reggroup
8849 || group == save_reggroup
8850 || group == restore_reggroup
8851 || group == all_reggroup);
54483882
YQ
8852 else
8853 return default_register_reggroup_p (gdbarch, regnum, group);
8854}
8855
25f8c692
JL
8856\f
8857/* For backward-compatibility we allow two 'g' packet lengths with
8858 the remote protocol depending on whether FPA registers are
8859 supplied. M-profile targets do not have FPA registers, but some
8860 stubs already exist in the wild which use a 'g' packet which
8861 supplies them albeit with dummy values. The packet format which
8862 includes FPA registers should be considered deprecated for
8863 M-profile targets. */
8864
8865static void
8866arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8867{
8868 if (gdbarch_tdep (gdbarch)->is_m)
8869 {
8870 /* If we know from the executable this is an M-profile target,
8871 cater for remote targets whose register set layout is the
8872 same as the FPA layout. */
8873 register_remote_g_packet_guess (gdbarch,
03145bf4 8874 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8875 (16 * INT_REGISTER_SIZE)
8876 + (8 * FP_REGISTER_SIZE)
8877 + (2 * INT_REGISTER_SIZE),
8878 tdesc_arm_with_m_fpa_layout);
8879
8880 /* The regular M-profile layout. */
8881 register_remote_g_packet_guess (gdbarch,
8882 /* r0-r12,sp,lr,pc; xpsr */
8883 (16 * INT_REGISTER_SIZE)
8884 + INT_REGISTER_SIZE,
8885 tdesc_arm_with_m);
3184d3f9
JL
8886
8887 /* M-profile plus M4F VFP. */
8888 register_remote_g_packet_guess (gdbarch,
8889 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8890 (16 * INT_REGISTER_SIZE)
8891 + (16 * VFP_REGISTER_SIZE)
8892 + (2 * INT_REGISTER_SIZE),
8893 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8894 }
8895
8896 /* Otherwise we don't have a useful guess. */
8897}
8898
7eb89530
YQ
8899/* Implement the code_of_frame_writable gdbarch method. */
8900
8901static int
8902arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8903{
8904 if (gdbarch_tdep (gdbarch)->is_m
8905 && get_frame_type (frame) == SIGTRAMP_FRAME)
8906 {
8907 /* M-profile exception frames return to some magic PCs, where
8908 isn't writable at all. */
8909 return 0;
8910 }
8911 else
8912 return 1;
8913}
8914
70f80edf 8915\f
da3c6d4a
MS
8916/* Initialize the current architecture based on INFO. If possible,
8917 re-use an architecture from ARCHES, which is a list of
8918 architectures already created during this debugging session.
97e03143 8919
da3c6d4a
MS
8920 Called e.g. at program startup, when reading a core file, and when
8921 reading a binary file. */
97e03143 8922
39bbf761
RE
8923static struct gdbarch *
8924arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8925{
97e03143 8926 struct gdbarch_tdep *tdep;
39bbf761 8927 struct gdbarch *gdbarch;
28e97307
DJ
8928 struct gdbarch_list *best_arch;
8929 enum arm_abi_kind arm_abi = arm_abi_global;
8930 enum arm_float_model fp_model = arm_fp_model;
123dc839 8931 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8932 int i, is_m = 0;
330c6ca9 8933 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8934 int have_wmmx_registers = 0;
58d6951d 8935 int have_neon = 0;
ff6f572f 8936 int have_fpa_registers = 1;
9779414d
DJ
8937 const struct target_desc *tdesc = info.target_desc;
8938
8939 /* If we have an object to base this architecture on, try to determine
8940 its ABI. */
8941
8942 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8943 {
8944 int ei_osabi, e_flags;
8945
8946 switch (bfd_get_flavour (info.abfd))
8947 {
9779414d
DJ
8948 case bfd_target_coff_flavour:
8949 /* Assume it's an old APCS-style ABI. */
8950 /* XXX WinCE? */
8951 arm_abi = ARM_ABI_APCS;
8952 break;
8953
8954 case bfd_target_elf_flavour:
8955 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8956 e_flags = elf_elfheader (info.abfd)->e_flags;
8957
8958 if (ei_osabi == ELFOSABI_ARM)
8959 {
8960 /* GNU tools used to use this value, but do not for EABI
8961 objects. There's nowhere to tag an EABI version
8962 anyway, so assume APCS. */
8963 arm_abi = ARM_ABI_APCS;
8964 }
d403db27 8965 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
8966 {
8967 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8968 int attr_arch, attr_profile;
8969
8970 switch (eabi_ver)
8971 {
8972 case EF_ARM_EABI_UNKNOWN:
8973 /* Assume GNU tools. */
8974 arm_abi = ARM_ABI_APCS;
8975 break;
8976
8977 case EF_ARM_EABI_VER4:
8978 case EF_ARM_EABI_VER5:
8979 arm_abi = ARM_ABI_AAPCS;
8980 /* EABI binaries default to VFP float ordering.
8981 They may also contain build attributes that can
8982 be used to identify if the VFP argument-passing
8983 ABI is in use. */
8984 if (fp_model == ARM_FLOAT_AUTO)
8985 {
8986#ifdef HAVE_ELF
8987 switch (bfd_elf_get_obj_attr_int (info.abfd,
8988 OBJ_ATTR_PROC,
8989 Tag_ABI_VFP_args))
8990 {
b35b0298 8991 case AEABI_VFP_args_base:
9779414d
DJ
8992 /* "The user intended FP parameter/result
8993 passing to conform to AAPCS, base
8994 variant". */
8995 fp_model = ARM_FLOAT_SOFT_VFP;
8996 break;
b35b0298 8997 case AEABI_VFP_args_vfp:
9779414d
DJ
8998 /* "The user intended FP parameter/result
8999 passing to conform to AAPCS, VFP
9000 variant". */
9001 fp_model = ARM_FLOAT_VFP;
9002 break;
b35b0298 9003 case AEABI_VFP_args_toolchain:
9779414d
DJ
9004 /* "The user intended FP parameter/result
9005 passing to conform to tool chain-specific
9006 conventions" - we don't know any such
9007 conventions, so leave it as "auto". */
9008 break;
b35b0298 9009 case AEABI_VFP_args_compatible:
5c294fee
TG
9010 /* "Code is compatible with both the base
9011 and VFP variants; the user did not permit
9012 non-variadic functions to pass FP
9013 parameters/results" - leave it as
9014 "auto". */
9015 break;
9779414d
DJ
9016 default:
9017 /* Attribute value not mentioned in the
5c294fee 9018 November 2012 ABI, so leave it as
9779414d
DJ
9019 "auto". */
9020 break;
9021 }
9022#else
9023 fp_model = ARM_FLOAT_SOFT_VFP;
9024#endif
9025 }
9026 break;
9027
9028 default:
9029 /* Leave it as "auto". */
9030 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9031 break;
9032 }
9033
9034#ifdef HAVE_ELF
9035 /* Detect M-profile programs. This only works if the
9036 executable file includes build attributes; GCC does
9037 copy them to the executable, but e.g. RealView does
9038 not. */
9039 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9040 Tag_CPU_arch);
0963b4bd
MS
9041 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9042 OBJ_ATTR_PROC,
9779414d
DJ
9043 Tag_CPU_arch_profile);
9044 /* GCC specifies the profile for v6-M; RealView only
9045 specifies the profile for architectures starting with
9046 V7 (as opposed to architectures with a tag
9047 numerically greater than TAG_CPU_ARCH_V7). */
9048 if (!tdesc_has_registers (tdesc)
9049 && (attr_arch == TAG_CPU_ARCH_V6_M
9050 || attr_arch == TAG_CPU_ARCH_V6S_M
9051 || attr_profile == 'M'))
25f8c692 9052 is_m = 1;
9779414d
DJ
9053#endif
9054 }
9055
9056 if (fp_model == ARM_FLOAT_AUTO)
9057 {
9058 int e_flags = elf_elfheader (info.abfd)->e_flags;
9059
9060 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9061 {
9062 case 0:
9063 /* Leave it as "auto". Strictly speaking this case
9064 means FPA, but almost nobody uses that now, and
9065 many toolchains fail to set the appropriate bits
9066 for the floating-point model they use. */
9067 break;
9068 case EF_ARM_SOFT_FLOAT:
9069 fp_model = ARM_FLOAT_SOFT_FPA;
9070 break;
9071 case EF_ARM_VFP_FLOAT:
9072 fp_model = ARM_FLOAT_VFP;
9073 break;
9074 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9075 fp_model = ARM_FLOAT_SOFT_VFP;
9076 break;
9077 }
9078 }
9079
9080 if (e_flags & EF_ARM_BE8)
9081 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9082
9083 break;
9084
9085 default:
9086 /* Leave it as "auto". */
9087 break;
9088 }
9089 }
123dc839
DJ
9090
9091 /* Check any target description for validity. */
9779414d 9092 if (tdesc_has_registers (tdesc))
123dc839
DJ
9093 {
9094 /* For most registers we require GDB's default names; but also allow
9095 the numeric names for sp / lr / pc, as a convenience. */
9096 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9097 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9098 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9099
9100 const struct tdesc_feature *feature;
58d6951d 9101 int valid_p;
123dc839 9102
9779414d 9103 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9104 "org.gnu.gdb.arm.core");
9105 if (feature == NULL)
9779414d
DJ
9106 {
9107 feature = tdesc_find_feature (tdesc,
9108 "org.gnu.gdb.arm.m-profile");
9109 if (feature == NULL)
9110 return NULL;
9111 else
9112 is_m = 1;
9113 }
123dc839
DJ
9114
9115 tdesc_data = tdesc_data_alloc ();
9116
9117 valid_p = 1;
9118 for (i = 0; i < ARM_SP_REGNUM; i++)
9119 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9120 arm_register_names[i]);
9121 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9122 ARM_SP_REGNUM,
9123 arm_sp_names);
9124 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9125 ARM_LR_REGNUM,
9126 arm_lr_names);
9127 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9128 ARM_PC_REGNUM,
9129 arm_pc_names);
9779414d
DJ
9130 if (is_m)
9131 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9132 ARM_PS_REGNUM, "xpsr");
9133 else
9134 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9135 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9136
9137 if (!valid_p)
9138 {
9139 tdesc_data_cleanup (tdesc_data);
9140 return NULL;
9141 }
9142
9779414d 9143 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9144 "org.gnu.gdb.arm.fpa");
9145 if (feature != NULL)
9146 {
9147 valid_p = 1;
9148 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9149 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9150 arm_register_names[i]);
9151 if (!valid_p)
9152 {
9153 tdesc_data_cleanup (tdesc_data);
9154 return NULL;
9155 }
9156 }
ff6f572f
DJ
9157 else
9158 have_fpa_registers = 0;
9159
9779414d 9160 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9161 "org.gnu.gdb.xscale.iwmmxt");
9162 if (feature != NULL)
9163 {
9164 static const char *const iwmmxt_names[] = {
9165 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9166 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9167 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9168 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9169 };
9170
9171 valid_p = 1;
9172 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9173 valid_p
9174 &= tdesc_numbered_register (feature, tdesc_data, i,
9175 iwmmxt_names[i - ARM_WR0_REGNUM]);
9176
9177 /* Check for the control registers, but do not fail if they
9178 are missing. */
9179 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9180 tdesc_numbered_register (feature, tdesc_data, i,
9181 iwmmxt_names[i - ARM_WR0_REGNUM]);
9182
9183 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9184 valid_p
9185 &= tdesc_numbered_register (feature, tdesc_data, i,
9186 iwmmxt_names[i - ARM_WR0_REGNUM]);
9187
9188 if (!valid_p)
9189 {
9190 tdesc_data_cleanup (tdesc_data);
9191 return NULL;
9192 }
a56cc1ce
YQ
9193
9194 have_wmmx_registers = 1;
ff6f572f 9195 }
58d6951d
DJ
9196
9197 /* If we have a VFP unit, check whether the single precision registers
9198 are present. If not, then we will synthesize them as pseudo
9199 registers. */
9779414d 9200 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9201 "org.gnu.gdb.arm.vfp");
9202 if (feature != NULL)
9203 {
9204 static const char *const vfp_double_names[] = {
9205 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9206 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9207 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9208 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9209 };
9210
9211 /* Require the double precision registers. There must be either
9212 16 or 32. */
9213 valid_p = 1;
9214 for (i = 0; i < 32; i++)
9215 {
9216 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9217 ARM_D0_REGNUM + i,
9218 vfp_double_names[i]);
9219 if (!valid_p)
9220 break;
9221 }
2b9e5ea6
UW
9222 if (!valid_p && i == 16)
9223 valid_p = 1;
58d6951d 9224
2b9e5ea6
UW
9225 /* Also require FPSCR. */
9226 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9227 ARM_FPSCR_REGNUM, "fpscr");
9228 if (!valid_p)
58d6951d
DJ
9229 {
9230 tdesc_data_cleanup (tdesc_data);
9231 return NULL;
9232 }
9233
9234 if (tdesc_unnumbered_register (feature, "s0") == 0)
9235 have_vfp_pseudos = 1;
9236
330c6ca9 9237 vfp_register_count = i;
58d6951d
DJ
9238
9239 /* If we have VFP, also check for NEON. The architecture allows
9240 NEON without VFP (integer vector operations only), but GDB
9241 does not support that. */
9779414d 9242 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9243 "org.gnu.gdb.arm.neon");
9244 if (feature != NULL)
9245 {
9246 /* NEON requires 32 double-precision registers. */
9247 if (i != 32)
9248 {
9249 tdesc_data_cleanup (tdesc_data);
9250 return NULL;
9251 }
9252
9253 /* If there are quad registers defined by the stub, use
9254 their type; otherwise (normally) provide them with
9255 the default type. */
9256 if (tdesc_unnumbered_register (feature, "q0") == 0)
9257 have_neon_pseudos = 1;
9258
9259 have_neon = 1;
9260 }
9261 }
123dc839 9262 }
39bbf761 9263
28e97307
DJ
9264 /* If there is already a candidate, use it. */
9265 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9266 best_arch != NULL;
9267 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9268 {
b8926edc
DJ
9269 if (arm_abi != ARM_ABI_AUTO
9270 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9271 continue;
9272
b8926edc
DJ
9273 if (fp_model != ARM_FLOAT_AUTO
9274 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9275 continue;
9276
58d6951d
DJ
9277 /* There are various other properties in tdep that we do not
9278 need to check here: those derived from a target description,
9279 since gdbarches with a different target description are
9280 automatically disqualified. */
9281
9779414d
DJ
9282 /* Do check is_m, though, since it might come from the binary. */
9283 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9284 continue;
9285
28e97307
DJ
9286 /* Found a match. */
9287 break;
9288 }
97e03143 9289
28e97307 9290 if (best_arch != NULL)
123dc839
DJ
9291 {
9292 if (tdesc_data != NULL)
9293 tdesc_data_cleanup (tdesc_data);
9294 return best_arch->gdbarch;
9295 }
28e97307 9296
8d749320 9297 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9298 gdbarch = gdbarch_alloc (&info, tdep);
9299
28e97307
DJ
9300 /* Record additional information about the architecture we are defining.
9301 These are gdbarch discriminators, like the OSABI. */
9302 tdep->arm_abi = arm_abi;
9303 tdep->fp_model = fp_model;
9779414d 9304 tdep->is_m = is_m;
ff6f572f 9305 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9306 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9307 gdb_assert (vfp_register_count == 0
9308 || vfp_register_count == 16
9309 || vfp_register_count == 32);
9310 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9311 tdep->have_vfp_pseudos = have_vfp_pseudos;
9312 tdep->have_neon_pseudos = have_neon_pseudos;
9313 tdep->have_neon = have_neon;
08216dd7 9314
25f8c692
JL
9315 arm_register_g_packet_guesses (gdbarch);
9316
08216dd7 9317 /* Breakpoints. */
9d4fde75 9318 switch (info.byte_order_for_code)
67255d04
RE
9319 {
9320 case BFD_ENDIAN_BIG:
66e810cd
RE
9321 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9322 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9323 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9324 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9325
67255d04
RE
9326 break;
9327
9328 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9329 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9330 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9331 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9332 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9333
67255d04
RE
9334 break;
9335
9336 default:
9337 internal_error (__FILE__, __LINE__,
edefbb7c 9338 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9339 }
9340
d7b486e7
RE
9341 /* On ARM targets char defaults to unsigned. */
9342 set_gdbarch_char_signed (gdbarch, 0);
9343
53375380
PA
9344 /* wchar_t is unsigned under the AAPCS. */
9345 if (tdep->arm_abi == ARM_ABI_AAPCS)
9346 set_gdbarch_wchar_signed (gdbarch, 0);
9347 else
9348 set_gdbarch_wchar_signed (gdbarch, 1);
53375380 9349
cca44b1b
JB
9350 /* Note: for displaced stepping, this includes the breakpoint, and one word
9351 of additional scratch space. This setting isn't used for anything beside
9352 displaced stepping at present. */
9353 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9354
9df628e0 9355 /* This should be low enough for everything. */
97e03143 9356 tdep->lowest_pc = 0x20;
94c30b78 9357 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9358
7c00367c
MK
9359 /* The default, for both APCS and AAPCS, is to return small
9360 structures in registers. */
9361 tdep->struct_return = reg_struct_return;
9362
2dd604e7 9363 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9364 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9365
7eb89530
YQ
9366 if (is_m)
9367 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9368
756fe439
DJ
9369 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9370
148754e5 9371 /* Frame handling. */
a262aec2 9372 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9373 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9374 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9375
eb5492fa 9376 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9377
34e8f22d 9378 /* Address manipulation. */
34e8f22d
RE
9379 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9380
34e8f22d
RE
9381 /* Advance PC across function entry code. */
9382 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9383
c9cf6e20
MG
9384 /* Detect whether PC is at a point where the stack has been destroyed. */
9385 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9386
190dce09
UW
9387 /* Skip trampolines. */
9388 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9389
34e8f22d
RE
9390 /* The stack grows downward. */
9391 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9392
9393 /* Breakpoint manipulation. */
04180708
YQ
9394 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9395 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9396 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9397 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9398
9399 /* Information about registers, etc. */
34e8f22d
RE
9400 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9401 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9402 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9403 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9404 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9405
ff6f572f
DJ
9406 /* This "info float" is FPA-specific. Use the generic version if we
9407 do not have FPA. */
9408 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9409 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9410
26216b98 9411 /* Internal <-> external register number maps. */
ff6f572f 9412 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9413 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9414
34e8f22d
RE
9415 set_gdbarch_register_name (gdbarch, arm_register_name);
9416
9417 /* Returning results. */
2af48f68 9418 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9419
03d48a7d
RE
9420 /* Disassembly. */
9421 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9422
34e8f22d
RE
9423 /* Minsymbol frobbing. */
9424 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9425 set_gdbarch_coff_make_msymbol_special (gdbarch,
9426 arm_coff_make_msymbol_special);
60c5725c 9427 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9428
f9d67f43
DJ
9429 /* Thumb-2 IT block support. */
9430 set_gdbarch_adjust_breakpoint_address (gdbarch,
9431 arm_adjust_breakpoint_address);
9432
0d5de010
DJ
9433 /* Virtual tables. */
9434 set_gdbarch_vbit_in_delta (gdbarch, 1);
9435
97e03143 9436 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9437 gdbarch_init_osabi (info, gdbarch);
97e03143 9438
b39cc962
DJ
9439 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9440
eb5492fa 9441 /* Add some default predicates. */
2ae28aa9
YQ
9442 if (is_m)
9443 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9444 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9445 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9446 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9447 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9448 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9449
97e03143
RE
9450 /* Now we have tuned the configuration, set a few final things,
9451 based on what the OS ABI has told us. */
9452
b8926edc
DJ
9453 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9454 binaries are always marked. */
9455 if (tdep->arm_abi == ARM_ABI_AUTO)
9456 tdep->arm_abi = ARM_ABI_APCS;
9457
e3039479
UW
9458 /* Watchpoints are not steppable. */
9459 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9460
b8926edc
DJ
9461 /* We used to default to FPA for generic ARM, but almost nobody
9462 uses that now, and we now provide a way for the user to force
9463 the model. So default to the most useful variant. */
9464 if (tdep->fp_model == ARM_FLOAT_AUTO)
9465 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9466
9df628e0
RE
9467 if (tdep->jb_pc >= 0)
9468 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9469
08216dd7 9470 /* Floating point sizes and format. */
8da61cc4 9471 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9472 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9473 {
8da61cc4
DJ
9474 set_gdbarch_double_format
9475 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9476 set_gdbarch_long_double_format
9477 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9478 }
9479 else
9480 {
9481 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9482 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9483 }
9484
58d6951d
DJ
9485 if (have_vfp_pseudos)
9486 {
9487 /* NOTE: These are the only pseudo registers used by
9488 the ARM target at the moment. If more are added, a
9489 little more care in numbering will be needed. */
9490
9491 int num_pseudos = 32;
9492 if (have_neon_pseudos)
9493 num_pseudos += 16;
9494 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9495 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9496 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9497 }
9498
123dc839 9499 if (tdesc_data)
58d6951d
DJ
9500 {
9501 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9502
9779414d 9503 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9504
9505 /* Override tdesc_register_type to adjust the types of VFP
9506 registers for NEON. */
9507 set_gdbarch_register_type (gdbarch, arm_register_type);
9508 }
123dc839
DJ
9509
9510 /* Add standard register aliases. We add aliases even for those
9511 nanes which are used by the current architecture - it's simpler,
9512 and does no harm, since nothing ever lists user registers. */
9513 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9514 user_reg_add (gdbarch, arm_register_aliases[i].name,
9515 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9516
65b48a81
PB
9517 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9518 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9519
39bbf761
RE
9520 return gdbarch;
9521}
9522
97e03143 9523static void
2af46ca0 9524arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9525{
2af46ca0 9526 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9527
9528 if (tdep == NULL)
9529 return;
9530
edefbb7c 9531 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9532 (unsigned long) tdep->lowest_pc);
9533}
9534
0d4c07af 9535#if GDB_SELF_TEST
b121eeb9
YQ
9536namespace selftests
9537{
9538static void arm_record_test (void);
9539}
0d4c07af 9540#endif
b121eeb9 9541
c906108c 9542void
ed9a39eb 9543_initialize_arm_tdep (void)
c906108c 9544{
bc90b915 9545 long length;
53904c9e
AC
9546 const char *setname;
9547 const char *setdesc;
65b48a81 9548 int i, j;
edefbb7c
AC
9549 char regdesc[1024], *rdptr = regdesc;
9550 size_t rest = sizeof (regdesc);
085dd6e6 9551
42cf1509 9552 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9553
60c5725c 9554 arm_objfile_data_key
c1bd65d0 9555 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9556
0e9e9abd
UW
9557 /* Add ourselves to objfile event chain. */
9558 observer_attach_new_objfile (arm_exidx_new_objfile);
9559 arm_exidx_data_key
9560 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9561
70f80edf
JT
9562 /* Register an ELF OS ABI sniffer for ARM binaries. */
9563 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9564 bfd_target_elf_flavour,
9565 arm_elf_osabi_sniffer);
9566
9779414d
DJ
9567 /* Initialize the standard target descriptions. */
9568 initialize_tdesc_arm_with_m ();
25f8c692 9569 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9570 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9571 initialize_tdesc_arm_with_iwmmxt ();
9572 initialize_tdesc_arm_with_vfpv2 ();
9573 initialize_tdesc_arm_with_vfpv3 ();
9574 initialize_tdesc_arm_with_neon ();
9779414d 9575
afd7eef0
RE
9576 /* Add root prefix command for all "set arm"/"show arm" commands. */
9577 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9578 _("Various ARM-specific commands."),
afd7eef0
RE
9579 &setarmcmdlist, "set arm ", 0, &setlist);
9580
9581 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9582 _("Various ARM-specific commands."),
afd7eef0 9583 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9584
c5aa993b 9585
65b48a81
PB
9586 arm_disassembler_options = xstrdup ("reg-names-std");
9587 const disasm_options_t *disasm_options = disassembler_options_arm ();
9588 int num_disassembly_styles = 0;
9589 for (i = 0; disasm_options->name[i] != NULL; i++)
9590 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9591 num_disassembly_styles++;
9592
9593 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
8d749320 9594 valid_disassembly_styles = XNEWVEC (const char *,
65b48a81
PB
9595 num_disassembly_styles + 1);
9596 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9597 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9598 {
9599 size_t offset = strlen ("reg-names-");
9600 const char *style = disasm_options->name[i];
9601 valid_disassembly_styles[j++] = &style[offset];
9602 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9603 disasm_options->description[i]);
9604 rdptr += length;
9605 rest -= length;
9606 }
94c30b78 9607 /* Mark the end of valid options. */
65b48a81 9608 valid_disassembly_styles[num_disassembly_styles] = NULL;
c906108c 9609
edefbb7c 9610 /* Create the help text. */
d7e74731
PA
9611 std::string helptext = string_printf ("%s%s%s",
9612 _("The valid values are:\n"),
9613 regdesc,
9614 _("The default is \"std\"."));
ed9a39eb 9615
edefbb7c
AC
9616 add_setshow_enum_cmd("disassembler", no_class,
9617 valid_disassembly_styles, &disassembly_style,
9618 _("Set the disassembly style."),
9619 _("Show the disassembly style."),
09b0e4b0 9620 helptext.c_str (),
2c5b56ce 9621 set_disassembly_style_sfunc,
65b48a81 9622 show_disassembly_style_sfunc,
7376b4c2 9623 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9624
9625 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9626 _("Set usage of ARM 32-bit mode."),
9627 _("Show usage of ARM 32-bit mode."),
9628 _("When off, a 26-bit PC will be used."),
2c5b56ce 9629 NULL,
0963b4bd
MS
9630 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9631 mode is %s. */
26304000 9632 &setarmcmdlist, &showarmcmdlist);
c906108c 9633
fd50bc42 9634 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9635 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9636 _("Set the floating point type."),
9637 _("Show the floating point type."),
9638 _("auto - Determine the FP typefrom the OS-ABI.\n\
9639softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9640fpa - FPA co-processor (GCC compiled).\n\
9641softvfp - Software FP with pure-endian doubles.\n\
9642vfp - VFP co-processor."),
edefbb7c 9643 set_fp_model_sfunc, show_fp_model,
7376b4c2 9644 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9645
28e97307
DJ
9646 /* Add a command to allow the user to force the ABI. */
9647 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9648 _("Set the ABI."),
9649 _("Show the ABI."),
9650 NULL, arm_set_abi, arm_show_abi,
9651 &setarmcmdlist, &showarmcmdlist);
9652
0428b8f5
DJ
9653 /* Add two commands to allow the user to force the assumed
9654 execution mode. */
9655 add_setshow_enum_cmd ("fallback-mode", class_support,
9656 arm_mode_strings, &arm_fallback_mode_string,
9657 _("Set the mode assumed when symbols are unavailable."),
9658 _("Show the mode assumed when symbols are unavailable."),
9659 NULL, NULL, arm_show_fallback_mode,
9660 &setarmcmdlist, &showarmcmdlist);
9661 add_setshow_enum_cmd ("force-mode", class_support,
9662 arm_mode_strings, &arm_force_mode_string,
9663 _("Set the mode assumed even when symbols are available."),
9664 _("Show the mode assumed even when symbols are available."),
9665 NULL, NULL, arm_show_force_mode,
9666 &setarmcmdlist, &showarmcmdlist);
9667
6529d2dd 9668 /* Debugging flag. */
edefbb7c
AC
9669 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9670 _("Set ARM debugging."),
9671 _("Show ARM debugging."),
9672 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9673 NULL,
7915a72c 9674 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9675 &setdebuglist, &showdebuglist);
b121eeb9
YQ
9676
9677#if GDB_SELF_TEST
1526853e 9678 selftests::register_test ("arm-record", selftests::arm_record_test);
b121eeb9
YQ
9679#endif
9680
c906108c 9681}
72508ac0
PO
9682
9683/* ARM-reversible process record data structures. */
9684
9685#define ARM_INSN_SIZE_BYTES 4
9686#define THUMB_INSN_SIZE_BYTES 2
9687#define THUMB2_INSN_SIZE_BYTES 4
9688
9689
71e396f9
LM
9690/* Position of the bit within a 32-bit ARM instruction
9691 that defines whether the instruction is a load or store. */
72508ac0
PO
9692#define INSN_S_L_BIT_NUM 20
9693
9694#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9695 do \
9696 { \
9697 unsigned int reg_len = LENGTH; \
9698 if (reg_len) \
9699 { \
9700 REGS = XNEWVEC (uint32_t, reg_len); \
9701 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9702 } \
9703 } \
9704 while (0)
9705
9706#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9707 do \
9708 { \
9709 unsigned int mem_len = LENGTH; \
9710 if (mem_len) \
9711 { \
9712 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9713 memcpy(&MEMS->len, &RECORD_BUF[0], \
9714 sizeof(struct arm_mem_r) * LENGTH); \
9715 } \
9716 } \
9717 while (0)
9718
9719/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9720#define INSN_RECORDED(ARM_RECORD) \
9721 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9722
9723/* ARM memory record structure. */
9724struct arm_mem_r
9725{
9726 uint32_t len; /* Record length. */
bfbbec00 9727 uint32_t addr; /* Memory address. */
72508ac0
PO
9728};
9729
9730/* ARM instruction record contains opcode of current insn
9731 and execution state (before entry to decode_insn()),
9732 contains list of to-be-modified registers and
9733 memory blocks (on return from decode_insn()). */
9734
9735typedef struct insn_decode_record_t
9736{
9737 struct gdbarch *gdbarch;
9738 struct regcache *regcache;
9739 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9740 uint32_t arm_insn; /* Should accommodate thumb. */
9741 uint32_t cond; /* Condition code. */
9742 uint32_t opcode; /* Insn opcode. */
9743 uint32_t decode; /* Insn decode bits. */
9744 uint32_t mem_rec_count; /* No of mem records. */
9745 uint32_t reg_rec_count; /* No of reg records. */
9746 uint32_t *arm_regs; /* Registers to be saved for this record. */
9747 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9748} insn_decode_record;
9749
9750
9751/* Checks ARM SBZ and SBO mandatory fields. */
9752
9753static int
9754sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9755{
9756 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9757
9758 if (!len)
9759 return 1;
9760
9761 if (!sbo)
9762 ones = ~ones;
9763
9764 while (ones)
9765 {
9766 if (!(ones & sbo))
9767 {
9768 return 0;
9769 }
9770 ones = ones >> 1;
9771 }
9772 return 1;
9773}
9774
c6ec2b30
OJ
9775enum arm_record_result
9776{
9777 ARM_RECORD_SUCCESS = 0,
9778 ARM_RECORD_FAILURE = 1
9779};
9780
72508ac0
PO
9781typedef enum
9782{
9783 ARM_RECORD_STRH=1,
9784 ARM_RECORD_STRD
9785} arm_record_strx_t;
9786
9787typedef enum
9788{
9789 ARM_RECORD=1,
9790 THUMB_RECORD,
9791 THUMB2_RECORD
9792} record_type_t;
9793
9794
9795static int
9796arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9797 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9798{
9799
9800 struct regcache *reg_cache = arm_insn_r->regcache;
9801 ULONGEST u_regval[2]= {0};
9802
9803 uint32_t reg_src1 = 0, reg_src2 = 0;
9804 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9805
9806 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9807 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9808
9809 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9810 {
9811 /* 1) Handle misc store, immediate offset. */
9812 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9813 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9814 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9815 regcache_raw_read_unsigned (reg_cache, reg_src1,
9816 &u_regval[0]);
9817 if (ARM_PC_REGNUM == reg_src1)
9818 {
9819 /* If R15 was used as Rn, hence current PC+8. */
9820 u_regval[0] = u_regval[0] + 8;
9821 }
9822 offset_8 = (immed_high << 4) | immed_low;
9823 /* Calculate target store address. */
9824 if (14 == arm_insn_r->opcode)
9825 {
9826 tgt_mem_addr = u_regval[0] + offset_8;
9827 }
9828 else
9829 {
9830 tgt_mem_addr = u_regval[0] - offset_8;
9831 }
9832 if (ARM_RECORD_STRH == str_type)
9833 {
9834 record_buf_mem[0] = 2;
9835 record_buf_mem[1] = tgt_mem_addr;
9836 arm_insn_r->mem_rec_count = 1;
9837 }
9838 else if (ARM_RECORD_STRD == str_type)
9839 {
9840 record_buf_mem[0] = 4;
9841 record_buf_mem[1] = tgt_mem_addr;
9842 record_buf_mem[2] = 4;
9843 record_buf_mem[3] = tgt_mem_addr + 4;
9844 arm_insn_r->mem_rec_count = 2;
9845 }
9846 }
9847 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9848 {
9849 /* 2) Store, register offset. */
9850 /* Get Rm. */
9851 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9852 /* Get Rn. */
9853 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9854 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9855 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9856 if (15 == reg_src2)
9857 {
9858 /* If R15 was used as Rn, hence current PC+8. */
9859 u_regval[0] = u_regval[0] + 8;
9860 }
9861 /* Calculate target store address, Rn +/- Rm, register offset. */
9862 if (12 == arm_insn_r->opcode)
9863 {
9864 tgt_mem_addr = u_regval[0] + u_regval[1];
9865 }
9866 else
9867 {
9868 tgt_mem_addr = u_regval[1] - u_regval[0];
9869 }
9870 if (ARM_RECORD_STRH == str_type)
9871 {
9872 record_buf_mem[0] = 2;
9873 record_buf_mem[1] = tgt_mem_addr;
9874 arm_insn_r->mem_rec_count = 1;
9875 }
9876 else if (ARM_RECORD_STRD == str_type)
9877 {
9878 record_buf_mem[0] = 4;
9879 record_buf_mem[1] = tgt_mem_addr;
9880 record_buf_mem[2] = 4;
9881 record_buf_mem[3] = tgt_mem_addr + 4;
9882 arm_insn_r->mem_rec_count = 2;
9883 }
9884 }
9885 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9886 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9887 {
9888 /* 3) Store, immediate pre-indexed. */
9889 /* 5) Store, immediate post-indexed. */
9890 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9891 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9892 offset_8 = (immed_high << 4) | immed_low;
9893 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9894 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9895 /* Calculate target store address, Rn +/- Rm, register offset. */
9896 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9897 {
9898 tgt_mem_addr = u_regval[0] + offset_8;
9899 }
9900 else
9901 {
9902 tgt_mem_addr = u_regval[0] - offset_8;
9903 }
9904 if (ARM_RECORD_STRH == str_type)
9905 {
9906 record_buf_mem[0] = 2;
9907 record_buf_mem[1] = tgt_mem_addr;
9908 arm_insn_r->mem_rec_count = 1;
9909 }
9910 else if (ARM_RECORD_STRD == str_type)
9911 {
9912 record_buf_mem[0] = 4;
9913 record_buf_mem[1] = tgt_mem_addr;
9914 record_buf_mem[2] = 4;
9915 record_buf_mem[3] = tgt_mem_addr + 4;
9916 arm_insn_r->mem_rec_count = 2;
9917 }
9918 /* Record Rn also as it changes. */
9919 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9920 arm_insn_r->reg_rec_count = 1;
9921 }
9922 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9923 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9924 {
9925 /* 4) Store, register pre-indexed. */
9926 /* 6) Store, register post -indexed. */
9927 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9928 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9929 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9930 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9931 /* Calculate target store address, Rn +/- Rm, register offset. */
9932 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9933 {
9934 tgt_mem_addr = u_regval[0] + u_regval[1];
9935 }
9936 else
9937 {
9938 tgt_mem_addr = u_regval[1] - u_regval[0];
9939 }
9940 if (ARM_RECORD_STRH == str_type)
9941 {
9942 record_buf_mem[0] = 2;
9943 record_buf_mem[1] = tgt_mem_addr;
9944 arm_insn_r->mem_rec_count = 1;
9945 }
9946 else if (ARM_RECORD_STRD == str_type)
9947 {
9948 record_buf_mem[0] = 4;
9949 record_buf_mem[1] = tgt_mem_addr;
9950 record_buf_mem[2] = 4;
9951 record_buf_mem[3] = tgt_mem_addr + 4;
9952 arm_insn_r->mem_rec_count = 2;
9953 }
9954 /* Record Rn also as it changes. */
9955 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9956 arm_insn_r->reg_rec_count = 1;
9957 }
9958 return 0;
9959}
9960
9961/* Handling ARM extension space insns. */
9962
9963static int
9964arm_record_extension_space (insn_decode_record *arm_insn_r)
9965{
9966 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9967 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9968 uint32_t record_buf[8], record_buf_mem[8];
9969 uint32_t reg_src1 = 0;
72508ac0
PO
9970 struct regcache *reg_cache = arm_insn_r->regcache;
9971 ULONGEST u_regval = 0;
9972
9973 gdb_assert (!INSN_RECORDED(arm_insn_r));
9974 /* Handle unconditional insn extension space. */
9975
9976 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9977 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9978 if (arm_insn_r->cond)
9979 {
9980 /* PLD has no affect on architectural state, it just affects
9981 the caches. */
9982 if (5 == ((opcode1 & 0xE0) >> 5))
9983 {
9984 /* BLX(1) */
9985 record_buf[0] = ARM_PS_REGNUM;
9986 record_buf[1] = ARM_LR_REGNUM;
9987 arm_insn_r->reg_rec_count = 2;
9988 }
9989 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9990 }
9991
9992
9993 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9994 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9995 {
9996 ret = -1;
9997 /* Undefined instruction on ARM V5; need to handle if later
9998 versions define it. */
9999 }
10000
10001 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10002 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10003 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10004
10005 /* Handle arithmetic insn extension space. */
10006 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10007 && !INSN_RECORDED(arm_insn_r))
10008 {
10009 /* Handle MLA(S) and MUL(S). */
10010 if (0 <= insn_op1 && 3 >= insn_op1)
10011 {
10012 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10013 record_buf[1] = ARM_PS_REGNUM;
10014 arm_insn_r->reg_rec_count = 2;
10015 }
10016 else if (4 <= insn_op1 && 15 >= insn_op1)
10017 {
10018 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10019 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10020 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10021 record_buf[2] = ARM_PS_REGNUM;
10022 arm_insn_r->reg_rec_count = 3;
10023 }
10024 }
10025
10026 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10027 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10028 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10029
10030 /* Handle control insn extension space. */
10031
10032 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10033 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10034 {
10035 if (!bit (arm_insn_r->arm_insn,25))
10036 {
10037 if (!bits (arm_insn_r->arm_insn, 4, 7))
10038 {
10039 if ((0 == insn_op1) || (2 == insn_op1))
10040 {
10041 /* MRS. */
10042 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10043 arm_insn_r->reg_rec_count = 1;
10044 }
10045 else if (1 == insn_op1)
10046 {
10047 /* CSPR is going to be changed. */
10048 record_buf[0] = ARM_PS_REGNUM;
10049 arm_insn_r->reg_rec_count = 1;
10050 }
10051 else if (3 == insn_op1)
10052 {
10053 /* SPSR is going to be changed. */
10054 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10055 return -1;
10056 }
10057 }
10058 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10059 {
10060 if (1 == insn_op1)
10061 {
10062 /* BX. */
10063 record_buf[0] = ARM_PS_REGNUM;
10064 arm_insn_r->reg_rec_count = 1;
10065 }
10066 else if (3 == insn_op1)
10067 {
10068 /* CLZ. */
10069 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10070 arm_insn_r->reg_rec_count = 1;
10071 }
10072 }
10073 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10074 {
10075 /* BLX. */
10076 record_buf[0] = ARM_PS_REGNUM;
10077 record_buf[1] = ARM_LR_REGNUM;
10078 arm_insn_r->reg_rec_count = 2;
10079 }
10080 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10081 {
10082 /* QADD, QSUB, QDADD, QDSUB */
10083 record_buf[0] = ARM_PS_REGNUM;
10084 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10085 arm_insn_r->reg_rec_count = 2;
10086 }
10087 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10088 {
10089 /* BKPT. */
10090 record_buf[0] = ARM_PS_REGNUM;
10091 record_buf[1] = ARM_LR_REGNUM;
10092 arm_insn_r->reg_rec_count = 2;
10093
10094 /* Save SPSR also;how? */
72508ac0
PO
10095 return -1;
10096 }
10097 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10098 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10099 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10100 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10101 )
10102 {
10103 if (0 == insn_op1 || 1 == insn_op1)
10104 {
10105 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10106 /* We dont do optimization for SMULW<y> where we
10107 need only Rd. */
10108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10109 record_buf[1] = ARM_PS_REGNUM;
10110 arm_insn_r->reg_rec_count = 2;
10111 }
10112 else if (2 == insn_op1)
10113 {
10114 /* SMLAL<x><y>. */
10115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10116 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10117 arm_insn_r->reg_rec_count = 2;
10118 }
10119 else if (3 == insn_op1)
10120 {
10121 /* SMUL<x><y>. */
10122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10123 arm_insn_r->reg_rec_count = 1;
10124 }
10125 }
10126 }
10127 else
10128 {
10129 /* MSR : immediate form. */
10130 if (1 == insn_op1)
10131 {
10132 /* CSPR is going to be changed. */
10133 record_buf[0] = ARM_PS_REGNUM;
10134 arm_insn_r->reg_rec_count = 1;
10135 }
10136 else if (3 == insn_op1)
10137 {
10138 /* SPSR is going to be changed. */
10139 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10140 return -1;
10141 }
10142 }
10143 }
10144
10145 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10146 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10147 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10148
10149 /* Handle load/store insn extension space. */
10150
10151 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10152 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10153 && !INSN_RECORDED(arm_insn_r))
10154 {
10155 /* SWP/SWPB. */
10156 if (0 == insn_op1)
10157 {
10158 /* These insn, changes register and memory as well. */
10159 /* SWP or SWPB insn. */
10160 /* Get memory address given by Rn. */
10161 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10162 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10163 /* SWP insn ?, swaps word. */
10164 if (8 == arm_insn_r->opcode)
10165 {
10166 record_buf_mem[0] = 4;
10167 }
10168 else
10169 {
10170 /* SWPB insn, swaps only byte. */
10171 record_buf_mem[0] = 1;
10172 }
10173 record_buf_mem[1] = u_regval;
10174 arm_insn_r->mem_rec_count = 1;
10175 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10176 arm_insn_r->reg_rec_count = 1;
10177 }
10178 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10179 {
10180 /* STRH. */
10181 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10182 ARM_RECORD_STRH);
10183 }
10184 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10185 {
10186 /* LDRD. */
10187 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10188 record_buf[1] = record_buf[0] + 1;
10189 arm_insn_r->reg_rec_count = 2;
10190 }
10191 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10192 {
10193 /* STRD. */
10194 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10195 ARM_RECORD_STRD);
10196 }
10197 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10198 {
10199 /* LDRH, LDRSB, LDRSH. */
10200 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10201 arm_insn_r->reg_rec_count = 1;
10202 }
10203
10204 }
10205
10206 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10207 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10208 && !INSN_RECORDED(arm_insn_r))
10209 {
10210 ret = -1;
10211 /* Handle coprocessor insn extension space. */
10212 }
10213
10214 /* To be done for ARMv5 and later; as of now we return -1. */
10215 if (-1 == ret)
ca92db2d 10216 return ret;
72508ac0
PO
10217
10218 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10219 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10220
10221 return ret;
10222}
10223
10224/* Handling opcode 000 insns. */
10225
10226static int
10227arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10228{
10229 struct regcache *reg_cache = arm_insn_r->regcache;
10230 uint32_t record_buf[8], record_buf_mem[8];
10231 ULONGEST u_regval[2] = {0};
10232
bec2ab5a 10233 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10234 uint32_t opcode1 = 0;
10235
10236 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10237 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10238 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10239
10240 /* Data processing insn /multiply insn. */
10241 if (9 == arm_insn_r->decode
10242 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10243 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10244 {
10245 /* Handle multiply instructions. */
10246 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10247 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10248 {
10249 /* Handle MLA and MUL. */
10250 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10251 record_buf[1] = ARM_PS_REGNUM;
10252 arm_insn_r->reg_rec_count = 2;
10253 }
10254 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10255 {
10256 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10257 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10258 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10259 record_buf[2] = ARM_PS_REGNUM;
10260 arm_insn_r->reg_rec_count = 3;
10261 }
10262 }
10263 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10264 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10265 {
10266 /* Handle misc load insns, as 20th bit (L = 1). */
10267 /* LDR insn has a capability to do branching, if
10268 MOV LR, PC is precceded by LDR insn having Rn as R15
10269 in that case, it emulates branch and link insn, and hence we
10270 need to save CSPR and PC as well. I am not sure this is right
10271 place; as opcode = 010 LDR insn make this happen, if R15 was
10272 used. */
10273 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10274 if (15 != reg_dest)
10275 {
10276 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10277 arm_insn_r->reg_rec_count = 1;
10278 }
10279 else
10280 {
10281 record_buf[0] = reg_dest;
10282 record_buf[1] = ARM_PS_REGNUM;
10283 arm_insn_r->reg_rec_count = 2;
10284 }
10285 }
10286 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10287 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10288 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10289 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10290 {
10291 /* Handle MSR insn. */
10292 if (9 == arm_insn_r->opcode)
10293 {
10294 /* CSPR is going to be changed. */
10295 record_buf[0] = ARM_PS_REGNUM;
10296 arm_insn_r->reg_rec_count = 1;
10297 }
10298 else
10299 {
10300 /* SPSR is going to be changed. */
10301 /* How to read SPSR value? */
72508ac0
PO
10302 return -1;
10303 }
10304 }
10305 else if (9 == arm_insn_r->decode
10306 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10307 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10308 {
10309 /* Handling SWP, SWPB. */
10310 /* These insn, changes register and memory as well. */
10311 /* SWP or SWPB insn. */
10312
10313 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10314 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10315 /* SWP insn ?, swaps word. */
10316 if (8 == arm_insn_r->opcode)
10317 {
10318 record_buf_mem[0] = 4;
10319 }
10320 else
10321 {
10322 /* SWPB insn, swaps only byte. */
10323 record_buf_mem[0] = 1;
10324 }
10325 record_buf_mem[1] = u_regval[0];
10326 arm_insn_r->mem_rec_count = 1;
10327 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10328 arm_insn_r->reg_rec_count = 1;
10329 }
10330 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10331 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10332 {
10333 /* Handle BLX, branch and link/exchange. */
10334 if (9 == arm_insn_r->opcode)
10335 {
10336 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10337 and R14 stores the return address. */
10338 record_buf[0] = ARM_PS_REGNUM;
10339 record_buf[1] = ARM_LR_REGNUM;
10340 arm_insn_r->reg_rec_count = 2;
10341 }
10342 }
10343 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10344 {
10345 /* Handle enhanced software breakpoint insn, BKPT. */
10346 /* CPSR is changed to be executed in ARM state, disabling normal
10347 interrupts, entering abort mode. */
10348 /* According to high vector configuration PC is set. */
10349 /* user hit breakpoint and type reverse, in
10350 that case, we need to go back with previous CPSR and
10351 Program Counter. */
10352 record_buf[0] = ARM_PS_REGNUM;
10353 record_buf[1] = ARM_LR_REGNUM;
10354 arm_insn_r->reg_rec_count = 2;
10355
10356 /* Save SPSR also; how? */
72508ac0
PO
10357 return -1;
10358 }
10359 else if (11 == arm_insn_r->decode
10360 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10361 {
10362 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10363
10364 /* Handle str(x) insn */
10365 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10366 ARM_RECORD_STRH);
10367 }
10368 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10369 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10370 {
10371 /* Handle BX, branch and link/exchange. */
10372 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10373 record_buf[0] = ARM_PS_REGNUM;
10374 arm_insn_r->reg_rec_count = 1;
10375 }
10376 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10377 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10378 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10379 {
10380 /* Count leading zeros: CLZ. */
10381 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10382 arm_insn_r->reg_rec_count = 1;
10383 }
10384 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10385 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10386 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10387 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10388 )
10389 {
10390 /* Handle MRS insn. */
10391 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10392 arm_insn_r->reg_rec_count = 1;
10393 }
10394 else if (arm_insn_r->opcode <= 15)
10395 {
10396 /* Normal data processing insns. */
10397 /* Out of 11 shifter operands mode, all the insn modifies destination
10398 register, which is specified by 13-16 decode. */
10399 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10400 record_buf[1] = ARM_PS_REGNUM;
10401 arm_insn_r->reg_rec_count = 2;
10402 }
10403 else
10404 {
10405 return -1;
10406 }
10407
10408 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10409 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10410 return 0;
10411}
10412
10413/* Handling opcode 001 insns. */
10414
10415static int
10416arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10417{
10418 uint32_t record_buf[8], record_buf_mem[8];
10419
10420 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10421 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10422
10423 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10424 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10425 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10426 )
10427 {
10428 /* Handle MSR insn. */
10429 if (9 == arm_insn_r->opcode)
10430 {
10431 /* CSPR is going to be changed. */
10432 record_buf[0] = ARM_PS_REGNUM;
10433 arm_insn_r->reg_rec_count = 1;
10434 }
10435 else
10436 {
10437 /* SPSR is going to be changed. */
10438 }
10439 }
10440 else if (arm_insn_r->opcode <= 15)
10441 {
10442 /* Normal data processing insns. */
10443 /* Out of 11 shifter operands mode, all the insn modifies destination
10444 register, which is specified by 13-16 decode. */
10445 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10446 record_buf[1] = ARM_PS_REGNUM;
10447 arm_insn_r->reg_rec_count = 2;
10448 }
10449 else
10450 {
10451 return -1;
10452 }
10453
10454 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10455 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10456 return 0;
10457}
10458
c55978a6
YQ
10459static int
10460arm_record_media (insn_decode_record *arm_insn_r)
10461{
10462 uint32_t record_buf[8];
10463
10464 switch (bits (arm_insn_r->arm_insn, 22, 24))
10465 {
10466 case 0:
10467 /* Parallel addition and subtraction, signed */
10468 case 1:
10469 /* Parallel addition and subtraction, unsigned */
10470 case 2:
10471 case 3:
10472 /* Packing, unpacking, saturation and reversal */
10473 {
10474 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10475
10476 record_buf[arm_insn_r->reg_rec_count++] = rd;
10477 }
10478 break;
10479
10480 case 4:
10481 case 5:
10482 /* Signed multiplies */
10483 {
10484 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10485 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10486
10487 record_buf[arm_insn_r->reg_rec_count++] = rd;
10488 if (op1 == 0x0)
10489 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10490 else if (op1 == 0x4)
10491 record_buf[arm_insn_r->reg_rec_count++]
10492 = bits (arm_insn_r->arm_insn, 12, 15);
10493 }
10494 break;
10495
10496 case 6:
10497 {
10498 if (bit (arm_insn_r->arm_insn, 21)
10499 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10500 {
10501 /* SBFX */
10502 record_buf[arm_insn_r->reg_rec_count++]
10503 = bits (arm_insn_r->arm_insn, 12, 15);
10504 }
10505 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10506 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10507 {
10508 /* USAD8 and USADA8 */
10509 record_buf[arm_insn_r->reg_rec_count++]
10510 = bits (arm_insn_r->arm_insn, 16, 19);
10511 }
10512 }
10513 break;
10514
10515 case 7:
10516 {
10517 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10518 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10519 {
10520 /* Permanently UNDEFINED */
10521 return -1;
10522 }
10523 else
10524 {
10525 /* BFC, BFI and UBFX */
10526 record_buf[arm_insn_r->reg_rec_count++]
10527 = bits (arm_insn_r->arm_insn, 12, 15);
10528 }
10529 }
10530 break;
10531
10532 default:
10533 return -1;
10534 }
10535
10536 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10537
10538 return 0;
10539}
10540
71e396f9 10541/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10542
10543static int
10544arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10545{
10546 struct regcache *reg_cache = arm_insn_r->regcache;
10547
71e396f9
LM
10548 uint32_t reg_base , reg_dest;
10549 uint32_t offset_12, tgt_mem_addr;
72508ac0 10550 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10551 unsigned char wback;
10552 ULONGEST u_regval;
72508ac0 10553
71e396f9
LM
10554 /* Calculate wback. */
10555 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10556 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10557
71e396f9
LM
10558 arm_insn_r->reg_rec_count = 0;
10559 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10560
10561 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10562 {
71e396f9
LM
10563 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10564 and LDRT. */
10565
72508ac0 10566 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10567 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10568
10569 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10570 preceeds a LDR instruction having R15 as reg_base, it
10571 emulates a branch and link instruction, and hence we need to save
10572 CPSR and PC as well. */
10573 if (ARM_PC_REGNUM == reg_dest)
10574 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10575
10576 /* If wback is true, also save the base register, which is going to be
10577 written to. */
10578 if (wback)
10579 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10580 }
10581 else
10582 {
71e396f9
LM
10583 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10584
72508ac0 10585 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10586 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10587
10588 /* Handle bit U. */
72508ac0 10589 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10590 {
10591 /* U == 1: Add the offset. */
10592 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10593 }
72508ac0 10594 else
71e396f9
LM
10595 {
10596 /* U == 0: subtract the offset. */
10597 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10598 }
10599
10600 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10601 bytes. */
10602 if (bit (arm_insn_r->arm_insn, 22))
10603 {
10604 /* STRB and STRBT: 1 byte. */
10605 record_buf_mem[0] = 1;
10606 }
10607 else
10608 {
10609 /* STR and STRT: 4 bytes. */
10610 record_buf_mem[0] = 4;
10611 }
10612
10613 /* Handle bit P. */
10614 if (bit (arm_insn_r->arm_insn, 24))
10615 record_buf_mem[1] = tgt_mem_addr;
10616 else
10617 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10618
72508ac0
PO
10619 arm_insn_r->mem_rec_count = 1;
10620
71e396f9
LM
10621 /* If wback is true, also save the base register, which is going to be
10622 written to. */
10623 if (wback)
10624 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10625 }
10626
10627 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10628 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10629 return 0;
10630}
10631
10632/* Handling opcode 011 insns. */
10633
10634static int
10635arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10636{
10637 struct regcache *reg_cache = arm_insn_r->regcache;
10638
10639 uint32_t shift_imm = 0;
10640 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10641 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10642 uint32_t record_buf[8], record_buf_mem[8];
10643
10644 LONGEST s_word;
10645 ULONGEST u_regval[2];
10646
c55978a6
YQ
10647 if (bit (arm_insn_r->arm_insn, 4))
10648 return arm_record_media (arm_insn_r);
10649
72508ac0
PO
10650 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10651 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10652
10653 /* Handle enhanced store insns and LDRD DSP insn,
10654 order begins according to addressing modes for store insns
10655 STRH insn. */
10656
10657 /* LDR or STR? */
10658 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10659 {
10660 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10661 /* LDR insn has a capability to do branching, if
10662 MOV LR, PC is precedded by LDR insn having Rn as R15
10663 in that case, it emulates branch and link insn, and hence we
10664 need to save CSPR and PC as well. */
10665 if (15 != reg_dest)
10666 {
10667 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10668 arm_insn_r->reg_rec_count = 1;
10669 }
10670 else
10671 {
10672 record_buf[0] = reg_dest;
10673 record_buf[1] = ARM_PS_REGNUM;
10674 arm_insn_r->reg_rec_count = 2;
10675 }
10676 }
10677 else
10678 {
10679 if (! bits (arm_insn_r->arm_insn, 4, 11))
10680 {
10681 /* Store insn, register offset and register pre-indexed,
10682 register post-indexed. */
10683 /* Get Rm. */
10684 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10685 /* Get Rn. */
10686 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10687 regcache_raw_read_unsigned (reg_cache, reg_src1
10688 , &u_regval[0]);
10689 regcache_raw_read_unsigned (reg_cache, reg_src2
10690 , &u_regval[1]);
10691 if (15 == reg_src2)
10692 {
10693 /* If R15 was used as Rn, hence current PC+8. */
10694 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10695 u_regval[0] = u_regval[0] + 8;
10696 }
10697 /* Calculate target store address, Rn +/- Rm, register offset. */
10698 /* U == 1. */
10699 if (bit (arm_insn_r->arm_insn, 23))
10700 {
10701 tgt_mem_addr = u_regval[0] + u_regval[1];
10702 }
10703 else
10704 {
10705 tgt_mem_addr = u_regval[1] - u_regval[0];
10706 }
10707
10708 switch (arm_insn_r->opcode)
10709 {
10710 /* STR. */
10711 case 8:
10712 case 12:
10713 /* STR. */
10714 case 9:
10715 case 13:
10716 /* STRT. */
10717 case 1:
10718 case 5:
10719 /* STR. */
10720 case 0:
10721 case 4:
10722 record_buf_mem[0] = 4;
10723 break;
10724
10725 /* STRB. */
10726 case 10:
10727 case 14:
10728 /* STRB. */
10729 case 11:
10730 case 15:
10731 /* STRBT. */
10732 case 3:
10733 case 7:
10734 /* STRB. */
10735 case 2:
10736 case 6:
10737 record_buf_mem[0] = 1;
10738 break;
10739
10740 default:
10741 gdb_assert_not_reached ("no decoding pattern found");
10742 break;
10743 }
10744 record_buf_mem[1] = tgt_mem_addr;
10745 arm_insn_r->mem_rec_count = 1;
10746
10747 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10748 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10749 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10750 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10751 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10752 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10753 )
10754 {
10755 /* Rn is going to be changed in pre-indexed mode and
10756 post-indexed mode as well. */
10757 record_buf[0] = reg_src2;
10758 arm_insn_r->reg_rec_count = 1;
10759 }
10760 }
10761 else
10762 {
10763 /* Store insn, scaled register offset; scaled pre-indexed. */
10764 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10765 /* Get Rm. */
10766 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10767 /* Get Rn. */
10768 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10769 /* Get shift_imm. */
10770 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10771 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10772 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10773 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10774 /* Offset_12 used as shift. */
10775 switch (offset_12)
10776 {
10777 case 0:
10778 /* Offset_12 used as index. */
10779 offset_12 = u_regval[0] << shift_imm;
10780 break;
10781
10782 case 1:
10783 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10784 break;
10785
10786 case 2:
10787 if (!shift_imm)
10788 {
10789 if (bit (u_regval[0], 31))
10790 {
10791 offset_12 = 0xFFFFFFFF;
10792 }
10793 else
10794 {
10795 offset_12 = 0;
10796 }
10797 }
10798 else
10799 {
10800 /* This is arithmetic shift. */
10801 offset_12 = s_word >> shift_imm;
10802 }
10803 break;
10804
10805 case 3:
10806 if (!shift_imm)
10807 {
10808 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10809 &u_regval[1]);
10810 /* Get C flag value and shift it by 31. */
10811 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10812 | (u_regval[0]) >> 1);
10813 }
10814 else
10815 {
10816 offset_12 = (u_regval[0] >> shift_imm) \
10817 | (u_regval[0] <<
10818 (sizeof(uint32_t) - shift_imm));
10819 }
10820 break;
10821
10822 default:
10823 gdb_assert_not_reached ("no decoding pattern found");
10824 break;
10825 }
10826
10827 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10828 /* bit U set. */
10829 if (bit (arm_insn_r->arm_insn, 23))
10830 {
10831 tgt_mem_addr = u_regval[1] + offset_12;
10832 }
10833 else
10834 {
10835 tgt_mem_addr = u_regval[1] - offset_12;
10836 }
10837
10838 switch (arm_insn_r->opcode)
10839 {
10840 /* STR. */
10841 case 8:
10842 case 12:
10843 /* STR. */
10844 case 9:
10845 case 13:
10846 /* STRT. */
10847 case 1:
10848 case 5:
10849 /* STR. */
10850 case 0:
10851 case 4:
10852 record_buf_mem[0] = 4;
10853 break;
10854
10855 /* STRB. */
10856 case 10:
10857 case 14:
10858 /* STRB. */
10859 case 11:
10860 case 15:
10861 /* STRBT. */
10862 case 3:
10863 case 7:
10864 /* STRB. */
10865 case 2:
10866 case 6:
10867 record_buf_mem[0] = 1;
10868 break;
10869
10870 default:
10871 gdb_assert_not_reached ("no decoding pattern found");
10872 break;
10873 }
10874 record_buf_mem[1] = tgt_mem_addr;
10875 arm_insn_r->mem_rec_count = 1;
10876
10877 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10878 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10879 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10880 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10881 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10882 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10883 )
10884 {
10885 /* Rn is going to be changed in register scaled pre-indexed
10886 mode,and scaled post indexed mode. */
10887 record_buf[0] = reg_src2;
10888 arm_insn_r->reg_rec_count = 1;
10889 }
10890 }
10891 }
10892
10893 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10894 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10895 return 0;
10896}
10897
71e396f9 10898/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10899
10900static int
10901arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10902{
10903 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10904 uint32_t register_count = 0, register_bits;
10905 uint32_t reg_base, addr_mode;
72508ac0 10906 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10907 uint32_t wback;
10908 ULONGEST u_regval;
72508ac0 10909
71e396f9
LM
10910 /* Fetch the list of registers. */
10911 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10912 arm_insn_r->reg_rec_count = 0;
10913
10914 /* Fetch the base register that contains the address we are loading data
10915 to. */
10916 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10917
71e396f9
LM
10918 /* Calculate wback. */
10919 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10920
10921 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10922 {
71e396f9 10923 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10924
71e396f9 10925 /* Find out which registers are going to be loaded from memory. */
72508ac0 10926 while (register_bits)
71e396f9
LM
10927 {
10928 if (register_bits & 0x00000001)
10929 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10930 register_bits = register_bits >> 1;
10931 register_count++;
10932 }
72508ac0 10933
71e396f9
LM
10934
10935 /* If wback is true, also save the base register, which is going to be
10936 written to. */
10937 if (wback)
10938 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10939
10940 /* Save the CPSR register. */
10941 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10942 }
10943 else
10944 {
71e396f9 10945 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10946
71e396f9
LM
10947 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10948
10949 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10950
10951 /* Find out how many registers are going to be stored to memory. */
72508ac0 10952 while (register_bits)
71e396f9
LM
10953 {
10954 if (register_bits & 0x00000001)
10955 register_count++;
10956 register_bits = register_bits >> 1;
10957 }
72508ac0
PO
10958
10959 switch (addr_mode)
71e396f9
LM
10960 {
10961 /* STMDA (STMED): Decrement after. */
10962 case 0:
10963 record_buf_mem[1] = (uint32_t) u_regval
10964 - register_count * INT_REGISTER_SIZE + 4;
10965 break;
10966 /* STM (STMIA, STMEA): Increment after. */
10967 case 1:
10968 record_buf_mem[1] = (uint32_t) u_regval;
10969 break;
10970 /* STMDB (STMFD): Decrement before. */
10971 case 2:
10972 record_buf_mem[1] = (uint32_t) u_regval
10973 - register_count * INT_REGISTER_SIZE;
10974 break;
10975 /* STMIB (STMFA): Increment before. */
10976 case 3:
10977 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10978 break;
10979 default:
10980 gdb_assert_not_reached ("no decoding pattern found");
10981 break;
10982 }
72508ac0 10983
71e396f9
LM
10984 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10985 arm_insn_r->mem_rec_count = 1;
10986
10987 /* If wback is true, also save the base register, which is going to be
10988 written to. */
10989 if (wback)
10990 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10991 }
10992
10993 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10994 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10995 return 0;
10996}
10997
10998/* Handling opcode 101 insns. */
10999
11000static int
11001arm_record_b_bl (insn_decode_record *arm_insn_r)
11002{
11003 uint32_t record_buf[8];
11004
11005 /* Handle B, BL, BLX(1) insns. */
11006 /* B simply branches so we do nothing here. */
11007 /* Note: BLX(1) doesnt fall here but instead it falls into
11008 extension space. */
11009 if (bit (arm_insn_r->arm_insn, 24))
11010 {
11011 record_buf[0] = ARM_LR_REGNUM;
11012 arm_insn_r->reg_rec_count = 1;
11013 }
11014
11015 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11016
11017 return 0;
11018}
11019
72508ac0 11020static int
c6ec2b30 11021arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11022{
11023 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11024 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11025 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11026
11027 return -1;
11028}
11029
5a578da5
OJ
11030/* Record handler for vector data transfer instructions. */
11031
11032static int
11033arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11034{
11035 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11036 uint32_t record_buf[4];
11037
5a578da5
OJ
11038 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11039 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11040 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11041 bit_l = bit (arm_insn_r->arm_insn, 20);
11042 bit_c = bit (arm_insn_r->arm_insn, 8);
11043
11044 /* Handle VMOV instruction. */
11045 if (bit_l && bit_c)
11046 {
11047 record_buf[0] = reg_t;
11048 arm_insn_r->reg_rec_count = 1;
11049 }
11050 else if (bit_l && !bit_c)
11051 {
11052 /* Handle VMOV instruction. */
11053 if (bits_a == 0x00)
11054 {
f1771dce 11055 record_buf[0] = reg_t;
5a578da5
OJ
11056 arm_insn_r->reg_rec_count = 1;
11057 }
11058 /* Handle VMRS instruction. */
11059 else if (bits_a == 0x07)
11060 {
11061 if (reg_t == 15)
11062 reg_t = ARM_PS_REGNUM;
11063
11064 record_buf[0] = reg_t;
11065 arm_insn_r->reg_rec_count = 1;
11066 }
11067 }
11068 else if (!bit_l && !bit_c)
11069 {
11070 /* Handle VMOV instruction. */
11071 if (bits_a == 0x00)
11072 {
f1771dce 11073 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11074
11075 arm_insn_r->reg_rec_count = 1;
11076 }
11077 /* Handle VMSR instruction. */
11078 else if (bits_a == 0x07)
11079 {
11080 record_buf[0] = ARM_FPSCR_REGNUM;
11081 arm_insn_r->reg_rec_count = 1;
11082 }
11083 }
11084 else if (!bit_l && bit_c)
11085 {
11086 /* Handle VMOV instruction. */
11087 if (!(bits_a & 0x04))
11088 {
11089 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11090 + ARM_D0_REGNUM;
11091 arm_insn_r->reg_rec_count = 1;
11092 }
11093 /* Handle VDUP instruction. */
11094 else
11095 {
11096 if (bit (arm_insn_r->arm_insn, 21))
11097 {
11098 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11099 record_buf[0] = reg_v + ARM_D0_REGNUM;
11100 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11101 arm_insn_r->reg_rec_count = 2;
11102 }
11103 else
11104 {
11105 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11106 record_buf[0] = reg_v + ARM_D0_REGNUM;
11107 arm_insn_r->reg_rec_count = 1;
11108 }
11109 }
11110 }
11111
11112 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11113 return 0;
11114}
11115
f20f80dd
OJ
11116/* Record handler for extension register load/store instructions. */
11117
11118static int
11119arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11120{
11121 uint32_t opcode, single_reg;
11122 uint8_t op_vldm_vstm;
11123 uint32_t record_buf[8], record_buf_mem[128];
11124 ULONGEST u_regval = 0;
11125
11126 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11127
11128 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11129 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11130 op_vldm_vstm = opcode & 0x1b;
11131
11132 /* Handle VMOV instructions. */
11133 if ((opcode & 0x1e) == 0x04)
11134 {
9fde51ed 11135 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11136 {
11137 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11138 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11139 arm_insn_r->reg_rec_count = 2;
11140 }
f20f80dd 11141 else
01e57735 11142 {
9fde51ed
YQ
11143 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11144 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11145
9fde51ed 11146 if (single_reg)
01e57735 11147 {
9fde51ed
YQ
11148 /* The first S register number m is REG_M:M (M is bit 5),
11149 the corresponding D register number is REG_M:M / 2, which
11150 is REG_M. */
11151 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11152 /* The second S register number is REG_M:M + 1, the
11153 corresponding D register number is (REG_M:M + 1) / 2.
11154 IOW, if bit M is 1, the first and second S registers
11155 are mapped to different D registers, otherwise, they are
11156 in the same D register. */
11157 if (bit_m)
11158 {
11159 record_buf[arm_insn_r->reg_rec_count++]
11160 = ARM_D0_REGNUM + reg_m + 1;
11161 }
01e57735
YQ
11162 }
11163 else
11164 {
9fde51ed 11165 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11166 arm_insn_r->reg_rec_count = 1;
11167 }
11168 }
f20f80dd
OJ
11169 }
11170 /* Handle VSTM and VPUSH instructions. */
11171 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11172 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11173 {
11174 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11175 uint32_t memory_index = 0;
11176
11177 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11178 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11179 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11180 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11181 memory_count = imm_off8;
11182
11183 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11184 start_address = u_regval;
f20f80dd 11185 else
01e57735 11186 start_address = u_regval - imm_off32;
f20f80dd
OJ
11187
11188 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11189 {
11190 record_buf[0] = reg_rn;
11191 arm_insn_r->reg_rec_count = 1;
11192 }
f20f80dd
OJ
11193
11194 while (memory_count > 0)
01e57735 11195 {
9fde51ed 11196 if (single_reg)
01e57735 11197 {
9fde51ed
YQ
11198 record_buf_mem[memory_index] = 4;
11199 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11200 start_address = start_address + 4;
11201 memory_index = memory_index + 2;
11202 }
11203 else
11204 {
9fde51ed
YQ
11205 record_buf_mem[memory_index] = 4;
11206 record_buf_mem[memory_index + 1] = start_address;
11207 record_buf_mem[memory_index + 2] = 4;
11208 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11209 start_address = start_address + 8;
11210 memory_index = memory_index + 4;
11211 }
11212 memory_count--;
11213 }
f20f80dd
OJ
11214 arm_insn_r->mem_rec_count = (memory_index >> 1);
11215 }
11216 /* Handle VLDM instructions. */
11217 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11218 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11219 {
11220 uint32_t reg_count, reg_vd;
11221 uint32_t reg_index = 0;
9fde51ed 11222 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11223
11224 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11225 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11226
9fde51ed
YQ
11227 /* REG_VD is the first D register number. If the instruction
11228 loads memory to S registers (SINGLE_REG is TRUE), the register
11229 number is (REG_VD << 1 | bit D), so the corresponding D
11230 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11231 if (!single_reg)
11232 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11233
9fde51ed 11234 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11235 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11236
9fde51ed
YQ
11237 /* If the instruction loads memory to D register, REG_COUNT should
11238 be divided by 2, according to the ARM Architecture Reference
11239 Manual. If the instruction loads memory to S register, divide by
11240 2 as well because two S registers are mapped to D register. */
11241 reg_count = reg_count / 2;
11242 if (single_reg && bit_d)
01e57735 11243 {
9fde51ed
YQ
11244 /* Increase the register count if S register list starts from
11245 an odd number (bit d is one). */
11246 reg_count++;
11247 }
f20f80dd 11248
9fde51ed
YQ
11249 while (reg_count > 0)
11250 {
11251 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11252 reg_count--;
11253 }
f20f80dd
OJ
11254 arm_insn_r->reg_rec_count = reg_index;
11255 }
11256 /* VSTR Vector store register. */
11257 else if ((opcode & 0x13) == 0x10)
11258 {
bec2ab5a 11259 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11260 uint32_t memory_index = 0;
11261
11262 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11263 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11264 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11265 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11266
11267 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11268 start_address = u_regval + imm_off32;
f20f80dd 11269 else
01e57735 11270 start_address = u_regval - imm_off32;
f20f80dd
OJ
11271
11272 if (single_reg)
01e57735 11273 {
9fde51ed
YQ
11274 record_buf_mem[memory_index] = 4;
11275 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11276 arm_insn_r->mem_rec_count = 1;
11277 }
f20f80dd 11278 else
01e57735 11279 {
9fde51ed
YQ
11280 record_buf_mem[memory_index] = 4;
11281 record_buf_mem[memory_index + 1] = start_address;
11282 record_buf_mem[memory_index + 2] = 4;
11283 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11284 arm_insn_r->mem_rec_count = 2;
11285 }
f20f80dd
OJ
11286 }
11287 /* VLDR Vector load register. */
11288 else if ((opcode & 0x13) == 0x11)
11289 {
11290 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11291
11292 if (!single_reg)
01e57735
YQ
11293 {
11294 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11295 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11296 }
f20f80dd 11297 else
01e57735
YQ
11298 {
11299 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11300 /* Record register D rather than pseudo register S. */
11301 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11302 }
f20f80dd
OJ
11303 arm_insn_r->reg_rec_count = 1;
11304 }
11305
11306 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11307 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11308 return 0;
11309}
11310
851f26ae
OJ
11311/* Record handler for arm/thumb mode VFP data processing instructions. */
11312
11313static int
11314arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11315{
11316 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11317 uint32_t record_buf[4];
11318 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11319 enum insn_types curr_insn_type = INSN_INV;
11320
11321 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11322 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11323 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11324 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11325 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11326 bit_d = bit (arm_insn_r->arm_insn, 22);
11327 opc1 = opc1 & 0x04;
11328
11329 /* Handle VMLA, VMLS. */
11330 if (opc1 == 0x00)
11331 {
11332 if (bit (arm_insn_r->arm_insn, 10))
11333 {
11334 if (bit (arm_insn_r->arm_insn, 6))
11335 curr_insn_type = INSN_T0;
11336 else
11337 curr_insn_type = INSN_T1;
11338 }
11339 else
11340 {
11341 if (dp_op_sz)
11342 curr_insn_type = INSN_T1;
11343 else
11344 curr_insn_type = INSN_T2;
11345 }
11346 }
11347 /* Handle VNMLA, VNMLS, VNMUL. */
11348 else if (opc1 == 0x01)
11349 {
11350 if (dp_op_sz)
11351 curr_insn_type = INSN_T1;
11352 else
11353 curr_insn_type = INSN_T2;
11354 }
11355 /* Handle VMUL. */
11356 else if (opc1 == 0x02 && !(opc3 & 0x01))
11357 {
11358 if (bit (arm_insn_r->arm_insn, 10))
11359 {
11360 if (bit (arm_insn_r->arm_insn, 6))
11361 curr_insn_type = INSN_T0;
11362 else
11363 curr_insn_type = INSN_T1;
11364 }
11365 else
11366 {
11367 if (dp_op_sz)
11368 curr_insn_type = INSN_T1;
11369 else
11370 curr_insn_type = INSN_T2;
11371 }
11372 }
11373 /* Handle VADD, VSUB. */
11374 else if (opc1 == 0x03)
11375 {
11376 if (!bit (arm_insn_r->arm_insn, 9))
11377 {
11378 if (bit (arm_insn_r->arm_insn, 6))
11379 curr_insn_type = INSN_T0;
11380 else
11381 curr_insn_type = INSN_T1;
11382 }
11383 else
11384 {
11385 if (dp_op_sz)
11386 curr_insn_type = INSN_T1;
11387 else
11388 curr_insn_type = INSN_T2;
11389 }
11390 }
11391 /* Handle VDIV. */
11392 else if (opc1 == 0x0b)
11393 {
11394 if (dp_op_sz)
11395 curr_insn_type = INSN_T1;
11396 else
11397 curr_insn_type = INSN_T2;
11398 }
11399 /* Handle all other vfp data processing instructions. */
11400 else if (opc1 == 0x0b)
11401 {
11402 /* Handle VMOV. */
11403 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11404 {
11405 if (bit (arm_insn_r->arm_insn, 4))
11406 {
11407 if (bit (arm_insn_r->arm_insn, 6))
11408 curr_insn_type = INSN_T0;
11409 else
11410 curr_insn_type = INSN_T1;
11411 }
11412 else
11413 {
11414 if (dp_op_sz)
11415 curr_insn_type = INSN_T1;
11416 else
11417 curr_insn_type = INSN_T2;
11418 }
11419 }
11420 /* Handle VNEG and VABS. */
11421 else if ((opc2 == 0x01 && opc3 == 0x01)
11422 || (opc2 == 0x00 && opc3 == 0x03))
11423 {
11424 if (!bit (arm_insn_r->arm_insn, 11))
11425 {
11426 if (bit (arm_insn_r->arm_insn, 6))
11427 curr_insn_type = INSN_T0;
11428 else
11429 curr_insn_type = INSN_T1;
11430 }
11431 else
11432 {
11433 if (dp_op_sz)
11434 curr_insn_type = INSN_T1;
11435 else
11436 curr_insn_type = INSN_T2;
11437 }
11438 }
11439 /* Handle VSQRT. */
11440 else if (opc2 == 0x01 && opc3 == 0x03)
11441 {
11442 if (dp_op_sz)
11443 curr_insn_type = INSN_T1;
11444 else
11445 curr_insn_type = INSN_T2;
11446 }
11447 /* Handle VCVT. */
11448 else if (opc2 == 0x07 && opc3 == 0x03)
11449 {
11450 if (!dp_op_sz)
11451 curr_insn_type = INSN_T1;
11452 else
11453 curr_insn_type = INSN_T2;
11454 }
11455 else if (opc3 & 0x01)
11456 {
11457 /* Handle VCVT. */
11458 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11459 {
11460 if (!bit (arm_insn_r->arm_insn, 18))
11461 curr_insn_type = INSN_T2;
11462 else
11463 {
11464 if (dp_op_sz)
11465 curr_insn_type = INSN_T1;
11466 else
11467 curr_insn_type = INSN_T2;
11468 }
11469 }
11470 /* Handle VCVT. */
11471 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11472 {
11473 if (dp_op_sz)
11474 curr_insn_type = INSN_T1;
11475 else
11476 curr_insn_type = INSN_T2;
11477 }
11478 /* Handle VCVTB, VCVTT. */
11479 else if ((opc2 & 0x0e) == 0x02)
11480 curr_insn_type = INSN_T2;
11481 /* Handle VCMP, VCMPE. */
11482 else if ((opc2 & 0x0e) == 0x04)
11483 curr_insn_type = INSN_T3;
11484 }
11485 }
11486
11487 switch (curr_insn_type)
11488 {
11489 case INSN_T0:
11490 reg_vd = reg_vd | (bit_d << 4);
11491 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11492 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11493 arm_insn_r->reg_rec_count = 2;
11494 break;
11495
11496 case INSN_T1:
11497 reg_vd = reg_vd | (bit_d << 4);
11498 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11499 arm_insn_r->reg_rec_count = 1;
11500 break;
11501
11502 case INSN_T2:
11503 reg_vd = (reg_vd << 1) | bit_d;
11504 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11505 arm_insn_r->reg_rec_count = 1;
11506 break;
11507
11508 case INSN_T3:
11509 record_buf[0] = ARM_FPSCR_REGNUM;
11510 arm_insn_r->reg_rec_count = 1;
11511 break;
11512
11513 default:
11514 gdb_assert_not_reached ("no decoding pattern found");
11515 break;
11516 }
11517
11518 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11519 return 0;
11520}
11521
60cc5e93
OJ
11522/* Handling opcode 110 insns. */
11523
11524static int
11525arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11526{
bec2ab5a 11527 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11528
11529 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11530 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11531 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11532
11533 if ((coproc & 0x0e) == 0x0a)
11534 {
11535 /* Handle extension register ld/st instructions. */
11536 if (!(op1 & 0x20))
f20f80dd 11537 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11538
11539 /* 64-bit transfers between arm core and extension registers. */
11540 if ((op1 & 0x3e) == 0x04)
f20f80dd 11541 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11542 }
11543 else
11544 {
11545 /* Handle coprocessor ld/st instructions. */
11546 if (!(op1 & 0x3a))
11547 {
11548 /* Store. */
11549 if (!op1_ebit)
11550 return arm_record_unsupported_insn (arm_insn_r);
11551 else
11552 /* Load. */
11553 return arm_record_unsupported_insn (arm_insn_r);
11554 }
11555
11556 /* Move to coprocessor from two arm core registers. */
11557 if (op1 == 0x4)
11558 return arm_record_unsupported_insn (arm_insn_r);
11559
11560 /* Move to two arm core registers from coprocessor. */
11561 if (op1 == 0x5)
11562 {
11563 uint32_t reg_t[2];
11564
11565 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11566 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11567 arm_insn_r->reg_rec_count = 2;
11568
11569 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11570 return 0;
11571 }
11572 }
11573 return arm_record_unsupported_insn (arm_insn_r);
11574}
11575
72508ac0
PO
11576/* Handling opcode 111 insns. */
11577
11578static int
11579arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11580{
60cc5e93 11581 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11582 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11583 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11584
11585 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11586 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11587 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11588 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11589 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11590
11591 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11592 if (op1_sbit)
97dfe206
OJ
11593 {
11594 if (tdep->arm_syscall_record != NULL)
11595 {
11596 ULONGEST svc_operand, svc_number;
11597
11598 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11599
11600 if (svc_operand) /* OABI. */
11601 svc_number = svc_operand - 0x900000;
11602 else /* EABI. */
11603 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11604
60cc5e93 11605 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11606 }
11607 else
11608 {
11609 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11610 return -1;
97dfe206
OJ
11611 }
11612 }
60cc5e93
OJ
11613
11614 if ((coproc & 0x0e) == 0x0a)
11615 {
11616 /* VFP data-processing instructions. */
11617 if (!op1_sbit && !op)
851f26ae 11618 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11619
11620 /* Advanced SIMD, VFP instructions. */
11621 if (!op1_sbit && op)
5a578da5 11622 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11623 }
97dfe206
OJ
11624 else
11625 {
60cc5e93
OJ
11626 /* Coprocessor data operations. */
11627 if (!op1_sbit && !op)
11628 return arm_record_unsupported_insn (arm_insn_r);
11629
11630 /* Move to Coprocessor from ARM core register. */
11631 if (!op1_sbit && !op1_ebit && op)
11632 return arm_record_unsupported_insn (arm_insn_r);
11633
11634 /* Move to arm core register from coprocessor. */
11635 if (!op1_sbit && op1_ebit && op)
11636 {
11637 uint32_t record_buf[1];
11638
11639 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11640 if (record_buf[0] == 15)
11641 record_buf[0] = ARM_PS_REGNUM;
11642
11643 arm_insn_r->reg_rec_count = 1;
11644 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11645 record_buf);
11646 return 0;
11647 }
97dfe206 11648 }
72508ac0 11649
60cc5e93 11650 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11651}
11652
11653/* Handling opcode 000 insns. */
11654
11655static int
11656thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11657{
11658 uint32_t record_buf[8];
11659 uint32_t reg_src1 = 0;
11660
11661 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11662
11663 record_buf[0] = ARM_PS_REGNUM;
11664 record_buf[1] = reg_src1;
11665 thumb_insn_r->reg_rec_count = 2;
11666
11667 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11668
11669 return 0;
11670}
11671
11672
11673/* Handling opcode 001 insns. */
11674
11675static int
11676thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11677{
11678 uint32_t record_buf[8];
11679 uint32_t reg_src1 = 0;
11680
11681 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11682
11683 record_buf[0] = ARM_PS_REGNUM;
11684 record_buf[1] = reg_src1;
11685 thumb_insn_r->reg_rec_count = 2;
11686
11687 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11688
11689 return 0;
11690}
11691
11692/* Handling opcode 010 insns. */
11693
11694static int
11695thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11696{
11697 struct regcache *reg_cache = thumb_insn_r->regcache;
11698 uint32_t record_buf[8], record_buf_mem[8];
11699
11700 uint32_t reg_src1 = 0, reg_src2 = 0;
11701 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11702
11703 ULONGEST u_regval[2] = {0};
11704
11705 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11706
11707 if (bit (thumb_insn_r->arm_insn, 12))
11708 {
11709 /* Handle load/store register offset. */
b121eeb9
YQ
11710 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11711
11712 if (opB >= 4 && opB <= 7)
72508ac0
PO
11713 {
11714 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11715 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11716 record_buf[0] = reg_src1;
11717 thumb_insn_r->reg_rec_count = 1;
11718 }
b121eeb9 11719 else if (opB >= 0 && opB <= 2)
72508ac0
PO
11720 {
11721 /* STR(2), STRB(2), STRH(2) . */
11722 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11723 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11724 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11725 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
b121eeb9 11726 if (0 == opB)
72508ac0 11727 record_buf_mem[0] = 4; /* STR (2). */
b121eeb9 11728 else if (2 == opB)
72508ac0 11729 record_buf_mem[0] = 1; /* STRB (2). */
b121eeb9 11730 else if (1 == opB)
72508ac0
PO
11731 record_buf_mem[0] = 2; /* STRH (2). */
11732 record_buf_mem[1] = u_regval[0] + u_regval[1];
11733 thumb_insn_r->mem_rec_count = 1;
11734 }
11735 }
11736 else if (bit (thumb_insn_r->arm_insn, 11))
11737 {
11738 /* Handle load from literal pool. */
11739 /* LDR(3). */
11740 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11741 record_buf[0] = reg_src1;
11742 thumb_insn_r->reg_rec_count = 1;
11743 }
11744 else if (opcode1)
11745 {
b121eeb9 11746 /* Special data instructions and branch and exchange */
72508ac0
PO
11747 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11748 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11749 if ((3 == opcode2) && (!opcode3))
11750 {
11751 /* Branch with exchange. */
11752 record_buf[0] = ARM_PS_REGNUM;
11753 thumb_insn_r->reg_rec_count = 1;
11754 }
11755 else
11756 {
1f33efec
YQ
11757 /* Format 8; special data processing insns. */
11758 record_buf[0] = ARM_PS_REGNUM;
11759 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11760 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11761 thumb_insn_r->reg_rec_count = 2;
11762 }
11763 }
11764 else
11765 {
11766 /* Format 5; data processing insns. */
11767 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11768 if (bit (thumb_insn_r->arm_insn, 7))
11769 {
11770 reg_src1 = reg_src1 + 8;
11771 }
11772 record_buf[0] = ARM_PS_REGNUM;
11773 record_buf[1] = reg_src1;
11774 thumb_insn_r->reg_rec_count = 2;
11775 }
11776
11777 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11778 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11779 record_buf_mem);
11780
11781 return 0;
11782}
11783
11784/* Handling opcode 001 insns. */
11785
11786static int
11787thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11788{
11789 struct regcache *reg_cache = thumb_insn_r->regcache;
11790 uint32_t record_buf[8], record_buf_mem[8];
11791
11792 uint32_t reg_src1 = 0;
11793 uint32_t opcode = 0, immed_5 = 0;
11794
11795 ULONGEST u_regval = 0;
11796
11797 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11798
11799 if (opcode)
11800 {
11801 /* LDR(1). */
11802 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11803 record_buf[0] = reg_src1;
11804 thumb_insn_r->reg_rec_count = 1;
11805 }
11806 else
11807 {
11808 /* STR(1). */
11809 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11810 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11811 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11812 record_buf_mem[0] = 4;
11813 record_buf_mem[1] = u_regval + (immed_5 * 4);
11814 thumb_insn_r->mem_rec_count = 1;
11815 }
11816
11817 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11818 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11819 record_buf_mem);
11820
11821 return 0;
11822}
11823
11824/* Handling opcode 100 insns. */
11825
11826static int
11827thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11828{
11829 struct regcache *reg_cache = thumb_insn_r->regcache;
11830 uint32_t record_buf[8], record_buf_mem[8];
11831
11832 uint32_t reg_src1 = 0;
11833 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11834
11835 ULONGEST u_regval = 0;
11836
11837 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11838
11839 if (3 == opcode)
11840 {
11841 /* LDR(4). */
11842 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11843 record_buf[0] = reg_src1;
11844 thumb_insn_r->reg_rec_count = 1;
11845 }
11846 else if (1 == opcode)
11847 {
11848 /* LDRH(1). */
11849 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11850 record_buf[0] = reg_src1;
11851 thumb_insn_r->reg_rec_count = 1;
11852 }
11853 else if (2 == opcode)
11854 {
11855 /* STR(3). */
11856 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11857 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11858 record_buf_mem[0] = 4;
11859 record_buf_mem[1] = u_regval + (immed_8 * 4);
11860 thumb_insn_r->mem_rec_count = 1;
11861 }
11862 else if (0 == opcode)
11863 {
11864 /* STRH(1). */
11865 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11866 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11867 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11868 record_buf_mem[0] = 2;
11869 record_buf_mem[1] = u_regval + (immed_5 * 2);
11870 thumb_insn_r->mem_rec_count = 1;
11871 }
11872
11873 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11874 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11875 record_buf_mem);
11876
11877 return 0;
11878}
11879
11880/* Handling opcode 101 insns. */
11881
11882static int
11883thumb_record_misc (insn_decode_record *thumb_insn_r)
11884{
11885 struct regcache *reg_cache = thumb_insn_r->regcache;
11886
b121eeb9 11887 uint32_t opcode = 0;
72508ac0 11888 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11889 uint32_t index = 0, start_address = 0;
72508ac0
PO
11890 uint32_t record_buf[24], record_buf_mem[48];
11891 uint32_t reg_src1;
11892
11893 ULONGEST u_regval = 0;
11894
11895 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
72508ac0 11896
b121eeb9 11897 if (opcode == 0 || opcode == 1)
72508ac0 11898 {
b121eeb9
YQ
11899 /* ADR and ADD (SP plus immediate) */
11900
72508ac0
PO
11901 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11902 record_buf[0] = reg_src1;
11903 thumb_insn_r->reg_rec_count = 1;
11904 }
b121eeb9 11905 else
72508ac0 11906 {
b121eeb9
YQ
11907 /* Miscellaneous 16-bit instructions */
11908 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11909
11910 switch (opcode2)
11911 {
11912 case 6:
11913 /* SETEND and CPS */
11914 break;
11915 case 0:
11916 /* ADD/SUB (SP plus immediate) */
11917 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11918 record_buf[0] = ARM_SP_REGNUM;
11919 thumb_insn_r->reg_rec_count = 1;
11920 break;
11921 case 1: /* fall through */
11922 case 3: /* fall through */
11923 case 9: /* fall through */
11924 case 11:
11925 /* CBNZ, CBZ */
b121eeb9
YQ
11926 break;
11927 case 2:
11928 /* SXTH, SXTB, UXTH, UXTB */
11929 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11930 thumb_insn_r->reg_rec_count = 1;
11931 break;
11932 case 4: /* fall through */
11933 case 5:
11934 /* PUSH. */
11935 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11936 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11937 while (register_bits)
11938 {
11939 if (register_bits & 0x00000001)
11940 register_count++;
11941 register_bits = register_bits >> 1;
11942 }
11943 start_address = u_regval - \
11944 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11945 thumb_insn_r->mem_rec_count = register_count;
11946 while (register_count)
11947 {
11948 record_buf_mem[(register_count * 2) - 1] = start_address;
11949 record_buf_mem[(register_count * 2) - 2] = 4;
11950 start_address = start_address + 4;
11951 register_count--;
11952 }
11953 record_buf[0] = ARM_SP_REGNUM;
11954 thumb_insn_r->reg_rec_count = 1;
11955 break;
11956 case 10:
11957 /* REV, REV16, REVSH */
ba14f379
YQ
11958 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11959 thumb_insn_r->reg_rec_count = 1;
b121eeb9
YQ
11960 break;
11961 case 12: /* fall through */
11962 case 13:
11963 /* POP. */
11964 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11965 while (register_bits)
11966 {
11967 if (register_bits & 0x00000001)
11968 record_buf[index++] = register_count;
11969 register_bits = register_bits >> 1;
11970 register_count++;
11971 }
11972 record_buf[index++] = ARM_PS_REGNUM;
11973 record_buf[index++] = ARM_SP_REGNUM;
11974 thumb_insn_r->reg_rec_count = index;
11975 break;
11976 case 0xe:
11977 /* BKPT insn. */
11978 /* Handle enhanced software breakpoint insn, BKPT. */
11979 /* CPSR is changed to be executed in ARM state, disabling normal
11980 interrupts, entering abort mode. */
11981 /* According to high vector configuration PC is set. */
11982 /* User hits breakpoint and type reverse, in that case, we need to go back with
11983 previous CPSR and Program Counter. */
11984 record_buf[0] = ARM_PS_REGNUM;
11985 record_buf[1] = ARM_LR_REGNUM;
11986 thumb_insn_r->reg_rec_count = 2;
11987 /* We need to save SPSR value, which is not yet done. */
11988 printf_unfiltered (_("Process record does not support instruction "
11989 "0x%0x at address %s.\n"),
11990 thumb_insn_r->arm_insn,
11991 paddress (thumb_insn_r->gdbarch,
11992 thumb_insn_r->this_addr));
11993 return -1;
11994
11995 case 0xf:
11996 /* If-Then, and hints */
11997 break;
11998 default:
11999 return -1;
12000 };
72508ac0
PO
12001 }
12002
12003 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12004 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12005 record_buf_mem);
12006
12007 return 0;
12008}
12009
12010/* Handling opcode 110 insns. */
12011
12012static int
12013thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12014{
12015 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12016 struct regcache *reg_cache = thumb_insn_r->regcache;
12017
12018 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12019 uint32_t reg_src1 = 0;
12020 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12021 uint32_t index = 0, start_address = 0;
72508ac0
PO
12022 uint32_t record_buf[24], record_buf_mem[48];
12023
12024 ULONGEST u_regval = 0;
12025
12026 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12027 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12028
12029 if (1 == opcode2)
12030 {
12031
12032 /* LDMIA. */
12033 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12034 /* Get Rn. */
12035 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12036 while (register_bits)
12037 {
12038 if (register_bits & 0x00000001)
f969241e 12039 record_buf[index++] = register_count;
72508ac0 12040 register_bits = register_bits >> 1;
f969241e 12041 register_count++;
72508ac0 12042 }
f969241e
OJ
12043 record_buf[index++] = reg_src1;
12044 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12045 }
12046 else if (0 == opcode2)
12047 {
12048 /* It handles both STMIA. */
12049 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12050 /* Get Rn. */
12051 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12052 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12053 while (register_bits)
12054 {
12055 if (register_bits & 0x00000001)
12056 register_count++;
12057 register_bits = register_bits >> 1;
12058 }
12059 start_address = u_regval;
12060 thumb_insn_r->mem_rec_count = register_count;
12061 while (register_count)
12062 {
12063 record_buf_mem[(register_count * 2) - 1] = start_address;
12064 record_buf_mem[(register_count * 2) - 2] = 4;
12065 start_address = start_address + 4;
12066 register_count--;
12067 }
12068 }
12069 else if (0x1F == opcode1)
12070 {
12071 /* Handle arm syscall insn. */
97dfe206 12072 if (tdep->arm_syscall_record != NULL)
72508ac0 12073 {
97dfe206
OJ
12074 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12075 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12076 }
12077 else
12078 {
12079 printf_unfiltered (_("no syscall record support\n"));
12080 return -1;
12081 }
12082 }
12083
12084 /* B (1), conditional branch is automatically taken care in process_record,
12085 as PC is saved there. */
12086
12087 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12088 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12089 record_buf_mem);
12090
12091 return ret;
12092}
12093
12094/* Handling opcode 111 insns. */
12095
12096static int
12097thumb_record_branch (insn_decode_record *thumb_insn_r)
12098{
12099 uint32_t record_buf[8];
12100 uint32_t bits_h = 0;
12101
12102 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12103
12104 if (2 == bits_h || 3 == bits_h)
12105 {
12106 /* BL */
12107 record_buf[0] = ARM_LR_REGNUM;
12108 thumb_insn_r->reg_rec_count = 1;
12109 }
12110 else if (1 == bits_h)
12111 {
12112 /* BLX(1). */
12113 record_buf[0] = ARM_PS_REGNUM;
12114 record_buf[1] = ARM_LR_REGNUM;
12115 thumb_insn_r->reg_rec_count = 2;
12116 }
12117
12118 /* B(2) is automatically taken care in process_record, as PC is
12119 saved there. */
12120
12121 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12122
12123 return 0;
12124}
12125
c6ec2b30
OJ
12126/* Handler for thumb2 load/store multiple instructions. */
12127
12128static int
12129thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12130{
12131 struct regcache *reg_cache = thumb2_insn_r->regcache;
12132
12133 uint32_t reg_rn, op;
12134 uint32_t register_bits = 0, register_count = 0;
12135 uint32_t index = 0, start_address = 0;
12136 uint32_t record_buf[24], record_buf_mem[48];
12137
12138 ULONGEST u_regval = 0;
12139
12140 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12141 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12142
12143 if (0 == op || 3 == op)
12144 {
12145 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12146 {
12147 /* Handle RFE instruction. */
12148 record_buf[0] = ARM_PS_REGNUM;
12149 thumb2_insn_r->reg_rec_count = 1;
12150 }
12151 else
12152 {
12153 /* Handle SRS instruction after reading banked SP. */
12154 return arm_record_unsupported_insn (thumb2_insn_r);
12155 }
12156 }
12157 else if (1 == op || 2 == op)
12158 {
12159 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12160 {
12161 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12162 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12163 while (register_bits)
12164 {
12165 if (register_bits & 0x00000001)
12166 record_buf[index++] = register_count;
12167
12168 register_count++;
12169 register_bits = register_bits >> 1;
12170 }
12171 record_buf[index++] = reg_rn;
12172 record_buf[index++] = ARM_PS_REGNUM;
12173 thumb2_insn_r->reg_rec_count = index;
12174 }
12175 else
12176 {
12177 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12178 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12179 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12180 while (register_bits)
12181 {
12182 if (register_bits & 0x00000001)
12183 register_count++;
12184
12185 register_bits = register_bits >> 1;
12186 }
12187
12188 if (1 == op)
12189 {
12190 /* Start address calculation for LDMDB/LDMEA. */
12191 start_address = u_regval;
12192 }
12193 else if (2 == op)
12194 {
12195 /* Start address calculation for LDMDB/LDMEA. */
12196 start_address = u_regval - register_count * 4;
12197 }
12198
12199 thumb2_insn_r->mem_rec_count = register_count;
12200 while (register_count)
12201 {
12202 record_buf_mem[register_count * 2 - 1] = start_address;
12203 record_buf_mem[register_count * 2 - 2] = 4;
12204 start_address = start_address + 4;
12205 register_count--;
12206 }
12207 record_buf[0] = reg_rn;
12208 record_buf[1] = ARM_PS_REGNUM;
12209 thumb2_insn_r->reg_rec_count = 2;
12210 }
12211 }
12212
12213 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12214 record_buf_mem);
12215 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12216 record_buf);
12217 return ARM_RECORD_SUCCESS;
12218}
12219
12220/* Handler for thumb2 load/store (dual/exclusive) and table branch
12221 instructions. */
12222
12223static int
12224thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12225{
12226 struct regcache *reg_cache = thumb2_insn_r->regcache;
12227
12228 uint32_t reg_rd, reg_rn, offset_imm;
12229 uint32_t reg_dest1, reg_dest2;
12230 uint32_t address, offset_addr;
12231 uint32_t record_buf[8], record_buf_mem[8];
12232 uint32_t op1, op2, op3;
c6ec2b30
OJ
12233
12234 ULONGEST u_regval[2];
12235
12236 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12237 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12238 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12239
12240 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12241 {
12242 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12243 {
12244 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12245 record_buf[0] = reg_dest1;
12246 record_buf[1] = ARM_PS_REGNUM;
12247 thumb2_insn_r->reg_rec_count = 2;
12248 }
12249
12250 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12251 {
12252 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12253 record_buf[2] = reg_dest2;
12254 thumb2_insn_r->reg_rec_count = 3;
12255 }
12256 }
12257 else
12258 {
12259 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12260 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12261
12262 if (0 == op1 && 0 == op2)
12263 {
12264 /* Handle STREX. */
12265 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12266 address = u_regval[0] + (offset_imm * 4);
12267 record_buf_mem[0] = 4;
12268 record_buf_mem[1] = address;
12269 thumb2_insn_r->mem_rec_count = 1;
12270 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12271 record_buf[0] = reg_rd;
12272 thumb2_insn_r->reg_rec_count = 1;
12273 }
12274 else if (1 == op1 && 0 == op2)
12275 {
12276 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12277 record_buf[0] = reg_rd;
12278 thumb2_insn_r->reg_rec_count = 1;
12279 address = u_regval[0];
12280 record_buf_mem[1] = address;
12281
12282 if (4 == op3)
12283 {
12284 /* Handle STREXB. */
12285 record_buf_mem[0] = 1;
12286 thumb2_insn_r->mem_rec_count = 1;
12287 }
12288 else if (5 == op3)
12289 {
12290 /* Handle STREXH. */
12291 record_buf_mem[0] = 2 ;
12292 thumb2_insn_r->mem_rec_count = 1;
12293 }
12294 else if (7 == op3)
12295 {
12296 /* Handle STREXD. */
12297 address = u_regval[0];
12298 record_buf_mem[0] = 4;
12299 record_buf_mem[2] = 4;
12300 record_buf_mem[3] = address + 4;
12301 thumb2_insn_r->mem_rec_count = 2;
12302 }
12303 }
12304 else
12305 {
12306 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12307
12308 if (bit (thumb2_insn_r->arm_insn, 24))
12309 {
12310 if (bit (thumb2_insn_r->arm_insn, 23))
12311 offset_addr = u_regval[0] + (offset_imm * 4);
12312 else
12313 offset_addr = u_regval[0] - (offset_imm * 4);
12314
12315 address = offset_addr;
12316 }
12317 else
12318 address = u_regval[0];
12319
12320 record_buf_mem[0] = 4;
12321 record_buf_mem[1] = address;
12322 record_buf_mem[2] = 4;
12323 record_buf_mem[3] = address + 4;
12324 thumb2_insn_r->mem_rec_count = 2;
12325 record_buf[0] = reg_rn;
12326 thumb2_insn_r->reg_rec_count = 1;
12327 }
12328 }
12329
12330 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12331 record_buf);
12332 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12333 record_buf_mem);
12334 return ARM_RECORD_SUCCESS;
12335}
12336
12337/* Handler for thumb2 data processing (shift register and modified immediate)
12338 instructions. */
12339
12340static int
12341thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12342{
12343 uint32_t reg_rd, op;
12344 uint32_t record_buf[8];
12345
12346 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12347 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12348
12349 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12350 {
12351 record_buf[0] = ARM_PS_REGNUM;
12352 thumb2_insn_r->reg_rec_count = 1;
12353 }
12354 else
12355 {
12356 record_buf[0] = reg_rd;
12357 record_buf[1] = ARM_PS_REGNUM;
12358 thumb2_insn_r->reg_rec_count = 2;
12359 }
12360
12361 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12362 record_buf);
12363 return ARM_RECORD_SUCCESS;
12364}
12365
12366/* Generic handler for thumb2 instructions which effect destination and PS
12367 registers. */
12368
12369static int
12370thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12371{
12372 uint32_t reg_rd;
12373 uint32_t record_buf[8];
12374
12375 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12376
12377 record_buf[0] = reg_rd;
12378 record_buf[1] = ARM_PS_REGNUM;
12379 thumb2_insn_r->reg_rec_count = 2;
12380
12381 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12382 record_buf);
12383 return ARM_RECORD_SUCCESS;
12384}
12385
12386/* Handler for thumb2 branch and miscellaneous control instructions. */
12387
12388static int
12389thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12390{
12391 uint32_t op, op1, op2;
12392 uint32_t record_buf[8];
12393
12394 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12395 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12396 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12397
12398 /* Handle MSR insn. */
12399 if (!(op1 & 0x2) && 0x38 == op)
12400 {
12401 if (!(op2 & 0x3))
12402 {
12403 /* CPSR is going to be changed. */
12404 record_buf[0] = ARM_PS_REGNUM;
12405 thumb2_insn_r->reg_rec_count = 1;
12406 }
12407 else
12408 {
12409 arm_record_unsupported_insn(thumb2_insn_r);
12410 return -1;
12411 }
12412 }
12413 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12414 {
12415 /* BLX. */
12416 record_buf[0] = ARM_PS_REGNUM;
12417 record_buf[1] = ARM_LR_REGNUM;
12418 thumb2_insn_r->reg_rec_count = 2;
12419 }
12420
12421 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12422 record_buf);
12423 return ARM_RECORD_SUCCESS;
12424}
12425
12426/* Handler for thumb2 store single data item instructions. */
12427
12428static int
12429thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12430{
12431 struct regcache *reg_cache = thumb2_insn_r->regcache;
12432
12433 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12434 uint32_t address, offset_addr;
12435 uint32_t record_buf[8], record_buf_mem[8];
12436 uint32_t op1, op2;
12437
12438 ULONGEST u_regval[2];
12439
12440 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12441 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12442 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12443 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12444
12445 if (bit (thumb2_insn_r->arm_insn, 23))
12446 {
12447 /* T2 encoding. */
12448 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12449 offset_addr = u_regval[0] + offset_imm;
12450 address = offset_addr;
12451 }
12452 else
12453 {
12454 /* T3 encoding. */
12455 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12456 {
12457 /* Handle STRB (register). */
12458 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12459 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12460 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12461 offset_addr = u_regval[1] << shift_imm;
12462 address = u_regval[0] + offset_addr;
12463 }
12464 else
12465 {
12466 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12467 if (bit (thumb2_insn_r->arm_insn, 10))
12468 {
12469 if (bit (thumb2_insn_r->arm_insn, 9))
12470 offset_addr = u_regval[0] + offset_imm;
12471 else
12472 offset_addr = u_regval[0] - offset_imm;
12473
12474 address = offset_addr;
12475 }
12476 else
12477 address = u_regval[0];
12478 }
12479 }
12480
12481 switch (op1)
12482 {
12483 /* Store byte instructions. */
12484 case 4:
12485 case 0:
12486 record_buf_mem[0] = 1;
12487 break;
12488 /* Store half word instructions. */
12489 case 1:
12490 case 5:
12491 record_buf_mem[0] = 2;
12492 break;
12493 /* Store word instructions. */
12494 case 2:
12495 case 6:
12496 record_buf_mem[0] = 4;
12497 break;
12498
12499 default:
12500 gdb_assert_not_reached ("no decoding pattern found");
12501 break;
12502 }
12503
12504 record_buf_mem[1] = address;
12505 thumb2_insn_r->mem_rec_count = 1;
12506 record_buf[0] = reg_rn;
12507 thumb2_insn_r->reg_rec_count = 1;
12508
12509 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12510 record_buf);
12511 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12512 record_buf_mem);
12513 return ARM_RECORD_SUCCESS;
12514}
12515
12516/* Handler for thumb2 load memory hints instructions. */
12517
12518static int
12519thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12520{
12521 uint32_t record_buf[8];
12522 uint32_t reg_rt, reg_rn;
12523
12524 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12525 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12526
12527 if (ARM_PC_REGNUM != reg_rt)
12528 {
12529 record_buf[0] = reg_rt;
12530 record_buf[1] = reg_rn;
12531 record_buf[2] = ARM_PS_REGNUM;
12532 thumb2_insn_r->reg_rec_count = 3;
12533
12534 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12535 record_buf);
12536 return ARM_RECORD_SUCCESS;
12537 }
12538
12539 return ARM_RECORD_FAILURE;
12540}
12541
12542/* Handler for thumb2 load word instructions. */
12543
12544static int
12545thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12546{
c6ec2b30
OJ
12547 uint32_t record_buf[8];
12548
12549 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12550 record_buf[1] = ARM_PS_REGNUM;
12551 thumb2_insn_r->reg_rec_count = 2;
12552
12553 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12554 record_buf);
12555 return ARM_RECORD_SUCCESS;
12556}
12557
12558/* Handler for thumb2 long multiply, long multiply accumulate, and
12559 divide instructions. */
12560
12561static int
12562thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12563{
12564 uint32_t opcode1 = 0, opcode2 = 0;
12565 uint32_t record_buf[8];
c6ec2b30
OJ
12566
12567 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12568 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12569
12570 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12571 {
12572 /* Handle SMULL, UMULL, SMULAL. */
12573 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12574 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12575 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12576 record_buf[2] = ARM_PS_REGNUM;
12577 thumb2_insn_r->reg_rec_count = 3;
12578 }
12579 else if (1 == opcode1 || 3 == opcode2)
12580 {
12581 /* Handle SDIV and UDIV. */
12582 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12583 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12584 record_buf[2] = ARM_PS_REGNUM;
12585 thumb2_insn_r->reg_rec_count = 3;
12586 }
12587 else
12588 return ARM_RECORD_FAILURE;
12589
12590 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12591 record_buf);
12592 return ARM_RECORD_SUCCESS;
12593}
12594
60cc5e93
OJ
12595/* Record handler for thumb32 coprocessor instructions. */
12596
12597static int
12598thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12599{
12600 if (bit (thumb2_insn_r->arm_insn, 25))
12601 return arm_record_coproc_data_proc (thumb2_insn_r);
12602 else
12603 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12604}
12605
1e1b6563
OJ
12606/* Record handler for advance SIMD structure load/store instructions. */
12607
12608static int
12609thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12610{
12611 struct regcache *reg_cache = thumb2_insn_r->regcache;
12612 uint32_t l_bit, a_bit, b_bits;
12613 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12614 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12615 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12616 uint8_t f_ebytes;
12617
12618 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12619 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12620 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12621 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12622 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12623 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12624 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12625 f_elem = 8 / f_ebytes;
12626
12627 if (!l_bit)
12628 {
12629 ULONGEST u_regval = 0;
12630 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12631 address = u_regval;
12632
12633 if (!a_bit)
12634 {
12635 /* Handle VST1. */
12636 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12637 {
12638 if (b_bits == 0x07)
12639 bf_regs = 1;
12640 else if (b_bits == 0x0a)
12641 bf_regs = 2;
12642 else if (b_bits == 0x06)
12643 bf_regs = 3;
12644 else if (b_bits == 0x02)
12645 bf_regs = 4;
12646 else
12647 bf_regs = 0;
12648
12649 for (index_r = 0; index_r < bf_regs; index_r++)
12650 {
12651 for (index_e = 0; index_e < f_elem; index_e++)
12652 {
12653 record_buf_mem[index_m++] = f_ebytes;
12654 record_buf_mem[index_m++] = address;
12655 address = address + f_ebytes;
12656 thumb2_insn_r->mem_rec_count += 1;
12657 }
12658 }
12659 }
12660 /* Handle VST2. */
12661 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12662 {
12663 if (b_bits == 0x09 || b_bits == 0x08)
12664 bf_regs = 1;
12665 else if (b_bits == 0x03)
12666 bf_regs = 2;
12667 else
12668 bf_regs = 0;
12669
12670 for (index_r = 0; index_r < bf_regs; index_r++)
12671 for (index_e = 0; index_e < f_elem; index_e++)
12672 {
12673 for (loop_t = 0; loop_t < 2; loop_t++)
12674 {
12675 record_buf_mem[index_m++] = f_ebytes;
12676 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12677 thumb2_insn_r->mem_rec_count += 1;
12678 }
12679 address = address + (2 * f_ebytes);
12680 }
12681 }
12682 /* Handle VST3. */
12683 else if ((b_bits & 0x0e) == 0x04)
12684 {
12685 for (index_e = 0; index_e < f_elem; index_e++)
12686 {
12687 for (loop_t = 0; loop_t < 3; loop_t++)
12688 {
12689 record_buf_mem[index_m++] = f_ebytes;
12690 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12691 thumb2_insn_r->mem_rec_count += 1;
12692 }
12693 address = address + (3 * f_ebytes);
12694 }
12695 }
12696 /* Handle VST4. */
12697 else if (!(b_bits & 0x0e))
12698 {
12699 for (index_e = 0; index_e < f_elem; index_e++)
12700 {
12701 for (loop_t = 0; loop_t < 4; loop_t++)
12702 {
12703 record_buf_mem[index_m++] = f_ebytes;
12704 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12705 thumb2_insn_r->mem_rec_count += 1;
12706 }
12707 address = address + (4 * f_ebytes);
12708 }
12709 }
12710 }
12711 else
12712 {
12713 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12714
12715 if (bft_size == 0x00)
12716 f_ebytes = 1;
12717 else if (bft_size == 0x01)
12718 f_ebytes = 2;
12719 else if (bft_size == 0x02)
12720 f_ebytes = 4;
12721 else
12722 f_ebytes = 0;
12723
12724 /* Handle VST1. */
12725 if (!(b_bits & 0x0b) || b_bits == 0x08)
12726 thumb2_insn_r->mem_rec_count = 1;
12727 /* Handle VST2. */
12728 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12729 thumb2_insn_r->mem_rec_count = 2;
12730 /* Handle VST3. */
12731 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12732 thumb2_insn_r->mem_rec_count = 3;
12733 /* Handle VST4. */
12734 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12735 thumb2_insn_r->mem_rec_count = 4;
12736
12737 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12738 {
12739 record_buf_mem[index_m] = f_ebytes;
12740 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12741 }
12742 }
12743 }
12744 else
12745 {
12746 if (!a_bit)
12747 {
12748 /* Handle VLD1. */
12749 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12750 thumb2_insn_r->reg_rec_count = 1;
12751 /* Handle VLD2. */
12752 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12753 thumb2_insn_r->reg_rec_count = 2;
12754 /* Handle VLD3. */
12755 else if ((b_bits & 0x0e) == 0x04)
12756 thumb2_insn_r->reg_rec_count = 3;
12757 /* Handle VLD4. */
12758 else if (!(b_bits & 0x0e))
12759 thumb2_insn_r->reg_rec_count = 4;
12760 }
12761 else
12762 {
12763 /* Handle VLD1. */
12764 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12765 thumb2_insn_r->reg_rec_count = 1;
12766 /* Handle VLD2. */
12767 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12768 thumb2_insn_r->reg_rec_count = 2;
12769 /* Handle VLD3. */
12770 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12771 thumb2_insn_r->reg_rec_count = 3;
12772 /* Handle VLD4. */
12773 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12774 thumb2_insn_r->reg_rec_count = 4;
12775
12776 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12777 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12778 }
12779 }
12780
12781 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12782 {
12783 record_buf[index_r] = reg_rn;
12784 thumb2_insn_r->reg_rec_count += 1;
12785 }
12786
12787 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12788 record_buf);
12789 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12790 record_buf_mem);
12791 return 0;
12792}
12793
c6ec2b30
OJ
12794/* Decodes thumb2 instruction type and invokes its record handler. */
12795
12796static unsigned int
12797thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12798{
12799 uint32_t op, op1, op2;
12800
12801 op = bit (thumb2_insn_r->arm_insn, 15);
12802 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12803 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12804
12805 if (op1 == 0x01)
12806 {
12807 if (!(op2 & 0x64 ))
12808 {
12809 /* Load/store multiple instruction. */
12810 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12811 }
b121eeb9 12812 else if ((op2 & 0x64) == 0x4)
c6ec2b30
OJ
12813 {
12814 /* Load/store (dual/exclusive) and table branch instruction. */
12815 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12816 }
b121eeb9 12817 else if ((op2 & 0x60) == 0x20)
c6ec2b30
OJ
12818 {
12819 /* Data-processing (shifted register). */
12820 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12821 }
12822 else if (op2 & 0x40)
12823 {
12824 /* Co-processor instructions. */
60cc5e93 12825 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12826 }
12827 }
12828 else if (op1 == 0x02)
12829 {
12830 if (op)
12831 {
12832 /* Branches and miscellaneous control instructions. */
12833 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12834 }
12835 else if (op2 & 0x20)
12836 {
12837 /* Data-processing (plain binary immediate) instruction. */
12838 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12839 }
12840 else
12841 {
12842 /* Data-processing (modified immediate). */
12843 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12844 }
12845 }
12846 else if (op1 == 0x03)
12847 {
12848 if (!(op2 & 0x71 ))
12849 {
12850 /* Store single data item. */
12851 return thumb2_record_str_single_data (thumb2_insn_r);
12852 }
12853 else if (!((op2 & 0x71) ^ 0x10))
12854 {
12855 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12856 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12857 }
12858 else if (!((op2 & 0x67) ^ 0x01))
12859 {
12860 /* Load byte, memory hints instruction. */
12861 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12862 }
12863 else if (!((op2 & 0x67) ^ 0x03))
12864 {
12865 /* Load halfword, memory hints instruction. */
12866 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12867 }
12868 else if (!((op2 & 0x67) ^ 0x05))
12869 {
12870 /* Load word instruction. */
12871 return thumb2_record_ld_word (thumb2_insn_r);
12872 }
12873 else if (!((op2 & 0x70) ^ 0x20))
12874 {
12875 /* Data-processing (register) instruction. */
12876 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12877 }
12878 else if (!((op2 & 0x78) ^ 0x30))
12879 {
12880 /* Multiply, multiply accumulate, abs diff instruction. */
12881 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12882 }
12883 else if (!((op2 & 0x78) ^ 0x38))
12884 {
12885 /* Long multiply, long multiply accumulate, and divide. */
12886 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12887 }
12888 else if (op2 & 0x40)
12889 {
12890 /* Co-processor instructions. */
60cc5e93 12891 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12892 }
12893 }
12894
12895 return -1;
12896}
72508ac0 12897
ffdbe864 12898namespace {
728a7913
YQ
12899/* Abstract memory reader. */
12900
12901class abstract_memory_reader
12902{
12903public:
12904 /* Read LEN bytes of target memory at address MEMADDR, placing the
12905 results in GDB's memory at BUF. Return true on success. */
12906
12907 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12908};
12909
12910/* Instruction reader from real target. */
12911
12912class instruction_reader : public abstract_memory_reader
12913{
12914 public:
12915 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12916 {
12917 if (target_read_memory (memaddr, buf, len))
12918 return false;
12919 else
12920 return true;
12921 }
12922};
12923
ffdbe864
YQ
12924} // namespace
12925
72508ac0
PO
12926/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12927and positive val on fauilure. */
12928
12929static int
728a7913
YQ
12930extract_arm_insn (abstract_memory_reader& reader,
12931 insn_decode_record *insn_record, uint32_t insn_size)
72508ac0
PO
12932{
12933 gdb_byte buf[insn_size];
12934
12935 memset (&buf[0], 0, insn_size);
12936
728a7913 12937 if (!reader.read (insn_record->this_addr, buf, insn_size))
72508ac0
PO
12938 return 1;
12939 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12940 insn_size,
2959fed9 12941 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12942 return 0;
12943}
12944
12945typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12946
12947/* Decode arm/thumb insn depending on condition cods and opcodes; and
12948 dispatch it. */
12949
12950static int
728a7913
YQ
12951decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12952 record_type_t record_type, uint32_t insn_size)
72508ac0
PO
12953{
12954
01e57735
YQ
12955 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12956 instruction. */
0fa9c223 12957 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12958 {
12959 arm_record_data_proc_misc_ld_str, /* 000. */
12960 arm_record_data_proc_imm, /* 001. */
12961 arm_record_ld_st_imm_offset, /* 010. */
12962 arm_record_ld_st_reg_offset, /* 011. */
12963 arm_record_ld_st_multiple, /* 100. */
12964 arm_record_b_bl, /* 101. */
60cc5e93 12965 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12966 arm_record_coproc_data_proc /* 111. */
12967 };
12968
01e57735
YQ
12969 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12970 instruction. */
0fa9c223 12971 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12972 { \
12973 thumb_record_shift_add_sub, /* 000. */
12974 thumb_record_add_sub_cmp_mov, /* 001. */
12975 thumb_record_ld_st_reg_offset, /* 010. */
12976 thumb_record_ld_st_imm_offset, /* 011. */
12977 thumb_record_ld_st_stack, /* 100. */
12978 thumb_record_misc, /* 101. */
12979 thumb_record_ldm_stm_swi, /* 110. */
12980 thumb_record_branch /* 111. */
12981 };
12982
12983 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12984 uint32_t insn_id = 0;
12985
728a7913 12986 if (extract_arm_insn (reader, arm_record, insn_size))
72508ac0
PO
12987 {
12988 if (record_debug)
01e57735
YQ
12989 {
12990 printf_unfiltered (_("Process record: error reading memory at "
12991 "addr %s len = %d.\n"),
12992 paddress (arm_record->gdbarch,
12993 arm_record->this_addr), insn_size);
12994 }
72508ac0
PO
12995 return -1;
12996 }
12997 else if (ARM_RECORD == record_type)
12998 {
12999 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13000 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
13001
13002 if (arm_record->cond == 0xf)
13003 ret = arm_record_extension_space (arm_record);
13004 else
01e57735 13005 {
ca92db2d
YQ
13006 /* If this insn has fallen into extension space
13007 then we need not decode it anymore. */
01e57735
YQ
13008 ret = arm_handle_insn[insn_id] (arm_record);
13009 }
ca92db2d
YQ
13010 if (ret != ARM_RECORD_SUCCESS)
13011 {
13012 arm_record_unsupported_insn (arm_record);
13013 ret = -1;
13014 }
72508ac0
PO
13015 }
13016 else if (THUMB_RECORD == record_type)
13017 {
13018 /* As thumb does not have condition codes, we set negative. */
13019 arm_record->cond = -1;
13020 insn_id = bits (arm_record->arm_insn, 13, 15);
13021 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13022 if (ret != ARM_RECORD_SUCCESS)
13023 {
13024 arm_record_unsupported_insn (arm_record);
13025 ret = -1;
13026 }
72508ac0
PO
13027 }
13028 else if (THUMB2_RECORD == record_type)
13029 {
c6ec2b30
OJ
13030 /* As thumb does not have condition codes, we set negative. */
13031 arm_record->cond = -1;
13032
13033 /* Swap first half of 32bit thumb instruction with second half. */
13034 arm_record->arm_insn
01e57735 13035 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13036
ca92db2d 13037 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13038
ca92db2d 13039 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13040 {
13041 arm_record_unsupported_insn (arm_record);
13042 ret = -1;
13043 }
72508ac0
PO
13044 }
13045 else
13046 {
13047 /* Throw assertion. */
13048 gdb_assert_not_reached ("not a valid instruction, could not decode");
13049 }
13050
13051 return ret;
13052}
13053
b121eeb9
YQ
13054#if GDB_SELF_TEST
13055namespace selftests {
13056
13057/* Provide both 16-bit and 32-bit thumb instructions. */
13058
13059class instruction_reader_thumb : public abstract_memory_reader
13060{
13061public:
13062 template<size_t SIZE>
13063 instruction_reader_thumb (enum bfd_endian endian,
13064 const uint16_t (&insns)[SIZE])
13065 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13066 {}
13067
13068 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13069 {
13070 SELF_CHECK (len == 4 || len == 2);
13071 SELF_CHECK (memaddr % 2 == 0);
13072 SELF_CHECK ((memaddr / 2) < m_insns_size);
13073
13074 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13075 if (len == 4)
13076 {
13077 store_unsigned_integer (&buf[2], 2, m_endian,
13078 m_insns[memaddr / 2 + 1]);
13079 }
13080 return true;
13081 }
13082
13083private:
13084 enum bfd_endian m_endian;
13085 const uint16_t *m_insns;
13086 size_t m_insns_size;
13087};
13088
13089static void
13090arm_record_test (void)
13091{
13092 struct gdbarch_info info;
13093 gdbarch_info_init (&info);
13094 info.bfd_arch_info = bfd_scan_arch ("arm");
13095
13096 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13097
13098 SELF_CHECK (gdbarch != NULL);
13099
13100 /* 16-bit Thumb instructions. */
13101 {
13102 insn_decode_record arm_record;
13103
13104 memset (&arm_record, 0, sizeof (insn_decode_record));
13105 arm_record.gdbarch = gdbarch;
13106
13107 static const uint16_t insns[] = {
13108 /* db b2 uxtb r3, r3 */
13109 0xb2db,
13110 /* cd 58 ldr r5, [r1, r3] */
13111 0x58cd,
13112 };
13113
13114 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13115 instruction_reader_thumb reader (endian, insns);
13116 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13117 THUMB_INSN_SIZE_BYTES);
13118
13119 SELF_CHECK (ret == 0);
13120 SELF_CHECK (arm_record.mem_rec_count == 0);
13121 SELF_CHECK (arm_record.reg_rec_count == 1);
13122 SELF_CHECK (arm_record.arm_regs[0] == 3);
13123
13124 arm_record.this_addr += 2;
13125 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13126 THUMB_INSN_SIZE_BYTES);
13127
13128 SELF_CHECK (ret == 0);
13129 SELF_CHECK (arm_record.mem_rec_count == 0);
13130 SELF_CHECK (arm_record.reg_rec_count == 1);
13131 SELF_CHECK (arm_record.arm_regs[0] == 5);
13132 }
13133
13134 /* 32-bit Thumb-2 instructions. */
13135 {
13136 insn_decode_record arm_record;
13137
13138 memset (&arm_record, 0, sizeof (insn_decode_record));
13139 arm_record.gdbarch = gdbarch;
13140
13141 static const uint16_t insns[] = {
13142 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13143 0xee1d, 0x7f70,
13144 };
13145
13146 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13147 instruction_reader_thumb reader (endian, insns);
13148 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13149 THUMB2_INSN_SIZE_BYTES);
13150
13151 SELF_CHECK (ret == 0);
13152 SELF_CHECK (arm_record.mem_rec_count == 0);
13153 SELF_CHECK (arm_record.reg_rec_count == 1);
13154 SELF_CHECK (arm_record.arm_regs[0] == 7);
13155 }
13156}
13157} // namespace selftests
13158#endif /* GDB_SELF_TEST */
72508ac0
PO
13159
13160/* Cleans up local record registers and memory allocations. */
13161
13162static void
13163deallocate_reg_mem (insn_decode_record *record)
13164{
13165 xfree (record->arm_regs);
13166 xfree (record->arm_mems);
13167}
13168
13169
01e57735 13170/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13171 memory that will be changed in current instruction to record_arch_list".
13172 Return -1 if something is wrong. */
13173
13174int
01e57735
YQ
13175arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13176 CORE_ADDR insn_addr)
72508ac0
PO
13177{
13178
72508ac0
PO
13179 uint32_t no_of_rec = 0;
13180 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13181 ULONGEST t_bit = 0, insn_id = 0;
13182
13183 ULONGEST u_regval = 0;
13184
13185 insn_decode_record arm_record;
13186
13187 memset (&arm_record, 0, sizeof (insn_decode_record));
13188 arm_record.regcache = regcache;
13189 arm_record.this_addr = insn_addr;
13190 arm_record.gdbarch = gdbarch;
13191
13192
13193 if (record_debug > 1)
13194 {
13195 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13196 "addr = %s\n",
72508ac0
PO
13197 paddress (gdbarch, arm_record.this_addr));
13198 }
13199
728a7913
YQ
13200 instruction_reader reader;
13201 if (extract_arm_insn (reader, &arm_record, 2))
72508ac0
PO
13202 {
13203 if (record_debug)
01e57735
YQ
13204 {
13205 printf_unfiltered (_("Process record: error reading memory at "
13206 "addr %s len = %d.\n"),
13207 paddress (arm_record.gdbarch,
13208 arm_record.this_addr), 2);
13209 }
72508ac0
PO
13210 return -1;
13211 }
13212
13213 /* Check the insn, whether it is thumb or arm one. */
13214
13215 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13216 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13217
13218
13219 if (!(u_regval & t_bit))
13220 {
13221 /* We are decoding arm insn. */
728a7913 13222 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
72508ac0
PO
13223 }
13224 else
13225 {
13226 insn_id = bits (arm_record.arm_insn, 11, 15);
13227 /* is it thumb2 insn? */
13228 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735 13229 {
728a7913 13230 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
01e57735
YQ
13231 THUMB2_INSN_SIZE_BYTES);
13232 }
72508ac0 13233 else
01e57735
YQ
13234 {
13235 /* We are decoding thumb insn. */
728a7913
YQ
13236 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13237 THUMB_INSN_SIZE_BYTES);
01e57735 13238 }
72508ac0
PO
13239 }
13240
13241 if (0 == ret)
13242 {
13243 /* Record registers. */
25ea693b 13244 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13245 if (arm_record.arm_regs)
01e57735
YQ
13246 {
13247 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13248 {
13249 if (record_full_arch_list_add_reg
25ea693b 13250 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13251 ret = -1;
13252 }
13253 }
72508ac0
PO
13254 /* Record memories. */
13255 if (arm_record.arm_mems)
01e57735
YQ
13256 {
13257 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13258 {
13259 if (record_full_arch_list_add_mem
13260 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13261 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13262 ret = -1;
13263 }
13264 }
72508ac0 13265
25ea693b 13266 if (record_full_arch_list_add_end ())
01e57735 13267 ret = -1;
72508ac0
PO
13268 }
13269
13270
13271 deallocate_reg_mem (&arm_record);
13272
13273 return ret;
13274}
This page took 5.095687 seconds and 4 git commands to generate.