x86: Add VERIFY_COPY_RELOC
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
61baf725 3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
e47ad6c0 30#include "disasm.h"
4e052eda 31#include "regcache.h"
54483882 32#include "reggroups.h"
d16aafd8 33#include "doublest.h"
fd0407d6 34#include "value.h"
34e8f22d 35#include "arch-utils.h"
4be87837 36#include "osabi.h"
eb5492fa
DJ
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
842e1f1e
DJ
40#include "objfiles.h"
41#include "dwarf2-frame.h"
e4c16157 42#include "gdbtypes.h"
29d73ae4 43#include "prologue-value.h"
25f8c692 44#include "remote.h"
123dc839
DJ
45#include "target-descriptions.h"
46#include "user-regs.h"
0e9e9abd 47#include "observer.h"
34e8f22d 48
8689682c 49#include "arch/arm.h"
d9311bfa 50#include "arch/arm-get-next-pcs.h"
34e8f22d 51#include "arm-tdep.h"
26216b98 52#include "gdb/sim-arm.h"
34e8f22d 53
082fc60d
RE
54#include "elf-bfd.h"
55#include "coff/internal.h"
97e03143 56#include "elf/arm.h"
c906108c 57
60c5725c 58#include "vec.h"
26216b98 59
72508ac0 60#include "record.h"
d02ed0bb 61#include "record-full.h"
325fac50 62#include <algorithm>
72508ac0 63
0a69eedb
YQ
64#include "features/arm/arm-with-m.c"
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
67#include "features/arm/arm-with-iwmmxt.c"
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
70#include "features/arm/arm-with-neon.c"
9779414d 71
b121eeb9
YQ
72#if GDB_SELF_TEST
73#include "selftest.h"
74#endif
75
6529d2dd
AC
76static int arm_debug;
77
082fc60d
RE
78/* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 80 is used for this purpose.
082fc60d
RE
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 84
0963b4bd 85#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
87
88#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 89 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 90
60c5725c
DJ
91/* Per-objfile data used for mapping symbols. */
92static const struct objfile_data *arm_objfile_data_key;
93
94struct arm_mapping_symbol
95{
96 bfd_vma value;
97 char type;
98};
99typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100DEF_VEC_O(arm_mapping_symbol_s);
101
102struct arm_per_objfile
103{
104 VEC(arm_mapping_symbol_s) **section_maps;
105};
106
afd7eef0
RE
107/* The list of available "set arm ..." and "show arm ..." commands. */
108static struct cmd_list_element *setarmcmdlist = NULL;
109static struct cmd_list_element *showarmcmdlist = NULL;
110
fd50bc42
RE
111/* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 113static const char *const fp_model_strings[] =
fd50bc42
RE
114{
115 "auto",
116 "softfpa",
117 "fpa",
118 "softvfp",
28e97307
DJ
119 "vfp",
120 NULL
fd50bc42
RE
121};
122
123/* A variable that can be configured by the user. */
124static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125static const char *current_fp_model = "auto";
126
28e97307 127/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 128static const char *const arm_abi_strings[] =
28e97307
DJ
129{
130 "auto",
131 "APCS",
132 "AAPCS",
133 NULL
134};
135
136/* A variable that can be configured by the user. */
137static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138static const char *arm_abi_string = "auto";
139
0428b8f5 140/* The execution mode to assume. */
40478521 141static const char *const arm_mode_strings[] =
0428b8f5
DJ
142 {
143 "auto",
144 "arm",
68770265
MGD
145 "thumb",
146 NULL
0428b8f5
DJ
147 };
148
149static const char *arm_fallback_mode_string = "auto";
150static const char *arm_force_mode_string = "auto";
151
f32bf4a4
YQ
152/* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
156static const struct
157{
158 const char *name;
159 int regnum;
160} arm_register_aliases[] = {
161 /* Basic register numbers. */
162 { "r0", 0 },
163 { "r1", 1 },
164 { "r2", 2 },
165 { "r3", 3 },
166 { "r4", 4 },
167 { "r5", 5 },
168 { "r6", 6 },
169 { "r7", 7 },
170 { "r8", 8 },
171 { "r9", 9 },
172 { "r10", 10 },
173 { "r11", 11 },
174 { "r12", 12 },
175 { "r13", 13 },
176 { "r14", 14 },
177 { "r15", 15 },
178 /* Synonyms (argument and variable registers). */
179 { "a1", 0 },
180 { "a2", 1 },
181 { "a3", 2 },
182 { "a4", 3 },
183 { "v1", 4 },
184 { "v2", 5 },
185 { "v3", 6 },
186 { "v4", 7 },
187 { "v5", 8 },
188 { "v6", 9 },
189 { "v7", 10 },
190 { "v8", 11 },
191 /* Other platform-specific names for r9. */
192 { "sb", 9 },
193 { "tr", 9 },
194 /* Special names. */
195 { "ip", 12 },
123dc839 196 { "lr", 14 },
123dc839
DJ
197 /* Names used by GCC (not listed in the ARM EABI). */
198 { "sl", 10 },
123dc839
DJ
199 /* A special name from the older ATPCS. */
200 { "wr", 7 },
201};
bc90b915 202
123dc839 203static const char *const arm_register_names[] =
da59e081
JM
204{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 210 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 211
65b48a81
PB
212/* Holds the current set of options to be passed to the disassembler. */
213static char *arm_disassembler_options;
214
afd7eef0
RE
215/* Valid register name styles. */
216static const char **valid_disassembly_styles;
ed9a39eb 217
afd7eef0
RE
218/* Disassembly style to use. Default to "std" register names. */
219static const char *disassembly_style;
96baa820 220
ed9a39eb 221/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
222 style. */
223static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 224 struct cmd_list_element *);
65b48a81
PB
225static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
227 const char *);
ed9a39eb 228
05d1431c
PA
229static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 struct regcache *regcache,
231 int regnum, gdb_byte *buf);
58d6951d
DJ
232static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
235
e7cf25a8 236static CORE_ADDR
553cb527 237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
238
239
d9311bfa
AT
240/* get_next_pcs operations. */
241static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
245 arm_get_next_pcs_is_thumb,
246 NULL,
d9311bfa
AT
247};
248
9b8d791a 249struct arm_prologue_cache
c3b4394c 250{
eb5492fa
DJ
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
254 CORE_ADDR prev_sp;
255
4be43953
DJ
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
eb5492fa 259
c3b4394c 260 int framesize;
eb5492fa
DJ
261
262 /* The register used to hold the frame pointer for this frame. */
c3b4394c 263 int framereg;
eb5492fa
DJ
264
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
c3b4394c 267};
ed9a39eb 268
0d39a070
DJ
269static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
273
cca44b1b
JB
274/* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
276
277#define DISPLACED_STEPPING_ARCH_VERSION 5
278
94c30b78 279/* Set to true if the 32-bit mode is in use. */
c906108c
SS
280
281int arm_apcs_32 = 1;
282
9779414d
DJ
283/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
284
478fd957 285int
9779414d
DJ
286arm_psr_thumb_bit (struct gdbarch *gdbarch)
287{
288 if (gdbarch_tdep (gdbarch)->is_m)
289 return XPSR_T;
290 else
291 return CPSR_T;
292}
293
d0e59a68
AT
294/* Determine if the processor is currently executing in Thumb mode. */
295
296int
297arm_is_thumb (struct regcache *regcache)
298{
299 ULONGEST cpsr;
300 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
301
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
303
304 return (cpsr & t_bit) != 0;
305}
306
b39cc962
DJ
307/* Determine if FRAME is executing in Thumb mode. */
308
25b41d01 309int
b39cc962
DJ
310arm_frame_is_thumb (struct frame_info *frame)
311{
312 CORE_ADDR cpsr;
9779414d 313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
314
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
320
9779414d 321 return (cpsr & t_bit) != 0;
b39cc962
DJ
322}
323
60c5725c
DJ
324/* Callback for VEC_lower_bound. */
325
326static inline int
327arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
329{
330 return lhs->value < rhs->value;
331}
332
f9d67f43
DJ
333/* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
c906108c 336
f9d67f43
DJ
337static char
338arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 339{
60c5725c 340 struct obj_section *sec;
0428b8f5 341
60c5725c
DJ
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
344 if (sec != NULL)
345 {
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
349 0 };
60c5725c
DJ
350 unsigned int idx;
351
9a3c8263
SM
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
60c5725c
DJ
354 if (data != NULL)
355 {
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
358 {
359 struct arm_mapping_symbol *map_sym;
360
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
363
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
369 {
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
f9d67f43
DJ
372 {
373 if (start)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
376 }
60c5725c
DJ
377 }
378
379 if (idx > 0)
380 {
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
382 if (start)
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
60c5725c
DJ
385 }
386 }
387 }
388 }
389
f9d67f43
DJ
390 return 0;
391}
392
393/* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
396
e3039479 397int
9779414d 398arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 399{
7cbd4a93 400 struct bound_minimal_symbol sym;
f9d67f43 401 char type;
a42244db
YQ
402 struct displaced_step_closure* dsc
403 = get_displaced_step_closure_by_addr(memaddr);
404
405 /* If checking the mode of displaced instruction in copy area, the mode
406 should be determined by instruction on the original address. */
407 if (dsc)
408 {
409 if (debug_displaced)
410 fprintf_unfiltered (gdb_stdlog,
411 "displaced: check mode of %.8lx instead of %.8lx\n",
412 (unsigned long) dsc->insn_addr,
413 (unsigned long) memaddr);
414 memaddr = dsc->insn_addr;
415 }
f9d67f43
DJ
416
417 /* If bit 0 of the address is set, assume this is a Thumb address. */
418 if (IS_THUMB_ADDR (memaddr))
419 return 1;
420
421 /* If the user wants to override the symbol table, let him. */
422 if (strcmp (arm_force_mode_string, "arm") == 0)
423 return 0;
424 if (strcmp (arm_force_mode_string, "thumb") == 0)
425 return 1;
426
9779414d
DJ
427 /* ARM v6-M and v7-M are always in Thumb mode. */
428 if (gdbarch_tdep (gdbarch)->is_m)
429 return 1;
430
f9d67f43
DJ
431 /* If there are mapping symbols, consult them. */
432 type = arm_find_mapping_symbol (memaddr, NULL);
433 if (type)
434 return type == 't';
435
ed9a39eb 436 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 437 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
438 if (sym.minsym)
439 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
440
441 /* If the user wants to override the fallback mode, let them. */
442 if (strcmp (arm_fallback_mode_string, "arm") == 0)
443 return 0;
444 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
445 return 1;
446
447 /* If we couldn't find any symbol, but we're talking to a running
448 target, then trust the current value of $cpsr. This lets
449 "display/i $pc" always show the correct mode (though if there is
450 a symbol table we will not reach here, so it still may not be
18819fa6 451 displayed in the mode it will be executed). */
0428b8f5 452 if (target_has_registers)
18819fa6 453 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
454
455 /* Otherwise we're out of luck; we assume ARM. */
456 return 0;
c906108c
SS
457}
458
ca90e760
FH
459/* Determine if the address specified equals any of these magic return
460 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
461 architectures.
462
463 From ARMv6-M Reference Manual B1.5.8
464 Table B1-5 Exception return behavior
465
466 EXC_RETURN Return To Return Stack
467 0xFFFFFFF1 Handler mode Main
468 0xFFFFFFF9 Thread mode Main
469 0xFFFFFFFD Thread mode Process
470
471 From ARMv7-M Reference Manual B1.5.8
472 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
473
474 EXC_RETURN Return To Return Stack
475 0xFFFFFFF1 Handler mode Main
476 0xFFFFFFF9 Thread mode Main
477 0xFFFFFFFD Thread mode Process
478
479 Table B1-9 EXC_RETURN definition of exception return behavior, with
480 FP
481
482 EXC_RETURN Return To Return Stack Frame Type
483 0xFFFFFFE1 Handler mode Main Extended
484 0xFFFFFFE9 Thread mode Main Extended
485 0xFFFFFFED Thread mode Process Extended
486 0xFFFFFFF1 Handler mode Main Basic
487 0xFFFFFFF9 Thread mode Main Basic
488 0xFFFFFFFD Thread mode Process Basic
489
490 For more details see "B1.5.8 Exception return behavior"
491 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
492
493static int
494arm_m_addr_is_magic (CORE_ADDR addr)
495{
496 switch (addr)
497 {
498 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
499 the exception return behavior. */
500 case 0xffffffe1:
501 case 0xffffffe9:
502 case 0xffffffed:
503 case 0xfffffff1:
504 case 0xfffffff9:
505 case 0xfffffffd:
506 /* Address is magic. */
507 return 1;
508
509 default:
510 /* Address is not magic. */
511 return 0;
512 }
513}
514
181c1381 515/* Remove useless bits from addresses in a running program. */
34e8f22d 516static CORE_ADDR
24568a2c 517arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 518{
2ae28aa9
YQ
519 /* On M-profile devices, do not strip the low bit from EXC_RETURN
520 (the magic exception return address). */
521 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 522 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
523 return val;
524
a3a2ee65 525 if (arm_apcs_32)
dd6be234 526 return UNMAKE_THUMB_ADDR (val);
c906108c 527 else
a3a2ee65 528 return (val & 0x03fffffc);
c906108c
SS
529}
530
0d39a070 531/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
532 can be safely ignored during prologue skipping. IS_THUMB is true
533 if the function is known to be a Thumb function due to the way it
534 is being called. */
0d39a070 535static int
e0634ccf 536skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 537{
e0634ccf 538 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 539 struct bound_minimal_symbol msym;
0d39a070
DJ
540
541 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 542 if (msym.minsym != NULL
77e371c0 543 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 544 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 545 {
efd66ac6 546 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 547
e0634ccf
UW
548 /* The GNU linker's Thumb call stub to foo is named
549 __foo_from_thumb. */
550 if (strstr (name, "_from_thumb") != NULL)
551 name += 2;
0d39a070 552
e0634ccf
UW
553 /* On soft-float targets, __truncdfsf2 is called to convert promoted
554 arguments to their argument types in non-prototyped
555 functions. */
61012eef 556 if (startswith (name, "__truncdfsf2"))
e0634ccf 557 return 1;
61012eef 558 if (startswith (name, "__aeabi_d2f"))
e0634ccf 559 return 1;
0d39a070 560
e0634ccf 561 /* Internal functions related to thread-local storage. */
61012eef 562 if (startswith (name, "__tls_get_addr"))
e0634ccf 563 return 1;
61012eef 564 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
565 return 1;
566 }
567 else
568 {
569 /* If we run against a stripped glibc, we may be unable to identify
570 special functions by name. Check for one important case,
571 __aeabi_read_tp, by comparing the *code* against the default
572 implementation (this is hand-written ARM assembler in glibc). */
573
574 if (!is_thumb
198cd59d 575 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
e0634ccf 576 == 0xe3e00a0f /* mov r0, #0xffff0fff */
198cd59d 577 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
e0634ccf
UW
578 == 0xe240f01f) /* sub pc, r0, #31 */
579 return 1;
580 }
ec3d575a 581
0d39a070
DJ
582 return 0;
583}
584
621c6d5b
YQ
585/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
586 the first 16-bit of instruction, and INSN2 is the second 16-bit of
587 instruction. */
588#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
589 ((bits ((insn1), 0, 3) << 12) \
590 | (bits ((insn1), 10, 10) << 11) \
591 | (bits ((insn2), 12, 14) << 8) \
592 | bits ((insn2), 0, 7))
593
594/* Extract the immediate from instruction movw/movt of encoding A. INSN is
595 the 32-bit instruction. */
596#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
597 ((bits ((insn), 16, 19) << 12) \
598 | bits ((insn), 0, 11))
599
ec3d575a
UW
600/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
601
602static unsigned int
603thumb_expand_immediate (unsigned int imm)
604{
605 unsigned int count = imm >> 7;
606
607 if (count < 8)
608 switch (count / 2)
609 {
610 case 0:
611 return imm & 0xff;
612 case 1:
613 return (imm & 0xff) | ((imm & 0xff) << 16);
614 case 2:
615 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
616 case 3:
617 return (imm & 0xff) | ((imm & 0xff) << 8)
618 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
619 }
620
621 return (0x80 | (imm & 0x7f)) << (32 - count);
622}
623
540314bd
YQ
624/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
625 epilogue, 0 otherwise. */
626
627static int
628thumb_instruction_restores_sp (unsigned short insn)
629{
630 return (insn == 0x46bd /* mov sp, r7 */
631 || (insn & 0xff80) == 0xb000 /* add sp, imm */
632 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
633}
634
29d73ae4
DJ
635/* Analyze a Thumb prologue, looking for a recognizable stack frame
636 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
637 clobber the stack frame unexpectedly, or an unknown instruction.
638 Return the last address which is definitely safe to skip for an
639 initial breakpoint. */
c906108c
SS
640
641static CORE_ADDR
29d73ae4
DJ
642thumb_analyze_prologue (struct gdbarch *gdbarch,
643 CORE_ADDR start, CORE_ADDR limit,
644 struct arm_prologue_cache *cache)
c906108c 645{
0d39a070 646 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
648 int i;
649 pv_t regs[16];
650 struct pv_area *stack;
651 struct cleanup *back_to;
652 CORE_ADDR offset;
ec3d575a 653 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 654
29d73ae4
DJ
655 for (i = 0; i < 16; i++)
656 regs[i] = pv_register (i, 0);
55f960e1 657 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
658 back_to = make_cleanup_free_pv_area (stack);
659
29d73ae4 660 while (start < limit)
c906108c 661 {
29d73ae4
DJ
662 unsigned short insn;
663
198cd59d 664 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 665
94c30b78 666 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 667 {
29d73ae4
DJ
668 int regno;
669 int mask;
4be43953
DJ
670
671 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
672 break;
29d73ae4
DJ
673
674 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
675 whether to save LR (R14). */
676 mask = (insn & 0xff) | ((insn & 0x100) << 6);
677
678 /* Calculate offsets of saved R0-R7 and LR. */
679 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
680 if (mask & (1 << regno))
681 {
29d73ae4
DJ
682 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
683 -4);
684 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
685 }
da59e081 686 }
1db01f22 687 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 688 {
29d73ae4 689 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
690 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 -offset);
da59e081 692 }
808f7ab1
YQ
693 else if (thumb_instruction_restores_sp (insn))
694 {
695 /* Don't scan past the epilogue. */
696 break;
697 }
0d39a070
DJ
698 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
699 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
700 (insn & 0xff) << 2);
701 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
702 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
703 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
704 bits (insn, 6, 8));
705 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
706 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
707 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
708 bits (insn, 0, 7));
709 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
710 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
711 && pv_is_constant (regs[bits (insn, 3, 5)]))
712 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
713 regs[bits (insn, 6, 8)]);
714 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
715 && pv_is_constant (regs[bits (insn, 3, 6)]))
716 {
717 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
718 int rm = bits (insn, 3, 6);
719 regs[rd] = pv_add (regs[rd], regs[rm]);
720 }
29d73ae4 721 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 722 {
29d73ae4
DJ
723 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
724 int src_reg = (insn & 0x78) >> 3;
725 regs[dst_reg] = regs[src_reg];
da59e081 726 }
29d73ae4 727 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 728 {
29d73ae4
DJ
729 /* Handle stores to the stack. Normally pushes are used,
730 but with GCC -mtpcs-frame, there may be other stores
731 in the prologue to create the frame. */
732 int regno = (insn >> 8) & 0x7;
733 pv_t addr;
734
735 offset = (insn & 0xff) << 2;
736 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
737
738 if (pv_area_store_would_trash (stack, addr))
739 break;
740
741 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 742 }
0d39a070
DJ
743 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
744 {
745 int rd = bits (insn, 0, 2);
746 int rn = bits (insn, 3, 5);
747 pv_t addr;
748
749 offset = bits (insn, 6, 10) << 2;
750 addr = pv_add_constant (regs[rn], offset);
751
752 if (pv_area_store_would_trash (stack, addr))
753 break;
754
755 pv_area_store (stack, addr, 4, regs[rd]);
756 }
757 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
758 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
759 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
760 /* Ignore stores of argument registers to the stack. */
761 ;
762 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
763 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
764 /* Ignore block loads from the stack, potentially copying
765 parameters from memory. */
766 ;
767 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
768 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
769 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
770 /* Similarly ignore single loads from the stack. */
771 ;
772 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
773 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
774 /* Skip register copies, i.e. saves to another register
775 instead of the stack. */
776 ;
777 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
778 /* Recognize constant loads; even with small stacks these are necessary
779 on Thumb. */
780 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
781 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
782 {
783 /* Constant pool loads, for the same reason. */
784 unsigned int constant;
785 CORE_ADDR loc;
786
787 loc = start + 4 + bits (insn, 0, 7) * 4;
788 constant = read_memory_unsigned_integer (loc, 4, byte_order);
789 regs[bits (insn, 8, 10)] = pv_constant (constant);
790 }
db24da6d 791 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 792 {
0d39a070
DJ
793 unsigned short inst2;
794
198cd59d
YQ
795 inst2 = read_code_unsigned_integer (start + 2, 2,
796 byte_order_for_code);
0d39a070
DJ
797
798 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
799 {
800 /* BL, BLX. Allow some special function calls when
801 skipping the prologue; GCC generates these before
802 storing arguments to the stack. */
803 CORE_ADDR nextpc;
804 int j1, j2, imm1, imm2;
805
806 imm1 = sbits (insn, 0, 10);
807 imm2 = bits (inst2, 0, 10);
808 j1 = bit (inst2, 13);
809 j2 = bit (inst2, 11);
810
811 offset = ((imm1 << 12) + (imm2 << 1));
812 offset ^= ((!j2) << 22) | ((!j1) << 23);
813
814 nextpc = start + 4 + offset;
815 /* For BLX make sure to clear the low bits. */
816 if (bit (inst2, 12) == 0)
817 nextpc = nextpc & 0xfffffffc;
818
e0634ccf
UW
819 if (!skip_prologue_function (gdbarch, nextpc,
820 bit (inst2, 12) != 0))
0d39a070
DJ
821 break;
822 }
ec3d575a 823
0963b4bd
MS
824 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
825 { registers } */
ec3d575a
UW
826 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
827 {
828 pv_t addr = regs[bits (insn, 0, 3)];
829 int regno;
830
831 if (pv_area_store_would_trash (stack, addr))
832 break;
833
834 /* Calculate offsets of saved registers. */
835 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
836 if (inst2 & (1 << regno))
837 {
838 addr = pv_add_constant (addr, -4);
839 pv_area_store (stack, addr, 4, regs[regno]);
840 }
841
842 if (insn & 0x0020)
843 regs[bits (insn, 0, 3)] = addr;
844 }
845
0963b4bd
MS
846 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
847 [Rn, #+/-imm]{!} */
ec3d575a
UW
848 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
849 {
850 int regno1 = bits (inst2, 12, 15);
851 int regno2 = bits (inst2, 8, 11);
852 pv_t addr = regs[bits (insn, 0, 3)];
853
854 offset = inst2 & 0xff;
855 if (insn & 0x0080)
856 addr = pv_add_constant (addr, offset);
857 else
858 addr = pv_add_constant (addr, -offset);
859
860 if (pv_area_store_would_trash (stack, addr))
861 break;
862
863 pv_area_store (stack, addr, 4, regs[regno1]);
864 pv_area_store (stack, pv_add_constant (addr, 4),
865 4, regs[regno2]);
866
867 if (insn & 0x0020)
868 regs[bits (insn, 0, 3)] = addr;
869 }
870
871 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
872 && (inst2 & 0x0c00) == 0x0c00
873 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
874 {
875 int regno = bits (inst2, 12, 15);
876 pv_t addr = regs[bits (insn, 0, 3)];
877
878 offset = inst2 & 0xff;
879 if (inst2 & 0x0200)
880 addr = pv_add_constant (addr, offset);
881 else
882 addr = pv_add_constant (addr, -offset);
883
884 if (pv_area_store_would_trash (stack, addr))
885 break;
886
887 pv_area_store (stack, addr, 4, regs[regno]);
888
889 if (inst2 & 0x0100)
890 regs[bits (insn, 0, 3)] = addr;
891 }
892
893 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
894 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
895 {
896 int regno = bits (inst2, 12, 15);
897 pv_t addr;
898
899 offset = inst2 & 0xfff;
900 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
901
902 if (pv_area_store_would_trash (stack, addr))
903 break;
904
905 pv_area_store (stack, addr, 4, regs[regno]);
906 }
907
908 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 909 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 910 /* Ignore stores of argument registers to the stack. */
0d39a070 911 ;
ec3d575a
UW
912
913 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
914 && (inst2 & 0x0d00) == 0x0c00
0d39a070 915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 916 /* Ignore stores of argument registers to the stack. */
0d39a070 917 ;
ec3d575a 918
0963b4bd
MS
919 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
920 { registers } */
ec3d575a
UW
921 && (inst2 & 0x8000) == 0x0000
922 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
923 /* Ignore block loads from the stack, potentially copying
924 parameters from memory. */
0d39a070 925 ;
ec3d575a 926
0963b4bd
MS
927 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
928 [Rn, #+/-imm] */
0d39a070 929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 930 /* Similarly ignore dual loads from the stack. */
0d39a070 931 ;
ec3d575a
UW
932
933 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
934 && (inst2 & 0x0d00) == 0x0c00
0d39a070 935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 936 /* Similarly ignore single loads from the stack. */
0d39a070 937 ;
ec3d575a
UW
938
939 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 940 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 941 /* Similarly ignore single loads from the stack. */
0d39a070 942 ;
ec3d575a
UW
943
944 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
945 && (inst2 & 0x8000) == 0x0000)
946 {
947 unsigned int imm = ((bits (insn, 10, 10) << 11)
948 | (bits (inst2, 12, 14) << 8)
949 | bits (inst2, 0, 7));
950
951 regs[bits (inst2, 8, 11)]
952 = pv_add_constant (regs[bits (insn, 0, 3)],
953 thumb_expand_immediate (imm));
954 }
955
956 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
957 && (inst2 & 0x8000) == 0x0000)
0d39a070 958 {
ec3d575a
UW
959 unsigned int imm = ((bits (insn, 10, 10) << 11)
960 | (bits (inst2, 12, 14) << 8)
961 | bits (inst2, 0, 7));
962
963 regs[bits (inst2, 8, 11)]
964 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
965 }
966
967 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
968 && (inst2 & 0x8000) == 0x0000)
969 {
970 unsigned int imm = ((bits (insn, 10, 10) << 11)
971 | (bits (inst2, 12, 14) << 8)
972 | bits (inst2, 0, 7));
973
974 regs[bits (inst2, 8, 11)]
975 = pv_add_constant (regs[bits (insn, 0, 3)],
976 - (CORE_ADDR) thumb_expand_immediate (imm));
977 }
978
979 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
980 && (inst2 & 0x8000) == 0x0000)
981 {
982 unsigned int imm = ((bits (insn, 10, 10) << 11)
983 | (bits (inst2, 12, 14) << 8)
984 | bits (inst2, 0, 7));
985
986 regs[bits (inst2, 8, 11)]
987 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
988 }
989
990 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
991 {
992 unsigned int imm = ((bits (insn, 10, 10) << 11)
993 | (bits (inst2, 12, 14) << 8)
994 | bits (inst2, 0, 7));
995
996 regs[bits (inst2, 8, 11)]
997 = pv_constant (thumb_expand_immediate (imm));
998 }
999
1000 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1001 {
621c6d5b
YQ
1002 unsigned int imm
1003 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1004
1005 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1006 }
1007
1008 else if (insn == 0xea5f /* mov.w Rd,Rm */
1009 && (inst2 & 0xf0f0) == 0)
1010 {
1011 int dst_reg = (inst2 & 0x0f00) >> 8;
1012 int src_reg = inst2 & 0xf;
1013 regs[dst_reg] = regs[src_reg];
1014 }
1015
1016 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1017 {
1018 /* Constant pool loads. */
1019 unsigned int constant;
1020 CORE_ADDR loc;
1021
cac395ea 1022 offset = bits (inst2, 0, 11);
ec3d575a
UW
1023 if (insn & 0x0080)
1024 loc = start + 4 + offset;
1025 else
1026 loc = start + 4 - offset;
1027
1028 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1029 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1030 }
1031
1032 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1033 {
1034 /* Constant pool loads. */
1035 unsigned int constant;
1036 CORE_ADDR loc;
1037
cac395ea 1038 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1039 if (insn & 0x0080)
1040 loc = start + 4 + offset;
1041 else
1042 loc = start + 4 - offset;
1043
1044 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1045 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1046
1047 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1048 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1049 }
1050
1051 else if (thumb2_instruction_changes_pc (insn, inst2))
1052 {
1053 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1054 break;
1055 }
ec3d575a
UW
1056 else
1057 {
1058 /* The optimizer might shove anything into the prologue,
1059 so we just skip what we don't recognize. */
1060 unrecognized_pc = start;
1061 }
0d39a070
DJ
1062
1063 start += 2;
1064 }
ec3d575a 1065 else if (thumb_instruction_changes_pc (insn))
3d74b771 1066 {
ec3d575a 1067 /* Don't scan past anything that might change control flow. */
da3c6d4a 1068 break;
3d74b771 1069 }
ec3d575a
UW
1070 else
1071 {
1072 /* The optimizer might shove anything into the prologue,
1073 so we just skip what we don't recognize. */
1074 unrecognized_pc = start;
1075 }
29d73ae4
DJ
1076
1077 start += 2;
c906108c
SS
1078 }
1079
0d39a070
DJ
1080 if (arm_debug)
1081 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1082 paddress (gdbarch, start));
1083
ec3d575a
UW
1084 if (unrecognized_pc == 0)
1085 unrecognized_pc = start;
1086
29d73ae4
DJ
1087 if (cache == NULL)
1088 {
1089 do_cleanups (back_to);
ec3d575a 1090 return unrecognized_pc;
29d73ae4
DJ
1091 }
1092
29d73ae4
DJ
1093 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1094 {
1095 /* Frame pointer is fp. Frame size is constant. */
1096 cache->framereg = ARM_FP_REGNUM;
1097 cache->framesize = -regs[ARM_FP_REGNUM].k;
1098 }
1099 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1100 {
1101 /* Frame pointer is r7. Frame size is constant. */
1102 cache->framereg = THUMB_FP_REGNUM;
1103 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1104 }
72a2e3dc 1105 else
29d73ae4
DJ
1106 {
1107 /* Try the stack pointer... this is a bit desperate. */
1108 cache->framereg = ARM_SP_REGNUM;
1109 cache->framesize = -regs[ARM_SP_REGNUM].k;
1110 }
29d73ae4
DJ
1111
1112 for (i = 0; i < 16; i++)
1113 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1114 cache->saved_regs[i].addr = offset;
1115
1116 do_cleanups (back_to);
ec3d575a 1117 return unrecognized_pc;
c906108c
SS
1118}
1119
621c6d5b
YQ
1120
1121/* Try to analyze the instructions starting from PC, which load symbol
1122 __stack_chk_guard. Return the address of instruction after loading this
1123 symbol, set the dest register number to *BASEREG, and set the size of
1124 instructions for loading symbol in OFFSET. Return 0 if instructions are
1125 not recognized. */
1126
1127static CORE_ADDR
1128arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1129 unsigned int *destreg, int *offset)
1130{
1131 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1132 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1133 unsigned int low, high, address;
1134
1135 address = 0;
1136 if (is_thumb)
1137 {
1138 unsigned short insn1
198cd59d 1139 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
621c6d5b
YQ
1140
1141 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1142 {
1143 *destreg = bits (insn1, 8, 10);
1144 *offset = 2;
6ae274b7
YQ
1145 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1146 address = read_memory_unsigned_integer (address, 4,
1147 byte_order_for_code);
621c6d5b
YQ
1148 }
1149 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1150 {
1151 unsigned short insn2
198cd59d 1152 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
621c6d5b
YQ
1153
1154 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1155
1156 insn1
198cd59d 1157 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
621c6d5b 1158 insn2
198cd59d 1159 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
621c6d5b
YQ
1160
1161 /* movt Rd, #const */
1162 if ((insn1 & 0xfbc0) == 0xf2c0)
1163 {
1164 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1165 *destreg = bits (insn2, 8, 11);
1166 *offset = 8;
1167 address = (high << 16 | low);
1168 }
1169 }
1170 }
1171 else
1172 {
2e9e421f 1173 unsigned int insn
198cd59d 1174 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
2e9e421f 1175
6ae274b7 1176 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1177 {
6ae274b7
YQ
1178 address = bits (insn, 0, 11) + pc + 8;
1179 address = read_memory_unsigned_integer (address, 4,
1180 byte_order_for_code);
1181
2e9e421f
UW
1182 *destreg = bits (insn, 12, 15);
1183 *offset = 4;
1184 }
1185 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1186 {
1187 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1188
1189 insn
198cd59d 1190 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
2e9e421f
UW
1191
1192 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1193 {
1194 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1195 *destreg = bits (insn, 12, 15);
1196 *offset = 8;
1197 address = (high << 16 | low);
1198 }
1199 }
621c6d5b
YQ
1200 }
1201
1202 return address;
1203}
1204
1205/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1206 points to the first instruction of this sequence, return the address of
1207 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1208
1209 On arm, this sequence of instructions is composed of mainly three steps,
1210 Step 1: load symbol __stack_chk_guard,
1211 Step 2: load from address of __stack_chk_guard,
1212 Step 3: store it to somewhere else.
1213
1214 Usually, instructions on step 2 and step 3 are the same on various ARM
1215 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1216 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1217 instructions in step 1 vary from different ARM architectures. On ARMv7,
1218 they are,
1219
1220 movw Rn, #:lower16:__stack_chk_guard
1221 movt Rn, #:upper16:__stack_chk_guard
1222
1223 On ARMv5t, it is,
1224
1225 ldr Rn, .Label
1226 ....
1227 .Lable:
1228 .word __stack_chk_guard
1229
1230 Since ldr/str is a very popular instruction, we can't use them as
1231 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1232 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1233 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1234
1235static CORE_ADDR
1236arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1237{
1238 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1239 unsigned int basereg;
7cbd4a93 1240 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1241 int offset;
1242 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1243 CORE_ADDR addr;
1244
1245 /* Try to parse the instructions in Step 1. */
1246 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1247 &basereg, &offset);
1248 if (!addr)
1249 return pc;
1250
1251 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1252 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1253 Otherwise, this sequence cannot be for stack protector. */
1254 if (stack_chk_guard.minsym == NULL
61012eef 1255 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1256 return pc;
1257
1258 if (is_thumb)
1259 {
1260 unsigned int destreg;
1261 unsigned short insn
198cd59d 1262 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
621c6d5b
YQ
1263
1264 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1265 if ((insn & 0xf800) != 0x6800)
1266 return pc;
1267 if (bits (insn, 3, 5) != basereg)
1268 return pc;
1269 destreg = bits (insn, 0, 2);
1270
198cd59d
YQ
1271 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1272 byte_order_for_code);
621c6d5b
YQ
1273 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1274 if ((insn & 0xf800) != 0x6000)
1275 return pc;
1276 if (destreg != bits (insn, 0, 2))
1277 return pc;
1278 }
1279 else
1280 {
1281 unsigned int destreg;
1282 unsigned int insn
198cd59d 1283 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
621c6d5b
YQ
1284
1285 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1286 if ((insn & 0x0e500000) != 0x04100000)
1287 return pc;
1288 if (bits (insn, 16, 19) != basereg)
1289 return pc;
1290 destreg = bits (insn, 12, 15);
1291 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
198cd59d 1292 insn = read_code_unsigned_integer (pc + offset + 4,
621c6d5b
YQ
1293 4, byte_order_for_code);
1294 if ((insn & 0x0e500000) != 0x04000000)
1295 return pc;
1296 if (bits (insn, 12, 15) != destreg)
1297 return pc;
1298 }
1299 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1300 on arm. */
1301 if (is_thumb)
1302 return pc + offset + 4;
1303 else
1304 return pc + offset + 8;
1305}
1306
da3c6d4a
MS
1307/* Advance the PC across any function entry prologue instructions to
1308 reach some "real" code.
34e8f22d
RE
1309
1310 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1311 prologue:
c906108c 1312
c5aa993b
JM
1313 mov ip, sp
1314 [stmfd sp!, {a1,a2,a3,a4}]
1315 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1316 [stfe f7, [sp, #-12]!]
1317 [stfe f6, [sp, #-12]!]
1318 [stfe f5, [sp, #-12]!]
1319 [stfe f4, [sp, #-12]!]
0963b4bd 1320 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1321
34e8f22d 1322static CORE_ADDR
6093d2eb 1323arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1324{
a89fea3c 1325 CORE_ADDR func_addr, limit_pc;
c906108c 1326
a89fea3c
JL
1327 /* See if we can determine the end of the prologue via the symbol table.
1328 If so, then return either PC, or the PC after the prologue, whichever
1329 is greater. */
1330 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1331 {
d80b854b
UW
1332 CORE_ADDR post_prologue_pc
1333 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1334 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1335
621c6d5b
YQ
1336 if (post_prologue_pc)
1337 post_prologue_pc
1338 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1339
1340
0d39a070
DJ
1341 /* GCC always emits a line note before the prologue and another
1342 one after, even if the two are at the same address or on the
1343 same line. Take advantage of this so that we do not need to
1344 know every instruction that might appear in the prologue. We
1345 will have producer information for most binaries; if it is
1346 missing (e.g. for -gstabs), assuming the GNU tools. */
1347 if (post_prologue_pc
43f3e411
DE
1348 && (cust == NULL
1349 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1350 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1351 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1352 return post_prologue_pc;
1353
a89fea3c 1354 if (post_prologue_pc != 0)
0d39a070
DJ
1355 {
1356 CORE_ADDR analyzed_limit;
1357
1358 /* For non-GCC compilers, make sure the entire line is an
1359 acceptable prologue; GDB will round this function's
1360 return value up to the end of the following line so we
1361 can not skip just part of a line (and we do not want to).
1362
1363 RealView does not treat the prologue specially, but does
1364 associate prologue code with the opening brace; so this
1365 lets us skip the first line if we think it is the opening
1366 brace. */
9779414d 1367 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1368 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1369 post_prologue_pc, NULL);
1370 else
1371 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1372 post_prologue_pc, NULL);
1373
1374 if (analyzed_limit != post_prologue_pc)
1375 return func_addr;
1376
1377 return post_prologue_pc;
1378 }
c906108c
SS
1379 }
1380
a89fea3c
JL
1381 /* Can't determine prologue from the symbol table, need to examine
1382 instructions. */
c906108c 1383
a89fea3c
JL
1384 /* Find an upper limit on the function prologue using the debug
1385 information. If the debug information could not be used to provide
1386 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1387 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1388 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1389 if (limit_pc == 0)
1390 limit_pc = pc + 64; /* Magic. */
1391
c906108c 1392
29d73ae4 1393 /* Check if this is Thumb code. */
9779414d 1394 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1395 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1396 else
1397 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1398}
94c30b78 1399
c5aa993b 1400/* *INDENT-OFF* */
c906108c
SS
1401/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1402 This function decodes a Thumb function prologue to determine:
1403 1) the size of the stack frame
1404 2) which registers are saved on it
1405 3) the offsets of saved regs
1406 4) the offset from the stack pointer to the frame pointer
c906108c 1407
da59e081
JM
1408 A typical Thumb function prologue would create this stack frame
1409 (offsets relative to FP)
c906108c
SS
1410 old SP -> 24 stack parameters
1411 20 LR
1412 16 R7
1413 R7 -> 0 local variables (16 bytes)
1414 SP -> -12 additional stack space (12 bytes)
1415 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1416 12 bytes. The frame register is R7.
da59e081 1417
da3c6d4a
MS
1418 The comments for thumb_skip_prolog() describe the algorithm we use
1419 to detect the end of the prolog. */
c5aa993b
JM
1420/* *INDENT-ON* */
1421
c906108c 1422static void
be8626e0 1423thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1424 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1425{
1426 CORE_ADDR prologue_start;
1427 CORE_ADDR prologue_end;
c906108c 1428
b39cc962
DJ
1429 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1430 &prologue_end))
c906108c 1431 {
ec3d575a
UW
1432 /* See comment in arm_scan_prologue for an explanation of
1433 this heuristics. */
1434 if (prologue_end > prologue_start + 64)
1435 {
1436 prologue_end = prologue_start + 64;
1437 }
c906108c
SS
1438 }
1439 else
f7060f85
DJ
1440 /* We're in the boondocks: we have no idea where the start of the
1441 function is. */
1442 return;
c906108c 1443
325fac50 1444 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1445
be8626e0 1446 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1447}
1448
f303bc3e
YQ
1449/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1450 otherwise. */
1451
1452static int
1453arm_instruction_restores_sp (unsigned int insn)
1454{
1455 if (bits (insn, 28, 31) != INST_NV)
1456 {
1457 if ((insn & 0x0df0f000) == 0x0080d000
1458 /* ADD SP (register or immediate). */
1459 || (insn & 0x0df0f000) == 0x0040d000
1460 /* SUB SP (register or immediate). */
1461 || (insn & 0x0ffffff0) == 0x01a0d000
1462 /* MOV SP. */
1463 || (insn & 0x0fff0000) == 0x08bd0000
1464 /* POP (LDMIA). */
1465 || (insn & 0x0fff0000) == 0x049d0000)
1466 /* POP of a single register. */
1467 return 1;
1468 }
1469
1470 return 0;
1471}
1472
0d39a070
DJ
1473/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1474 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1475 fill it in. Return the first address not recognized as a prologue
1476 instruction.
eb5492fa 1477
0d39a070
DJ
1478 We recognize all the instructions typically found in ARM prologues,
1479 plus harmless instructions which can be skipped (either for analysis
1480 purposes, or a more restrictive set that can be skipped when finding
1481 the end of the prologue). */
1482
1483static CORE_ADDR
1484arm_analyze_prologue (struct gdbarch *gdbarch,
1485 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1486 struct arm_prologue_cache *cache)
1487{
0d39a070
DJ
1488 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1489 int regno;
1490 CORE_ADDR offset, current_pc;
1491 pv_t regs[ARM_FPS_REGNUM];
1492 struct pv_area *stack;
1493 struct cleanup *back_to;
0d39a070
DJ
1494 CORE_ADDR unrecognized_pc = 0;
1495
1496 /* Search the prologue looking for instructions that set up the
96baa820 1497 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1498
96baa820
JM
1499 Be careful, however, and if it doesn't look like a prologue,
1500 don't try to scan it. If, for instance, a frameless function
1501 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1502 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1503 and other operations that rely on a knowledge of the stack
0d39a070 1504 traceback. */
d4473757 1505
4be43953
DJ
1506 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1507 regs[regno] = pv_register (regno, 0);
55f960e1 1508 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1509 back_to = make_cleanup_free_pv_area (stack);
1510
94c30b78
MS
1511 for (current_pc = prologue_start;
1512 current_pc < prologue_end;
f43845b3 1513 current_pc += 4)
96baa820 1514 {
e17a4113 1515 unsigned int insn
198cd59d 1516 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1517
94c30b78 1518 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1519 {
4be43953 1520 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1521 continue;
1522 }
0d39a070
DJ
1523 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1525 {
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1528 int rd = bits (insn, 12, 15);
28cd8767 1529 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1531 continue;
1532 }
0d39a070
DJ
1533 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1534 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1535 {
1536 unsigned imm = insn & 0xff; /* immediate value */
1537 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1538 int rd = bits (insn, 12, 15);
28cd8767 1539 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1540 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1541 continue;
1542 }
0963b4bd
MS
1543 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1544 [sp, #-4]! */
f43845b3 1545 {
4be43953
DJ
1546 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1547 break;
1548 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1549 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1550 regs[bits (insn, 12, 15)]);
f43845b3
MS
1551 continue;
1552 }
1553 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1554 /* stmfd sp!, {..., fp, ip, lr, pc}
1555 or
1556 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1557 {
d4473757 1558 int mask = insn & 0xffff;
ed9a39eb 1559
4be43953
DJ
1560 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1561 break;
1562
94c30b78 1563 /* Calculate offsets of saved registers. */
34e8f22d 1564 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1565 if (mask & (1 << regno))
1566 {
0963b4bd
MS
1567 regs[ARM_SP_REGNUM]
1568 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1569 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1570 }
1571 }
0d39a070
DJ
1572 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1573 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1574 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1575 {
1576 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 continue;
1578 }
0d39a070
DJ
1579 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1580 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1581 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1582 {
1583 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 continue;
1585 }
0963b4bd
MS
1586 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1587 { registers } */
0d39a070
DJ
1588 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1589 {
1590 /* No need to add this to saved_regs -- it's just arg regs. */
1591 continue;
1592 }
d4473757
KB
1593 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1594 {
94c30b78
MS
1595 unsigned imm = insn & 0xff; /* immediate value */
1596 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1597 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1598 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1599 }
1600 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1601 {
94c30b78
MS
1602 unsigned imm = insn & 0xff; /* immediate value */
1603 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1604 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1606 }
0963b4bd
MS
1607 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1608 [sp, -#c]! */
2af46ca0 1609 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1610 {
4be43953
DJ
1611 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1612 break;
1613
1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1615 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1616 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1617 }
0963b4bd
MS
1618 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1619 [sp!] */
2af46ca0 1620 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1621 {
1622 int n_saved_fp_regs;
1623 unsigned int fp_start_reg, fp_bound_reg;
1624
4be43953
DJ
1625 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1626 break;
1627
94c30b78 1628 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1629 {
d4473757
KB
1630 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1631 n_saved_fp_regs = 3;
1632 else
1633 n_saved_fp_regs = 1;
96baa820 1634 }
d4473757 1635 else
96baa820 1636 {
d4473757
KB
1637 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1638 n_saved_fp_regs = 2;
1639 else
1640 n_saved_fp_regs = 4;
96baa820 1641 }
d4473757 1642
34e8f22d 1643 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1644 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1645 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1646 {
4be43953
DJ
1647 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1648 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1649 regs[fp_start_reg++]);
96baa820 1650 }
c906108c 1651 }
0d39a070
DJ
1652 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1653 {
1654 /* Allow some special function calls when skipping the
1655 prologue; GCC generates these before storing arguments to
1656 the stack. */
1657 CORE_ADDR dest = BranchDest (current_pc, insn);
1658
e0634ccf 1659 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1660 continue;
1661 else
1662 break;
1663 }
d4473757 1664 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1665 break; /* Condition not true, exit early. */
0d39a070
DJ
1666 else if (arm_instruction_changes_pc (insn))
1667 /* Don't scan past anything that might change control flow. */
1668 break;
f303bc3e
YQ
1669 else if (arm_instruction_restores_sp (insn))
1670 {
1671 /* Don't scan past the epilogue. */
1672 break;
1673 }
d19f7eee
UW
1674 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 /* Ignore block loads from the stack, potentially copying
1677 parameters from memory. */
1678 continue;
1679 else if ((insn & 0xfc500000) == 0xe4100000
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Similarly ignore single loads from the stack. */
1682 continue;
0d39a070
DJ
1683 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1684 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1685 register instead of the stack. */
d4473757 1686 continue;
0d39a070
DJ
1687 else
1688 {
21daaaaf
YQ
1689 /* The optimizer might shove anything into the prologue, if
1690 we build up cache (cache != NULL) from scanning prologue,
1691 we just skip what we don't recognize and scan further to
1692 make cache as complete as possible. However, if we skip
1693 prologue, we'll stop immediately on unrecognized
1694 instruction. */
0d39a070 1695 unrecognized_pc = current_pc;
21daaaaf
YQ
1696 if (cache != NULL)
1697 continue;
1698 else
1699 break;
0d39a070 1700 }
c906108c
SS
1701 }
1702
0d39a070
DJ
1703 if (unrecognized_pc == 0)
1704 unrecognized_pc = current_pc;
1705
0d39a070
DJ
1706 if (cache)
1707 {
4072f920
YQ
1708 int framereg, framesize;
1709
1710 /* The frame size is just the distance from the frame register
1711 to the original stack pointer. */
1712 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1713 {
1714 /* Frame pointer is fp. */
1715 framereg = ARM_FP_REGNUM;
1716 framesize = -regs[ARM_FP_REGNUM].k;
1717 }
1718 else
1719 {
1720 /* Try the stack pointer... this is a bit desperate. */
1721 framereg = ARM_SP_REGNUM;
1722 framesize = -regs[ARM_SP_REGNUM].k;
1723 }
1724
0d39a070
DJ
1725 cache->framereg = framereg;
1726 cache->framesize = framesize;
1727
1728 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1729 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1730 cache->saved_regs[regno].addr = offset;
1731 }
1732
1733 if (arm_debug)
1734 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1735 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1736
1737 do_cleanups (back_to);
0d39a070
DJ
1738 return unrecognized_pc;
1739}
1740
1741static void
1742arm_scan_prologue (struct frame_info *this_frame,
1743 struct arm_prologue_cache *cache)
1744{
1745 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1746 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1747 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1748 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1749 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1750
1751 /* Assume there is no frame until proven otherwise. */
1752 cache->framereg = ARM_SP_REGNUM;
1753 cache->framesize = 0;
1754
1755 /* Check for Thumb prologue. */
1756 if (arm_frame_is_thumb (this_frame))
1757 {
1758 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1759 return;
1760 }
1761
1762 /* Find the function prologue. If we can't find the function in
1763 the symbol table, peek in the stack frame to find the PC. */
1764 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1765 &prologue_end))
1766 {
1767 /* One way to find the end of the prologue (which works well
1768 for unoptimized code) is to do the following:
1769
1770 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1771
1772 if (sal.line == 0)
1773 prologue_end = prev_pc;
1774 else if (sal.end < prologue_end)
1775 prologue_end = sal.end;
1776
1777 This mechanism is very accurate so long as the optimizer
1778 doesn't move any instructions from the function body into the
1779 prologue. If this happens, sal.end will be the last
1780 instruction in the first hunk of prologue code just before
1781 the first instruction that the scheduler has moved from
1782 the body to the prologue.
1783
1784 In order to make sure that we scan all of the prologue
1785 instructions, we use a slightly less accurate mechanism which
1786 may scan more than necessary. To help compensate for this
1787 lack of accuracy, the prologue scanning loop below contains
1788 several clauses which'll cause the loop to terminate early if
1789 an implausible prologue instruction is encountered.
1790
1791 The expression
1792
1793 prologue_start + 64
1794
1795 is a suitable endpoint since it accounts for the largest
1796 possible prologue plus up to five instructions inserted by
1797 the scheduler. */
1798
1799 if (prologue_end > prologue_start + 64)
1800 {
1801 prologue_end = prologue_start + 64; /* See above. */
1802 }
1803 }
1804 else
1805 {
1806 /* We have no symbol information. Our only option is to assume this
1807 function has a standard stack frame and the normal frame register.
1808 Then, we can find the value of our frame pointer on entrance to
1809 the callee (or at the present moment if this is the innermost frame).
1810 The value stored there should be the address of the stmfd + 8. */
1811 CORE_ADDR frame_loc;
7913a64c 1812 ULONGEST return_value;
0d39a070
DJ
1813
1814 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
7913a64c
YQ
1815 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1816 &return_value))
0d39a070
DJ
1817 return;
1818 else
1819 {
1820 prologue_start = gdbarch_addr_bits_remove
1821 (gdbarch, return_value) - 8;
1822 prologue_end = prologue_start + 64; /* See above. */
1823 }
1824 }
1825
1826 if (prev_pc < prologue_end)
1827 prologue_end = prev_pc;
1828
1829 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1830}
1831
eb5492fa 1832static struct arm_prologue_cache *
a262aec2 1833arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1834{
eb5492fa
DJ
1835 int reg;
1836 struct arm_prologue_cache *cache;
1837 CORE_ADDR unwound_fp;
c5aa993b 1838
35d5d4ee 1839 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1840 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1841
a262aec2 1842 arm_scan_prologue (this_frame, cache);
848cfffb 1843
a262aec2 1844 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1845 if (unwound_fp == 0)
1846 return cache;
c906108c 1847
4be43953 1848 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1849
eb5492fa
DJ
1850 /* Calculate actual addresses of saved registers using offsets
1851 determined by arm_scan_prologue. */
a262aec2 1852 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1853 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1854 cache->saved_regs[reg].addr += cache->prev_sp;
1855
1856 return cache;
c906108c
SS
1857}
1858
c1ee9414
LM
1859/* Implementation of the stop_reason hook for arm_prologue frames. */
1860
1861static enum unwind_stop_reason
1862arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1863 void **this_cache)
1864{
1865 struct arm_prologue_cache *cache;
1866 CORE_ADDR pc;
1867
1868 if (*this_cache == NULL)
1869 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1870 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1871
1872 /* This is meant to halt the backtrace at "_start". */
1873 pc = get_frame_pc (this_frame);
1874 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1875 return UNWIND_OUTERMOST;
1876
1877 /* If we've hit a wall, stop. */
1878 if (cache->prev_sp == 0)
1879 return UNWIND_OUTERMOST;
1880
1881 return UNWIND_NO_REASON;
1882}
1883
eb5492fa
DJ
1884/* Our frame ID for a normal frame is the current function's starting PC
1885 and the caller's SP when we were called. */
c906108c 1886
148754e5 1887static void
a262aec2 1888arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1889 void **this_cache,
1890 struct frame_id *this_id)
c906108c 1891{
eb5492fa
DJ
1892 struct arm_prologue_cache *cache;
1893 struct frame_id id;
2c404490 1894 CORE_ADDR pc, func;
f079148d 1895
eb5492fa 1896 if (*this_cache == NULL)
a262aec2 1897 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1898 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1899
0e9e9abd
UW
1900 /* Use function start address as part of the frame ID. If we cannot
1901 identify the start address (due to missing symbol information),
1902 fall back to just using the current PC. */
c1ee9414 1903 pc = get_frame_pc (this_frame);
2c404490 1904 func = get_frame_func (this_frame);
0e9e9abd
UW
1905 if (!func)
1906 func = pc;
1907
eb5492fa 1908 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1909 *this_id = id;
c906108c
SS
1910}
1911
a262aec2
DJ
1912static struct value *
1913arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1914 void **this_cache,
a262aec2 1915 int prev_regnum)
24de872b 1916{
24568a2c 1917 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1918 struct arm_prologue_cache *cache;
1919
eb5492fa 1920 if (*this_cache == NULL)
a262aec2 1921 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1922 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1923
eb5492fa 1924 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1925 instead. The prologue may save PC, but it will point into this
1926 frame's prologue, not the next frame's resume location. Also
1927 strip the saved T bit. A valid LR may have the low bit set, but
1928 a valid PC never does. */
eb5492fa 1929 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1930 {
1931 CORE_ADDR lr;
1932
1933 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1934 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1935 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1936 }
24de872b 1937
eb5492fa 1938 /* SP is generally not saved to the stack, but this frame is
a262aec2 1939 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1940 The value was already reconstructed into PREV_SP. */
1941 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1942 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1943
b39cc962
DJ
1944 /* The CPSR may have been changed by the call instruction and by the
1945 called function. The only bit we can reconstruct is the T bit,
1946 by checking the low bit of LR as of the call. This is a reliable
1947 indicator of Thumb-ness except for some ARM v4T pre-interworking
1948 Thumb code, which could get away with a clear low bit as long as
1949 the called function did not use bx. Guess that all other
1950 bits are unchanged; the condition flags are presumably lost,
1951 but the processor status is likely valid. */
1952 if (prev_regnum == ARM_PS_REGNUM)
1953 {
1954 CORE_ADDR lr, cpsr;
9779414d 1955 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1956
1957 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1958 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1959 if (IS_THUMB_ADDR (lr))
9779414d 1960 cpsr |= t_bit;
b39cc962 1961 else
9779414d 1962 cpsr &= ~t_bit;
b39cc962
DJ
1963 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1964 }
1965
a262aec2
DJ
1966 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1967 prev_regnum);
eb5492fa
DJ
1968}
1969
1970struct frame_unwind arm_prologue_unwind = {
1971 NORMAL_FRAME,
c1ee9414 1972 arm_prologue_unwind_stop_reason,
eb5492fa 1973 arm_prologue_this_id,
a262aec2
DJ
1974 arm_prologue_prev_register,
1975 NULL,
1976 default_frame_sniffer
eb5492fa
DJ
1977};
1978
0e9e9abd
UW
1979/* Maintain a list of ARM exception table entries per objfile, similar to the
1980 list of mapping symbols. We only cache entries for standard ARM-defined
1981 personality routines; the cache will contain only the frame unwinding
1982 instructions associated with the entry (not the descriptors). */
1983
1984static const struct objfile_data *arm_exidx_data_key;
1985
1986struct arm_exidx_entry
1987{
1988 bfd_vma addr;
1989 gdb_byte *entry;
1990};
1991typedef struct arm_exidx_entry arm_exidx_entry_s;
1992DEF_VEC_O(arm_exidx_entry_s);
1993
1994struct arm_exidx_data
1995{
1996 VEC(arm_exidx_entry_s) **section_maps;
1997};
1998
1999static void
2000arm_exidx_data_free (struct objfile *objfile, void *arg)
2001{
9a3c8263 2002 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2003 unsigned int i;
2004
2005 for (i = 0; i < objfile->obfd->section_count; i++)
2006 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2007}
2008
2009static inline int
2010arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2011 const struct arm_exidx_entry *rhs)
2012{
2013 return lhs->addr < rhs->addr;
2014}
2015
2016static struct obj_section *
2017arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2018{
2019 struct obj_section *osect;
2020
2021 ALL_OBJFILE_OSECTIONS (objfile, osect)
2022 if (bfd_get_section_flags (objfile->obfd,
2023 osect->the_bfd_section) & SEC_ALLOC)
2024 {
2025 bfd_vma start, size;
2026 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2027 size = bfd_get_section_size (osect->the_bfd_section);
2028
2029 if (start <= vma && vma < start + size)
2030 return osect;
2031 }
2032
2033 return NULL;
2034}
2035
2036/* Parse contents of exception table and exception index sections
2037 of OBJFILE, and fill in the exception table entry cache.
2038
2039 For each entry that refers to a standard ARM-defined personality
2040 routine, extract the frame unwinding instructions (from either
2041 the index or the table section). The unwinding instructions
2042 are normalized by:
2043 - extracting them from the rest of the table data
2044 - converting to host endianness
2045 - appending the implicit 0xb0 ("Finish") code
2046
2047 The extracted and normalized instructions are stored for later
2048 retrieval by the arm_find_exidx_entry routine. */
2049
2050static void
2051arm_exidx_new_objfile (struct objfile *objfile)
2052{
3bb47e8b 2053 struct cleanup *cleanups;
0e9e9abd
UW
2054 struct arm_exidx_data *data;
2055 asection *exidx, *extab;
2056 bfd_vma exidx_vma = 0, extab_vma = 0;
2057 bfd_size_type exidx_size = 0, extab_size = 0;
2058 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2059 LONGEST i;
2060
2061 /* If we've already touched this file, do nothing. */
2062 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2063 return;
3bb47e8b 2064 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2065
2066 /* Read contents of exception table and index. */
a5eda10c 2067 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2068 if (exidx)
2069 {
2070 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2071 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2072 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2073 make_cleanup (xfree, exidx_data);
2074
2075 if (!bfd_get_section_contents (objfile->obfd, exidx,
2076 exidx_data, 0, exidx_size))
2077 {
2078 do_cleanups (cleanups);
2079 return;
2080 }
2081 }
2082
2083 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2084 if (extab)
2085 {
2086 extab_vma = bfd_section_vma (objfile->obfd, extab);
2087 extab_size = bfd_get_section_size (extab);
224c3ddb 2088 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2089 make_cleanup (xfree, extab_data);
2090
2091 if (!bfd_get_section_contents (objfile->obfd, extab,
2092 extab_data, 0, extab_size))
2093 {
2094 do_cleanups (cleanups);
2095 return;
2096 }
2097 }
2098
2099 /* Allocate exception table data structure. */
2100 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2101 set_objfile_data (objfile, arm_exidx_data_key, data);
2102 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2103 objfile->obfd->section_count,
2104 VEC(arm_exidx_entry_s) *);
2105
2106 /* Fill in exception table. */
2107 for (i = 0; i < exidx_size / 8; i++)
2108 {
2109 struct arm_exidx_entry new_exidx_entry;
2110 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2111 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2112 bfd_vma addr = 0, word = 0;
2113 int n_bytes = 0, n_words = 0;
2114 struct obj_section *sec;
2115 gdb_byte *entry = NULL;
2116
2117 /* Extract address of start of function. */
2118 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2119 idx += exidx_vma + i * 8;
2120
2121 /* Find section containing function and compute section offset. */
2122 sec = arm_obj_section_from_vma (objfile, idx);
2123 if (sec == NULL)
2124 continue;
2125 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2126
2127 /* Determine address of exception table entry. */
2128 if (val == 1)
2129 {
2130 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2131 }
2132 else if ((val & 0xff000000) == 0x80000000)
2133 {
2134 /* Exception table entry embedded in .ARM.exidx
2135 -- must be short form. */
2136 word = val;
2137 n_bytes = 3;
2138 }
2139 else if (!(val & 0x80000000))
2140 {
2141 /* Exception table entry in .ARM.extab. */
2142 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2143 addr += exidx_vma + i * 8 + 4;
2144
2145 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2146 {
2147 word = bfd_h_get_32 (objfile->obfd,
2148 extab_data + addr - extab_vma);
2149 addr += 4;
2150
2151 if ((word & 0xff000000) == 0x80000000)
2152 {
2153 /* Short form. */
2154 n_bytes = 3;
2155 }
2156 else if ((word & 0xff000000) == 0x81000000
2157 || (word & 0xff000000) == 0x82000000)
2158 {
2159 /* Long form. */
2160 n_bytes = 2;
2161 n_words = ((word >> 16) & 0xff);
2162 }
2163 else if (!(word & 0x80000000))
2164 {
2165 bfd_vma pers;
2166 struct obj_section *pers_sec;
2167 int gnu_personality = 0;
2168
2169 /* Custom personality routine. */
2170 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2171 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2172
2173 /* Check whether we've got one of the variants of the
2174 GNU personality routines. */
2175 pers_sec = arm_obj_section_from_vma (objfile, pers);
2176 if (pers_sec)
2177 {
2178 static const char *personality[] =
2179 {
2180 "__gcc_personality_v0",
2181 "__gxx_personality_v0",
2182 "__gcj_personality_v0",
2183 "__gnu_objc_personality_v0",
2184 NULL
2185 };
2186
2187 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2188 int k;
2189
2190 for (k = 0; personality[k]; k++)
2191 if (lookup_minimal_symbol_by_pc_name
2192 (pc, personality[k], objfile))
2193 {
2194 gnu_personality = 1;
2195 break;
2196 }
2197 }
2198
2199 /* If so, the next word contains a word count in the high
2200 byte, followed by the same unwind instructions as the
2201 pre-defined forms. */
2202 if (gnu_personality
2203 && addr + 4 <= extab_vma + extab_size)
2204 {
2205 word = bfd_h_get_32 (objfile->obfd,
2206 extab_data + addr - extab_vma);
2207 addr += 4;
2208 n_bytes = 3;
2209 n_words = ((word >> 24) & 0xff);
2210 }
2211 }
2212 }
2213 }
2214
2215 /* Sanity check address. */
2216 if (n_words)
2217 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2218 n_words = n_bytes = 0;
2219
2220 /* The unwind instructions reside in WORD (only the N_BYTES least
2221 significant bytes are valid), followed by N_WORDS words in the
2222 extab section starting at ADDR. */
2223 if (n_bytes || n_words)
2224 {
224c3ddb
SM
2225 gdb_byte *p = entry
2226 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2227 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2228
2229 while (n_bytes--)
2230 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2231
2232 while (n_words--)
2233 {
2234 word = bfd_h_get_32 (objfile->obfd,
2235 extab_data + addr - extab_vma);
2236 addr += 4;
2237
2238 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2239 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2240 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2241 *p++ = (gdb_byte) (word & 0xff);
2242 }
2243
2244 /* Implied "Finish" to terminate the list. */
2245 *p++ = 0xb0;
2246 }
2247
2248 /* Push entry onto vector. They are guaranteed to always
2249 appear in order of increasing addresses. */
2250 new_exidx_entry.addr = idx;
2251 new_exidx_entry.entry = entry;
2252 VEC_safe_push (arm_exidx_entry_s,
2253 data->section_maps[sec->the_bfd_section->index],
2254 &new_exidx_entry);
2255 }
2256
2257 do_cleanups (cleanups);
2258}
2259
2260/* Search for the exception table entry covering MEMADDR. If one is found,
2261 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2262 set *START to the start of the region covered by this entry. */
2263
2264static gdb_byte *
2265arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2266{
2267 struct obj_section *sec;
2268
2269 sec = find_pc_section (memaddr);
2270 if (sec != NULL)
2271 {
2272 struct arm_exidx_data *data;
2273 VEC(arm_exidx_entry_s) *map;
2274 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2275 unsigned int idx;
2276
9a3c8263
SM
2277 data = ((struct arm_exidx_data *)
2278 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2279 if (data != NULL)
2280 {
2281 map = data->section_maps[sec->the_bfd_section->index];
2282 if (!VEC_empty (arm_exidx_entry_s, map))
2283 {
2284 struct arm_exidx_entry *map_sym;
2285
2286 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2287 arm_compare_exidx_entries);
2288
2289 /* VEC_lower_bound finds the earliest ordered insertion
2290 point. If the following symbol starts at this exact
2291 address, we use that; otherwise, the preceding
2292 exception table entry covers this address. */
2293 if (idx < VEC_length (arm_exidx_entry_s, map))
2294 {
2295 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2296 if (map_sym->addr == map_key.addr)
2297 {
2298 if (start)
2299 *start = map_sym->addr + obj_section_addr (sec);
2300 return map_sym->entry;
2301 }
2302 }
2303
2304 if (idx > 0)
2305 {
2306 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2307 if (start)
2308 *start = map_sym->addr + obj_section_addr (sec);
2309 return map_sym->entry;
2310 }
2311 }
2312 }
2313 }
2314
2315 return NULL;
2316}
2317
2318/* Given the current frame THIS_FRAME, and its associated frame unwinding
2319 instruction list from the ARM exception table entry ENTRY, allocate and
2320 return a prologue cache structure describing how to unwind this frame.
2321
2322 Return NULL if the unwinding instruction list contains a "spare",
2323 "reserved" or "refuse to unwind" instruction as defined in section
2324 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2325 for the ARM Architecture" document. */
2326
2327static struct arm_prologue_cache *
2328arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2329{
2330 CORE_ADDR vsp = 0;
2331 int vsp_valid = 0;
2332
2333 struct arm_prologue_cache *cache;
2334 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2335 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2336
2337 for (;;)
2338 {
2339 gdb_byte insn;
2340
2341 /* Whenever we reload SP, we actually have to retrieve its
2342 actual value in the current frame. */
2343 if (!vsp_valid)
2344 {
2345 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2346 {
2347 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2348 vsp = get_frame_register_unsigned (this_frame, reg);
2349 }
2350 else
2351 {
2352 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2353 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2354 }
2355
2356 vsp_valid = 1;
2357 }
2358
2359 /* Decode next unwind instruction. */
2360 insn = *entry++;
2361
2362 if ((insn & 0xc0) == 0)
2363 {
2364 int offset = insn & 0x3f;
2365 vsp += (offset << 2) + 4;
2366 }
2367 else if ((insn & 0xc0) == 0x40)
2368 {
2369 int offset = insn & 0x3f;
2370 vsp -= (offset << 2) + 4;
2371 }
2372 else if ((insn & 0xf0) == 0x80)
2373 {
2374 int mask = ((insn & 0xf) << 8) | *entry++;
2375 int i;
2376
2377 /* The special case of an all-zero mask identifies
2378 "Refuse to unwind". We return NULL to fall back
2379 to the prologue analyzer. */
2380 if (mask == 0)
2381 return NULL;
2382
2383 /* Pop registers r4..r15 under mask. */
2384 for (i = 0; i < 12; i++)
2385 if (mask & (1 << i))
2386 {
2387 cache->saved_regs[4 + i].addr = vsp;
2388 vsp += 4;
2389 }
2390
2391 /* Special-case popping SP -- we need to reload vsp. */
2392 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2393 vsp_valid = 0;
2394 }
2395 else if ((insn & 0xf0) == 0x90)
2396 {
2397 int reg = insn & 0xf;
2398
2399 /* Reserved cases. */
2400 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2401 return NULL;
2402
2403 /* Set SP from another register and mark VSP for reload. */
2404 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2405 vsp_valid = 0;
2406 }
2407 else if ((insn & 0xf0) == 0xa0)
2408 {
2409 int count = insn & 0x7;
2410 int pop_lr = (insn & 0x8) != 0;
2411 int i;
2412
2413 /* Pop r4..r[4+count]. */
2414 for (i = 0; i <= count; i++)
2415 {
2416 cache->saved_regs[4 + i].addr = vsp;
2417 vsp += 4;
2418 }
2419
2420 /* If indicated by flag, pop LR as well. */
2421 if (pop_lr)
2422 {
2423 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2424 vsp += 4;
2425 }
2426 }
2427 else if (insn == 0xb0)
2428 {
2429 /* We could only have updated PC by popping into it; if so, it
2430 will show up as address. Otherwise, copy LR into PC. */
2431 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2432 cache->saved_regs[ARM_PC_REGNUM]
2433 = cache->saved_regs[ARM_LR_REGNUM];
2434
2435 /* We're done. */
2436 break;
2437 }
2438 else if (insn == 0xb1)
2439 {
2440 int mask = *entry++;
2441 int i;
2442
2443 /* All-zero mask and mask >= 16 is "spare". */
2444 if (mask == 0 || mask >= 16)
2445 return NULL;
2446
2447 /* Pop r0..r3 under mask. */
2448 for (i = 0; i < 4; i++)
2449 if (mask & (1 << i))
2450 {
2451 cache->saved_regs[i].addr = vsp;
2452 vsp += 4;
2453 }
2454 }
2455 else if (insn == 0xb2)
2456 {
2457 ULONGEST offset = 0;
2458 unsigned shift = 0;
2459
2460 do
2461 {
2462 offset |= (*entry & 0x7f) << shift;
2463 shift += 7;
2464 }
2465 while (*entry++ & 0x80);
2466
2467 vsp += 0x204 + (offset << 2);
2468 }
2469 else if (insn == 0xb3)
2470 {
2471 int start = *entry >> 4;
2472 int count = (*entry++) & 0xf;
2473 int i;
2474
2475 /* Only registers D0..D15 are valid here. */
2476 if (start + count >= 16)
2477 return NULL;
2478
2479 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2480 for (i = 0; i <= count; i++)
2481 {
2482 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2483 vsp += 8;
2484 }
2485
2486 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2487 vsp += 4;
2488 }
2489 else if ((insn & 0xf8) == 0xb8)
2490 {
2491 int count = insn & 0x7;
2492 int i;
2493
2494 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2495 for (i = 0; i <= count; i++)
2496 {
2497 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2498 vsp += 8;
2499 }
2500
2501 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2502 vsp += 4;
2503 }
2504 else if (insn == 0xc6)
2505 {
2506 int start = *entry >> 4;
2507 int count = (*entry++) & 0xf;
2508 int i;
2509
2510 /* Only registers WR0..WR15 are valid. */
2511 if (start + count >= 16)
2512 return NULL;
2513
2514 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2515 for (i = 0; i <= count; i++)
2516 {
2517 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2518 vsp += 8;
2519 }
2520 }
2521 else if (insn == 0xc7)
2522 {
2523 int mask = *entry++;
2524 int i;
2525
2526 /* All-zero mask and mask >= 16 is "spare". */
2527 if (mask == 0 || mask >= 16)
2528 return NULL;
2529
2530 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2531 for (i = 0; i < 4; i++)
2532 if (mask & (1 << i))
2533 {
2534 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2535 vsp += 4;
2536 }
2537 }
2538 else if ((insn & 0xf8) == 0xc0)
2539 {
2540 int count = insn & 0x7;
2541 int i;
2542
2543 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2544 for (i = 0; i <= count; i++)
2545 {
2546 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2547 vsp += 8;
2548 }
2549 }
2550 else if (insn == 0xc8)
2551 {
2552 int start = *entry >> 4;
2553 int count = (*entry++) & 0xf;
2554 int i;
2555
2556 /* Only registers D0..D31 are valid. */
2557 if (start + count >= 16)
2558 return NULL;
2559
2560 /* Pop VFP double-precision registers
2561 D[16+start]..D[16+start+count]. */
2562 for (i = 0; i <= count; i++)
2563 {
2564 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2565 vsp += 8;
2566 }
2567 }
2568 else if (insn == 0xc9)
2569 {
2570 int start = *entry >> 4;
2571 int count = (*entry++) & 0xf;
2572 int i;
2573
2574 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2575 for (i = 0; i <= count; i++)
2576 {
2577 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2578 vsp += 8;
2579 }
2580 }
2581 else if ((insn & 0xf8) == 0xd0)
2582 {
2583 int count = insn & 0x7;
2584 int i;
2585
2586 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2587 for (i = 0; i <= count; i++)
2588 {
2589 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2590 vsp += 8;
2591 }
2592 }
2593 else
2594 {
2595 /* Everything else is "spare". */
2596 return NULL;
2597 }
2598 }
2599
2600 /* If we restore SP from a register, assume this was the frame register.
2601 Otherwise just fall back to SP as frame register. */
2602 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2603 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2604 else
2605 cache->framereg = ARM_SP_REGNUM;
2606
2607 /* Determine offset to previous frame. */
2608 cache->framesize
2609 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2610
2611 /* We already got the previous SP. */
2612 cache->prev_sp = vsp;
2613
2614 return cache;
2615}
2616
2617/* Unwinding via ARM exception table entries. Note that the sniffer
2618 already computes a filled-in prologue cache, which is then used
2619 with the same arm_prologue_this_id and arm_prologue_prev_register
2620 routines also used for prologue-parsing based unwinding. */
2621
2622static int
2623arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2624 struct frame_info *this_frame,
2625 void **this_prologue_cache)
2626{
2627 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2628 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2629 CORE_ADDR addr_in_block, exidx_region, func_start;
2630 struct arm_prologue_cache *cache;
2631 gdb_byte *entry;
2632
2633 /* See if we have an ARM exception table entry covering this address. */
2634 addr_in_block = get_frame_address_in_block (this_frame);
2635 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2636 if (!entry)
2637 return 0;
2638
2639 /* The ARM exception table does not describe unwind information
2640 for arbitrary PC values, but is guaranteed to be correct only
2641 at call sites. We have to decide here whether we want to use
2642 ARM exception table information for this frame, or fall back
2643 to using prologue parsing. (Note that if we have DWARF CFI,
2644 this sniffer isn't even called -- CFI is always preferred.)
2645
2646 Before we make this decision, however, we check whether we
2647 actually have *symbol* information for the current frame.
2648 If not, prologue parsing would not work anyway, so we might
2649 as well use the exception table and hope for the best. */
2650 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2651 {
2652 int exc_valid = 0;
2653
2654 /* If the next frame is "normal", we are at a call site in this
2655 frame, so exception information is guaranteed to be valid. */
2656 if (get_next_frame (this_frame)
2657 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2658 exc_valid = 1;
2659
2660 /* We also assume exception information is valid if we're currently
2661 blocked in a system call. The system library is supposed to
d9311bfa
AT
2662 ensure this, so that e.g. pthread cancellation works. */
2663 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2664 {
7913a64c 2665 ULONGEST insn;
416dc9c6 2666
7913a64c
YQ
2667 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2668 2, byte_order_for_code, &insn)
d9311bfa
AT
2669 && (insn & 0xff00) == 0xdf00 /* svc */)
2670 exc_valid = 1;
0e9e9abd 2671 }
d9311bfa
AT
2672 else
2673 {
7913a64c 2674 ULONGEST insn;
416dc9c6 2675
7913a64c
YQ
2676 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2677 4, byte_order_for_code, &insn)
d9311bfa
AT
2678 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2679 exc_valid = 1;
2680 }
2681
0e9e9abd
UW
2682 /* Bail out if we don't know that exception information is valid. */
2683 if (!exc_valid)
2684 return 0;
2685
2686 /* The ARM exception index does not mark the *end* of the region
2687 covered by the entry, and some functions will not have any entry.
2688 To correctly recognize the end of the covered region, the linker
2689 should have inserted dummy records with a CANTUNWIND marker.
2690
2691 Unfortunately, current versions of GNU ld do not reliably do
2692 this, and thus we may have found an incorrect entry above.
2693 As a (temporary) sanity check, we only use the entry if it
2694 lies *within* the bounds of the function. Note that this check
2695 might reject perfectly valid entries that just happen to cover
2696 multiple functions; therefore this check ought to be removed
2697 once the linker is fixed. */
2698 if (func_start > exidx_region)
2699 return 0;
2700 }
2701
2702 /* Decode the list of unwinding instructions into a prologue cache.
2703 Note that this may fail due to e.g. a "refuse to unwind" code. */
2704 cache = arm_exidx_fill_cache (this_frame, entry);
2705 if (!cache)
2706 return 0;
2707
2708 *this_prologue_cache = cache;
2709 return 1;
2710}
2711
2712struct frame_unwind arm_exidx_unwind = {
2713 NORMAL_FRAME,
8fbca658 2714 default_frame_unwind_stop_reason,
0e9e9abd
UW
2715 arm_prologue_this_id,
2716 arm_prologue_prev_register,
2717 NULL,
2718 arm_exidx_unwind_sniffer
2719};
2720
779aa56f
YQ
2721static struct arm_prologue_cache *
2722arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2723{
2724 struct arm_prologue_cache *cache;
779aa56f
YQ
2725 int reg;
2726
2727 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2728 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2729
2730 /* Still rely on the offset calculated from prologue. */
2731 arm_scan_prologue (this_frame, cache);
2732
2733 /* Since we are in epilogue, the SP has been restored. */
2734 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2735
2736 /* Calculate actual addresses of saved registers using offsets
2737 determined by arm_scan_prologue. */
2738 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2739 if (trad_frame_addr_p (cache->saved_regs, reg))
2740 cache->saved_regs[reg].addr += cache->prev_sp;
2741
2742 return cache;
2743}
2744
2745/* Implementation of function hook 'this_id' in
2746 'struct frame_uwnind' for epilogue unwinder. */
2747
2748static void
2749arm_epilogue_frame_this_id (struct frame_info *this_frame,
2750 void **this_cache,
2751 struct frame_id *this_id)
2752{
2753 struct arm_prologue_cache *cache;
2754 CORE_ADDR pc, func;
2755
2756 if (*this_cache == NULL)
2757 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2758 cache = (struct arm_prologue_cache *) *this_cache;
2759
2760 /* Use function start address as part of the frame ID. If we cannot
2761 identify the start address (due to missing symbol information),
2762 fall back to just using the current PC. */
2763 pc = get_frame_pc (this_frame);
2764 func = get_frame_func (this_frame);
fb3f3d25 2765 if (func == 0)
779aa56f
YQ
2766 func = pc;
2767
2768 (*this_id) = frame_id_build (cache->prev_sp, pc);
2769}
2770
2771/* Implementation of function hook 'prev_register' in
2772 'struct frame_uwnind' for epilogue unwinder. */
2773
2774static struct value *
2775arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2776 void **this_cache, int regnum)
2777{
779aa56f
YQ
2778 if (*this_cache == NULL)
2779 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2780
2781 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2782}
2783
2784static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2785 CORE_ADDR pc);
2786static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2787 CORE_ADDR pc);
2788
2789/* Implementation of function hook 'sniffer' in
2790 'struct frame_uwnind' for epilogue unwinder. */
2791
2792static int
2793arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2794 struct frame_info *this_frame,
2795 void **this_prologue_cache)
2796{
2797 if (frame_relative_level (this_frame) == 0)
2798 {
2799 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2800 CORE_ADDR pc = get_frame_pc (this_frame);
2801
2802 if (arm_frame_is_thumb (this_frame))
2803 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2804 else
2805 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2806 }
2807 else
2808 return 0;
2809}
2810
2811/* Frame unwinder from epilogue. */
2812
2813static const struct frame_unwind arm_epilogue_frame_unwind =
2814{
2815 NORMAL_FRAME,
2816 default_frame_unwind_stop_reason,
2817 arm_epilogue_frame_this_id,
2818 arm_epilogue_frame_prev_register,
2819 NULL,
2820 arm_epilogue_frame_sniffer,
2821};
2822
80d8d390
YQ
2823/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2824 trampoline, return the target PC. Otherwise return 0.
2825
2826 void call0a (char c, short s, int i, long l) {}
2827
2828 int main (void)
2829 {
2830 (*pointer_to_call0a) (c, s, i, l);
2831 }
2832
2833 Instead of calling a stub library function _call_via_xx (xx is
2834 the register name), GCC may inline the trampoline in the object
2835 file as below (register r2 has the address of call0a).
2836
2837 .global main
2838 .type main, %function
2839 ...
2840 bl .L1
2841 ...
2842 .size main, .-main
2843
2844 .L1:
2845 bx r2
2846
2847 The trampoline 'bx r2' doesn't belong to main. */
2848
2849static CORE_ADDR
2850arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2851{
2852 /* The heuristics of recognizing such trampoline is that FRAME is
2853 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2854 if (arm_frame_is_thumb (frame))
2855 {
2856 gdb_byte buf[2];
2857
2858 if (target_read_memory (pc, buf, 2) == 0)
2859 {
2860 struct gdbarch *gdbarch = get_frame_arch (frame);
2861 enum bfd_endian byte_order_for_code
2862 = gdbarch_byte_order_for_code (gdbarch);
2863 uint16_t insn
2864 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2865
2866 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2867 {
2868 CORE_ADDR dest
2869 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2870
2871 /* Clear the LSB so that gdb core sets step-resume
2872 breakpoint at the right address. */
2873 return UNMAKE_THUMB_ADDR (dest);
2874 }
2875 }
2876 }
2877
2878 return 0;
2879}
2880
909cf6ea 2881static struct arm_prologue_cache *
a262aec2 2882arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2883{
909cf6ea 2884 struct arm_prologue_cache *cache;
909cf6ea 2885
35d5d4ee 2886 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2887 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2888
a262aec2 2889 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2890
2891 return cache;
2892}
2893
2894/* Our frame ID for a stub frame is the current SP and LR. */
2895
2896static void
a262aec2 2897arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2898 void **this_cache,
2899 struct frame_id *this_id)
2900{
2901 struct arm_prologue_cache *cache;
2902
2903 if (*this_cache == NULL)
a262aec2 2904 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2905 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2906
a262aec2 2907 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2908}
2909
a262aec2
DJ
2910static int
2911arm_stub_unwind_sniffer (const struct frame_unwind *self,
2912 struct frame_info *this_frame,
2913 void **this_prologue_cache)
909cf6ea 2914{
93d42b30 2915 CORE_ADDR addr_in_block;
948f8e3d 2916 gdb_byte dummy[4];
18d18ac8
YQ
2917 CORE_ADDR pc, start_addr;
2918 const char *name;
909cf6ea 2919
a262aec2 2920 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2921 pc = get_frame_pc (this_frame);
3e5d3a5a 2922 if (in_plt_section (addr_in_block)
fc36e839
DE
2923 /* We also use the stub winder if the target memory is unreadable
2924 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2925 || target_read_memory (pc, dummy, 4) != 0)
2926 return 1;
2927
2928 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2929 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2930 return 1;
909cf6ea 2931
a262aec2 2932 return 0;
909cf6ea
DJ
2933}
2934
a262aec2
DJ
2935struct frame_unwind arm_stub_unwind = {
2936 NORMAL_FRAME,
8fbca658 2937 default_frame_unwind_stop_reason,
a262aec2
DJ
2938 arm_stub_this_id,
2939 arm_prologue_prev_register,
2940 NULL,
2941 arm_stub_unwind_sniffer
2942};
2943
2ae28aa9
YQ
2944/* Put here the code to store, into CACHE->saved_regs, the addresses
2945 of the saved registers of frame described by THIS_FRAME. CACHE is
2946 returned. */
2947
2948static struct arm_prologue_cache *
2949arm_m_exception_cache (struct frame_info *this_frame)
2950{
2951 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2952 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2953 struct arm_prologue_cache *cache;
2954 CORE_ADDR unwound_sp;
2955 LONGEST xpsr;
2956
2957 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2958 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2959
2960 unwound_sp = get_frame_register_unsigned (this_frame,
2961 ARM_SP_REGNUM);
2962
2963 /* The hardware saves eight 32-bit words, comprising xPSR,
2964 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2965 "B1.5.6 Exception entry behavior" in
2966 "ARMv7-M Architecture Reference Manual". */
2967 cache->saved_regs[0].addr = unwound_sp;
2968 cache->saved_regs[1].addr = unwound_sp + 4;
2969 cache->saved_regs[2].addr = unwound_sp + 8;
2970 cache->saved_regs[3].addr = unwound_sp + 12;
2971 cache->saved_regs[12].addr = unwound_sp + 16;
2972 cache->saved_regs[14].addr = unwound_sp + 20;
2973 cache->saved_regs[15].addr = unwound_sp + 24;
2974 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2975
2976 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2977 aligner between the top of the 32-byte stack frame and the
2978 previous context's stack pointer. */
2979 cache->prev_sp = unwound_sp + 32;
2980 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2981 && (xpsr & (1 << 9)) != 0)
2982 cache->prev_sp += 4;
2983
2984 return cache;
2985}
2986
2987/* Implementation of function hook 'this_id' in
2988 'struct frame_uwnind'. */
2989
2990static void
2991arm_m_exception_this_id (struct frame_info *this_frame,
2992 void **this_cache,
2993 struct frame_id *this_id)
2994{
2995 struct arm_prologue_cache *cache;
2996
2997 if (*this_cache == NULL)
2998 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2999 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3000
3001 /* Our frame ID for a stub frame is the current SP and LR. */
3002 *this_id = frame_id_build (cache->prev_sp,
3003 get_frame_pc (this_frame));
3004}
3005
3006/* Implementation of function hook 'prev_register' in
3007 'struct frame_uwnind'. */
3008
3009static struct value *
3010arm_m_exception_prev_register (struct frame_info *this_frame,
3011 void **this_cache,
3012 int prev_regnum)
3013{
2ae28aa9
YQ
3014 struct arm_prologue_cache *cache;
3015
3016 if (*this_cache == NULL)
3017 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3018 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3019
3020 /* The value was already reconstructed into PREV_SP. */
3021 if (prev_regnum == ARM_SP_REGNUM)
3022 return frame_unwind_got_constant (this_frame, prev_regnum,
3023 cache->prev_sp);
3024
3025 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3026 prev_regnum);
3027}
3028
3029/* Implementation of function hook 'sniffer' in
3030 'struct frame_uwnind'. */
3031
3032static int
3033arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3034 struct frame_info *this_frame,
3035 void **this_prologue_cache)
3036{
3037 CORE_ADDR this_pc = get_frame_pc (this_frame);
3038
3039 /* No need to check is_m; this sniffer is only registered for
3040 M-profile architectures. */
3041
ca90e760
FH
3042 /* Check if exception frame returns to a magic PC value. */
3043 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3044}
3045
3046/* Frame unwinder for M-profile exceptions. */
3047
3048struct frame_unwind arm_m_exception_unwind =
3049{
3050 SIGTRAMP_FRAME,
3051 default_frame_unwind_stop_reason,
3052 arm_m_exception_this_id,
3053 arm_m_exception_prev_register,
3054 NULL,
3055 arm_m_exception_unwind_sniffer
3056};
3057
24de872b 3058static CORE_ADDR
a262aec2 3059arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3060{
3061 struct arm_prologue_cache *cache;
3062
eb5492fa 3063 if (*this_cache == NULL)
a262aec2 3064 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3065 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3066
4be43953 3067 return cache->prev_sp - cache->framesize;
24de872b
DJ
3068}
3069
eb5492fa
DJ
3070struct frame_base arm_normal_base = {
3071 &arm_prologue_unwind,
3072 arm_normal_frame_base,
3073 arm_normal_frame_base,
3074 arm_normal_frame_base
3075};
3076
a262aec2 3077/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3078 dummy frame. The frame ID's base needs to match the TOS value
3079 saved by save_dummy_frame_tos() and returned from
3080 arm_push_dummy_call, and the PC needs to match the dummy frame's
3081 breakpoint. */
c906108c 3082
eb5492fa 3083static struct frame_id
a262aec2 3084arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3085{
0963b4bd
MS
3086 return frame_id_build (get_frame_register_unsigned (this_frame,
3087 ARM_SP_REGNUM),
a262aec2 3088 get_frame_pc (this_frame));
eb5492fa 3089}
c3b4394c 3090
eb5492fa
DJ
3091/* Given THIS_FRAME, find the previous frame's resume PC (which will
3092 be used to construct the previous frame's ID, after looking up the
3093 containing function). */
c3b4394c 3094
eb5492fa
DJ
3095static CORE_ADDR
3096arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3097{
3098 CORE_ADDR pc;
3099 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3100 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3101}
3102
3103static CORE_ADDR
3104arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3105{
3106 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3107}
3108
b39cc962
DJ
3109static struct value *
3110arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3111 int regnum)
3112{
24568a2c 3113 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3114 CORE_ADDR lr, cpsr;
9779414d 3115 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3116
3117 switch (regnum)
3118 {
3119 case ARM_PC_REGNUM:
3120 /* The PC is normally copied from the return column, which
3121 describes saves of LR. However, that version may have an
3122 extra bit set to indicate Thumb state. The bit is not
3123 part of the PC. */
3124 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3125 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3126 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3127
3128 case ARM_PS_REGNUM:
3129 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3130 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3131 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3132 if (IS_THUMB_ADDR (lr))
9779414d 3133 cpsr |= t_bit;
b39cc962 3134 else
9779414d 3135 cpsr &= ~t_bit;
ca38c58e 3136 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3137
3138 default:
3139 internal_error (__FILE__, __LINE__,
3140 _("Unexpected register %d"), regnum);
3141 }
3142}
3143
3144static void
3145arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3146 struct dwarf2_frame_state_reg *reg,
3147 struct frame_info *this_frame)
3148{
3149 switch (regnum)
3150 {
3151 case ARM_PC_REGNUM:
3152 case ARM_PS_REGNUM:
3153 reg->how = DWARF2_FRAME_REG_FN;
3154 reg->loc.fn = arm_dwarf2_prev_register;
3155 break;
3156 case ARM_SP_REGNUM:
3157 reg->how = DWARF2_FRAME_REG_CFA;
3158 break;
3159 }
3160}
3161
c9cf6e20 3162/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3163
3164static int
c9cf6e20 3165thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3166{
3167 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3168 unsigned int insn, insn2;
3169 int found_return = 0, found_stack_adjust = 0;
3170 CORE_ADDR func_start, func_end;
3171 CORE_ADDR scan_pc;
3172 gdb_byte buf[4];
3173
3174 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3175 return 0;
3176
3177 /* The epilogue is a sequence of instructions along the following lines:
3178
3179 - add stack frame size to SP or FP
3180 - [if frame pointer used] restore SP from FP
3181 - restore registers from SP [may include PC]
3182 - a return-type instruction [if PC wasn't already restored]
3183
3184 In a first pass, we scan forward from the current PC and verify the
3185 instructions we find as compatible with this sequence, ending in a
3186 return instruction.
3187
3188 However, this is not sufficient to distinguish indirect function calls
3189 within a function from indirect tail calls in the epilogue in some cases.
3190 Therefore, if we didn't already find any SP-changing instruction during
3191 forward scan, we add a backward scanning heuristic to ensure we actually
3192 are in the epilogue. */
3193
3194 scan_pc = pc;
3195 while (scan_pc < func_end && !found_return)
3196 {
3197 if (target_read_memory (scan_pc, buf, 2))
3198 break;
3199
3200 scan_pc += 2;
3201 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3202
3203 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3204 found_return = 1;
3205 else if (insn == 0x46f7) /* mov pc, lr */
3206 found_return = 1;
540314bd 3207 else if (thumb_instruction_restores_sp (insn))
4024ca99 3208 {
b7576e5c 3209 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3210 found_return = 1;
3211 }
db24da6d 3212 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3213 {
3214 if (target_read_memory (scan_pc, buf, 2))
3215 break;
3216
3217 scan_pc += 2;
3218 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3219
3220 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3221 {
4024ca99
UW
3222 if (insn2 & 0x8000) /* <registers> include PC. */
3223 found_return = 1;
3224 }
3225 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3226 && (insn2 & 0x0fff) == 0x0b04)
3227 {
4024ca99
UW
3228 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3229 found_return = 1;
3230 }
3231 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3232 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3233 ;
4024ca99
UW
3234 else
3235 break;
3236 }
3237 else
3238 break;
3239 }
3240
3241 if (!found_return)
3242 return 0;
3243
3244 /* Since any instruction in the epilogue sequence, with the possible
3245 exception of return itself, updates the stack pointer, we need to
3246 scan backwards for at most one instruction. Try either a 16-bit or
3247 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3248 too much about false positives. */
4024ca99 3249
6b65d1b6
YQ
3250 if (pc - 4 < func_start)
3251 return 0;
3252 if (target_read_memory (pc - 4, buf, 4))
3253 return 0;
4024ca99 3254
6b65d1b6
YQ
3255 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3256 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3257
3258 if (thumb_instruction_restores_sp (insn2))
3259 found_stack_adjust = 1;
3260 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3261 found_stack_adjust = 1;
3262 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3263 && (insn2 & 0x0fff) == 0x0b04)
3264 found_stack_adjust = 1;
3265 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3266 && (insn2 & 0x0e00) == 0x0a00)
3267 found_stack_adjust = 1;
4024ca99
UW
3268
3269 return found_stack_adjust;
3270}
3271
4024ca99 3272static int
c58b006a 3273arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3274{
3275 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3276 unsigned int insn;
f303bc3e 3277 int found_return;
4024ca99
UW
3278 CORE_ADDR func_start, func_end;
3279
4024ca99
UW
3280 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3281 return 0;
3282
3283 /* We are in the epilogue if the previous instruction was a stack
3284 adjustment and the next instruction is a possible return (bx, mov
3285 pc, or pop). We could have to scan backwards to find the stack
3286 adjustment, or forwards to find the return, but this is a decent
3287 approximation. First scan forwards. */
3288
3289 found_return = 0;
3290 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3291 if (bits (insn, 28, 31) != INST_NV)
3292 {
3293 if ((insn & 0x0ffffff0) == 0x012fff10)
3294 /* BX. */
3295 found_return = 1;
3296 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3297 /* MOV PC. */
3298 found_return = 1;
3299 else if ((insn & 0x0fff0000) == 0x08bd0000
3300 && (insn & 0x0000c000) != 0)
3301 /* POP (LDMIA), including PC or LR. */
3302 found_return = 1;
3303 }
3304
3305 if (!found_return)
3306 return 0;
3307
3308 /* Scan backwards. This is just a heuristic, so do not worry about
3309 false positives from mode changes. */
3310
3311 if (pc < func_start + 4)
3312 return 0;
3313
3314 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3315 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3316 return 1;
3317
3318 return 0;
3319}
3320
c58b006a
YQ
3321/* Implement the stack_frame_destroyed_p gdbarch method. */
3322
3323static int
3324arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3325{
3326 if (arm_pc_is_thumb (gdbarch, pc))
3327 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3328 else
3329 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3330}
4024ca99 3331
2dd604e7
RE
3332/* When arguments must be pushed onto the stack, they go on in reverse
3333 order. The code below implements a FILO (stack) to do this. */
3334
3335struct stack_item
3336{
3337 int len;
3338 struct stack_item *prev;
7c543f7b 3339 gdb_byte *data;
2dd604e7
RE
3340};
3341
3342static struct stack_item *
df3b6708 3343push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3344{
3345 struct stack_item *si;
8d749320 3346 si = XNEW (struct stack_item);
7c543f7b 3347 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3348 si->len = len;
3349 si->prev = prev;
3350 memcpy (si->data, contents, len);
3351 return si;
3352}
3353
3354static struct stack_item *
3355pop_stack_item (struct stack_item *si)
3356{
3357 struct stack_item *dead = si;
3358 si = si->prev;
3359 xfree (dead->data);
3360 xfree (dead);
3361 return si;
3362}
3363
2af48f68
PB
3364
3365/* Return the alignment (in bytes) of the given type. */
3366
3367static int
3368arm_type_align (struct type *t)
3369{
3370 int n;
3371 int align;
3372 int falign;
3373
3374 t = check_typedef (t);
3375 switch (TYPE_CODE (t))
3376 {
3377 default:
3378 /* Should never happen. */
3379 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3380 return 4;
3381
3382 case TYPE_CODE_PTR:
3383 case TYPE_CODE_ENUM:
3384 case TYPE_CODE_INT:
3385 case TYPE_CODE_FLT:
3386 case TYPE_CODE_SET:
3387 case TYPE_CODE_RANGE:
2af48f68 3388 case TYPE_CODE_REF:
aa006118 3389 case TYPE_CODE_RVALUE_REF:
2af48f68
PB
3390 case TYPE_CODE_CHAR:
3391 case TYPE_CODE_BOOL:
3392 return TYPE_LENGTH (t);
3393
3394 case TYPE_CODE_ARRAY:
c4312b19
YQ
3395 if (TYPE_VECTOR (t))
3396 {
3397 /* Use the natural alignment for vector types (the same for
3398 scalar type), but the maximum alignment is 64-bit. */
3399 if (TYPE_LENGTH (t) > 8)
3400 return 8;
3401 else
3402 return TYPE_LENGTH (t);
3403 }
3404 else
3405 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3406 case TYPE_CODE_COMPLEX:
2af48f68
PB
3407 return arm_type_align (TYPE_TARGET_TYPE (t));
3408
3409 case TYPE_CODE_STRUCT:
3410 case TYPE_CODE_UNION:
3411 align = 1;
3412 for (n = 0; n < TYPE_NFIELDS (t); n++)
3413 {
3414 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3415 if (falign > align)
3416 align = falign;
3417 }
3418 return align;
3419 }
3420}
3421
90445bd3
DJ
3422/* Possible base types for a candidate for passing and returning in
3423 VFP registers. */
3424
3425enum arm_vfp_cprc_base_type
3426{
3427 VFP_CPRC_UNKNOWN,
3428 VFP_CPRC_SINGLE,
3429 VFP_CPRC_DOUBLE,
3430 VFP_CPRC_VEC64,
3431 VFP_CPRC_VEC128
3432};
3433
3434/* The length of one element of base type B. */
3435
3436static unsigned
3437arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3438{
3439 switch (b)
3440 {
3441 case VFP_CPRC_SINGLE:
3442 return 4;
3443 case VFP_CPRC_DOUBLE:
3444 return 8;
3445 case VFP_CPRC_VEC64:
3446 return 8;
3447 case VFP_CPRC_VEC128:
3448 return 16;
3449 default:
3450 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3451 (int) b);
3452 }
3453}
3454
3455/* The character ('s', 'd' or 'q') for the type of VFP register used
3456 for passing base type B. */
3457
3458static int
3459arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3460{
3461 switch (b)
3462 {
3463 case VFP_CPRC_SINGLE:
3464 return 's';
3465 case VFP_CPRC_DOUBLE:
3466 return 'd';
3467 case VFP_CPRC_VEC64:
3468 return 'd';
3469 case VFP_CPRC_VEC128:
3470 return 'q';
3471 default:
3472 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3473 (int) b);
3474 }
3475}
3476
3477/* Determine whether T may be part of a candidate for passing and
3478 returning in VFP registers, ignoring the limit on the total number
3479 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3480 classification of the first valid component found; if it is not
3481 VFP_CPRC_UNKNOWN, all components must have the same classification
3482 as *BASE_TYPE. If it is found that T contains a type not permitted
3483 for passing and returning in VFP registers, a type differently
3484 classified from *BASE_TYPE, or two types differently classified
3485 from each other, return -1, otherwise return the total number of
3486 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3487 array). Vector types are not currently supported, matching the
3488 generic AAPCS support. */
90445bd3
DJ
3489
3490static int
3491arm_vfp_cprc_sub_candidate (struct type *t,
3492 enum arm_vfp_cprc_base_type *base_type)
3493{
3494 t = check_typedef (t);
3495 switch (TYPE_CODE (t))
3496 {
3497 case TYPE_CODE_FLT:
3498 switch (TYPE_LENGTH (t))
3499 {
3500 case 4:
3501 if (*base_type == VFP_CPRC_UNKNOWN)
3502 *base_type = VFP_CPRC_SINGLE;
3503 else if (*base_type != VFP_CPRC_SINGLE)
3504 return -1;
3505 return 1;
3506
3507 case 8:
3508 if (*base_type == VFP_CPRC_UNKNOWN)
3509 *base_type = VFP_CPRC_DOUBLE;
3510 else if (*base_type != VFP_CPRC_DOUBLE)
3511 return -1;
3512 return 1;
3513
3514 default:
3515 return -1;
3516 }
3517 break;
3518
817e0957
YQ
3519 case TYPE_CODE_COMPLEX:
3520 /* Arguments of complex T where T is one of the types float or
3521 double get treated as if they are implemented as:
3522
3523 struct complexT
3524 {
3525 T real;
3526 T imag;
5f52445b
YQ
3527 };
3528
3529 */
817e0957
YQ
3530 switch (TYPE_LENGTH (t))
3531 {
3532 case 8:
3533 if (*base_type == VFP_CPRC_UNKNOWN)
3534 *base_type = VFP_CPRC_SINGLE;
3535 else if (*base_type != VFP_CPRC_SINGLE)
3536 return -1;
3537 return 2;
3538
3539 case 16:
3540 if (*base_type == VFP_CPRC_UNKNOWN)
3541 *base_type = VFP_CPRC_DOUBLE;
3542 else if (*base_type != VFP_CPRC_DOUBLE)
3543 return -1;
3544 return 2;
3545
3546 default:
3547 return -1;
3548 }
3549 break;
3550
90445bd3
DJ
3551 case TYPE_CODE_ARRAY:
3552 {
c4312b19 3553 if (TYPE_VECTOR (t))
90445bd3 3554 {
c4312b19
YQ
3555 /* A 64-bit or 128-bit containerized vector type are VFP
3556 CPRCs. */
3557 switch (TYPE_LENGTH (t))
3558 {
3559 case 8:
3560 if (*base_type == VFP_CPRC_UNKNOWN)
3561 *base_type = VFP_CPRC_VEC64;
3562 return 1;
3563 case 16:
3564 if (*base_type == VFP_CPRC_UNKNOWN)
3565 *base_type = VFP_CPRC_VEC128;
3566 return 1;
3567 default:
3568 return -1;
3569 }
3570 }
3571 else
3572 {
3573 int count;
3574 unsigned unitlen;
3575
3576 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3577 base_type);
3578 if (count == -1)
3579 return -1;
3580 if (TYPE_LENGTH (t) == 0)
3581 {
3582 gdb_assert (count == 0);
3583 return 0;
3584 }
3585 else if (count == 0)
3586 return -1;
3587 unitlen = arm_vfp_cprc_unit_length (*base_type);
3588 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3589 return TYPE_LENGTH (t) / unitlen;
90445bd3 3590 }
90445bd3
DJ
3591 }
3592 break;
3593
3594 case TYPE_CODE_STRUCT:
3595 {
3596 int count = 0;
3597 unsigned unitlen;
3598 int i;
3599 for (i = 0; i < TYPE_NFIELDS (t); i++)
3600 {
1040b979
YQ
3601 int sub_count = 0;
3602
3603 if (!field_is_static (&TYPE_FIELD (t, i)))
3604 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3605 base_type);
90445bd3
DJ
3606 if (sub_count == -1)
3607 return -1;
3608 count += sub_count;
3609 }
3610 if (TYPE_LENGTH (t) == 0)
3611 {
3612 gdb_assert (count == 0);
3613 return 0;
3614 }
3615 else if (count == 0)
3616 return -1;
3617 unitlen = arm_vfp_cprc_unit_length (*base_type);
3618 if (TYPE_LENGTH (t) != unitlen * count)
3619 return -1;
3620 return count;
3621 }
3622
3623 case TYPE_CODE_UNION:
3624 {
3625 int count = 0;
3626 unsigned unitlen;
3627 int i;
3628 for (i = 0; i < TYPE_NFIELDS (t); i++)
3629 {
3630 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3631 base_type);
3632 if (sub_count == -1)
3633 return -1;
3634 count = (count > sub_count ? count : sub_count);
3635 }
3636 if (TYPE_LENGTH (t) == 0)
3637 {
3638 gdb_assert (count == 0);
3639 return 0;
3640 }
3641 else if (count == 0)
3642 return -1;
3643 unitlen = arm_vfp_cprc_unit_length (*base_type);
3644 if (TYPE_LENGTH (t) != unitlen * count)
3645 return -1;
3646 return count;
3647 }
3648
3649 default:
3650 break;
3651 }
3652
3653 return -1;
3654}
3655
3656/* Determine whether T is a VFP co-processor register candidate (CPRC)
3657 if passed to or returned from a non-variadic function with the VFP
3658 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3659 *BASE_TYPE to the base type for T and *COUNT to the number of
3660 elements of that base type before returning. */
3661
3662static int
3663arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3664 int *count)
3665{
3666 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3667 int c = arm_vfp_cprc_sub_candidate (t, &b);
3668 if (c <= 0 || c > 4)
3669 return 0;
3670 *base_type = b;
3671 *count = c;
3672 return 1;
3673}
3674
3675/* Return 1 if the VFP ABI should be used for passing arguments to and
3676 returning values from a function of type FUNC_TYPE, 0
3677 otherwise. */
3678
3679static int
3680arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3681{
3682 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3683 /* Variadic functions always use the base ABI. Assume that functions
3684 without debug info are not variadic. */
3685 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3686 return 0;
3687 /* The VFP ABI is only supported as a variant of AAPCS. */
3688 if (tdep->arm_abi != ARM_ABI_AAPCS)
3689 return 0;
3690 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3691}
3692
3693/* We currently only support passing parameters in integer registers, which
3694 conforms with GCC's default model, and VFP argument passing following
3695 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3696 we should probably support some of them based on the selected ABI. */
3697
3698static CORE_ADDR
7d9b040b 3699arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3700 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3701 struct value **args, CORE_ADDR sp, int struct_return,
3702 CORE_ADDR struct_addr)
2dd604e7 3703{
e17a4113 3704 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3705 int argnum;
3706 int argreg;
3707 int nstack;
3708 struct stack_item *si = NULL;
90445bd3
DJ
3709 int use_vfp_abi;
3710 struct type *ftype;
3711 unsigned vfp_regs_free = (1 << 16) - 1;
3712
3713 /* Determine the type of this function and whether the VFP ABI
3714 applies. */
3715 ftype = check_typedef (value_type (function));
3716 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3717 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3718 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3719
6a65450a
AC
3720 /* Set the return address. For the ARM, the return breakpoint is
3721 always at BP_ADDR. */
9779414d 3722 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3723 bp_addr |= 1;
6a65450a 3724 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3725
3726 /* Walk through the list of args and determine how large a temporary
3727 stack is required. Need to take care here as structs may be
7a9dd1b2 3728 passed on the stack, and we have to push them. */
2dd604e7
RE
3729 nstack = 0;
3730
3731 argreg = ARM_A1_REGNUM;
3732 nstack = 0;
3733
2dd604e7
RE
3734 /* The struct_return pointer occupies the first parameter
3735 passing register. */
3736 if (struct_return)
3737 {
3738 if (arm_debug)
5af949e3 3739 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3740 gdbarch_register_name (gdbarch, argreg),
5af949e3 3741 paddress (gdbarch, struct_addr));
2dd604e7
RE
3742 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3743 argreg++;
3744 }
3745
3746 for (argnum = 0; argnum < nargs; argnum++)
3747 {
3748 int len;
3749 struct type *arg_type;
3750 struct type *target_type;
3751 enum type_code typecode;
8c6363cf 3752 const bfd_byte *val;
2af48f68 3753 int align;
90445bd3
DJ
3754 enum arm_vfp_cprc_base_type vfp_base_type;
3755 int vfp_base_count;
3756 int may_use_core_reg = 1;
2dd604e7 3757
df407dfe 3758 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3759 len = TYPE_LENGTH (arg_type);
3760 target_type = TYPE_TARGET_TYPE (arg_type);
3761 typecode = TYPE_CODE (arg_type);
8c6363cf 3762 val = value_contents (args[argnum]);
2dd604e7 3763
2af48f68
PB
3764 align = arm_type_align (arg_type);
3765 /* Round alignment up to a whole number of words. */
3766 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3767 /* Different ABIs have different maximum alignments. */
3768 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3769 {
3770 /* The APCS ABI only requires word alignment. */
3771 align = INT_REGISTER_SIZE;
3772 }
3773 else
3774 {
3775 /* The AAPCS requires at most doubleword alignment. */
3776 if (align > INT_REGISTER_SIZE * 2)
3777 align = INT_REGISTER_SIZE * 2;
3778 }
3779
90445bd3
DJ
3780 if (use_vfp_abi
3781 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3782 &vfp_base_count))
3783 {
3784 int regno;
3785 int unit_length;
3786 int shift;
3787 unsigned mask;
3788
3789 /* Because this is a CPRC it cannot go in a core register or
3790 cause a core register to be skipped for alignment.
3791 Either it goes in VFP registers and the rest of this loop
3792 iteration is skipped for this argument, or it goes on the
3793 stack (and the stack alignment code is correct for this
3794 case). */
3795 may_use_core_reg = 0;
3796
3797 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3798 shift = unit_length / 4;
3799 mask = (1 << (shift * vfp_base_count)) - 1;
3800 for (regno = 0; regno < 16; regno += shift)
3801 if (((vfp_regs_free >> regno) & mask) == mask)
3802 break;
3803
3804 if (regno < 16)
3805 {
3806 int reg_char;
3807 int reg_scaled;
3808 int i;
3809
3810 vfp_regs_free &= ~(mask << regno);
3811 reg_scaled = regno / shift;
3812 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3813 for (i = 0; i < vfp_base_count; i++)
3814 {
3815 char name_buf[4];
3816 int regnum;
58d6951d
DJ
3817 if (reg_char == 'q')
3818 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3819 val + i * unit_length);
58d6951d
DJ
3820 else
3821 {
8c042590
PM
3822 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3823 reg_char, reg_scaled + i);
58d6951d
DJ
3824 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3825 strlen (name_buf));
3826 regcache_cooked_write (regcache, regnum,
3827 val + i * unit_length);
3828 }
90445bd3
DJ
3829 }
3830 continue;
3831 }
3832 else
3833 {
3834 /* This CPRC could not go in VFP registers, so all VFP
3835 registers are now marked as used. */
3836 vfp_regs_free = 0;
3837 }
3838 }
3839
2af48f68
PB
3840 /* Push stack padding for dowubleword alignment. */
3841 if (nstack & (align - 1))
3842 {
3843 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3844 nstack += INT_REGISTER_SIZE;
3845 }
3846
3847 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3848 if (may_use_core_reg
3849 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3850 && align > INT_REGISTER_SIZE
3851 && argreg & 1)
3852 argreg++;
3853
2dd604e7
RE
3854 /* If the argument is a pointer to a function, and it is a
3855 Thumb function, create a LOCAL copy of the value and set
3856 the THUMB bit in it. */
3857 if (TYPE_CODE_PTR == typecode
3858 && target_type != NULL
f96b8fa0 3859 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3860 {
e17a4113 3861 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3862 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3863 {
224c3ddb 3864 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3865 store_unsigned_integer (copy, len, byte_order,
e17a4113 3866 MAKE_THUMB_ADDR (regval));
8c6363cf 3867 val = copy;
2dd604e7
RE
3868 }
3869 }
3870
3871 /* Copy the argument to general registers or the stack in
3872 register-sized pieces. Large arguments are split between
3873 registers and stack. */
3874 while (len > 0)
3875 {
f0c9063c 3876 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3877 CORE_ADDR regval
3878 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3879
90445bd3 3880 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3881 {
3882 /* The argument is being passed in a general purpose
3883 register. */
e17a4113 3884 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3885 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3886 if (arm_debug)
3887 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3888 argnum,
3889 gdbarch_register_name
2af46ca0 3890 (gdbarch, argreg),
f0c9063c 3891 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3892 regcache_cooked_write_unsigned (regcache, argreg, regval);
3893 argreg++;
3894 }
3895 else
3896 {
ef9bd0b8
YQ
3897 gdb_byte buf[INT_REGISTER_SIZE];
3898
3899 memset (buf, 0, sizeof (buf));
3900 store_unsigned_integer (buf, partial_len, byte_order, regval);
3901
2dd604e7
RE
3902 /* Push the arguments onto the stack. */
3903 if (arm_debug)
3904 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3905 argnum, nstack);
ef9bd0b8 3906 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3907 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3908 }
3909
3910 len -= partial_len;
3911 val += partial_len;
3912 }
3913 }
3914 /* If we have an odd number of words to push, then decrement the stack
3915 by one word now, so first stack argument will be dword aligned. */
3916 if (nstack & 4)
3917 sp -= 4;
3918
3919 while (si)
3920 {
3921 sp -= si->len;
3922 write_memory (sp, si->data, si->len);
3923 si = pop_stack_item (si);
3924 }
3925
3926 /* Finally, update teh SP register. */
3927 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3928
3929 return sp;
3930}
3931
f53f0d0b
PB
3932
3933/* Always align the frame to an 8-byte boundary. This is required on
3934 some platforms and harmless on the rest. */
3935
3936static CORE_ADDR
3937arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3938{
3939 /* Align the stack to eight bytes. */
3940 return sp & ~ (CORE_ADDR) 7;
3941}
3942
c906108c 3943static void
12b27276 3944print_fpu_flags (struct ui_file *file, int flags)
c906108c 3945{
c5aa993b 3946 if (flags & (1 << 0))
12b27276 3947 fputs_filtered ("IVO ", file);
c5aa993b 3948 if (flags & (1 << 1))
12b27276 3949 fputs_filtered ("DVZ ", file);
c5aa993b 3950 if (flags & (1 << 2))
12b27276 3951 fputs_filtered ("OFL ", file);
c5aa993b 3952 if (flags & (1 << 3))
12b27276 3953 fputs_filtered ("UFL ", file);
c5aa993b 3954 if (flags & (1 << 4))
12b27276
WN
3955 fputs_filtered ("INX ", file);
3956 fputc_filtered ('\n', file);
c906108c
SS
3957}
3958
5e74b15c
RE
3959/* Print interesting information about the floating point processor
3960 (if present) or emulator. */
34e8f22d 3961static void
d855c300 3962arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3963 struct frame_info *frame, const char *args)
c906108c 3964{
9c9acae0 3965 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3966 int type;
3967
3968 type = (status >> 24) & 127;
edefbb7c 3969 if (status & (1 << 31))
12b27276 3970 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3971 else
12b27276 3972 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3973 /* i18n: [floating point unit] mask */
12b27276
WN
3974 fputs_filtered (_("mask: "), file);
3975 print_fpu_flags (file, status >> 16);
edefbb7c 3976 /* i18n: [floating point unit] flags */
12b27276
WN
3977 fputs_filtered (_("flags: "), file);
3978 print_fpu_flags (file, status);
c906108c
SS
3979}
3980
27067745
UW
3981/* Construct the ARM extended floating point type. */
3982static struct type *
3983arm_ext_type (struct gdbarch *gdbarch)
3984{
3985 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3986
3987 if (!tdep->arm_ext_type)
3988 tdep->arm_ext_type
e9bb382b 3989 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3990 floatformats_arm_ext);
3991
3992 return tdep->arm_ext_type;
3993}
3994
58d6951d
DJ
3995static struct type *
3996arm_neon_double_type (struct gdbarch *gdbarch)
3997{
3998 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3999
4000 if (tdep->neon_double_type == NULL)
4001 {
4002 struct type *t, *elem;
4003
4004 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4005 TYPE_CODE_UNION);
4006 elem = builtin_type (gdbarch)->builtin_uint8;
4007 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4008 elem = builtin_type (gdbarch)->builtin_uint16;
4009 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4010 elem = builtin_type (gdbarch)->builtin_uint32;
4011 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4012 elem = builtin_type (gdbarch)->builtin_uint64;
4013 append_composite_type_field (t, "u64", elem);
4014 elem = builtin_type (gdbarch)->builtin_float;
4015 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4016 elem = builtin_type (gdbarch)->builtin_double;
4017 append_composite_type_field (t, "f64", elem);
4018
4019 TYPE_VECTOR (t) = 1;
4020 TYPE_NAME (t) = "neon_d";
4021 tdep->neon_double_type = t;
4022 }
4023
4024 return tdep->neon_double_type;
4025}
4026
4027/* FIXME: The vector types are not correctly ordered on big-endian
4028 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4029 bits of d0 - regardless of what unit size is being held in d0. So
4030 the offset of the first uint8 in d0 is 7, but the offset of the
4031 first float is 4. This code works as-is for little-endian
4032 targets. */
4033
4034static struct type *
4035arm_neon_quad_type (struct gdbarch *gdbarch)
4036{
4037 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4038
4039 if (tdep->neon_quad_type == NULL)
4040 {
4041 struct type *t, *elem;
4042
4043 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4044 TYPE_CODE_UNION);
4045 elem = builtin_type (gdbarch)->builtin_uint8;
4046 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4047 elem = builtin_type (gdbarch)->builtin_uint16;
4048 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4049 elem = builtin_type (gdbarch)->builtin_uint32;
4050 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4051 elem = builtin_type (gdbarch)->builtin_uint64;
4052 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4053 elem = builtin_type (gdbarch)->builtin_float;
4054 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4055 elem = builtin_type (gdbarch)->builtin_double;
4056 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4057
4058 TYPE_VECTOR (t) = 1;
4059 TYPE_NAME (t) = "neon_q";
4060 tdep->neon_quad_type = t;
4061 }
4062
4063 return tdep->neon_quad_type;
4064}
4065
34e8f22d
RE
4066/* Return the GDB type object for the "standard" data type of data in
4067 register N. */
4068
4069static struct type *
7a5ea0d4 4070arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4071{
58d6951d
DJ
4072 int num_regs = gdbarch_num_regs (gdbarch);
4073
4074 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4075 && regnum >= num_regs && regnum < num_regs + 32)
4076 return builtin_type (gdbarch)->builtin_float;
4077
4078 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4079 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4080 return arm_neon_quad_type (gdbarch);
4081
4082 /* If the target description has register information, we are only
4083 in this function so that we can override the types of
4084 double-precision registers for NEON. */
4085 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4086 {
4087 struct type *t = tdesc_register_type (gdbarch, regnum);
4088
4089 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4090 && TYPE_CODE (t) == TYPE_CODE_FLT
4091 && gdbarch_tdep (gdbarch)->have_neon)
4092 return arm_neon_double_type (gdbarch);
4093 else
4094 return t;
4095 }
4096
34e8f22d 4097 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4098 {
4099 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4100 return builtin_type (gdbarch)->builtin_void;
4101
4102 return arm_ext_type (gdbarch);
4103 }
e4c16157 4104 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4105 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4106 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4107 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4108 else if (regnum >= ARRAY_SIZE (arm_register_names))
4109 /* These registers are only supported on targets which supply
4110 an XML description. */
df4df182 4111 return builtin_type (gdbarch)->builtin_int0;
032758dc 4112 else
df4df182 4113 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4114}
4115
ff6f572f
DJ
4116/* Map a DWARF register REGNUM onto the appropriate GDB register
4117 number. */
4118
4119static int
d3f73121 4120arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4121{
4122 /* Core integer regs. */
4123 if (reg >= 0 && reg <= 15)
4124 return reg;
4125
4126 /* Legacy FPA encoding. These were once used in a way which
4127 overlapped with VFP register numbering, so their use is
4128 discouraged, but GDB doesn't support the ARM toolchain
4129 which used them for VFP. */
4130 if (reg >= 16 && reg <= 23)
4131 return ARM_F0_REGNUM + reg - 16;
4132
4133 /* New assignments for the FPA registers. */
4134 if (reg >= 96 && reg <= 103)
4135 return ARM_F0_REGNUM + reg - 96;
4136
4137 /* WMMX register assignments. */
4138 if (reg >= 104 && reg <= 111)
4139 return ARM_WCGR0_REGNUM + reg - 104;
4140
4141 if (reg >= 112 && reg <= 127)
4142 return ARM_WR0_REGNUM + reg - 112;
4143
4144 if (reg >= 192 && reg <= 199)
4145 return ARM_WC0_REGNUM + reg - 192;
4146
58d6951d
DJ
4147 /* VFP v2 registers. A double precision value is actually
4148 in d1 rather than s2, but the ABI only defines numbering
4149 for the single precision registers. This will "just work"
4150 in GDB for little endian targets (we'll read eight bytes,
4151 starting in s0 and then progressing to s1), but will be
4152 reversed on big endian targets with VFP. This won't
4153 be a problem for the new Neon quad registers; you're supposed
4154 to use DW_OP_piece for those. */
4155 if (reg >= 64 && reg <= 95)
4156 {
4157 char name_buf[4];
4158
8c042590 4159 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4160 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4161 strlen (name_buf));
4162 }
4163
4164 /* VFP v3 / Neon registers. This range is also used for VFP v2
4165 registers, except that it now describes d0 instead of s0. */
4166 if (reg >= 256 && reg <= 287)
4167 {
4168 char name_buf[4];
4169
8c042590 4170 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4171 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4172 strlen (name_buf));
4173 }
4174
ff6f572f
DJ
4175 return -1;
4176}
4177
26216b98
AC
4178/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4179static int
e7faf938 4180arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4181{
4182 int reg = regnum;
e7faf938 4183 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4184
ff6f572f
DJ
4185 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4186 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4187
4188 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4189 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4190
4191 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4192 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4193
26216b98
AC
4194 if (reg < NUM_GREGS)
4195 return SIM_ARM_R0_REGNUM + reg;
4196 reg -= NUM_GREGS;
4197
4198 if (reg < NUM_FREGS)
4199 return SIM_ARM_FP0_REGNUM + reg;
4200 reg -= NUM_FREGS;
4201
4202 if (reg < NUM_SREGS)
4203 return SIM_ARM_FPS_REGNUM + reg;
4204 reg -= NUM_SREGS;
4205
edefbb7c 4206 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4207}
34e8f22d 4208
d9311bfa
AT
4209/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4210 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4211 NULL if an error occurs. BUF is freed. */
c906108c 4212
d9311bfa
AT
4213static gdb_byte *
4214extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4215 int old_len, int new_len)
4216{
4217 gdb_byte *new_buf;
4218 int bytes_to_read = new_len - old_len;
c906108c 4219
d9311bfa
AT
4220 new_buf = (gdb_byte *) xmalloc (new_len);
4221 memcpy (new_buf + bytes_to_read, buf, old_len);
4222 xfree (buf);
198cd59d 4223 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
d9311bfa
AT
4224 {
4225 xfree (new_buf);
4226 return NULL;
c906108c 4227 }
d9311bfa 4228 return new_buf;
c906108c
SS
4229}
4230
d9311bfa
AT
4231/* An IT block is at most the 2-byte IT instruction followed by
4232 four 4-byte instructions. The furthest back we must search to
4233 find an IT block that affects the current instruction is thus
4234 2 + 3 * 4 == 14 bytes. */
4235#define MAX_IT_BLOCK_PREFIX 14
177321bd 4236
d9311bfa
AT
4237/* Use a quick scan if there are more than this many bytes of
4238 code. */
4239#define IT_SCAN_THRESHOLD 32
177321bd 4240
d9311bfa
AT
4241/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4242 A breakpoint in an IT block may not be hit, depending on the
4243 condition flags. */
ad527d2e 4244static CORE_ADDR
d9311bfa 4245arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4246{
d9311bfa
AT
4247 gdb_byte *buf;
4248 char map_type;
4249 CORE_ADDR boundary, func_start;
4250 int buf_len;
4251 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4252 int i, any, last_it, last_it_count;
177321bd 4253
d9311bfa
AT
4254 /* If we are using BKPT breakpoints, none of this is necessary. */
4255 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4256 return bpaddr;
177321bd 4257
d9311bfa
AT
4258 /* ARM mode does not have this problem. */
4259 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4260 return bpaddr;
177321bd 4261
d9311bfa
AT
4262 /* We are setting a breakpoint in Thumb code that could potentially
4263 contain an IT block. The first step is to find how much Thumb
4264 code there is; we do not need to read outside of known Thumb
4265 sequences. */
4266 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4267 if (map_type == 0)
4268 /* Thumb-2 code must have mapping symbols to have a chance. */
4269 return bpaddr;
9dca5578 4270
d9311bfa 4271 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4272
d9311bfa
AT
4273 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4274 && func_start > boundary)
4275 boundary = func_start;
9dca5578 4276
d9311bfa
AT
4277 /* Search for a candidate IT instruction. We have to do some fancy
4278 footwork to distinguish a real IT instruction from the second
4279 half of a 32-bit instruction, but there is no need for that if
4280 there's no candidate. */
325fac50 4281 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4282 if (buf_len == 0)
4283 /* No room for an IT instruction. */
4284 return bpaddr;
c906108c 4285
d9311bfa 4286 buf = (gdb_byte *) xmalloc (buf_len);
198cd59d 4287 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
d9311bfa
AT
4288 return bpaddr;
4289 any = 0;
4290 for (i = 0; i < buf_len; i += 2)
c906108c 4291 {
d9311bfa
AT
4292 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4293 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4294 {
d9311bfa
AT
4295 any = 1;
4296 break;
25b41d01 4297 }
c906108c 4298 }
d9311bfa
AT
4299
4300 if (any == 0)
c906108c 4301 {
d9311bfa
AT
4302 xfree (buf);
4303 return bpaddr;
f9d67f43
DJ
4304 }
4305
4306 /* OK, the code bytes before this instruction contain at least one
4307 halfword which resembles an IT instruction. We know that it's
4308 Thumb code, but there are still two possibilities. Either the
4309 halfword really is an IT instruction, or it is the second half of
4310 a 32-bit Thumb instruction. The only way we can tell is to
4311 scan forwards from a known instruction boundary. */
4312 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4313 {
4314 int definite;
4315
4316 /* There's a lot of code before this instruction. Start with an
4317 optimistic search; it's easy to recognize halfwords that can
4318 not be the start of a 32-bit instruction, and use that to
4319 lock on to the instruction boundaries. */
4320 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4321 if (buf == NULL)
4322 return bpaddr;
4323 buf_len = IT_SCAN_THRESHOLD;
4324
4325 definite = 0;
4326 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4327 {
4328 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4329 if (thumb_insn_size (inst1) == 2)
4330 {
4331 definite = 1;
4332 break;
4333 }
4334 }
4335
4336 /* At this point, if DEFINITE, BUF[I] is the first place we
4337 are sure that we know the instruction boundaries, and it is far
4338 enough from BPADDR that we could not miss an IT instruction
4339 affecting BPADDR. If ! DEFINITE, give up - start from a
4340 known boundary. */
4341 if (! definite)
4342 {
0963b4bd
MS
4343 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4344 bpaddr - boundary);
f9d67f43
DJ
4345 if (buf == NULL)
4346 return bpaddr;
4347 buf_len = bpaddr - boundary;
4348 i = 0;
4349 }
4350 }
4351 else
4352 {
4353 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4354 if (buf == NULL)
4355 return bpaddr;
4356 buf_len = bpaddr - boundary;
4357 i = 0;
4358 }
4359
4360 /* Scan forwards. Find the last IT instruction before BPADDR. */
4361 last_it = -1;
4362 last_it_count = 0;
4363 while (i < buf_len)
4364 {
4365 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4366 last_it_count--;
4367 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4368 {
4369 last_it = i;
4370 if (inst1 & 0x0001)
4371 last_it_count = 4;
4372 else if (inst1 & 0x0002)
4373 last_it_count = 3;
4374 else if (inst1 & 0x0004)
4375 last_it_count = 2;
4376 else
4377 last_it_count = 1;
4378 }
4379 i += thumb_insn_size (inst1);
4380 }
4381
4382 xfree (buf);
4383
4384 if (last_it == -1)
4385 /* There wasn't really an IT instruction after all. */
4386 return bpaddr;
4387
4388 if (last_it_count < 1)
4389 /* It was too far away. */
4390 return bpaddr;
4391
4392 /* This really is a trouble spot. Move the breakpoint to the IT
4393 instruction. */
4394 return bpaddr - buf_len + last_it;
4395}
4396
cca44b1b 4397/* ARM displaced stepping support.
c906108c 4398
cca44b1b 4399 Generally ARM displaced stepping works as follows:
c906108c 4400
cca44b1b 4401 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4402 arm_process_displaced_insn. Depending on the type of instruction, it is
4403 then copied to a scratch location, possibly in a modified form. The
4404 copy_* set of functions performs such modification, as necessary. A
4405 breakpoint is placed after the modified instruction in the scratch space
4406 to return control to GDB. Note in particular that instructions which
4407 modify the PC will no longer do so after modification.
c5aa993b 4408
cca44b1b
JB
4409 2. The instruction is single-stepped, by setting the PC to the scratch
4410 location address, and resuming. Control returns to GDB when the
4411 breakpoint is hit.
c5aa993b 4412
cca44b1b
JB
4413 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4414 function used for the current instruction. This function's job is to
4415 put the CPU/memory state back to what it would have been if the
4416 instruction had been executed unmodified in its original location. */
c5aa993b 4417
cca44b1b
JB
4418/* NOP instruction (mov r0, r0). */
4419#define ARM_NOP 0xe1a00000
34518530 4420#define THUMB_NOP 0x4600
cca44b1b
JB
4421
4422/* Helper for register reads for displaced stepping. In particular, this
4423 returns the PC as it would be seen by the instruction at its original
4424 location. */
4425
4426ULONGEST
36073a92
YQ
4427displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4428 int regno)
cca44b1b
JB
4429{
4430 ULONGEST ret;
36073a92 4431 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4432
bf9f652a 4433 if (regno == ARM_PC_REGNUM)
cca44b1b 4434 {
4db71c0b
YQ
4435 /* Compute pipeline offset:
4436 - When executing an ARM instruction, PC reads as the address of the
4437 current instruction plus 8.
4438 - When executing a Thumb instruction, PC reads as the address of the
4439 current instruction plus 4. */
4440
36073a92 4441 if (!dsc->is_thumb)
4db71c0b
YQ
4442 from += 8;
4443 else
4444 from += 4;
4445
cca44b1b
JB
4446 if (debug_displaced)
4447 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4448 (unsigned long) from);
4449 return (ULONGEST) from;
cca44b1b 4450 }
c906108c 4451 else
cca44b1b
JB
4452 {
4453 regcache_cooked_read_unsigned (regs, regno, &ret);
4454 if (debug_displaced)
4455 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4456 regno, (unsigned long) ret);
4457 return ret;
4458 }
c906108c
SS
4459}
4460
cca44b1b
JB
4461static int
4462displaced_in_arm_mode (struct regcache *regs)
4463{
4464 ULONGEST ps;
9779414d 4465 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4466
cca44b1b 4467 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4468
9779414d 4469 return (ps & t_bit) == 0;
cca44b1b 4470}
66e810cd 4471
cca44b1b 4472/* Write to the PC as from a branch instruction. */
c906108c 4473
cca44b1b 4474static void
36073a92
YQ
4475branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4476 ULONGEST val)
c906108c 4477{
36073a92 4478 if (!dsc->is_thumb)
cca44b1b
JB
4479 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4480 architecture versions < 6. */
0963b4bd
MS
4481 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4482 val & ~(ULONGEST) 0x3);
cca44b1b 4483 else
0963b4bd
MS
4484 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4485 val & ~(ULONGEST) 0x1);
cca44b1b 4486}
66e810cd 4487
cca44b1b
JB
4488/* Write to the PC as from a branch-exchange instruction. */
4489
4490static void
4491bx_write_pc (struct regcache *regs, ULONGEST val)
4492{
4493 ULONGEST ps;
9779414d 4494 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4495
4496 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4497
4498 if ((val & 1) == 1)
c906108c 4499 {
9779414d 4500 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4501 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4502 }
4503 else if ((val & 2) == 0)
4504 {
9779414d 4505 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4506 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4507 }
4508 else
4509 {
cca44b1b
JB
4510 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4511 mode, align dest to 4 bytes). */
4512 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4513 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4514 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4515 }
4516}
ed9a39eb 4517
cca44b1b 4518/* Write to the PC as if from a load instruction. */
ed9a39eb 4519
34e8f22d 4520static void
36073a92
YQ
4521load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4522 ULONGEST val)
ed9a39eb 4523{
cca44b1b
JB
4524 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4525 bx_write_pc (regs, val);
4526 else
36073a92 4527 branch_write_pc (regs, dsc, val);
cca44b1b 4528}
be8626e0 4529
cca44b1b
JB
4530/* Write to the PC as if from an ALU instruction. */
4531
4532static void
36073a92
YQ
4533alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4534 ULONGEST val)
cca44b1b 4535{
36073a92 4536 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4537 bx_write_pc (regs, val);
4538 else
36073a92 4539 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4540}
4541
4542/* Helper for writing to registers for displaced stepping. Writing to the PC
4543 has a varying effects depending on the instruction which does the write:
4544 this is controlled by the WRITE_PC argument. */
4545
4546void
4547displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4548 int regno, ULONGEST val, enum pc_write_style write_pc)
4549{
bf9f652a 4550 if (regno == ARM_PC_REGNUM)
08216dd7 4551 {
cca44b1b
JB
4552 if (debug_displaced)
4553 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4554 (unsigned long) val);
4555 switch (write_pc)
08216dd7 4556 {
cca44b1b 4557 case BRANCH_WRITE_PC:
36073a92 4558 branch_write_pc (regs, dsc, val);
08216dd7
RE
4559 break;
4560
cca44b1b
JB
4561 case BX_WRITE_PC:
4562 bx_write_pc (regs, val);
4563 break;
4564
4565 case LOAD_WRITE_PC:
36073a92 4566 load_write_pc (regs, dsc, val);
cca44b1b
JB
4567 break;
4568
4569 case ALU_WRITE_PC:
36073a92 4570 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4571 break;
4572
4573 case CANNOT_WRITE_PC:
4574 warning (_("Instruction wrote to PC in an unexpected way when "
4575 "single-stepping"));
08216dd7
RE
4576 break;
4577
4578 default:
97b9747c
JB
4579 internal_error (__FILE__, __LINE__,
4580 _("Invalid argument to displaced_write_reg"));
08216dd7 4581 }
b508a996 4582
cca44b1b 4583 dsc->wrote_to_pc = 1;
b508a996 4584 }
ed9a39eb 4585 else
b508a996 4586 {
cca44b1b
JB
4587 if (debug_displaced)
4588 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4589 regno, (unsigned long) val);
4590 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4591 }
34e8f22d
RE
4592}
4593
cca44b1b
JB
4594/* This function is used to concisely determine if an instruction INSN
4595 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4596 corresponding fields of BITMASK set to 0b1111. The function
4597 returns return 1 if any of these fields in INSN reference the PC
4598 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4599
4600static int
cca44b1b 4601insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4602{
cca44b1b 4603 uint32_t lowbit = 1;
67255d04 4604
cca44b1b
JB
4605 while (bitmask != 0)
4606 {
4607 uint32_t mask;
44e1a9eb 4608
cca44b1b
JB
4609 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4610 ;
67255d04 4611
cca44b1b
JB
4612 if (!lowbit)
4613 break;
67255d04 4614
cca44b1b 4615 mask = lowbit * 0xf;
67255d04 4616
cca44b1b
JB
4617 if ((insn & mask) == mask)
4618 return 1;
4619
4620 bitmask &= ~mask;
67255d04
RE
4621 }
4622
cca44b1b
JB
4623 return 0;
4624}
2af48f68 4625
cca44b1b
JB
4626/* The simplest copy function. Many instructions have the same effect no
4627 matter what address they are executed at: in those cases, use this. */
67255d04 4628
cca44b1b 4629static int
7ff120b4
YQ
4630arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4631 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4632{
4633 if (debug_displaced)
4634 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4635 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4636 iname);
67255d04 4637
cca44b1b 4638 dsc->modinsn[0] = insn;
67255d04 4639
cca44b1b
JB
4640 return 0;
4641}
4642
34518530
YQ
4643static int
4644thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4645 uint16_t insn2, const char *iname,
4646 struct displaced_step_closure *dsc)
4647{
4648 if (debug_displaced)
4649 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4650 "opcode/class '%s' unmodified\n", insn1, insn2,
4651 iname);
4652
4653 dsc->modinsn[0] = insn1;
4654 dsc->modinsn[1] = insn2;
4655 dsc->numinsns = 2;
4656
4657 return 0;
4658}
4659
4660/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4661 modification. */
4662static int
615234c1 4663thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4664 const char *iname,
4665 struct displaced_step_closure *dsc)
4666{
4667 if (debug_displaced)
4668 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4669 "opcode/class '%s' unmodified\n", insn,
4670 iname);
4671
4672 dsc->modinsn[0] = insn;
4673
4674 return 0;
4675}
4676
cca44b1b
JB
4677/* Preload instructions with immediate offset. */
4678
4679static void
6e39997a 4680cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4681 struct regcache *regs, struct displaced_step_closure *dsc)
4682{
4683 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4684 if (!dsc->u.preload.immed)
4685 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4686}
4687
7ff120b4
YQ
4688static void
4689install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4690 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4691{
cca44b1b 4692 ULONGEST rn_val;
cca44b1b
JB
4693 /* Preload instructions:
4694
4695 {pli/pld} [rn, #+/-imm]
4696 ->
4697 {pli/pld} [r0, #+/-imm]. */
4698
36073a92
YQ
4699 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4700 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4701 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4702 dsc->u.preload.immed = 1;
4703
cca44b1b 4704 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4705}
4706
cca44b1b 4707static int
7ff120b4 4708arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4709 struct displaced_step_closure *dsc)
4710{
4711 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4712
7ff120b4
YQ
4713 if (!insn_references_pc (insn, 0x000f0000ul))
4714 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4715
4716 if (debug_displaced)
4717 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4718 (unsigned long) insn);
4719
7ff120b4
YQ
4720 dsc->modinsn[0] = insn & 0xfff0ffff;
4721
4722 install_preload (gdbarch, regs, dsc, rn);
4723
4724 return 0;
4725}
4726
34518530
YQ
4727static int
4728thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4729 struct regcache *regs, struct displaced_step_closure *dsc)
4730{
4731 unsigned int rn = bits (insn1, 0, 3);
4732 unsigned int u_bit = bit (insn1, 7);
4733 int imm12 = bits (insn2, 0, 11);
4734 ULONGEST pc_val;
4735
4736 if (rn != ARM_PC_REGNUM)
4737 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4738
4739 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4740 PLD (literal) Encoding T1. */
4741 if (debug_displaced)
4742 fprintf_unfiltered (gdb_stdlog,
4743 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4744 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4745 imm12);
4746
4747 if (!u_bit)
4748 imm12 = -1 * imm12;
4749
4750 /* Rewrite instruction {pli/pld} PC imm12 into:
4751 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4752
4753 {pli/pld} [r0, r1]
4754
4755 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4756
4757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4758 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4759
4760 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4761
4762 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4763 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4764 dsc->u.preload.immed = 0;
4765
4766 /* {pli/pld} [r0, r1] */
4767 dsc->modinsn[0] = insn1 & 0xfff0;
4768 dsc->modinsn[1] = 0xf001;
4769 dsc->numinsns = 2;
4770
4771 dsc->cleanup = &cleanup_preload;
4772 return 0;
4773}
4774
7ff120b4
YQ
4775/* Preload instructions with register offset. */
4776
4777static void
4778install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4779 struct displaced_step_closure *dsc, unsigned int rn,
4780 unsigned int rm)
4781{
4782 ULONGEST rn_val, rm_val;
4783
cca44b1b
JB
4784 /* Preload register-offset instructions:
4785
4786 {pli/pld} [rn, rm {, shift}]
4787 ->
4788 {pli/pld} [r0, r1 {, shift}]. */
4789
36073a92
YQ
4790 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4791 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4792 rn_val = displaced_read_reg (regs, dsc, rn);
4793 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4794 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4795 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4796 dsc->u.preload.immed = 0;
4797
cca44b1b 4798 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4799}
4800
4801static int
4802arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4803 struct regcache *regs,
4804 struct displaced_step_closure *dsc)
4805{
4806 unsigned int rn = bits (insn, 16, 19);
4807 unsigned int rm = bits (insn, 0, 3);
4808
4809
4810 if (!insn_references_pc (insn, 0x000f000ful))
4811 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4812
4813 if (debug_displaced)
4814 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4815 (unsigned long) insn);
4816
4817 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4818
7ff120b4 4819 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4820 return 0;
4821}
4822
4823/* Copy/cleanup coprocessor load and store instructions. */
4824
4825static void
6e39997a 4826cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4827 struct regcache *regs,
4828 struct displaced_step_closure *dsc)
4829{
36073a92 4830 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4831
4832 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4833
4834 if (dsc->u.ldst.writeback)
4835 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4836}
4837
7ff120b4
YQ
4838static void
4839install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4840 struct displaced_step_closure *dsc,
4841 int writeback, unsigned int rn)
cca44b1b 4842{
cca44b1b 4843 ULONGEST rn_val;
cca44b1b 4844
cca44b1b
JB
4845 /* Coprocessor load/store instructions:
4846
4847 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4848 ->
4849 {stc/stc2} [r0, #+/-imm].
4850
4851 ldc/ldc2 are handled identically. */
4852
36073a92
YQ
4853 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4854 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4855 /* PC should be 4-byte aligned. */
4856 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4857 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4858
7ff120b4 4859 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4860 dsc->u.ldst.rn = rn;
4861
7ff120b4
YQ
4862 dsc->cleanup = &cleanup_copro_load_store;
4863}
4864
4865static int
4866arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4867 struct regcache *regs,
4868 struct displaced_step_closure *dsc)
4869{
4870 unsigned int rn = bits (insn, 16, 19);
4871
4872 if (!insn_references_pc (insn, 0x000f0000ul))
4873 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4874
4875 if (debug_displaced)
4876 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4877 "load/store insn %.8lx\n", (unsigned long) insn);
4878
cca44b1b
JB
4879 dsc->modinsn[0] = insn & 0xfff0ffff;
4880
7ff120b4 4881 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4882
4883 return 0;
4884}
4885
34518530
YQ
4886static int
4887thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4888 uint16_t insn2, struct regcache *regs,
4889 struct displaced_step_closure *dsc)
4890{
4891 unsigned int rn = bits (insn1, 0, 3);
4892
4893 if (rn != ARM_PC_REGNUM)
4894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4895 "copro load/store", dsc);
4896
4897 if (debug_displaced)
4898 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4899 "load/store insn %.4x%.4x\n", insn1, insn2);
4900
4901 dsc->modinsn[0] = insn1 & 0xfff0;
4902 dsc->modinsn[1] = insn2;
4903 dsc->numinsns = 2;
4904
4905 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4906 doesn't support writeback, so pass 0. */
4907 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4908
4909 return 0;
4910}
4911
cca44b1b
JB
4912/* Clean up branch instructions (actually perform the branch, by setting
4913 PC). */
4914
4915static void
6e39997a 4916cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4917 struct displaced_step_closure *dsc)
4918{
36073a92 4919 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4920 int branch_taken = condition_true (dsc->u.branch.cond, status);
4921 enum pc_write_style write_pc = dsc->u.branch.exchange
4922 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4923
4924 if (!branch_taken)
4925 return;
4926
4927 if (dsc->u.branch.link)
4928 {
8c8dba6d
YQ
4929 /* The value of LR should be the next insn of current one. In order
4930 not to confuse logic hanlding later insn `bx lr', if current insn mode
4931 is Thumb, the bit 0 of LR value should be set to 1. */
4932 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4933
4934 if (dsc->is_thumb)
4935 next_insn_addr |= 0x1;
4936
4937 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4938 CANNOT_WRITE_PC);
cca44b1b
JB
4939 }
4940
bf9f652a 4941 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4942}
4943
4944/* Copy B/BL/BLX instructions with immediate destinations. */
4945
7ff120b4
YQ
4946static void
4947install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4948 struct displaced_step_closure *dsc,
4949 unsigned int cond, int exchange, int link, long offset)
4950{
4951 /* Implement "BL<cond> <label>" as:
4952
4953 Preparation: cond <- instruction condition
4954 Insn: mov r0, r0 (nop)
4955 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4956
4957 B<cond> similar, but don't set r14 in cleanup. */
4958
4959 dsc->u.branch.cond = cond;
4960 dsc->u.branch.link = link;
4961 dsc->u.branch.exchange = exchange;
4962
2b16b2e3
YQ
4963 dsc->u.branch.dest = dsc->insn_addr;
4964 if (link && exchange)
4965 /* For BLX, offset is computed from the Align (PC, 4). */
4966 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4967
7ff120b4 4968 if (dsc->is_thumb)
2b16b2e3 4969 dsc->u.branch.dest += 4 + offset;
7ff120b4 4970 else
2b16b2e3 4971 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
4972
4973 dsc->cleanup = &cleanup_branch;
4974}
cca44b1b 4975static int
7ff120b4
YQ
4976arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4977 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
4978{
4979 unsigned int cond = bits (insn, 28, 31);
4980 int exchange = (cond == 0xf);
4981 int link = exchange || bit (insn, 24);
cca44b1b
JB
4982 long offset;
4983
4984 if (debug_displaced)
4985 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4986 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4987 (unsigned long) insn);
cca44b1b
JB
4988 if (exchange)
4989 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4990 then arrange the switch into Thumb mode. */
4991 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4992 else
4993 offset = bits (insn, 0, 23) << 2;
4994
4995 if (bit (offset, 25))
4996 offset = offset | ~0x3ffffff;
4997
cca44b1b
JB
4998 dsc->modinsn[0] = ARM_NOP;
4999
7ff120b4 5000 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5001 return 0;
5002}
5003
34518530
YQ
5004static int
5005thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5006 uint16_t insn2, struct regcache *regs,
5007 struct displaced_step_closure *dsc)
5008{
5009 int link = bit (insn2, 14);
5010 int exchange = link && !bit (insn2, 12);
5011 int cond = INST_AL;
5012 long offset = 0;
5013 int j1 = bit (insn2, 13);
5014 int j2 = bit (insn2, 11);
5015 int s = sbits (insn1, 10, 10);
5016 int i1 = !(j1 ^ bit (insn1, 10));
5017 int i2 = !(j2 ^ bit (insn1, 10));
5018
5019 if (!link && !exchange) /* B */
5020 {
5021 offset = (bits (insn2, 0, 10) << 1);
5022 if (bit (insn2, 12)) /* Encoding T4 */
5023 {
5024 offset |= (bits (insn1, 0, 9) << 12)
5025 | (i2 << 22)
5026 | (i1 << 23)
5027 | (s << 24);
5028 cond = INST_AL;
5029 }
5030 else /* Encoding T3 */
5031 {
5032 offset |= (bits (insn1, 0, 5) << 12)
5033 | (j1 << 18)
5034 | (j2 << 19)
5035 | (s << 20);
5036 cond = bits (insn1, 6, 9);
5037 }
5038 }
5039 else
5040 {
5041 offset = (bits (insn1, 0, 9) << 12);
5042 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5043 offset |= exchange ?
5044 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5045 }
5046
5047 if (debug_displaced)
5048 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5049 "%.4x %.4x with offset %.8lx\n",
5050 link ? (exchange) ? "blx" : "bl" : "b",
5051 insn1, insn2, offset);
5052
5053 dsc->modinsn[0] = THUMB_NOP;
5054
5055 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5056 return 0;
5057}
5058
5059/* Copy B Thumb instructions. */
5060static int
615234c1 5061thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5062 struct displaced_step_closure *dsc)
5063{
5064 unsigned int cond = 0;
5065 int offset = 0;
5066 unsigned short bit_12_15 = bits (insn, 12, 15);
5067 CORE_ADDR from = dsc->insn_addr;
5068
5069 if (bit_12_15 == 0xd)
5070 {
5071 /* offset = SignExtend (imm8:0, 32) */
5072 offset = sbits ((insn << 1), 0, 8);
5073 cond = bits (insn, 8, 11);
5074 }
5075 else if (bit_12_15 == 0xe) /* Encoding T2 */
5076 {
5077 offset = sbits ((insn << 1), 0, 11);
5078 cond = INST_AL;
5079 }
5080
5081 if (debug_displaced)
5082 fprintf_unfiltered (gdb_stdlog,
5083 "displaced: copying b immediate insn %.4x "
5084 "with offset %d\n", insn, offset);
5085
5086 dsc->u.branch.cond = cond;
5087 dsc->u.branch.link = 0;
5088 dsc->u.branch.exchange = 0;
5089 dsc->u.branch.dest = from + 4 + offset;
5090
5091 dsc->modinsn[0] = THUMB_NOP;
5092
5093 dsc->cleanup = &cleanup_branch;
5094
5095 return 0;
5096}
5097
cca44b1b
JB
5098/* Copy BX/BLX with register-specified destinations. */
5099
7ff120b4
YQ
5100static void
5101install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5102 struct displaced_step_closure *dsc, int link,
5103 unsigned int cond, unsigned int rm)
cca44b1b 5104{
cca44b1b
JB
5105 /* Implement {BX,BLX}<cond> <reg>" as:
5106
5107 Preparation: cond <- instruction condition
5108 Insn: mov r0, r0 (nop)
5109 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5110
5111 Don't set r14 in cleanup for BX. */
5112
36073a92 5113 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5114
5115 dsc->u.branch.cond = cond;
5116 dsc->u.branch.link = link;
cca44b1b 5117
7ff120b4 5118 dsc->u.branch.exchange = 1;
cca44b1b
JB
5119
5120 dsc->cleanup = &cleanup_branch;
7ff120b4 5121}
cca44b1b 5122
7ff120b4
YQ
5123static int
5124arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5125 struct regcache *regs, struct displaced_step_closure *dsc)
5126{
5127 unsigned int cond = bits (insn, 28, 31);
5128 /* BX: x12xxx1x
5129 BLX: x12xxx3x. */
5130 int link = bit (insn, 5);
5131 unsigned int rm = bits (insn, 0, 3);
5132
5133 if (debug_displaced)
5134 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5135 (unsigned long) insn);
5136
5137 dsc->modinsn[0] = ARM_NOP;
5138
5139 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5140 return 0;
5141}
5142
34518530
YQ
5143static int
5144thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5145 struct regcache *regs,
5146 struct displaced_step_closure *dsc)
5147{
5148 int link = bit (insn, 7);
5149 unsigned int rm = bits (insn, 3, 6);
5150
5151 if (debug_displaced)
5152 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5153 (unsigned short) insn);
5154
5155 dsc->modinsn[0] = THUMB_NOP;
5156
5157 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5158
5159 return 0;
5160}
5161
5162
0963b4bd 5163/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5164
5165static void
6e39997a 5166cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5167 struct regcache *regs, struct displaced_step_closure *dsc)
5168{
36073a92 5169 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5170 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5171 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5172 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5173}
5174
5175static int
7ff120b4
YQ
5176arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5177 struct displaced_step_closure *dsc)
cca44b1b
JB
5178{
5179 unsigned int rn = bits (insn, 16, 19);
5180 unsigned int rd = bits (insn, 12, 15);
5181 unsigned int op = bits (insn, 21, 24);
5182 int is_mov = (op == 0xd);
5183 ULONGEST rd_val, rn_val;
cca44b1b
JB
5184
5185 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5186 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5187
5188 if (debug_displaced)
5189 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5190 "%.8lx\n", is_mov ? "move" : "ALU",
5191 (unsigned long) insn);
5192
5193 /* Instruction is of form:
5194
5195 <op><cond> rd, [rn,] #imm
5196
5197 Rewrite as:
5198
5199 Preparation: tmp1, tmp2 <- r0, r1;
5200 r0, r1 <- rd, rn
5201 Insn: <op><cond> r0, r1, #imm
5202 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5203 */
5204
36073a92
YQ
5205 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5206 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5207 rn_val = displaced_read_reg (regs, dsc, rn);
5208 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5209 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5210 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5211 dsc->rd = rd;
5212
5213 if (is_mov)
5214 dsc->modinsn[0] = insn & 0xfff00fff;
5215 else
5216 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5217
5218 dsc->cleanup = &cleanup_alu_imm;
5219
5220 return 0;
5221}
5222
34518530
YQ
5223static int
5224thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5225 uint16_t insn2, struct regcache *regs,
5226 struct displaced_step_closure *dsc)
5227{
5228 unsigned int op = bits (insn1, 5, 8);
5229 unsigned int rn, rm, rd;
5230 ULONGEST rd_val, rn_val;
5231
5232 rn = bits (insn1, 0, 3); /* Rn */
5233 rm = bits (insn2, 0, 3); /* Rm */
5234 rd = bits (insn2, 8, 11); /* Rd */
5235
5236 /* This routine is only called for instruction MOV. */
5237 gdb_assert (op == 0x2 && rn == 0xf);
5238
5239 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5240 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5241
5242 if (debug_displaced)
5243 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5244 "ALU", insn1, insn2);
5245
5246 /* Instruction is of form:
5247
5248 <op><cond> rd, [rn,] #imm
5249
5250 Rewrite as:
5251
5252 Preparation: tmp1, tmp2 <- r0, r1;
5253 r0, r1 <- rd, rn
5254 Insn: <op><cond> r0, r1, #imm
5255 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5256 */
5257
5258 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5259 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5260 rn_val = displaced_read_reg (regs, dsc, rn);
5261 rd_val = displaced_read_reg (regs, dsc, rd);
5262 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5263 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5264 dsc->rd = rd;
5265
5266 dsc->modinsn[0] = insn1;
5267 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5268 dsc->numinsns = 2;
5269
5270 dsc->cleanup = &cleanup_alu_imm;
5271
5272 return 0;
5273}
5274
cca44b1b
JB
5275/* Copy/cleanup arithmetic/logic insns with register RHS. */
5276
5277static void
6e39997a 5278cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5279 struct regcache *regs, struct displaced_step_closure *dsc)
5280{
5281 ULONGEST rd_val;
5282 int i;
5283
36073a92 5284 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5285
5286 for (i = 0; i < 3; i++)
5287 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5288
5289 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5290}
5291
7ff120b4
YQ
5292static void
5293install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5294 struct displaced_step_closure *dsc,
5295 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5296{
cca44b1b 5297 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5298
cca44b1b
JB
5299 /* Instruction is of form:
5300
5301 <op><cond> rd, [rn,] rm [, <shift>]
5302
5303 Rewrite as:
5304
5305 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5306 r0, r1, r2 <- rd, rn, rm
ef713951 5307 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5308 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5309 */
5310
36073a92
YQ
5311 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5312 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5313 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5314 rd_val = displaced_read_reg (regs, dsc, rd);
5315 rn_val = displaced_read_reg (regs, dsc, rn);
5316 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5317 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5318 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5319 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5320 dsc->rd = rd;
5321
7ff120b4
YQ
5322 dsc->cleanup = &cleanup_alu_reg;
5323}
5324
5325static int
5326arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5327 struct displaced_step_closure *dsc)
5328{
5329 unsigned int op = bits (insn, 21, 24);
5330 int is_mov = (op == 0xd);
5331
5332 if (!insn_references_pc (insn, 0x000ff00ful))
5333 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5334
5335 if (debug_displaced)
5336 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5337 is_mov ? "move" : "ALU", (unsigned long) insn);
5338
cca44b1b
JB
5339 if (is_mov)
5340 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5341 else
5342 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5343
7ff120b4
YQ
5344 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5345 bits (insn, 0, 3));
cca44b1b
JB
5346 return 0;
5347}
5348
34518530
YQ
5349static int
5350thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5351 struct regcache *regs,
5352 struct displaced_step_closure *dsc)
5353{
ef713951 5354 unsigned rm, rd;
34518530 5355
ef713951
YQ
5356 rm = bits (insn, 3, 6);
5357 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5358
ef713951 5359 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5360 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5361
5362 if (debug_displaced)
ef713951
YQ
5363 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5364 (unsigned short) insn);
34518530 5365
ef713951 5366 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5367
ef713951 5368 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5369
5370 return 0;
5371}
5372
cca44b1b
JB
5373/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5374
5375static void
6e39997a 5376cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5377 struct regcache *regs,
5378 struct displaced_step_closure *dsc)
5379{
36073a92 5380 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5381 int i;
5382
5383 for (i = 0; i < 4; i++)
5384 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5385
5386 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5387}
5388
7ff120b4
YQ
5389static void
5390install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5391 struct displaced_step_closure *dsc,
5392 unsigned int rd, unsigned int rn, unsigned int rm,
5393 unsigned rs)
cca44b1b 5394{
7ff120b4 5395 int i;
cca44b1b 5396 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5397
cca44b1b
JB
5398 /* Instruction is of form:
5399
5400 <op><cond> rd, [rn,] rm, <shift> rs
5401
5402 Rewrite as:
5403
5404 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5405 r0, r1, r2, r3 <- rd, rn, rm, rs
5406 Insn: <op><cond> r0, r1, r2, <shift> r3
5407 Cleanup: tmp5 <- r0
5408 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5409 rd <- tmp5
5410 */
5411
5412 for (i = 0; i < 4; i++)
36073a92 5413 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5414
36073a92
YQ
5415 rd_val = displaced_read_reg (regs, dsc, rd);
5416 rn_val = displaced_read_reg (regs, dsc, rn);
5417 rm_val = displaced_read_reg (regs, dsc, rm);
5418 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5419 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5420 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5421 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5422 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5423 dsc->rd = rd;
7ff120b4
YQ
5424 dsc->cleanup = &cleanup_alu_shifted_reg;
5425}
5426
5427static int
5428arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5429 struct regcache *regs,
5430 struct displaced_step_closure *dsc)
5431{
5432 unsigned int op = bits (insn, 21, 24);
5433 int is_mov = (op == 0xd);
5434 unsigned int rd, rn, rm, rs;
5435
5436 if (!insn_references_pc (insn, 0x000fff0ful))
5437 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5438
5439 if (debug_displaced)
5440 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5441 "%.8lx\n", is_mov ? "move" : "ALU",
5442 (unsigned long) insn);
5443
5444 rn = bits (insn, 16, 19);
5445 rm = bits (insn, 0, 3);
5446 rs = bits (insn, 8, 11);
5447 rd = bits (insn, 12, 15);
cca44b1b
JB
5448
5449 if (is_mov)
5450 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5451 else
5452 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5453
7ff120b4 5454 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5455
5456 return 0;
5457}
5458
5459/* Clean up load instructions. */
5460
5461static void
6e39997a 5462cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5463 struct displaced_step_closure *dsc)
5464{
5465 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5466
36073a92 5467 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5468 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5469 rt_val2 = displaced_read_reg (regs, dsc, 1);
5470 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5471
5472 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5473 if (dsc->u.ldst.xfersize > 4)
5474 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5475 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5476 if (!dsc->u.ldst.immed)
5477 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5478
5479 /* Handle register writeback. */
5480 if (dsc->u.ldst.writeback)
5481 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5482 /* Put result in right place. */
5483 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5484 if (dsc->u.ldst.xfersize == 8)
5485 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5486}
5487
5488/* Clean up store instructions. */
5489
5490static void
6e39997a 5491cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5492 struct displaced_step_closure *dsc)
5493{
36073a92 5494 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5495
5496 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5497 if (dsc->u.ldst.xfersize > 4)
5498 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5499 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5500 if (!dsc->u.ldst.immed)
5501 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5502 if (!dsc->u.ldst.restore_r4)
5503 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5504
5505 /* Writeback. */
5506 if (dsc->u.ldst.writeback)
5507 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5508}
5509
5510/* Copy "extra" load/store instructions. These are halfword/doubleword
5511 transfers, which have a different encoding to byte/word transfers. */
5512
5513static int
550dc4e2 5514arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5515 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5516{
5517 unsigned int op1 = bits (insn, 20, 24);
5518 unsigned int op2 = bits (insn, 5, 6);
5519 unsigned int rt = bits (insn, 12, 15);
5520 unsigned int rn = bits (insn, 16, 19);
5521 unsigned int rm = bits (insn, 0, 3);
5522 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5523 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5524 int immed = (op1 & 0x4) != 0;
5525 int opcode;
5526 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5527
5528 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5529 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5530
5531 if (debug_displaced)
5532 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5533 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5534 (unsigned long) insn);
5535
5536 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5537
5538 if (opcode < 0)
5539 internal_error (__FILE__, __LINE__,
5540 _("copy_extra_ld_st: instruction decode error"));
5541
36073a92
YQ
5542 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5543 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5544 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5545 if (!immed)
36073a92 5546 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5547
36073a92 5548 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5549 if (bytesize[opcode] == 8)
36073a92
YQ
5550 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5551 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5552 if (!immed)
36073a92 5553 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5554
5555 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5556 if (bytesize[opcode] == 8)
5557 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5558 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5559 if (!immed)
5560 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5561
5562 dsc->rd = rt;
5563 dsc->u.ldst.xfersize = bytesize[opcode];
5564 dsc->u.ldst.rn = rn;
5565 dsc->u.ldst.immed = immed;
5566 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5567 dsc->u.ldst.restore_r4 = 0;
5568
5569 if (immed)
5570 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5571 ->
5572 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5573 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5574 else
5575 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5576 ->
5577 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5578 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5579
5580 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5581
5582 return 0;
5583}
5584
0f6f04ba 5585/* Copy byte/half word/word loads and stores. */
cca44b1b 5586
7ff120b4 5587static void
0f6f04ba
YQ
5588install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5589 struct displaced_step_closure *dsc, int load,
5590 int immed, int writeback, int size, int usermode,
5591 int rt, int rm, int rn)
cca44b1b 5592{
cca44b1b 5593 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5594
36073a92
YQ
5595 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5596 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5597 if (!immed)
36073a92 5598 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5599 if (!load)
36073a92 5600 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5601
36073a92
YQ
5602 rt_val = displaced_read_reg (regs, dsc, rt);
5603 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5604 if (!immed)
36073a92 5605 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5606
5607 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5608 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5609 if (!immed)
5610 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5611 dsc->rd = rt;
0f6f04ba 5612 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5613 dsc->u.ldst.rn = rn;
5614 dsc->u.ldst.immed = immed;
7ff120b4 5615 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5616
5617 /* To write PC we can do:
5618
494e194e
YQ
5619 Before this sequence of instructions:
5620 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5621 r2 is the Rn value got from dispalced_read_reg.
5622
5623 Insn1: push {pc} Write address of STR instruction + offset on stack
5624 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5625 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5626 = addr(Insn1) + offset - addr(Insn3) - 8
5627 = offset - 16
5628 Insn4: add r4, r4, #8 r4 = offset - 8
5629 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5630 = from + offset
5631 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5632
5633 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5634 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5635 of this can be found in Section "Saving from r15" in
5636 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5637
7ff120b4
YQ
5638 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5639}
5640
34518530
YQ
5641
5642static int
5643thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5644 uint16_t insn2, struct regcache *regs,
5645 struct displaced_step_closure *dsc, int size)
5646{
5647 unsigned int u_bit = bit (insn1, 7);
5648 unsigned int rt = bits (insn2, 12, 15);
5649 int imm12 = bits (insn2, 0, 11);
5650 ULONGEST pc_val;
5651
5652 if (debug_displaced)
5653 fprintf_unfiltered (gdb_stdlog,
5654 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5655 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5656 imm12);
5657
5658 if (!u_bit)
5659 imm12 = -1 * imm12;
5660
5661 /* Rewrite instruction LDR Rt imm12 into:
5662
5663 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5664
5665 LDR R0, R2, R3,
5666
5667 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5668
5669
5670 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5671 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5672 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5673
5674 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5675
5676 pc_val = pc_val & 0xfffffffc;
5677
5678 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5679 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5680
5681 dsc->rd = rt;
5682
5683 dsc->u.ldst.xfersize = size;
5684 dsc->u.ldst.immed = 0;
5685 dsc->u.ldst.writeback = 0;
5686 dsc->u.ldst.restore_r4 = 0;
5687
5688 /* LDR R0, R2, R3 */
5689 dsc->modinsn[0] = 0xf852;
5690 dsc->modinsn[1] = 0x3;
5691 dsc->numinsns = 2;
5692
5693 dsc->cleanup = &cleanup_load;
5694
5695 return 0;
5696}
5697
5698static int
5699thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5700 uint16_t insn2, struct regcache *regs,
5701 struct displaced_step_closure *dsc,
5702 int writeback, int immed)
5703{
5704 unsigned int rt = bits (insn2, 12, 15);
5705 unsigned int rn = bits (insn1, 0, 3);
5706 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5707 /* In LDR (register), there is also a register Rm, which is not allowed to
5708 be PC, so we don't have to check it. */
5709
5710 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5711 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5712 dsc);
5713
5714 if (debug_displaced)
5715 fprintf_unfiltered (gdb_stdlog,
5716 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5717 rt, rn, insn1, insn2);
5718
5719 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5720 0, rt, rm, rn);
5721
5722 dsc->u.ldst.restore_r4 = 0;
5723
5724 if (immed)
5725 /* ldr[b]<cond> rt, [rn, #imm], etc.
5726 ->
5727 ldr[b]<cond> r0, [r2, #imm]. */
5728 {
5729 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5730 dsc->modinsn[1] = insn2 & 0x0fff;
5731 }
5732 else
5733 /* ldr[b]<cond> rt, [rn, rm], etc.
5734 ->
5735 ldr[b]<cond> r0, [r2, r3]. */
5736 {
5737 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5738 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5739 }
5740
5741 dsc->numinsns = 2;
5742
5743 return 0;
5744}
5745
5746
7ff120b4
YQ
5747static int
5748arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5749 struct regcache *regs,
5750 struct displaced_step_closure *dsc,
0f6f04ba 5751 int load, int size, int usermode)
7ff120b4
YQ
5752{
5753 int immed = !bit (insn, 25);
5754 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5755 unsigned int rt = bits (insn, 12, 15);
5756 unsigned int rn = bits (insn, 16, 19);
5757 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5758
5759 if (!insn_references_pc (insn, 0x000ff00ful))
5760 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5761
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog,
5764 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5765 load ? (size == 1 ? "ldrb" : "ldr")
5766 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5767 rt, rn,
5768 (unsigned long) insn);
5769
0f6f04ba
YQ
5770 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5771 usermode, rt, rm, rn);
7ff120b4 5772
bf9f652a 5773 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5774 {
5775 dsc->u.ldst.restore_r4 = 0;
5776
5777 if (immed)
5778 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5779 ->
5780 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5781 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5782 else
5783 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5784 ->
5785 {ldr,str}[b]<cond> r0, [r2, r3]. */
5786 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5787 }
5788 else
5789 {
5790 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5791 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5792 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5793 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5794 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5795 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5796 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5797
5798 /* As above. */
5799 if (immed)
5800 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5801 else
5802 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5803
cca44b1b
JB
5804 dsc->numinsns = 6;
5805 }
5806
5807 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5808
5809 return 0;
5810}
5811
5812/* Cleanup LDM instructions with fully-populated register list. This is an
5813 unfortunate corner case: it's impossible to implement correctly by modifying
5814 the instruction. The issue is as follows: we have an instruction,
5815
5816 ldm rN, {r0-r15}
5817
5818 which we must rewrite to avoid loading PC. A possible solution would be to
5819 do the load in two halves, something like (with suitable cleanup
5820 afterwards):
5821
5822 mov r8, rN
5823 ldm[id][ab] r8!, {r0-r7}
5824 str r7, <temp>
5825 ldm[id][ab] r8, {r7-r14}
5826 <bkpt>
5827
5828 but at present there's no suitable place for <temp>, since the scratch space
5829 is overwritten before the cleanup routine is called. For now, we simply
5830 emulate the instruction. */
5831
5832static void
5833cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5834 struct displaced_step_closure *dsc)
5835{
cca44b1b
JB
5836 int inc = dsc->u.block.increment;
5837 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5838 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5839 uint32_t regmask = dsc->u.block.regmask;
5840 int regno = inc ? 0 : 15;
5841 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5842 int exception_return = dsc->u.block.load && dsc->u.block.user
5843 && (regmask & 0x8000) != 0;
36073a92 5844 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5845 int do_transfer = condition_true (dsc->u.block.cond, status);
5846 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5847
5848 if (!do_transfer)
5849 return;
5850
5851 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5852 sensible we can do here. Complain loudly. */
5853 if (exception_return)
5854 error (_("Cannot single-step exception return"));
5855
5856 /* We don't handle any stores here for now. */
5857 gdb_assert (dsc->u.block.load != 0);
5858
5859 if (debug_displaced)
5860 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5861 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5862 dsc->u.block.increment ? "inc" : "dec",
5863 dsc->u.block.before ? "before" : "after");
5864
5865 while (regmask)
5866 {
5867 uint32_t memword;
5868
5869 if (inc)
bf9f652a 5870 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5871 regno++;
5872 else
5873 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5874 regno--;
5875
5876 xfer_addr += bump_before;
5877
5878 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5879 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5880
5881 xfer_addr += bump_after;
5882
5883 regmask &= ~(1 << regno);
5884 }
5885
5886 if (dsc->u.block.writeback)
5887 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5888 CANNOT_WRITE_PC);
5889}
5890
5891/* Clean up an STM which included the PC in the register list. */
5892
5893static void
5894cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5895 struct displaced_step_closure *dsc)
5896{
36073a92 5897 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5898 int store_executed = condition_true (dsc->u.block.cond, status);
5899 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5900 CORE_ADDR stm_insn_addr;
5901 uint32_t pc_val;
5902 long offset;
5903 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5904
5905 /* If condition code fails, there's nothing else to do. */
5906 if (!store_executed)
5907 return;
5908
5909 if (dsc->u.block.increment)
5910 {
5911 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5912
5913 if (dsc->u.block.before)
5914 pc_stored_at += 4;
5915 }
5916 else
5917 {
5918 pc_stored_at = dsc->u.block.xfer_addr;
5919
5920 if (dsc->u.block.before)
5921 pc_stored_at -= 4;
5922 }
5923
5924 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5925 stm_insn_addr = dsc->scratch_base;
5926 offset = pc_val - stm_insn_addr;
5927
5928 if (debug_displaced)
5929 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5930 "STM instruction\n", offset);
5931
5932 /* Rewrite the stored PC to the proper value for the non-displaced original
5933 instruction. */
5934 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5935 dsc->insn_addr + offset);
5936}
5937
5938/* Clean up an LDM which includes the PC in the register list. We clumped all
5939 the registers in the transferred list into a contiguous range r0...rX (to
5940 avoid loading PC directly and losing control of the debugged program), so we
5941 must undo that here. */
5942
5943static void
6e39997a 5944cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5945 struct regcache *regs,
5946 struct displaced_step_closure *dsc)
5947{
36073a92 5948 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5949 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5950 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5951 unsigned int regs_loaded = bitcount (mask);
5952 unsigned int num_to_shuffle = regs_loaded, clobbered;
5953
5954 /* The method employed here will fail if the register list is fully populated
5955 (we need to avoid loading PC directly). */
5956 gdb_assert (num_to_shuffle < 16);
5957
5958 if (!load_executed)
5959 return;
5960
5961 clobbered = (1 << num_to_shuffle) - 1;
5962
5963 while (num_to_shuffle > 0)
5964 {
5965 if ((mask & (1 << write_reg)) != 0)
5966 {
5967 unsigned int read_reg = num_to_shuffle - 1;
5968
5969 if (read_reg != write_reg)
5970 {
36073a92 5971 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
5972 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5973 if (debug_displaced)
5974 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5975 "loaded register r%d to r%d\n"), read_reg,
5976 write_reg);
5977 }
5978 else if (debug_displaced)
5979 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5980 "r%d already in the right place\n"),
5981 write_reg);
5982
5983 clobbered &= ~(1 << write_reg);
5984
5985 num_to_shuffle--;
5986 }
5987
5988 write_reg--;
5989 }
5990
5991 /* Restore any registers we scribbled over. */
5992 for (write_reg = 0; clobbered != 0; write_reg++)
5993 {
5994 if ((clobbered & (1 << write_reg)) != 0)
5995 {
5996 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5997 CANNOT_WRITE_PC);
5998 if (debug_displaced)
5999 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6000 "clobbered register r%d\n"), write_reg);
6001 clobbered &= ~(1 << write_reg);
6002 }
6003 }
6004
6005 /* Perform register writeback manually. */
6006 if (dsc->u.block.writeback)
6007 {
6008 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6009
6010 if (dsc->u.block.increment)
6011 new_rn_val += regs_loaded * 4;
6012 else
6013 new_rn_val -= regs_loaded * 4;
6014
6015 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6016 CANNOT_WRITE_PC);
6017 }
6018}
6019
6020/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6021 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6022
6023static int
7ff120b4
YQ
6024arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6025 struct regcache *regs,
6026 struct displaced_step_closure *dsc)
cca44b1b
JB
6027{
6028 int load = bit (insn, 20);
6029 int user = bit (insn, 22);
6030 int increment = bit (insn, 23);
6031 int before = bit (insn, 24);
6032 int writeback = bit (insn, 21);
6033 int rn = bits (insn, 16, 19);
cca44b1b 6034
0963b4bd
MS
6035 /* Block transfers which don't mention PC can be run directly
6036 out-of-line. */
bf9f652a 6037 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6038 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6039
bf9f652a 6040 if (rn == ARM_PC_REGNUM)
cca44b1b 6041 {
0963b4bd
MS
6042 warning (_("displaced: Unpredictable LDM or STM with "
6043 "base register r15"));
7ff120b4 6044 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6045 }
6046
6047 if (debug_displaced)
6048 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6049 "%.8lx\n", (unsigned long) insn);
6050
36073a92 6051 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6052 dsc->u.block.rn = rn;
6053
6054 dsc->u.block.load = load;
6055 dsc->u.block.user = user;
6056 dsc->u.block.increment = increment;
6057 dsc->u.block.before = before;
6058 dsc->u.block.writeback = writeback;
6059 dsc->u.block.cond = bits (insn, 28, 31);
6060
6061 dsc->u.block.regmask = insn & 0xffff;
6062
6063 if (load)
6064 {
6065 if ((insn & 0xffff) == 0xffff)
6066 {
6067 /* LDM with a fully-populated register list. This case is
6068 particularly tricky. Implement for now by fully emulating the
6069 instruction (which might not behave perfectly in all cases, but
6070 these instructions should be rare enough for that not to matter
6071 too much). */
6072 dsc->modinsn[0] = ARM_NOP;
6073
6074 dsc->cleanup = &cleanup_block_load_all;
6075 }
6076 else
6077 {
6078 /* LDM of a list of registers which includes PC. Implement by
6079 rewriting the list of registers to be transferred into a
6080 contiguous chunk r0...rX before doing the transfer, then shuffling
6081 registers into the correct places in the cleanup routine. */
6082 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6083 unsigned int num_in_list = bitcount (regmask), new_regmask;
6084 unsigned int i;
cca44b1b
JB
6085
6086 for (i = 0; i < num_in_list; i++)
36073a92 6087 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6088
6089 /* Writeback makes things complicated. We need to avoid clobbering
6090 the base register with one of the registers in our modified
6091 register list, but just using a different register can't work in
6092 all cases, e.g.:
6093
6094 ldm r14!, {r0-r13,pc}
6095
6096 which would need to be rewritten as:
6097
6098 ldm rN!, {r0-r14}
6099
6100 but that can't work, because there's no free register for N.
6101
6102 Solve this by turning off the writeback bit, and emulating
6103 writeback manually in the cleanup routine. */
6104
6105 if (writeback)
6106 insn &= ~(1 << 21);
6107
6108 new_regmask = (1 << num_in_list) - 1;
6109
6110 if (debug_displaced)
6111 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6112 "{..., pc}: original reg list %.4x, modified "
6113 "list %.4x\n"), rn, writeback ? "!" : "",
6114 (int) insn & 0xffff, new_regmask);
6115
6116 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6117
6118 dsc->cleanup = &cleanup_block_load_pc;
6119 }
6120 }
6121 else
6122 {
6123 /* STM of a list of registers which includes PC. Run the instruction
6124 as-is, but out of line: this will store the wrong value for the PC,
6125 so we must manually fix up the memory in the cleanup routine.
6126 Doing things this way has the advantage that we can auto-detect
6127 the offset of the PC write (which is architecture-dependent) in
6128 the cleanup routine. */
6129 dsc->modinsn[0] = insn;
6130
6131 dsc->cleanup = &cleanup_block_store_pc;
6132 }
6133
6134 return 0;
6135}
6136
34518530
YQ
6137static int
6138thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6139 struct regcache *regs,
6140 struct displaced_step_closure *dsc)
cca44b1b 6141{
34518530
YQ
6142 int rn = bits (insn1, 0, 3);
6143 int load = bit (insn1, 4);
6144 int writeback = bit (insn1, 5);
cca44b1b 6145
34518530
YQ
6146 /* Block transfers which don't mention PC can be run directly
6147 out-of-line. */
6148 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6149 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6150
34518530
YQ
6151 if (rn == ARM_PC_REGNUM)
6152 {
6153 warning (_("displaced: Unpredictable LDM or STM with "
6154 "base register r15"));
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6156 "unpredictable ldm/stm", dsc);
6157 }
cca44b1b
JB
6158
6159 if (debug_displaced)
34518530
YQ
6160 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6161 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6162
34518530
YQ
6163 /* Clear bit 13, since it should be always zero. */
6164 dsc->u.block.regmask = (insn2 & 0xdfff);
6165 dsc->u.block.rn = rn;
cca44b1b 6166
34518530
YQ
6167 dsc->u.block.load = load;
6168 dsc->u.block.user = 0;
6169 dsc->u.block.increment = bit (insn1, 7);
6170 dsc->u.block.before = bit (insn1, 8);
6171 dsc->u.block.writeback = writeback;
6172 dsc->u.block.cond = INST_AL;
6173 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6174
34518530
YQ
6175 if (load)
6176 {
6177 if (dsc->u.block.regmask == 0xffff)
6178 {
6179 /* This branch is impossible to happen. */
6180 gdb_assert (0);
6181 }
6182 else
6183 {
6184 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6185 unsigned int num_in_list = bitcount (regmask), new_regmask;
6186 unsigned int i;
34518530
YQ
6187
6188 for (i = 0; i < num_in_list; i++)
6189 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6190
6191 if (writeback)
6192 insn1 &= ~(1 << 5);
6193
6194 new_regmask = (1 << num_in_list) - 1;
6195
6196 if (debug_displaced)
6197 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6198 "{..., pc}: original reg list %.4x, modified "
6199 "list %.4x\n"), rn, writeback ? "!" : "",
6200 (int) dsc->u.block.regmask, new_regmask);
6201
6202 dsc->modinsn[0] = insn1;
6203 dsc->modinsn[1] = (new_regmask & 0xffff);
6204 dsc->numinsns = 2;
6205
6206 dsc->cleanup = &cleanup_block_load_pc;
6207 }
6208 }
6209 else
6210 {
6211 dsc->modinsn[0] = insn1;
6212 dsc->modinsn[1] = insn2;
6213 dsc->numinsns = 2;
6214 dsc->cleanup = &cleanup_block_store_pc;
6215 }
6216 return 0;
6217}
6218
d9311bfa
AT
6219/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6220 This is used to avoid a dependency on BFD's bfd_endian enum. */
6221
6222ULONGEST
6223arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6224 int byte_order)
6225{
5f2dfcfd
AT
6226 return read_memory_unsigned_integer (memaddr, len,
6227 (enum bfd_endian) byte_order);
d9311bfa
AT
6228}
6229
6230/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6231
6232CORE_ADDR
6233arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6234 CORE_ADDR val)
6235{
6236 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6237}
6238
6239/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6240
e7cf25a8 6241static CORE_ADDR
553cb527 6242arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6243{
d9311bfa
AT
6244 return 0;
6245}
6246
6247/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6248
6249int
6250arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6251{
6252 return arm_is_thumb (self->regcache);
6253}
6254
6255/* single_step() is called just before we want to resume the inferior,
6256 if we want to single-step it but there is no hardware or kernel
6257 single-step support. We find the target of the coming instructions
6258 and breakpoint them. */
6259
a0ff9e1a 6260std::vector<CORE_ADDR>
f5ea389a 6261arm_software_single_step (struct regcache *regcache)
d9311bfa 6262{
d9311bfa 6263 struct gdbarch *gdbarch = get_regcache_arch (regcache);
d9311bfa 6264 struct arm_get_next_pcs next_pcs_ctx;
d9311bfa
AT
6265
6266 arm_get_next_pcs_ctor (&next_pcs_ctx,
6267 &arm_get_next_pcs_ops,
6268 gdbarch_byte_order (gdbarch),
6269 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6270 0,
d9311bfa
AT
6271 regcache);
6272
a0ff9e1a 6273 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa 6274
a0ff9e1a
SM
6275 for (CORE_ADDR &pc_ref : next_pcs)
6276 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
d9311bfa 6277
93f9a11f 6278 return next_pcs;
d9311bfa
AT
6279}
6280
34518530
YQ
6281/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6282 for Linux, where some SVC instructions must be treated specially. */
6283
6284static void
6285cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6286 struct displaced_step_closure *dsc)
6287{
6288 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6289
6290 if (debug_displaced)
6291 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6292 "%.8lx\n", (unsigned long) resume_addr);
6293
6294 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6295}
6296
6297
6298/* Common copy routine for svc instruciton. */
6299
6300static int
6301install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6302 struct displaced_step_closure *dsc)
6303{
6304 /* Preparation: none.
6305 Insn: unmodified svc.
6306 Cleanup: pc <- insn_addr + insn_size. */
6307
6308 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6309 instruction. */
6310 dsc->wrote_to_pc = 1;
6311
6312 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6313 if (dsc->u.svc.copy_svc_os)
6314 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6315 else
6316 {
6317 dsc->cleanup = &cleanup_svc;
6318 return 0;
6319 }
34518530
YQ
6320}
6321
6322static int
6323arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6324 struct regcache *regs, struct displaced_step_closure *dsc)
6325{
6326
6327 if (debug_displaced)
6328 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6329 (unsigned long) insn);
6330
6331 dsc->modinsn[0] = insn;
6332
6333 return install_svc (gdbarch, regs, dsc);
6334}
6335
6336static int
6337thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6338 struct regcache *regs, struct displaced_step_closure *dsc)
6339{
6340
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6343 insn);
bd18283a 6344
34518530
YQ
6345 dsc->modinsn[0] = insn;
6346
6347 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6348}
6349
6350/* Copy undefined instructions. */
6351
6352static int
7ff120b4
YQ
6353arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6354 struct displaced_step_closure *dsc)
cca44b1b
JB
6355{
6356 if (debug_displaced)
0963b4bd
MS
6357 fprintf_unfiltered (gdb_stdlog,
6358 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6359 (unsigned long) insn);
6360
6361 dsc->modinsn[0] = insn;
6362
6363 return 0;
6364}
6365
34518530
YQ
6366static int
6367thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6368 struct displaced_step_closure *dsc)
6369{
6370
6371 if (debug_displaced)
6372 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6373 "%.4x %.4x\n", (unsigned short) insn1,
6374 (unsigned short) insn2);
6375
6376 dsc->modinsn[0] = insn1;
6377 dsc->modinsn[1] = insn2;
6378 dsc->numinsns = 2;
6379
6380 return 0;
6381}
6382
cca44b1b
JB
6383/* Copy unpredictable instructions. */
6384
6385static int
7ff120b4
YQ
6386arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6387 struct displaced_step_closure *dsc)
cca44b1b
JB
6388{
6389 if (debug_displaced)
6390 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6391 "%.8lx\n", (unsigned long) insn);
6392
6393 dsc->modinsn[0] = insn;
6394
6395 return 0;
6396}
6397
6398/* The decode_* functions are instruction decoding helpers. They mostly follow
6399 the presentation in the ARM ARM. */
6400
6401static int
7ff120b4
YQ
6402arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6403 struct regcache *regs,
6404 struct displaced_step_closure *dsc)
cca44b1b
JB
6405{
6406 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6407 unsigned int rn = bits (insn, 16, 19);
6408
6409 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6410 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6411 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6412 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6413 else if ((op1 & 0x60) == 0x20)
7ff120b4 6414 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6415 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6416 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6417 dsc);
cca44b1b 6418 else if ((op1 & 0x77) == 0x41)
7ff120b4 6419 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6420 else if ((op1 & 0x77) == 0x45)
7ff120b4 6421 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6422 else if ((op1 & 0x77) == 0x51)
6423 {
6424 if (rn != 0xf)
7ff120b4 6425 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6426 else
7ff120b4 6427 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6428 }
6429 else if ((op1 & 0x77) == 0x55)
7ff120b4 6430 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6431 else if (op1 == 0x57)
6432 switch (op2)
6433 {
7ff120b4
YQ
6434 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6435 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6436 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6437 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6438 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6439 }
6440 else if ((op1 & 0x63) == 0x43)
7ff120b4 6441 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6442 else if ((op2 & 0x1) == 0x0)
6443 switch (op1 & ~0x80)
6444 {
6445 case 0x61:
7ff120b4 6446 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6447 case 0x65:
7ff120b4 6448 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6449 case 0x71: case 0x75:
6450 /* pld/pldw reg. */
7ff120b4 6451 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6452 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6453 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6454 default:
7ff120b4 6455 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6456 }
6457 else
7ff120b4 6458 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6459}
6460
6461static int
7ff120b4
YQ
6462arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6463 struct regcache *regs,
6464 struct displaced_step_closure *dsc)
cca44b1b
JB
6465{
6466 if (bit (insn, 27) == 0)
7ff120b4 6467 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6468 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6469 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6470 {
6471 case 0x0: case 0x2:
7ff120b4 6472 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6473
6474 case 0x1: case 0x3:
7ff120b4 6475 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6476
6477 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6478 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6479
6480 case 0x8:
6481 switch ((insn & 0xe00000) >> 21)
6482 {
6483 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6484 /* stc/stc2. */
7ff120b4 6485 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6486
6487 case 0x2:
7ff120b4 6488 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6489
6490 default:
7ff120b4 6491 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6492 }
6493
6494 case 0x9:
6495 {
6496 int rn_f = (bits (insn, 16, 19) == 0xf);
6497 switch ((insn & 0xe00000) >> 21)
6498 {
6499 case 0x1: case 0x3:
6500 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6501 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6502 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6503
6504 case 0x2:
7ff120b4 6505 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6506
6507 case 0x4: case 0x5: case 0x6: case 0x7:
6508 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6509 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6510 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6511
6512 default:
7ff120b4 6513 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6514 }
6515 }
6516
6517 case 0xa:
7ff120b4 6518 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6519
6520 case 0xb:
6521 if (bits (insn, 16, 19) == 0xf)
6522 /* ldc/ldc2 lit. */
7ff120b4 6523 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6524 else
7ff120b4 6525 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6526
6527 case 0xc:
6528 if (bit (insn, 4))
7ff120b4 6529 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6530 else
7ff120b4 6531 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6532
6533 case 0xd:
6534 if (bit (insn, 4))
7ff120b4 6535 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6536 else
7ff120b4 6537 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6538
6539 default:
7ff120b4 6540 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6541 }
6542}
6543
6544/* Decode miscellaneous instructions in dp/misc encoding space. */
6545
6546static int
7ff120b4
YQ
6547arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6548 struct regcache *regs,
6549 struct displaced_step_closure *dsc)
cca44b1b
JB
6550{
6551 unsigned int op2 = bits (insn, 4, 6);
6552 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6553
6554 switch (op2)
6555 {
6556 case 0x0:
7ff120b4 6557 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6558
6559 case 0x1:
6560 if (op == 0x1) /* bx. */
7ff120b4 6561 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6562 else if (op == 0x3)
7ff120b4 6563 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6564 else
7ff120b4 6565 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6566
6567 case 0x2:
6568 if (op == 0x1)
6569 /* Not really supported. */
7ff120b4 6570 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6571 else
7ff120b4 6572 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6573
6574 case 0x3:
6575 if (op == 0x1)
7ff120b4 6576 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6577 regs, dsc); /* blx register. */
cca44b1b 6578 else
7ff120b4 6579 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6580
6581 case 0x5:
7ff120b4 6582 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6583
6584 case 0x7:
6585 if (op == 0x1)
7ff120b4 6586 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6587 else if (op == 0x3)
6588 /* Not really supported. */
7ff120b4 6589 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6590
6591 default:
7ff120b4 6592 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6593 }
6594}
6595
6596static int
7ff120b4
YQ
6597arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6598 struct regcache *regs,
6599 struct displaced_step_closure *dsc)
cca44b1b
JB
6600{
6601 if (bit (insn, 25))
6602 switch (bits (insn, 20, 24))
6603 {
6604 case 0x10:
7ff120b4 6605 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6606
6607 case 0x14:
7ff120b4 6608 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6609
6610 case 0x12: case 0x16:
7ff120b4 6611 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6612
6613 default:
7ff120b4 6614 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6615 }
6616 else
6617 {
6618 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6619
6620 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6621 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6622 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6623 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6624 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6625 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6626 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6627 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6628 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6629 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6630 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6631 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6632 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6633 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6634 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6635 dsc);
cca44b1b
JB
6636 }
6637
6638 /* Should be unreachable. */
6639 return 1;
6640}
6641
6642static int
7ff120b4
YQ
6643arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6644 struct regcache *regs,
6645 struct displaced_step_closure *dsc)
cca44b1b
JB
6646{
6647 int a = bit (insn, 25), b = bit (insn, 4);
6648 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6649
6650 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6651 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6652 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6653 else if ((!a && (op1 & 0x17) == 0x02)
6654 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6655 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6656 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6657 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6658 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6659 else if ((!a && (op1 & 0x17) == 0x03)
6660 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6661 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6662 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6663 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6664 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6665 else if ((!a && (op1 & 0x17) == 0x06)
6666 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6667 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6668 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6669 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6670 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6671 else if ((!a && (op1 & 0x17) == 0x07)
6672 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6673 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6674
6675 /* Should be unreachable. */
6676 return 1;
6677}
6678
6679static int
7ff120b4
YQ
6680arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6681 struct displaced_step_closure *dsc)
cca44b1b
JB
6682{
6683 switch (bits (insn, 20, 24))
6684 {
6685 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6686 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6687
6688 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6689 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6690
6691 case 0x08: case 0x09: case 0x0a: case 0x0b:
6692 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6693 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6694 "decode/pack/unpack/saturate/reverse", dsc);
6695
6696 case 0x18:
6697 if (bits (insn, 5, 7) == 0) /* op2. */
6698 {
6699 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6700 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6701 else
7ff120b4 6702 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6703 }
6704 else
7ff120b4 6705 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6706
6707 case 0x1a: case 0x1b:
6708 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6709 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6710 else
7ff120b4 6711 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6712
6713 case 0x1c: case 0x1d:
6714 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6715 {
6716 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6717 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6718 else
7ff120b4 6719 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6720 }
6721 else
7ff120b4 6722 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6723
6724 case 0x1e: case 0x1f:
6725 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6726 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6727 else
7ff120b4 6728 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6729 }
6730
6731 /* Should be unreachable. */
6732 return 1;
6733}
6734
6735static int
615234c1 6736arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6737 struct regcache *regs,
6738 struct displaced_step_closure *dsc)
cca44b1b
JB
6739{
6740 if (bit (insn, 25))
7ff120b4 6741 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6742 else
7ff120b4 6743 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6744}
6745
6746static int
7ff120b4
YQ
6747arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6748 struct regcache *regs,
6749 struct displaced_step_closure *dsc)
cca44b1b
JB
6750{
6751 unsigned int opcode = bits (insn, 20, 24);
6752
6753 switch (opcode)
6754 {
6755 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6756 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6757
6758 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6759 case 0x12: case 0x16:
7ff120b4 6760 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6761
6762 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6763 case 0x13: case 0x17:
7ff120b4 6764 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6765
6766 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6767 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6768 /* Note: no writeback for these instructions. Bit 25 will always be
6769 zero though (via caller), so the following works OK. */
7ff120b4 6770 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6771 }
6772
6773 /* Should be unreachable. */
6774 return 1;
6775}
6776
34518530
YQ
6777/* Decode shifted register instructions. */
6778
6779static int
6780thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6781 uint16_t insn2, struct regcache *regs,
6782 struct displaced_step_closure *dsc)
6783{
6784 /* PC is only allowed to be used in instruction MOV. */
6785
6786 unsigned int op = bits (insn1, 5, 8);
6787 unsigned int rn = bits (insn1, 0, 3);
6788
6789 if (op == 0x2 && rn == 0xf) /* MOV */
6790 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6791 else
6792 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6793 "dp (shift reg)", dsc);
6794}
6795
6796
6797/* Decode extension register load/store. Exactly the same as
6798 arm_decode_ext_reg_ld_st. */
6799
6800static int
6801thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6802 uint16_t insn2, struct regcache *regs,
6803 struct displaced_step_closure *dsc)
6804{
6805 unsigned int opcode = bits (insn1, 4, 8);
6806
6807 switch (opcode)
6808 {
6809 case 0x04: case 0x05:
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6811 "vfp/neon vmov", dsc);
6812
6813 case 0x08: case 0x0c: /* 01x00 */
6814 case 0x0a: case 0x0e: /* 01x10 */
6815 case 0x12: case 0x16: /* 10x10 */
6816 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 "vfp/neon vstm/vpush", dsc);
6818
6819 case 0x09: case 0x0d: /* 01x01 */
6820 case 0x0b: case 0x0f: /* 01x11 */
6821 case 0x13: case 0x17: /* 10x11 */
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6823 "vfp/neon vldm/vpop", dsc);
6824
6825 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6827 "vstr", dsc);
6828 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6829 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6830 }
6831
6832 /* Should be unreachable. */
6833 return 1;
6834}
6835
cca44b1b 6836static int
12545665 6837arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6838 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6839{
6840 unsigned int op1 = bits (insn, 20, 25);
6841 int op = bit (insn, 4);
6842 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6843
6844 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6845 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6846 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6847 && (coproc & 0xe) != 0xa)
6848 /* stc/stc2. */
7ff120b4 6849 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6850 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6851 && (coproc & 0xe) != 0xa)
6852 /* ldc/ldc2 imm/lit. */
7ff120b4 6853 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6854 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6855 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6856 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6857 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6858 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6859 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6860 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6861 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6862 else if ((op1 & 0x30) == 0x20 && !op)
6863 {
6864 if ((coproc & 0xe) == 0xa)
7ff120b4 6865 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6866 else
7ff120b4 6867 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6868 }
6869 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6870 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6871 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6872 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6873 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6874 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6875 else if ((op1 & 0x30) == 0x30)
7ff120b4 6876 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6877 else
7ff120b4 6878 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6879}
6880
34518530
YQ
6881static int
6882thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6883 uint16_t insn2, struct regcache *regs,
6884 struct displaced_step_closure *dsc)
6885{
6886 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6887 unsigned int bit_5_8 = bits (insn1, 5, 8);
6888 unsigned int bit_9 = bit (insn1, 9);
6889 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6890
6891 if (bit_9 == 0)
6892 {
6893 if (bit_5_8 == 2)
6894 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6895 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6896 dsc);
6897 else if (bit_5_8 == 0) /* UNDEFINED. */
6898 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6899 else
6900 {
6901 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6902 if ((coproc & 0xe) == 0xa)
6903 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6904 dsc);
6905 else /* coproc is not 101x. */
6906 {
6907 if (bit_4 == 0) /* STC/STC2. */
6908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6909 "stc/stc2", dsc);
6910 else /* LDC/LDC2 {literal, immeidate}. */
6911 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6912 regs, dsc);
6913 }
6914 }
6915 }
6916 else
6917 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6918
6919 return 0;
6920}
6921
6922static void
6923install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6924 struct displaced_step_closure *dsc, int rd)
6925{
6926 /* ADR Rd, #imm
6927
6928 Rewrite as:
6929
6930 Preparation: Rd <- PC
6931 Insn: ADD Rd, #imm
6932 Cleanup: Null.
6933 */
6934
6935 /* Rd <- PC */
6936 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6937 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6938}
6939
6940static int
6941thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6942 struct displaced_step_closure *dsc,
6943 int rd, unsigned int imm)
6944{
6945
6946 /* Encoding T2: ADDS Rd, #imm */
6947 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6948
6949 install_pc_relative (gdbarch, regs, dsc, rd);
6950
6951 return 0;
6952}
6953
6954static int
6955thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6956 struct regcache *regs,
6957 struct displaced_step_closure *dsc)
6958{
6959 unsigned int rd = bits (insn, 8, 10);
6960 unsigned int imm8 = bits (insn, 0, 7);
6961
6962 if (debug_displaced)
6963 fprintf_unfiltered (gdb_stdlog,
6964 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6965 rd, imm8, insn);
6966
6967 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6968}
6969
6970static int
6971thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6972 uint16_t insn2, struct regcache *regs,
6973 struct displaced_step_closure *dsc)
6974{
6975 unsigned int rd = bits (insn2, 8, 11);
6976 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6977 extract raw immediate encoding rather than computing immediate. When
6978 generating ADD or SUB instruction, we can simply perform OR operation to
6979 set immediate into ADD. */
6980 unsigned int imm_3_8 = insn2 & 0x70ff;
6981 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6982
6983 if (debug_displaced)
6984 fprintf_unfiltered (gdb_stdlog,
6985 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6986 rd, imm_i, imm_3_8, insn1, insn2);
6987
6988 if (bit (insn1, 7)) /* Encoding T2 */
6989 {
6990 /* Encoding T3: SUB Rd, Rd, #imm */
6991 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6992 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6993 }
6994 else /* Encoding T3 */
6995 {
6996 /* Encoding T3: ADD Rd, Rd, #imm */
6997 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6998 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6999 }
7000 dsc->numinsns = 2;
7001
7002 install_pc_relative (gdbarch, regs, dsc, rd);
7003
7004 return 0;
7005}
7006
7007static int
615234c1 7008thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7009 struct regcache *regs,
7010 struct displaced_step_closure *dsc)
7011{
7012 unsigned int rt = bits (insn1, 8, 10);
7013 unsigned int pc;
7014 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7015
7016 /* LDR Rd, #imm8
7017
7018 Rwrite as:
7019
7020 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7021
7022 Insn: LDR R0, [R2, R3];
7023 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7024
7025 if (debug_displaced)
7026 fprintf_unfiltered (gdb_stdlog,
7027 "displaced: copying thumb ldr r%d [pc #%d]\n"
7028 , rt, imm8);
7029
7030 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7031 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7032 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7033 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7034 /* The assembler calculates the required value of the offset from the
7035 Align(PC,4) value of this instruction to the label. */
7036 pc = pc & 0xfffffffc;
7037
7038 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7039 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7040
7041 dsc->rd = rt;
7042 dsc->u.ldst.xfersize = 4;
7043 dsc->u.ldst.rn = 0;
7044 dsc->u.ldst.immed = 0;
7045 dsc->u.ldst.writeback = 0;
7046 dsc->u.ldst.restore_r4 = 0;
7047
7048 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7049
7050 dsc->cleanup = &cleanup_load;
7051
7052 return 0;
7053}
7054
7055/* Copy Thumb cbnz/cbz insruction. */
7056
7057static int
7058thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7059 struct regcache *regs,
7060 struct displaced_step_closure *dsc)
7061{
7062 int non_zero = bit (insn1, 11);
7063 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7064 CORE_ADDR from = dsc->insn_addr;
7065 int rn = bits (insn1, 0, 2);
7066 int rn_val = displaced_read_reg (regs, dsc, rn);
7067
7068 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7069 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7070 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7071 condition is false, let it be, cleanup_branch will do nothing. */
7072 if (dsc->u.branch.cond)
7073 {
7074 dsc->u.branch.cond = INST_AL;
7075 dsc->u.branch.dest = from + 4 + imm5;
7076 }
7077 else
7078 dsc->u.branch.dest = from + 2;
7079
7080 dsc->u.branch.link = 0;
7081 dsc->u.branch.exchange = 0;
7082
7083 if (debug_displaced)
7084 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7085 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7086 rn, rn_val, insn1, dsc->u.branch.dest);
7087
7088 dsc->modinsn[0] = THUMB_NOP;
7089
7090 dsc->cleanup = &cleanup_branch;
7091 return 0;
7092}
7093
7094/* Copy Table Branch Byte/Halfword */
7095static int
7096thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7097 uint16_t insn2, struct regcache *regs,
7098 struct displaced_step_closure *dsc)
7099{
7100 ULONGEST rn_val, rm_val;
7101 int is_tbh = bit (insn2, 4);
7102 CORE_ADDR halfwords = 0;
7103 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7104
7105 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7106 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7107
7108 if (is_tbh)
7109 {
7110 gdb_byte buf[2];
7111
7112 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7113 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7114 }
7115 else
7116 {
7117 gdb_byte buf[1];
7118
7119 target_read_memory (rn_val + rm_val, buf, 1);
7120 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7121 }
7122
7123 if (debug_displaced)
7124 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7125 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7126 (unsigned int) rn_val, (unsigned int) rm_val,
7127 (unsigned int) halfwords);
7128
7129 dsc->u.branch.cond = INST_AL;
7130 dsc->u.branch.link = 0;
7131 dsc->u.branch.exchange = 0;
7132 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7133
7134 dsc->cleanup = &cleanup_branch;
7135
7136 return 0;
7137}
7138
7139static void
7140cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7141 struct displaced_step_closure *dsc)
7142{
7143 /* PC <- r7 */
7144 int val = displaced_read_reg (regs, dsc, 7);
7145 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7146
7147 /* r7 <- r8 */
7148 val = displaced_read_reg (regs, dsc, 8);
7149 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7150
7151 /* r8 <- tmp[0] */
7152 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7153
7154}
7155
7156static int
615234c1 7157thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7158 struct regcache *regs,
7159 struct displaced_step_closure *dsc)
7160{
7161 dsc->u.block.regmask = insn1 & 0x00ff;
7162
7163 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7164 to :
7165
7166 (1) register list is full, that is, r0-r7 are used.
7167 Prepare: tmp[0] <- r8
7168
7169 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7170 MOV r8, r7; Move value of r7 to r8;
7171 POP {r7}; Store PC value into r7.
7172
7173 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7174
7175 (2) register list is not full, supposing there are N registers in
7176 register list (except PC, 0 <= N <= 7).
7177 Prepare: for each i, 0 - N, tmp[i] <- ri.
7178
7179 POP {r0, r1, ...., rN};
7180
7181 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7182 from tmp[] properly.
7183 */
7184 if (debug_displaced)
7185 fprintf_unfiltered (gdb_stdlog,
7186 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7187 dsc->u.block.regmask, insn1);
7188
7189 if (dsc->u.block.regmask == 0xff)
7190 {
7191 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7192
7193 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7194 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7195 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7196
7197 dsc->numinsns = 3;
7198 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7199 }
7200 else
7201 {
7202 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7203 unsigned int i;
7204 unsigned int new_regmask;
34518530
YQ
7205
7206 for (i = 0; i < num_in_list + 1; i++)
7207 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7208
7209 new_regmask = (1 << (num_in_list + 1)) - 1;
7210
7211 if (debug_displaced)
7212 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7213 "{..., pc}: original reg list %.4x,"
7214 " modified list %.4x\n"),
7215 (int) dsc->u.block.regmask, new_regmask);
7216
7217 dsc->u.block.regmask |= 0x8000;
7218 dsc->u.block.writeback = 0;
7219 dsc->u.block.cond = INST_AL;
7220
7221 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7222
7223 dsc->cleanup = &cleanup_block_load_pc;
7224 }
7225
7226 return 0;
7227}
7228
7229static void
7230thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7231 struct regcache *regs,
7232 struct displaced_step_closure *dsc)
7233{
7234 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7235 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7236 int err = 0;
7237
7238 /* 16-bit thumb instructions. */
7239 switch (op_bit_12_15)
7240 {
7241 /* Shift (imme), add, subtract, move and compare. */
7242 case 0: case 1: case 2: case 3:
7243 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7244 "shift/add/sub/mov/cmp",
7245 dsc);
7246 break;
7247 case 4:
7248 switch (op_bit_10_11)
7249 {
7250 case 0: /* Data-processing */
7251 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7252 "data-processing",
7253 dsc);
7254 break;
7255 case 1: /* Special data instructions and branch and exchange. */
7256 {
7257 unsigned short op = bits (insn1, 7, 9);
7258 if (op == 6 || op == 7) /* BX or BLX */
7259 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7260 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7261 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7262 else
7263 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7264 dsc);
7265 }
7266 break;
7267 default: /* LDR (literal) */
7268 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7269 }
7270 break;
7271 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7272 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7273 break;
7274 case 10:
7275 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7276 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7277 else /* Generate SP-relative address */
7278 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7279 break;
7280 case 11: /* Misc 16-bit instructions */
7281 {
7282 switch (bits (insn1, 8, 11))
7283 {
7284 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7285 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7286 break;
7287 case 12: case 13: /* POP */
7288 if (bit (insn1, 8)) /* PC is in register list. */
7289 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7290 else
7291 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7292 break;
7293 case 15: /* If-Then, and hints */
7294 if (bits (insn1, 0, 3))
7295 /* If-Then makes up to four following instructions conditional.
7296 IT instruction itself is not conditional, so handle it as a
7297 common unmodified instruction. */
7298 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7299 dsc);
7300 else
7301 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7302 break;
7303 default:
7304 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7305 }
7306 }
7307 break;
7308 case 12:
7309 if (op_bit_10_11 < 2) /* Store multiple registers */
7310 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7311 else /* Load multiple registers */
7312 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7313 break;
7314 case 13: /* Conditional branch and supervisor call */
7315 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7316 err = thumb_copy_b (gdbarch, insn1, dsc);
7317 else
7318 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7319 break;
7320 case 14: /* Unconditional branch */
7321 err = thumb_copy_b (gdbarch, insn1, dsc);
7322 break;
7323 default:
7324 err = 1;
7325 }
7326
7327 if (err)
7328 internal_error (__FILE__, __LINE__,
7329 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7330}
7331
7332static int
7333decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7334 uint16_t insn1, uint16_t insn2,
7335 struct regcache *regs,
7336 struct displaced_step_closure *dsc)
7337{
7338 int rt = bits (insn2, 12, 15);
7339 int rn = bits (insn1, 0, 3);
7340 int op1 = bits (insn1, 7, 8);
34518530
YQ
7341
7342 switch (bits (insn1, 5, 6))
7343 {
7344 case 0: /* Load byte and memory hints */
7345 if (rt == 0xf) /* PLD/PLI */
7346 {
7347 if (rn == 0xf)
7348 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7349 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7350 else
7351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "pli/pld", dsc);
7353 }
7354 else
7355 {
7356 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7357 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7358 1);
7359 else
7360 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7361 "ldrb{reg, immediate}/ldrbt",
7362 dsc);
7363 }
7364
7365 break;
7366 case 1: /* Load halfword and memory hints. */
7367 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7368 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7369 "pld/unalloc memhint", dsc);
7370 else
7371 {
7372 if (rn == 0xf)
7373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7374 2);
7375 else
7376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7377 "ldrh/ldrht", dsc);
7378 }
7379 break;
7380 case 2: /* Load word */
7381 {
7382 int insn2_bit_8_11 = bits (insn2, 8, 11);
7383
7384 if (rn == 0xf)
7385 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7386 else if (op1 == 0x1) /* Encoding T3 */
7387 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7388 0, 1);
7389 else /* op1 == 0x0 */
7390 {
7391 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7392 /* LDR (immediate) */
7393 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7394 dsc, bit (insn2, 8), 1);
7395 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7396 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7397 "ldrt", dsc);
7398 else
7399 /* LDR (register) */
7400 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7401 dsc, 0, 0);
7402 }
7403 break;
7404 }
7405 default:
7406 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7407 break;
7408 }
7409 return 0;
7410}
7411
7412static void
7413thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7414 uint16_t insn2, struct regcache *regs,
7415 struct displaced_step_closure *dsc)
7416{
7417 int err = 0;
7418 unsigned short op = bit (insn2, 15);
7419 unsigned int op1 = bits (insn1, 11, 12);
7420
7421 switch (op1)
7422 {
7423 case 1:
7424 {
7425 switch (bits (insn1, 9, 10))
7426 {
7427 case 0:
7428 if (bit (insn1, 6))
7429 {
7430 /* Load/store {dual, execlusive}, table branch. */
7431 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7432 && bits (insn2, 5, 7) == 0)
7433 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7434 dsc);
7435 else
7436 /* PC is not allowed to use in load/store {dual, exclusive}
7437 instructions. */
7438 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7439 "load/store dual/ex", dsc);
7440 }
7441 else /* load/store multiple */
7442 {
7443 switch (bits (insn1, 7, 8))
7444 {
7445 case 0: case 3: /* SRS, RFE */
7446 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7447 "srs/rfe", dsc);
7448 break;
7449 case 1: case 2: /* LDM/STM/PUSH/POP */
7450 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7451 break;
7452 }
7453 }
7454 break;
7455
7456 case 1:
7457 /* Data-processing (shift register). */
7458 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7459 dsc);
7460 break;
7461 default: /* Coprocessor instructions. */
7462 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7463 break;
7464 }
7465 break;
7466 }
7467 case 2: /* op1 = 2 */
7468 if (op) /* Branch and misc control. */
7469 {
7470 if (bit (insn2, 14) /* BLX/BL */
7471 || bit (insn2, 12) /* Unconditional branch */
7472 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7473 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7474 else
7475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7476 "misc ctrl", dsc);
7477 }
7478 else
7479 {
7480 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7481 {
7482 int op = bits (insn1, 4, 8);
7483 int rn = bits (insn1, 0, 3);
7484 if ((op == 0 || op == 0xa) && rn == 0xf)
7485 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7486 regs, dsc);
7487 else
7488 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7489 "dp/pb", dsc);
7490 }
7491 else /* Data processing (modified immeidate) */
7492 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7493 "dp/mi", dsc);
7494 }
7495 break;
7496 case 3: /* op1 = 3 */
7497 switch (bits (insn1, 9, 10))
7498 {
7499 case 0:
7500 if (bit (insn1, 4))
7501 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7502 regs, dsc);
7503 else /* NEON Load/Store and Store single data item */
7504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 "neon elt/struct load/store",
7506 dsc);
7507 break;
7508 case 1: /* op1 = 3, bits (9, 10) == 1 */
7509 switch (bits (insn1, 7, 8))
7510 {
7511 case 0: case 1: /* Data processing (register) */
7512 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7513 "dp(reg)", dsc);
7514 break;
7515 case 2: /* Multiply and absolute difference */
7516 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7517 "mul/mua/diff", dsc);
7518 break;
7519 case 3: /* Long multiply and divide */
7520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7521 "lmul/lmua", dsc);
7522 break;
7523 }
7524 break;
7525 default: /* Coprocessor instructions */
7526 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7527 break;
7528 }
7529 break;
7530 default:
7531 err = 1;
7532 }
7533
7534 if (err)
7535 internal_error (__FILE__, __LINE__,
7536 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7537
7538}
7539
b434a28f
YQ
7540static void
7541thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7542 struct regcache *regs,
b434a28f
YQ
7543 struct displaced_step_closure *dsc)
7544{
34518530
YQ
7545 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7546 uint16_t insn1
7547 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7548
7549 if (debug_displaced)
7550 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7551 "at %.8lx\n", insn1, (unsigned long) from);
7552
7553 dsc->is_thumb = 1;
7554 dsc->insn_size = thumb_insn_size (insn1);
7555 if (thumb_insn_size (insn1) == 4)
7556 {
7557 uint16_t insn2
7558 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7559 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7560 }
7561 else
7562 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7563}
7564
cca44b1b 7565void
b434a28f
YQ
7566arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7567 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7568 struct displaced_step_closure *dsc)
7569{
7570 int err = 0;
b434a28f
YQ
7571 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7572 uint32_t insn;
cca44b1b
JB
7573
7574 /* Most displaced instructions use a 1-instruction scratch space, so set this
7575 here and override below if/when necessary. */
7576 dsc->numinsns = 1;
7577 dsc->insn_addr = from;
7578 dsc->scratch_base = to;
7579 dsc->cleanup = NULL;
7580 dsc->wrote_to_pc = 0;
7581
b434a28f 7582 if (!displaced_in_arm_mode (regs))
12545665 7583 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7584
4db71c0b
YQ
7585 dsc->is_thumb = 0;
7586 dsc->insn_size = 4;
b434a28f
YQ
7587 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7588 if (debug_displaced)
7589 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7590 "at %.8lx\n", (unsigned long) insn,
7591 (unsigned long) from);
7592
cca44b1b 7593 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7594 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7595 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7596 {
7597 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7598 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7599 break;
7600
7601 case 0x4: case 0x5: case 0x6:
7ff120b4 7602 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7603 break;
7604
7605 case 0x7:
7ff120b4 7606 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7607 break;
7608
7609 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7610 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7611 break;
7612
7613 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7614 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7615 break;
7616 }
7617
7618 if (err)
7619 internal_error (__FILE__, __LINE__,
7620 _("arm_process_displaced_insn: Instruction decode error"));
7621}
7622
7623/* Actually set up the scratch space for a displaced instruction. */
7624
7625void
7626arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7627 CORE_ADDR to, struct displaced_step_closure *dsc)
7628{
7629 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7630 unsigned int i, len, offset;
cca44b1b 7631 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7632 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7633 const gdb_byte *bkp_insn;
cca44b1b 7634
4db71c0b 7635 offset = 0;
cca44b1b
JB
7636 /* Poke modified instruction(s). */
7637 for (i = 0; i < dsc->numinsns; i++)
7638 {
7639 if (debug_displaced)
4db71c0b
YQ
7640 {
7641 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7642 if (size == 4)
7643 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7644 dsc->modinsn[i]);
7645 else if (size == 2)
7646 fprintf_unfiltered (gdb_stdlog, "%.4x",
7647 (unsigned short)dsc->modinsn[i]);
7648
7649 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7650 (unsigned long) to + offset);
7651
7652 }
7653 write_memory_unsigned_integer (to + offset, size,
7654 byte_order_for_code,
cca44b1b 7655 dsc->modinsn[i]);
4db71c0b
YQ
7656 offset += size;
7657 }
7658
7659 /* Choose the correct breakpoint instruction. */
7660 if (dsc->is_thumb)
7661 {
7662 bkp_insn = tdep->thumb_breakpoint;
7663 len = tdep->thumb_breakpoint_size;
7664 }
7665 else
7666 {
7667 bkp_insn = tdep->arm_breakpoint;
7668 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7669 }
7670
7671 /* Put breakpoint afterwards. */
4db71c0b 7672 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7673
7674 if (debug_displaced)
7675 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7676 paddress (gdbarch, from), paddress (gdbarch, to));
7677}
7678
cca44b1b
JB
7679/* Entry point for cleaning things up after a displaced instruction has been
7680 single-stepped. */
7681
7682void
7683arm_displaced_step_fixup (struct gdbarch *gdbarch,
7684 struct displaced_step_closure *dsc,
7685 CORE_ADDR from, CORE_ADDR to,
7686 struct regcache *regs)
7687{
7688 if (dsc->cleanup)
7689 dsc->cleanup (gdbarch, regs, dsc);
7690
7691 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7692 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7693 dsc->insn_addr + dsc->insn_size);
7694
cca44b1b
JB
7695}
7696
7697#include "bfd-in2.h"
7698#include "libcoff.h"
7699
7700static int
7701gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7702{
e47ad6c0
YQ
7703 gdb_disassembler *di
7704 = static_cast<gdb_disassembler *>(info->application_data);
7705 struct gdbarch *gdbarch = di->arch ();
9779414d
DJ
7706
7707 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7708 {
7709 static asymbol *asym;
7710 static combined_entry_type ce;
7711 static struct coff_symbol_struct csym;
7712 static struct bfd fake_bfd;
7713 static bfd_target fake_target;
7714
7715 if (csym.native == NULL)
7716 {
7717 /* Create a fake symbol vector containing a Thumb symbol.
7718 This is solely so that the code in print_insn_little_arm()
7719 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7720 the presence of a Thumb symbol and switch to decoding
7721 Thumb instructions. */
7722
7723 fake_target.flavour = bfd_target_coff_flavour;
7724 fake_bfd.xvec = &fake_target;
7725 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7726 csym.native = &ce;
7727 csym.symbol.the_bfd = &fake_bfd;
7728 csym.symbol.name = "fake";
7729 asym = (asymbol *) & csym;
7730 }
7731
7732 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7733 info->symbols = &asym;
7734 }
7735 else
7736 info->symbols = NULL;
7737
e60eb288
YQ
7738 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7739 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7740 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7741 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7742 in default_print_insn. */
7743 if (exec_bfd != NULL)
7744 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7745
6394c606 7746 return default_print_insn (memaddr, info);
cca44b1b
JB
7747}
7748
7749/* The following define instruction sequences that will cause ARM
7750 cpu's to take an undefined instruction trap. These are used to
7751 signal a breakpoint to GDB.
7752
7753 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7754 modes. A different instruction is required for each mode. The ARM
7755 cpu's can also be big or little endian. Thus four different
7756 instructions are needed to support all cases.
7757
7758 Note: ARMv4 defines several new instructions that will take the
7759 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7760 not in fact add the new instructions. The new undefined
7761 instructions in ARMv4 are all instructions that had no defined
7762 behaviour in earlier chips. There is no guarantee that they will
7763 raise an exception, but may be treated as NOP's. In practice, it
7764 may only safe to rely on instructions matching:
7765
7766 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7767 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7768 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7769
0963b4bd 7770 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7771 following use a condition predicate of ALWAYS so it is always TRUE.
7772
7773 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7774 and NetBSD all use a software interrupt rather than an undefined
7775 instruction to force a trap. This can be handled by by the
7776 abi-specific code during establishment of the gdbarch vector. */
7777
7778#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7779#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7780#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7781#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7782
948f8e3d
PA
7783static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7784static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7785static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7786static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7787
cd6c3b4f
YQ
7788/* Implement the breakpoint_kind_from_pc gdbarch method. */
7789
d19280ad
YQ
7790static int
7791arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7792{
7793 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7794 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7795
9779414d 7796 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7797 {
7798 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7799
7800 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7801 check whether we are replacing a 32-bit instruction. */
7802 if (tdep->thumb2_breakpoint != NULL)
7803 {
7804 gdb_byte buf[2];
d19280ad 7805
177321bd
DJ
7806 if (target_read_memory (*pcptr, buf, 2) == 0)
7807 {
7808 unsigned short inst1;
d19280ad 7809
177321bd 7810 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7811 if (thumb_insn_size (inst1) == 4)
d19280ad 7812 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7813 }
7814 }
7815
d19280ad 7816 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7817 }
7818 else
d19280ad
YQ
7819 return ARM_BP_KIND_ARM;
7820
7821}
7822
cd6c3b4f
YQ
7823/* Implement the sw_breakpoint_from_kind gdbarch method. */
7824
d19280ad
YQ
7825static const gdb_byte *
7826arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7827{
7828 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7829
7830 switch (kind)
cca44b1b 7831 {
d19280ad
YQ
7832 case ARM_BP_KIND_ARM:
7833 *size = tdep->arm_breakpoint_size;
cca44b1b 7834 return tdep->arm_breakpoint;
d19280ad
YQ
7835 case ARM_BP_KIND_THUMB:
7836 *size = tdep->thumb_breakpoint_size;
7837 return tdep->thumb_breakpoint;
7838 case ARM_BP_KIND_THUMB2:
7839 *size = tdep->thumb2_breakpoint_size;
7840 return tdep->thumb2_breakpoint;
7841 default:
7842 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7843 }
7844}
7845
833b7ab5
YQ
7846/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7847
7848static int
7849arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7850 struct regcache *regcache,
7851 CORE_ADDR *pcptr)
7852{
7853 gdb_byte buf[4];
7854
7855 /* Check the memory pointed by PC is readable. */
7856 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7857 {
7858 struct arm_get_next_pcs next_pcs_ctx;
833b7ab5
YQ
7859
7860 arm_get_next_pcs_ctor (&next_pcs_ctx,
7861 &arm_get_next_pcs_ops,
7862 gdbarch_byte_order (gdbarch),
7863 gdbarch_byte_order_for_code (gdbarch),
7864 0,
7865 regcache);
7866
a0ff9e1a 7867 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
833b7ab5
YQ
7868
7869 /* If MEMADDR is the next instruction of current pc, do the
7870 software single step computation, and get the thumb mode by
7871 the destination address. */
a0ff9e1a 7872 for (CORE_ADDR pc : next_pcs)
833b7ab5
YQ
7873 {
7874 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7875 {
833b7ab5
YQ
7876 if (IS_THUMB_ADDR (pc))
7877 {
7878 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7879 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7880 }
7881 else
7882 return ARM_BP_KIND_ARM;
7883 }
7884 }
833b7ab5
YQ
7885 }
7886
7887 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7888}
7889
cca44b1b
JB
7890/* Extract from an array REGBUF containing the (raw) register state a
7891 function return value of type TYPE, and copy that, in virtual
7892 format, into VALBUF. */
7893
7894static void
7895arm_extract_return_value (struct type *type, struct regcache *regs,
7896 gdb_byte *valbuf)
7897{
7898 struct gdbarch *gdbarch = get_regcache_arch (regs);
7899 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7900
7901 if (TYPE_CODE_FLT == TYPE_CODE (type))
7902 {
7903 switch (gdbarch_tdep (gdbarch)->fp_model)
7904 {
7905 case ARM_FLOAT_FPA:
7906 {
7907 /* The value is in register F0 in internal format. We need to
7908 extract the raw value and then convert it to the desired
7909 internal type. */
7910 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7911
7912 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
96a5a1d3
UW
7913 convert_typed_floating (tmpbuf, arm_ext_type (gdbarch),
7914 valbuf, type);
cca44b1b
JB
7915 }
7916 break;
7917
7918 case ARM_FLOAT_SOFT_FPA:
7919 case ARM_FLOAT_SOFT_VFP:
7920 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7921 not using the VFP ABI code. */
7922 case ARM_FLOAT_VFP:
7923 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7924 if (TYPE_LENGTH (type) > 4)
7925 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7926 valbuf + INT_REGISTER_SIZE);
7927 break;
7928
7929 default:
0963b4bd
MS
7930 internal_error (__FILE__, __LINE__,
7931 _("arm_extract_return_value: "
7932 "Floating point model not supported"));
cca44b1b
JB
7933 break;
7934 }
7935 }
7936 else if (TYPE_CODE (type) == TYPE_CODE_INT
7937 || TYPE_CODE (type) == TYPE_CODE_CHAR
7938 || TYPE_CODE (type) == TYPE_CODE_BOOL
7939 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 7940 || TYPE_IS_REFERENCE (type)
cca44b1b
JB
7941 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7942 {
b021a221
MS
7943 /* If the type is a plain integer, then the access is
7944 straight-forward. Otherwise we have to play around a bit
7945 more. */
cca44b1b
JB
7946 int len = TYPE_LENGTH (type);
7947 int regno = ARM_A1_REGNUM;
7948 ULONGEST tmp;
7949
7950 while (len > 0)
7951 {
7952 /* By using store_unsigned_integer we avoid having to do
7953 anything special for small big-endian values. */
7954 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7955 store_unsigned_integer (valbuf,
7956 (len > INT_REGISTER_SIZE
7957 ? INT_REGISTER_SIZE : len),
7958 byte_order, tmp);
7959 len -= INT_REGISTER_SIZE;
7960 valbuf += INT_REGISTER_SIZE;
7961 }
7962 }
7963 else
7964 {
7965 /* For a structure or union the behaviour is as if the value had
7966 been stored to word-aligned memory and then loaded into
7967 registers with 32-bit load instruction(s). */
7968 int len = TYPE_LENGTH (type);
7969 int regno = ARM_A1_REGNUM;
7970 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7971
7972 while (len > 0)
7973 {
7974 regcache_cooked_read (regs, regno++, tmpbuf);
7975 memcpy (valbuf, tmpbuf,
7976 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7977 len -= INT_REGISTER_SIZE;
7978 valbuf += INT_REGISTER_SIZE;
7979 }
7980 }
7981}
7982
7983
7984/* Will a function return an aggregate type in memory or in a
7985 register? Return 0 if an aggregate type can be returned in a
7986 register, 1 if it must be returned in memory. */
7987
7988static int
7989arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7990{
cca44b1b
JB
7991 enum type_code code;
7992
f168693b 7993 type = check_typedef (type);
cca44b1b 7994
b13c8ab2
YQ
7995 /* Simple, non-aggregate types (ie not including vectors and
7996 complex) are always returned in a register (or registers). */
7997 code = TYPE_CODE (type);
7998 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7999 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8000 return 0;
cca44b1b 8001
c4312b19
YQ
8002 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8003 {
8004 /* Vector values should be returned using ARM registers if they
8005 are not over 16 bytes. */
8006 return (TYPE_LENGTH (type) > 16);
8007 }
8008
b13c8ab2 8009 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8010 {
b13c8ab2
YQ
8011 /* The AAPCS says all aggregates not larger than a word are returned
8012 in a register. */
8013 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8014 return 0;
8015
cca44b1b
JB
8016 return 1;
8017 }
b13c8ab2
YQ
8018 else
8019 {
8020 int nRc;
cca44b1b 8021
b13c8ab2
YQ
8022 /* All aggregate types that won't fit in a register must be returned
8023 in memory. */
8024 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8025 return 1;
cca44b1b 8026
b13c8ab2
YQ
8027 /* In the ARM ABI, "integer" like aggregate types are returned in
8028 registers. For an aggregate type to be integer like, its size
8029 must be less than or equal to INT_REGISTER_SIZE and the
8030 offset of each addressable subfield must be zero. Note that bit
8031 fields are not addressable, and all addressable subfields of
8032 unions always start at offset zero.
cca44b1b 8033
b13c8ab2
YQ
8034 This function is based on the behaviour of GCC 2.95.1.
8035 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8036
b13c8ab2
YQ
8037 Note: All versions of GCC before GCC 2.95.2 do not set up the
8038 parameters correctly for a function returning the following
8039 structure: struct { float f;}; This should be returned in memory,
8040 not a register. Richard Earnshaw sent me a patch, but I do not
8041 know of any way to detect if a function like the above has been
8042 compiled with the correct calling convention. */
8043
8044 /* Assume all other aggregate types can be returned in a register.
8045 Run a check for structures, unions and arrays. */
8046 nRc = 0;
67255d04 8047
b13c8ab2
YQ
8048 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8049 {
8050 int i;
8051 /* Need to check if this struct/union is "integer" like. For
8052 this to be true, its size must be less than or equal to
8053 INT_REGISTER_SIZE and the offset of each addressable
8054 subfield must be zero. Note that bit fields are not
8055 addressable, and unions always start at offset zero. If any
8056 of the subfields is a floating point type, the struct/union
8057 cannot be an integer type. */
8058
8059 /* For each field in the object, check:
8060 1) Is it FP? --> yes, nRc = 1;
8061 2) Is it addressable (bitpos != 0) and
8062 not packed (bitsize == 0)?
8063 --> yes, nRc = 1
8064 */
8065
8066 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8067 {
b13c8ab2
YQ
8068 enum type_code field_type_code;
8069
8070 field_type_code
8071 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8072 i)));
8073
8074 /* Is it a floating point type field? */
8075 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8076 {
8077 nRc = 1;
8078 break;
8079 }
b13c8ab2
YQ
8080
8081 /* If bitpos != 0, then we have to care about it. */
8082 if (TYPE_FIELD_BITPOS (type, i) != 0)
8083 {
8084 /* Bitfields are not addressable. If the field bitsize is
8085 zero, then the field is not packed. Hence it cannot be
8086 a bitfield or any other packed type. */
8087 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8088 {
8089 nRc = 1;
8090 break;
8091 }
8092 }
67255d04
RE
8093 }
8094 }
67255d04 8095
b13c8ab2
YQ
8096 return nRc;
8097 }
67255d04
RE
8098}
8099
34e8f22d
RE
8100/* Write into appropriate registers a function return value of type
8101 TYPE, given in virtual format. */
8102
8103static void
b508a996 8104arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8105 const gdb_byte *valbuf)
34e8f22d 8106{
be8626e0 8107 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8108 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8109
34e8f22d
RE
8110 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8111 {
64403bd1 8112 gdb_byte buf[FP_REGISTER_SIZE];
34e8f22d 8113
be8626e0 8114 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8115 {
8116 case ARM_FLOAT_FPA:
8117
96a5a1d3 8118 convert_typed_floating (valbuf, type, buf, arm_ext_type (gdbarch));
b508a996 8119 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8120 break;
8121
fd50bc42 8122 case ARM_FLOAT_SOFT_FPA:
08216dd7 8123 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8124 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8125 not using the VFP ABI code. */
8126 case ARM_FLOAT_VFP:
b508a996
RE
8127 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8128 if (TYPE_LENGTH (type) > 4)
8129 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8130 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8131 break;
8132
8133 default:
9b20d036
MS
8134 internal_error (__FILE__, __LINE__,
8135 _("arm_store_return_value: Floating "
8136 "point model not supported"));
08216dd7
RE
8137 break;
8138 }
34e8f22d 8139 }
b508a996
RE
8140 else if (TYPE_CODE (type) == TYPE_CODE_INT
8141 || TYPE_CODE (type) == TYPE_CODE_CHAR
8142 || TYPE_CODE (type) == TYPE_CODE_BOOL
8143 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 8144 || TYPE_IS_REFERENCE (type)
b508a996
RE
8145 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8146 {
8147 if (TYPE_LENGTH (type) <= 4)
8148 {
8149 /* Values of one word or less are zero/sign-extended and
8150 returned in r0. */
7a5ea0d4 8151 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8152 LONGEST val = unpack_long (type, valbuf);
8153
e17a4113 8154 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8155 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8156 }
8157 else
8158 {
8159 /* Integral values greater than one word are stored in consecutive
8160 registers starting with r0. This will always be a multiple of
8161 the regiser size. */
8162 int len = TYPE_LENGTH (type);
8163 int regno = ARM_A1_REGNUM;
8164
8165 while (len > 0)
8166 {
8167 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8168 len -= INT_REGISTER_SIZE;
8169 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8170 }
8171 }
8172 }
34e8f22d 8173 else
b508a996
RE
8174 {
8175 /* For a structure or union the behaviour is as if the value had
8176 been stored to word-aligned memory and then loaded into
8177 registers with 32-bit load instruction(s). */
8178 int len = TYPE_LENGTH (type);
8179 int regno = ARM_A1_REGNUM;
7a5ea0d4 8180 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8181
8182 while (len > 0)
8183 {
8184 memcpy (tmpbuf, valbuf,
7a5ea0d4 8185 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8186 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8187 len -= INT_REGISTER_SIZE;
8188 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8189 }
8190 }
34e8f22d
RE
8191}
8192
2af48f68
PB
8193
8194/* Handle function return values. */
8195
8196static enum return_value_convention
6a3a010b 8197arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8198 struct type *valtype, struct regcache *regcache,
8199 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8200{
7c00367c 8201 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8202 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8203 enum arm_vfp_cprc_base_type vfp_base_type;
8204 int vfp_base_count;
8205
8206 if (arm_vfp_abi_for_function (gdbarch, func_type)
8207 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8208 {
8209 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8210 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8211 int i;
8212 for (i = 0; i < vfp_base_count; i++)
8213 {
58d6951d
DJ
8214 if (reg_char == 'q')
8215 {
8216 if (writebuf)
8217 arm_neon_quad_write (gdbarch, regcache, i,
8218 writebuf + i * unit_length);
8219
8220 if (readbuf)
8221 arm_neon_quad_read (gdbarch, regcache, i,
8222 readbuf + i * unit_length);
8223 }
8224 else
8225 {
8226 char name_buf[4];
8227 int regnum;
8228
8c042590 8229 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8230 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8231 strlen (name_buf));
8232 if (writebuf)
8233 regcache_cooked_write (regcache, regnum,
8234 writebuf + i * unit_length);
8235 if (readbuf)
8236 regcache_cooked_read (regcache, regnum,
8237 readbuf + i * unit_length);
8238 }
90445bd3
DJ
8239 }
8240 return RETURN_VALUE_REGISTER_CONVENTION;
8241 }
7c00367c 8242
2af48f68
PB
8243 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8244 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8245 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8246 {
7c00367c
MK
8247 if (tdep->struct_return == pcc_struct_return
8248 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8249 return RETURN_VALUE_STRUCT_CONVENTION;
8250 }
b13c8ab2
YQ
8251 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8252 {
8253 if (arm_return_in_memory (gdbarch, valtype))
8254 return RETURN_VALUE_STRUCT_CONVENTION;
8255 }
7052e42c 8256
2af48f68
PB
8257 if (writebuf)
8258 arm_store_return_value (valtype, regcache, writebuf);
8259
8260 if (readbuf)
8261 arm_extract_return_value (valtype, regcache, readbuf);
8262
8263 return RETURN_VALUE_REGISTER_CONVENTION;
8264}
8265
8266
9df628e0 8267static int
60ade65d 8268arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8269{
e17a4113
UW
8270 struct gdbarch *gdbarch = get_frame_arch (frame);
8271 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8272 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8273 CORE_ADDR jb_addr;
e362b510 8274 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8275
60ade65d 8276 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8277
8278 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8279 INT_REGISTER_SIZE))
9df628e0
RE
8280 return 0;
8281
e17a4113 8282 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8283 return 1;
8284}
8285
faa95490
DJ
8286/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8287 return the target PC. Otherwise return 0. */
c906108c
SS
8288
8289CORE_ADDR
52f729a7 8290arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8291{
2c02bd72 8292 const char *name;
faa95490 8293 int namelen;
c906108c
SS
8294 CORE_ADDR start_addr;
8295
8296 /* Find the starting address and name of the function containing the PC. */
8297 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8298 {
8299 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8300 check here. */
8301 start_addr = arm_skip_bx_reg (frame, pc);
8302 if (start_addr != 0)
8303 return start_addr;
8304
8305 return 0;
8306 }
c906108c 8307
faa95490
DJ
8308 /* If PC is in a Thumb call or return stub, return the address of the
8309 target PC, which is in a register. The thunk functions are called
8310 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8311 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8312 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8313 if (startswith (name, "_call_via_")
8314 || startswith (name, "__ARM_call_via_"))
c906108c 8315 {
ed9a39eb
JM
8316 /* Use the name suffix to determine which register contains the
8317 target PC. */
a121b7c1 8318 static const char *table[15] =
c5aa993b
JM
8319 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8320 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8321 };
c906108c 8322 int regno;
faa95490 8323 int offset = strlen (name) - 2;
c906108c
SS
8324
8325 for (regno = 0; regno <= 14; regno++)
faa95490 8326 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8327 return get_frame_register_unsigned (frame, regno);
c906108c 8328 }
ed9a39eb 8329
faa95490
DJ
8330 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8331 non-interworking calls to foo. We could decode the stubs
8332 to find the target but it's easier to use the symbol table. */
8333 namelen = strlen (name);
8334 if (name[0] == '_' && name[1] == '_'
8335 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8336 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8337 || (namelen > 2 + strlen ("_from_arm")
61012eef 8338 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8339 {
8340 char *target_name;
8341 int target_len = namelen - 2;
3b7344d5 8342 struct bound_minimal_symbol minsym;
faa95490
DJ
8343 struct objfile *objfile;
8344 struct obj_section *sec;
8345
8346 if (name[namelen - 1] == 'b')
8347 target_len -= strlen ("_from_thumb");
8348 else
8349 target_len -= strlen ("_from_arm");
8350
224c3ddb 8351 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8352 memcpy (target_name, name + 2, target_len);
8353 target_name[target_len] = '\0';
8354
8355 sec = find_pc_section (pc);
8356 objfile = (sec == NULL) ? NULL : sec->objfile;
8357 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8358 if (minsym.minsym != NULL)
77e371c0 8359 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8360 else
8361 return 0;
8362 }
8363
c5aa993b 8364 return 0; /* not a stub */
c906108c
SS
8365}
8366
afd7eef0
RE
8367static void
8368set_arm_command (char *args, int from_tty)
8369{
edefbb7c
AC
8370 printf_unfiltered (_("\
8371\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8372 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8373}
8374
8375static void
8376show_arm_command (char *args, int from_tty)
8377{
26304000 8378 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8379}
8380
28e97307
DJ
8381static void
8382arm_update_current_architecture (void)
fd50bc42 8383{
28e97307 8384 struct gdbarch_info info;
fd50bc42 8385
28e97307 8386 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8387 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8388 return;
fd50bc42 8389
28e97307
DJ
8390 /* Update the architecture. */
8391 gdbarch_info_init (&info);
fd50bc42 8392
28e97307 8393 if (!gdbarch_update_p (info))
9b20d036 8394 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8395}
8396
8397static void
8398set_fp_model_sfunc (char *args, int from_tty,
8399 struct cmd_list_element *c)
8400{
570dc176 8401 int fp_model;
fd50bc42
RE
8402
8403 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8404 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8405 {
aead7601 8406 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8407 break;
8408 }
8409
8410 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8411 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8412 current_fp_model);
8413
28e97307 8414 arm_update_current_architecture ();
fd50bc42
RE
8415}
8416
8417static void
08546159
AC
8418show_fp_model (struct ui_file *file, int from_tty,
8419 struct cmd_list_element *c, const char *value)
fd50bc42 8420{
f5656ead 8421 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8422
28e97307 8423 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8424 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8425 fprintf_filtered (file, _("\
8426The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8427 fp_model_strings[tdep->fp_model]);
8428 else
8429 fprintf_filtered (file, _("\
8430The current ARM floating point model is \"%s\".\n"),
8431 fp_model_strings[arm_fp_model]);
8432}
8433
8434static void
8435arm_set_abi (char *args, int from_tty,
8436 struct cmd_list_element *c)
8437{
570dc176 8438 int arm_abi;
28e97307
DJ
8439
8440 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8441 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8442 {
aead7601 8443 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8444 break;
8445 }
8446
8447 if (arm_abi == ARM_ABI_LAST)
8448 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8449 arm_abi_string);
8450
8451 arm_update_current_architecture ();
8452}
8453
8454static void
8455arm_show_abi (struct ui_file *file, int from_tty,
8456 struct cmd_list_element *c, const char *value)
8457{
f5656ead 8458 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8459
8460 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8461 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8462 fprintf_filtered (file, _("\
8463The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8464 arm_abi_strings[tdep->arm_abi]);
8465 else
8466 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8467 arm_abi_string);
fd50bc42
RE
8468}
8469
0428b8f5
DJ
8470static void
8471arm_show_fallback_mode (struct ui_file *file, int from_tty,
8472 struct cmd_list_element *c, const char *value)
8473{
0963b4bd
MS
8474 fprintf_filtered (file,
8475 _("The current execution mode assumed "
8476 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8477 arm_fallback_mode_string);
8478}
8479
8480static void
8481arm_show_force_mode (struct ui_file *file, int from_tty,
8482 struct cmd_list_element *c, const char *value)
8483{
0963b4bd
MS
8484 fprintf_filtered (file,
8485 _("The current execution mode assumed "
8486 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8487 arm_force_mode_string);
8488}
8489
afd7eef0
RE
8490/* If the user changes the register disassembly style used for info
8491 register and other commands, we have to also switch the style used
8492 in opcodes for disassembly output. This function is run in the "set
8493 arm disassembly" command, and does that. */
bc90b915
FN
8494
8495static void
afd7eef0 8496set_disassembly_style_sfunc (char *args, int from_tty,
65b48a81 8497 struct cmd_list_element *c)
bc90b915 8498{
65b48a81
PB
8499 /* Convert the short style name into the long style name (eg, reg-names-*)
8500 before calling the generic set_disassembler_options() function. */
8501 std::string long_name = std::string ("reg-names-") + disassembly_style;
8502 set_disassembler_options (&long_name[0]);
8503}
8504
8505static void
8506show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8507 struct cmd_list_element *c, const char *value)
8508{
8509 struct gdbarch *gdbarch = get_current_arch ();
8510 char *options = get_disassembler_options (gdbarch);
8511 const char *style = "";
8512 int len = 0;
f995bbe8 8513 const char *opt;
65b48a81
PB
8514
8515 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8516 if (CONST_STRNEQ (opt, "reg-names-"))
8517 {
8518 style = &opt[strlen ("reg-names-")];
8519 len = strcspn (style, ",");
8520 }
8521
8522 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
bc90b915
FN
8523}
8524\f
966fbf70 8525/* Return the ARM register name corresponding to register I. */
a208b0cb 8526static const char *
d93859e2 8527arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8528{
58d6951d
DJ
8529 const int num_regs = gdbarch_num_regs (gdbarch);
8530
8531 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8532 && i >= num_regs && i < num_regs + 32)
8533 {
8534 static const char *const vfp_pseudo_names[] = {
8535 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8536 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8537 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8538 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8539 };
8540
8541 return vfp_pseudo_names[i - num_regs];
8542 }
8543
8544 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8545 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8546 {
8547 static const char *const neon_pseudo_names[] = {
8548 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8549 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8550 };
8551
8552 return neon_pseudo_names[i - num_regs - 32];
8553 }
8554
ff6f572f
DJ
8555 if (i >= ARRAY_SIZE (arm_register_names))
8556 /* These registers are only supported on targets which supply
8557 an XML description. */
8558 return "";
8559
966fbf70
RE
8560 return arm_register_names[i];
8561}
8562
082fc60d
RE
8563/* Test whether the coff symbol specific value corresponds to a Thumb
8564 function. */
8565
8566static int
8567coff_sym_is_thumb (int val)
8568{
f8bf5763
PM
8569 return (val == C_THUMBEXT
8570 || val == C_THUMBSTAT
8571 || val == C_THUMBEXTFUNC
8572 || val == C_THUMBSTATFUNC
8573 || val == C_THUMBLABEL);
082fc60d
RE
8574}
8575
8576/* arm_coff_make_msymbol_special()
8577 arm_elf_make_msymbol_special()
8578
8579 These functions test whether the COFF or ELF symbol corresponds to
8580 an address in thumb code, and set a "special" bit in a minimal
8581 symbol to indicate that it does. */
8582
34e8f22d 8583static void
082fc60d
RE
8584arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8585{
39d911fc
TP
8586 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8587
8588 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8589 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8590 MSYMBOL_SET_SPECIAL (msym);
8591}
8592
34e8f22d 8593static void
082fc60d
RE
8594arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8595{
8596 if (coff_sym_is_thumb (val))
8597 MSYMBOL_SET_SPECIAL (msym);
8598}
8599
60c5725c 8600static void
c1bd65d0 8601arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8602{
9a3c8263 8603 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8604 unsigned int i;
8605
8606 for (i = 0; i < objfile->obfd->section_count; i++)
8607 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8608}
8609
8610static void
8611arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8612 asymbol *sym)
8613{
8614 const char *name = bfd_asymbol_name (sym);
8615 struct arm_per_objfile *data;
8616 VEC(arm_mapping_symbol_s) **map_p;
8617 struct arm_mapping_symbol new_map_sym;
8618
8619 gdb_assert (name[0] == '$');
8620 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8621 return;
8622
9a3c8263
SM
8623 data = (struct arm_per_objfile *) objfile_data (objfile,
8624 arm_objfile_data_key);
60c5725c
DJ
8625 if (data == NULL)
8626 {
8627 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8628 struct arm_per_objfile);
8629 set_objfile_data (objfile, arm_objfile_data_key, data);
8630 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8631 objfile->obfd->section_count,
8632 VEC(arm_mapping_symbol_s) *);
8633 }
8634 map_p = &data->section_maps[bfd_get_section (sym)->index];
8635
8636 new_map_sym.value = sym->value;
8637 new_map_sym.type = name[1];
8638
8639 /* Assume that most mapping symbols appear in order of increasing
8640 value. If they were randomly distributed, it would be faster to
8641 always push here and then sort at first use. */
8642 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8643 {
8644 struct arm_mapping_symbol *prev_map_sym;
8645
8646 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8647 if (prev_map_sym->value >= sym->value)
8648 {
8649 unsigned int idx;
8650 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8651 arm_compare_mapping_symbols);
8652 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8653 return;
8654 }
8655 }
8656
8657 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8658}
8659
756fe439 8660static void
61a1198a 8661arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8662{
9779414d 8663 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8664 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8665
8666 /* If necessary, set the T bit. */
8667 if (arm_apcs_32)
8668 {
9779414d 8669 ULONGEST val, t_bit;
61a1198a 8670 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8671 t_bit = arm_psr_thumb_bit (gdbarch);
8672 if (arm_pc_is_thumb (gdbarch, pc))
8673 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8674 val | t_bit);
756fe439 8675 else
61a1198a 8676 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8677 val & ~t_bit);
756fe439
DJ
8678 }
8679}
123dc839 8680
58d6951d
DJ
8681/* Read the contents of a NEON quad register, by reading from two
8682 double registers. This is used to implement the quad pseudo
8683 registers, and for argument passing in case the quad registers are
8684 missing; vectors are passed in quad registers when using the VFP
8685 ABI, even if a NEON unit is not present. REGNUM is the index of
8686 the quad register, in [0, 15]. */
8687
05d1431c 8688static enum register_status
58d6951d
DJ
8689arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8690 int regnum, gdb_byte *buf)
8691{
8692 char name_buf[4];
8693 gdb_byte reg_buf[8];
8694 int offset, double_regnum;
05d1431c 8695 enum register_status status;
58d6951d 8696
8c042590 8697 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8698 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8699 strlen (name_buf));
8700
8701 /* d0 is always the least significant half of q0. */
8702 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8703 offset = 8;
8704 else
8705 offset = 0;
8706
05d1431c
PA
8707 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8708 if (status != REG_VALID)
8709 return status;
58d6951d
DJ
8710 memcpy (buf + offset, reg_buf, 8);
8711
8712 offset = 8 - offset;
05d1431c
PA
8713 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8714 if (status != REG_VALID)
8715 return status;
58d6951d 8716 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8717
8718 return REG_VALID;
58d6951d
DJ
8719}
8720
05d1431c 8721static enum register_status
58d6951d
DJ
8722arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8723 int regnum, gdb_byte *buf)
8724{
8725 const int num_regs = gdbarch_num_regs (gdbarch);
8726 char name_buf[4];
8727 gdb_byte reg_buf[8];
8728 int offset, double_regnum;
8729
8730 gdb_assert (regnum >= num_regs);
8731 regnum -= num_regs;
8732
8733 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8734 /* Quad-precision register. */
05d1431c 8735 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8736 else
8737 {
05d1431c
PA
8738 enum register_status status;
8739
58d6951d
DJ
8740 /* Single-precision register. */
8741 gdb_assert (regnum < 32);
8742
8743 /* s0 is always the least significant half of d0. */
8744 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8745 offset = (regnum & 1) ? 0 : 4;
8746 else
8747 offset = (regnum & 1) ? 4 : 0;
8748
8c042590 8749 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8750 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8751 strlen (name_buf));
8752
05d1431c
PA
8753 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8754 if (status == REG_VALID)
8755 memcpy (buf, reg_buf + offset, 4);
8756 return status;
58d6951d
DJ
8757 }
8758}
8759
8760/* Store the contents of BUF to a NEON quad register, by writing to
8761 two double registers. This is used to implement the quad pseudo
8762 registers, and for argument passing in case the quad registers are
8763 missing; vectors are passed in quad registers when using the VFP
8764 ABI, even if a NEON unit is not present. REGNUM is the index
8765 of the quad register, in [0, 15]. */
8766
8767static void
8768arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8769 int regnum, const gdb_byte *buf)
8770{
8771 char name_buf[4];
58d6951d
DJ
8772 int offset, double_regnum;
8773
8c042590 8774 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8775 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8776 strlen (name_buf));
8777
8778 /* d0 is always the least significant half of q0. */
8779 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8780 offset = 8;
8781 else
8782 offset = 0;
8783
8784 regcache_raw_write (regcache, double_regnum, buf + offset);
8785 offset = 8 - offset;
8786 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8787}
8788
8789static void
8790arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8791 int regnum, const gdb_byte *buf)
8792{
8793 const int num_regs = gdbarch_num_regs (gdbarch);
8794 char name_buf[4];
8795 gdb_byte reg_buf[8];
8796 int offset, double_regnum;
8797
8798 gdb_assert (regnum >= num_regs);
8799 regnum -= num_regs;
8800
8801 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8802 /* Quad-precision register. */
8803 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8804 else
8805 {
8806 /* Single-precision register. */
8807 gdb_assert (regnum < 32);
8808
8809 /* s0 is always the least significant half of d0. */
8810 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8811 offset = (regnum & 1) ? 0 : 4;
8812 else
8813 offset = (regnum & 1) ? 4 : 0;
8814
8c042590 8815 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8816 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8817 strlen (name_buf));
8818
8819 regcache_raw_read (regcache, double_regnum, reg_buf);
8820 memcpy (reg_buf + offset, buf, 4);
8821 regcache_raw_write (regcache, double_regnum, reg_buf);
8822 }
8823}
8824
123dc839
DJ
8825static struct value *
8826value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8827{
9a3c8263 8828 const int *reg_p = (const int *) baton;
123dc839
DJ
8829 return value_of_register (*reg_p, frame);
8830}
97e03143 8831\f
70f80edf
JT
8832static enum gdb_osabi
8833arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8834{
2af48f68 8835 unsigned int elfosabi;
70f80edf 8836 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8837
70f80edf 8838 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8839
28e97307
DJ
8840 if (elfosabi == ELFOSABI_ARM)
8841 /* GNU tools use this value. Check note sections in this case,
8842 as well. */
8843 bfd_map_over_sections (abfd,
8844 generic_elf_osabi_sniff_abi_tag_sections,
8845 &osabi);
97e03143 8846
28e97307 8847 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8848 return osabi;
97e03143
RE
8849}
8850
54483882
YQ
8851static int
8852arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8853 struct reggroup *group)
8854{
2c291032
YQ
8855 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8856 this, FPS register belongs to save_regroup, restore_reggroup, and
8857 all_reggroup, of course. */
54483882 8858 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8859 return (group == float_reggroup
8860 || group == save_reggroup
8861 || group == restore_reggroup
8862 || group == all_reggroup);
54483882
YQ
8863 else
8864 return default_register_reggroup_p (gdbarch, regnum, group);
8865}
8866
25f8c692
JL
8867\f
8868/* For backward-compatibility we allow two 'g' packet lengths with
8869 the remote protocol depending on whether FPA registers are
8870 supplied. M-profile targets do not have FPA registers, but some
8871 stubs already exist in the wild which use a 'g' packet which
8872 supplies them albeit with dummy values. The packet format which
8873 includes FPA registers should be considered deprecated for
8874 M-profile targets. */
8875
8876static void
8877arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8878{
8879 if (gdbarch_tdep (gdbarch)->is_m)
8880 {
8881 /* If we know from the executable this is an M-profile target,
8882 cater for remote targets whose register set layout is the
8883 same as the FPA layout. */
8884 register_remote_g_packet_guess (gdbarch,
03145bf4 8885 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8886 (16 * INT_REGISTER_SIZE)
8887 + (8 * FP_REGISTER_SIZE)
8888 + (2 * INT_REGISTER_SIZE),
8889 tdesc_arm_with_m_fpa_layout);
8890
8891 /* The regular M-profile layout. */
8892 register_remote_g_packet_guess (gdbarch,
8893 /* r0-r12,sp,lr,pc; xpsr */
8894 (16 * INT_REGISTER_SIZE)
8895 + INT_REGISTER_SIZE,
8896 tdesc_arm_with_m);
3184d3f9
JL
8897
8898 /* M-profile plus M4F VFP. */
8899 register_remote_g_packet_guess (gdbarch,
8900 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8901 (16 * INT_REGISTER_SIZE)
8902 + (16 * VFP_REGISTER_SIZE)
8903 + (2 * INT_REGISTER_SIZE),
8904 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8905 }
8906
8907 /* Otherwise we don't have a useful guess. */
8908}
8909
7eb89530
YQ
8910/* Implement the code_of_frame_writable gdbarch method. */
8911
8912static int
8913arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8914{
8915 if (gdbarch_tdep (gdbarch)->is_m
8916 && get_frame_type (frame) == SIGTRAMP_FRAME)
8917 {
8918 /* M-profile exception frames return to some magic PCs, where
8919 isn't writable at all. */
8920 return 0;
8921 }
8922 else
8923 return 1;
8924}
8925
70f80edf 8926\f
da3c6d4a
MS
8927/* Initialize the current architecture based on INFO. If possible,
8928 re-use an architecture from ARCHES, which is a list of
8929 architectures already created during this debugging session.
97e03143 8930
da3c6d4a
MS
8931 Called e.g. at program startup, when reading a core file, and when
8932 reading a binary file. */
97e03143 8933
39bbf761
RE
8934static struct gdbarch *
8935arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8936{
97e03143 8937 struct gdbarch_tdep *tdep;
39bbf761 8938 struct gdbarch *gdbarch;
28e97307
DJ
8939 struct gdbarch_list *best_arch;
8940 enum arm_abi_kind arm_abi = arm_abi_global;
8941 enum arm_float_model fp_model = arm_fp_model;
123dc839 8942 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8943 int i, is_m = 0;
330c6ca9 8944 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8945 int have_wmmx_registers = 0;
58d6951d 8946 int have_neon = 0;
ff6f572f 8947 int have_fpa_registers = 1;
9779414d
DJ
8948 const struct target_desc *tdesc = info.target_desc;
8949
8950 /* If we have an object to base this architecture on, try to determine
8951 its ABI. */
8952
8953 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8954 {
8955 int ei_osabi, e_flags;
8956
8957 switch (bfd_get_flavour (info.abfd))
8958 {
9779414d
DJ
8959 case bfd_target_coff_flavour:
8960 /* Assume it's an old APCS-style ABI. */
8961 /* XXX WinCE? */
8962 arm_abi = ARM_ABI_APCS;
8963 break;
8964
8965 case bfd_target_elf_flavour:
8966 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8967 e_flags = elf_elfheader (info.abfd)->e_flags;
8968
8969 if (ei_osabi == ELFOSABI_ARM)
8970 {
8971 /* GNU tools used to use this value, but do not for EABI
8972 objects. There's nowhere to tag an EABI version
8973 anyway, so assume APCS. */
8974 arm_abi = ARM_ABI_APCS;
8975 }
d403db27 8976 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
8977 {
8978 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8979 int attr_arch, attr_profile;
8980
8981 switch (eabi_ver)
8982 {
8983 case EF_ARM_EABI_UNKNOWN:
8984 /* Assume GNU tools. */
8985 arm_abi = ARM_ABI_APCS;
8986 break;
8987
8988 case EF_ARM_EABI_VER4:
8989 case EF_ARM_EABI_VER5:
8990 arm_abi = ARM_ABI_AAPCS;
8991 /* EABI binaries default to VFP float ordering.
8992 They may also contain build attributes that can
8993 be used to identify if the VFP argument-passing
8994 ABI is in use. */
8995 if (fp_model == ARM_FLOAT_AUTO)
8996 {
8997#ifdef HAVE_ELF
8998 switch (bfd_elf_get_obj_attr_int (info.abfd,
8999 OBJ_ATTR_PROC,
9000 Tag_ABI_VFP_args))
9001 {
b35b0298 9002 case AEABI_VFP_args_base:
9779414d
DJ
9003 /* "The user intended FP parameter/result
9004 passing to conform to AAPCS, base
9005 variant". */
9006 fp_model = ARM_FLOAT_SOFT_VFP;
9007 break;
b35b0298 9008 case AEABI_VFP_args_vfp:
9779414d
DJ
9009 /* "The user intended FP parameter/result
9010 passing to conform to AAPCS, VFP
9011 variant". */
9012 fp_model = ARM_FLOAT_VFP;
9013 break;
b35b0298 9014 case AEABI_VFP_args_toolchain:
9779414d
DJ
9015 /* "The user intended FP parameter/result
9016 passing to conform to tool chain-specific
9017 conventions" - we don't know any such
9018 conventions, so leave it as "auto". */
9019 break;
b35b0298 9020 case AEABI_VFP_args_compatible:
5c294fee
TG
9021 /* "Code is compatible with both the base
9022 and VFP variants; the user did not permit
9023 non-variadic functions to pass FP
9024 parameters/results" - leave it as
9025 "auto". */
9026 break;
9779414d
DJ
9027 default:
9028 /* Attribute value not mentioned in the
5c294fee 9029 November 2012 ABI, so leave it as
9779414d
DJ
9030 "auto". */
9031 break;
9032 }
9033#else
9034 fp_model = ARM_FLOAT_SOFT_VFP;
9035#endif
9036 }
9037 break;
9038
9039 default:
9040 /* Leave it as "auto". */
9041 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9042 break;
9043 }
9044
9045#ifdef HAVE_ELF
9046 /* Detect M-profile programs. This only works if the
9047 executable file includes build attributes; GCC does
9048 copy them to the executable, but e.g. RealView does
9049 not. */
9050 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9051 Tag_CPU_arch);
0963b4bd
MS
9052 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9053 OBJ_ATTR_PROC,
9779414d
DJ
9054 Tag_CPU_arch_profile);
9055 /* GCC specifies the profile for v6-M; RealView only
9056 specifies the profile for architectures starting with
9057 V7 (as opposed to architectures with a tag
9058 numerically greater than TAG_CPU_ARCH_V7). */
9059 if (!tdesc_has_registers (tdesc)
9060 && (attr_arch == TAG_CPU_ARCH_V6_M
9061 || attr_arch == TAG_CPU_ARCH_V6S_M
9062 || attr_profile == 'M'))
25f8c692 9063 is_m = 1;
9779414d
DJ
9064#endif
9065 }
9066
9067 if (fp_model == ARM_FLOAT_AUTO)
9068 {
9069 int e_flags = elf_elfheader (info.abfd)->e_flags;
9070
9071 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9072 {
9073 case 0:
9074 /* Leave it as "auto". Strictly speaking this case
9075 means FPA, but almost nobody uses that now, and
9076 many toolchains fail to set the appropriate bits
9077 for the floating-point model they use. */
9078 break;
9079 case EF_ARM_SOFT_FLOAT:
9080 fp_model = ARM_FLOAT_SOFT_FPA;
9081 break;
9082 case EF_ARM_VFP_FLOAT:
9083 fp_model = ARM_FLOAT_VFP;
9084 break;
9085 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9086 fp_model = ARM_FLOAT_SOFT_VFP;
9087 break;
9088 }
9089 }
9090
9091 if (e_flags & EF_ARM_BE8)
9092 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9093
9094 break;
9095
9096 default:
9097 /* Leave it as "auto". */
9098 break;
9099 }
9100 }
123dc839
DJ
9101
9102 /* Check any target description for validity. */
9779414d 9103 if (tdesc_has_registers (tdesc))
123dc839
DJ
9104 {
9105 /* For most registers we require GDB's default names; but also allow
9106 the numeric names for sp / lr / pc, as a convenience. */
9107 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9108 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9109 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9110
9111 const struct tdesc_feature *feature;
58d6951d 9112 int valid_p;
123dc839 9113
9779414d 9114 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9115 "org.gnu.gdb.arm.core");
9116 if (feature == NULL)
9779414d
DJ
9117 {
9118 feature = tdesc_find_feature (tdesc,
9119 "org.gnu.gdb.arm.m-profile");
9120 if (feature == NULL)
9121 return NULL;
9122 else
9123 is_m = 1;
9124 }
123dc839
DJ
9125
9126 tdesc_data = tdesc_data_alloc ();
9127
9128 valid_p = 1;
9129 for (i = 0; i < ARM_SP_REGNUM; i++)
9130 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9131 arm_register_names[i]);
9132 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9133 ARM_SP_REGNUM,
9134 arm_sp_names);
9135 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9136 ARM_LR_REGNUM,
9137 arm_lr_names);
9138 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9139 ARM_PC_REGNUM,
9140 arm_pc_names);
9779414d
DJ
9141 if (is_m)
9142 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9143 ARM_PS_REGNUM, "xpsr");
9144 else
9145 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9146 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9147
9148 if (!valid_p)
9149 {
9150 tdesc_data_cleanup (tdesc_data);
9151 return NULL;
9152 }
9153
9779414d 9154 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9155 "org.gnu.gdb.arm.fpa");
9156 if (feature != NULL)
9157 {
9158 valid_p = 1;
9159 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9160 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9161 arm_register_names[i]);
9162 if (!valid_p)
9163 {
9164 tdesc_data_cleanup (tdesc_data);
9165 return NULL;
9166 }
9167 }
ff6f572f
DJ
9168 else
9169 have_fpa_registers = 0;
9170
9779414d 9171 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9172 "org.gnu.gdb.xscale.iwmmxt");
9173 if (feature != NULL)
9174 {
9175 static const char *const iwmmxt_names[] = {
9176 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9177 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9178 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9179 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9180 };
9181
9182 valid_p = 1;
9183 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9184 valid_p
9185 &= tdesc_numbered_register (feature, tdesc_data, i,
9186 iwmmxt_names[i - ARM_WR0_REGNUM]);
9187
9188 /* Check for the control registers, but do not fail if they
9189 are missing. */
9190 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9191 tdesc_numbered_register (feature, tdesc_data, i,
9192 iwmmxt_names[i - ARM_WR0_REGNUM]);
9193
9194 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9195 valid_p
9196 &= tdesc_numbered_register (feature, tdesc_data, i,
9197 iwmmxt_names[i - ARM_WR0_REGNUM]);
9198
9199 if (!valid_p)
9200 {
9201 tdesc_data_cleanup (tdesc_data);
9202 return NULL;
9203 }
a56cc1ce
YQ
9204
9205 have_wmmx_registers = 1;
ff6f572f 9206 }
58d6951d
DJ
9207
9208 /* If we have a VFP unit, check whether the single precision registers
9209 are present. If not, then we will synthesize them as pseudo
9210 registers. */
9779414d 9211 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9212 "org.gnu.gdb.arm.vfp");
9213 if (feature != NULL)
9214 {
9215 static const char *const vfp_double_names[] = {
9216 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9217 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9218 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9219 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9220 };
9221
9222 /* Require the double precision registers. There must be either
9223 16 or 32. */
9224 valid_p = 1;
9225 for (i = 0; i < 32; i++)
9226 {
9227 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9228 ARM_D0_REGNUM + i,
9229 vfp_double_names[i]);
9230 if (!valid_p)
9231 break;
9232 }
2b9e5ea6
UW
9233 if (!valid_p && i == 16)
9234 valid_p = 1;
58d6951d 9235
2b9e5ea6
UW
9236 /* Also require FPSCR. */
9237 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9238 ARM_FPSCR_REGNUM, "fpscr");
9239 if (!valid_p)
58d6951d
DJ
9240 {
9241 tdesc_data_cleanup (tdesc_data);
9242 return NULL;
9243 }
9244
9245 if (tdesc_unnumbered_register (feature, "s0") == 0)
9246 have_vfp_pseudos = 1;
9247
330c6ca9 9248 vfp_register_count = i;
58d6951d
DJ
9249
9250 /* If we have VFP, also check for NEON. The architecture allows
9251 NEON without VFP (integer vector operations only), but GDB
9252 does not support that. */
9779414d 9253 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9254 "org.gnu.gdb.arm.neon");
9255 if (feature != NULL)
9256 {
9257 /* NEON requires 32 double-precision registers. */
9258 if (i != 32)
9259 {
9260 tdesc_data_cleanup (tdesc_data);
9261 return NULL;
9262 }
9263
9264 /* If there are quad registers defined by the stub, use
9265 their type; otherwise (normally) provide them with
9266 the default type. */
9267 if (tdesc_unnumbered_register (feature, "q0") == 0)
9268 have_neon_pseudos = 1;
9269
9270 have_neon = 1;
9271 }
9272 }
123dc839 9273 }
39bbf761 9274
28e97307
DJ
9275 /* If there is already a candidate, use it. */
9276 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9277 best_arch != NULL;
9278 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9279 {
b8926edc
DJ
9280 if (arm_abi != ARM_ABI_AUTO
9281 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9282 continue;
9283
b8926edc
DJ
9284 if (fp_model != ARM_FLOAT_AUTO
9285 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9286 continue;
9287
58d6951d
DJ
9288 /* There are various other properties in tdep that we do not
9289 need to check here: those derived from a target description,
9290 since gdbarches with a different target description are
9291 automatically disqualified. */
9292
9779414d
DJ
9293 /* Do check is_m, though, since it might come from the binary. */
9294 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9295 continue;
9296
28e97307
DJ
9297 /* Found a match. */
9298 break;
9299 }
97e03143 9300
28e97307 9301 if (best_arch != NULL)
123dc839
DJ
9302 {
9303 if (tdesc_data != NULL)
9304 tdesc_data_cleanup (tdesc_data);
9305 return best_arch->gdbarch;
9306 }
28e97307 9307
8d749320 9308 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9309 gdbarch = gdbarch_alloc (&info, tdep);
9310
28e97307
DJ
9311 /* Record additional information about the architecture we are defining.
9312 These are gdbarch discriminators, like the OSABI. */
9313 tdep->arm_abi = arm_abi;
9314 tdep->fp_model = fp_model;
9779414d 9315 tdep->is_m = is_m;
ff6f572f 9316 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9317 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9318 gdb_assert (vfp_register_count == 0
9319 || vfp_register_count == 16
9320 || vfp_register_count == 32);
9321 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9322 tdep->have_vfp_pseudos = have_vfp_pseudos;
9323 tdep->have_neon_pseudos = have_neon_pseudos;
9324 tdep->have_neon = have_neon;
08216dd7 9325
25f8c692
JL
9326 arm_register_g_packet_guesses (gdbarch);
9327
08216dd7 9328 /* Breakpoints. */
9d4fde75 9329 switch (info.byte_order_for_code)
67255d04
RE
9330 {
9331 case BFD_ENDIAN_BIG:
66e810cd
RE
9332 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9333 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9334 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9335 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9336
67255d04
RE
9337 break;
9338
9339 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9340 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9341 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9342 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9343 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9344
67255d04
RE
9345 break;
9346
9347 default:
9348 internal_error (__FILE__, __LINE__,
edefbb7c 9349 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9350 }
9351
d7b486e7
RE
9352 /* On ARM targets char defaults to unsigned. */
9353 set_gdbarch_char_signed (gdbarch, 0);
9354
53375380
PA
9355 /* wchar_t is unsigned under the AAPCS. */
9356 if (tdep->arm_abi == ARM_ABI_AAPCS)
9357 set_gdbarch_wchar_signed (gdbarch, 0);
9358 else
9359 set_gdbarch_wchar_signed (gdbarch, 1);
53375380 9360
cca44b1b
JB
9361 /* Note: for displaced stepping, this includes the breakpoint, and one word
9362 of additional scratch space. This setting isn't used for anything beside
9363 displaced stepping at present. */
9364 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9365
9df628e0 9366 /* This should be low enough for everything. */
97e03143 9367 tdep->lowest_pc = 0x20;
94c30b78 9368 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9369
7c00367c
MK
9370 /* The default, for both APCS and AAPCS, is to return small
9371 structures in registers. */
9372 tdep->struct_return = reg_struct_return;
9373
2dd604e7 9374 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9375 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9376
7eb89530
YQ
9377 if (is_m)
9378 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9379
756fe439
DJ
9380 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9381
148754e5 9382 /* Frame handling. */
a262aec2 9383 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9384 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9385 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9386
eb5492fa 9387 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9388
34e8f22d 9389 /* Address manipulation. */
34e8f22d
RE
9390 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9391
34e8f22d
RE
9392 /* Advance PC across function entry code. */
9393 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9394
c9cf6e20
MG
9395 /* Detect whether PC is at a point where the stack has been destroyed. */
9396 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9397
190dce09
UW
9398 /* Skip trampolines. */
9399 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9400
34e8f22d
RE
9401 /* The stack grows downward. */
9402 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9403
9404 /* Breakpoint manipulation. */
04180708
YQ
9405 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9406 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9407 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9408 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9409
9410 /* Information about registers, etc. */
34e8f22d
RE
9411 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9412 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9413 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9414 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9415 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9416
ff6f572f
DJ
9417 /* This "info float" is FPA-specific. Use the generic version if we
9418 do not have FPA. */
9419 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9420 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9421
26216b98 9422 /* Internal <-> external register number maps. */
ff6f572f 9423 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9424 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9425
34e8f22d
RE
9426 set_gdbarch_register_name (gdbarch, arm_register_name);
9427
9428 /* Returning results. */
2af48f68 9429 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9430
03d48a7d
RE
9431 /* Disassembly. */
9432 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9433
34e8f22d
RE
9434 /* Minsymbol frobbing. */
9435 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9436 set_gdbarch_coff_make_msymbol_special (gdbarch,
9437 arm_coff_make_msymbol_special);
60c5725c 9438 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9439
f9d67f43
DJ
9440 /* Thumb-2 IT block support. */
9441 set_gdbarch_adjust_breakpoint_address (gdbarch,
9442 arm_adjust_breakpoint_address);
9443
0d5de010
DJ
9444 /* Virtual tables. */
9445 set_gdbarch_vbit_in_delta (gdbarch, 1);
9446
97e03143 9447 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9448 gdbarch_init_osabi (info, gdbarch);
97e03143 9449
b39cc962
DJ
9450 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9451
eb5492fa 9452 /* Add some default predicates. */
2ae28aa9
YQ
9453 if (is_m)
9454 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9455 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9456 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9457 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9458 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9459 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9460
97e03143
RE
9461 /* Now we have tuned the configuration, set a few final things,
9462 based on what the OS ABI has told us. */
9463
b8926edc
DJ
9464 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9465 binaries are always marked. */
9466 if (tdep->arm_abi == ARM_ABI_AUTO)
9467 tdep->arm_abi = ARM_ABI_APCS;
9468
e3039479
UW
9469 /* Watchpoints are not steppable. */
9470 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9471
b8926edc
DJ
9472 /* We used to default to FPA for generic ARM, but almost nobody
9473 uses that now, and we now provide a way for the user to force
9474 the model. So default to the most useful variant. */
9475 if (tdep->fp_model == ARM_FLOAT_AUTO)
9476 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9477
9df628e0
RE
9478 if (tdep->jb_pc >= 0)
9479 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9480
08216dd7 9481 /* Floating point sizes and format. */
8da61cc4 9482 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9483 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9484 {
8da61cc4
DJ
9485 set_gdbarch_double_format
9486 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9487 set_gdbarch_long_double_format
9488 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9489 }
9490 else
9491 {
9492 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9493 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9494 }
9495
58d6951d
DJ
9496 if (have_vfp_pseudos)
9497 {
9498 /* NOTE: These are the only pseudo registers used by
9499 the ARM target at the moment. If more are added, a
9500 little more care in numbering will be needed. */
9501
9502 int num_pseudos = 32;
9503 if (have_neon_pseudos)
9504 num_pseudos += 16;
9505 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9506 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9507 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9508 }
9509
123dc839 9510 if (tdesc_data)
58d6951d
DJ
9511 {
9512 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9513
9779414d 9514 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9515
9516 /* Override tdesc_register_type to adjust the types of VFP
9517 registers for NEON. */
9518 set_gdbarch_register_type (gdbarch, arm_register_type);
9519 }
123dc839
DJ
9520
9521 /* Add standard register aliases. We add aliases even for those
9522 nanes which are used by the current architecture - it's simpler,
9523 and does no harm, since nothing ever lists user registers. */
9524 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9525 user_reg_add (gdbarch, arm_register_aliases[i].name,
9526 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9527
65b48a81
PB
9528 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9529 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9530
39bbf761
RE
9531 return gdbarch;
9532}
9533
97e03143 9534static void
2af46ca0 9535arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9536{
2af46ca0 9537 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9538
9539 if (tdep == NULL)
9540 return;
9541
edefbb7c 9542 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9543 (unsigned long) tdep->lowest_pc);
9544}
9545
0d4c07af 9546#if GDB_SELF_TEST
b121eeb9
YQ
9547namespace selftests
9548{
9549static void arm_record_test (void);
9550}
0d4c07af 9551#endif
b121eeb9 9552
c906108c 9553void
ed9a39eb 9554_initialize_arm_tdep (void)
c906108c 9555{
bc90b915 9556 long length;
53904c9e
AC
9557 const char *setname;
9558 const char *setdesc;
65b48a81 9559 int i, j;
edefbb7c
AC
9560 char regdesc[1024], *rdptr = regdesc;
9561 size_t rest = sizeof (regdesc);
085dd6e6 9562
42cf1509 9563 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9564
60c5725c 9565 arm_objfile_data_key
c1bd65d0 9566 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9567
0e9e9abd
UW
9568 /* Add ourselves to objfile event chain. */
9569 observer_attach_new_objfile (arm_exidx_new_objfile);
9570 arm_exidx_data_key
9571 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9572
70f80edf
JT
9573 /* Register an ELF OS ABI sniffer for ARM binaries. */
9574 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9575 bfd_target_elf_flavour,
9576 arm_elf_osabi_sniffer);
9577
9779414d
DJ
9578 /* Initialize the standard target descriptions. */
9579 initialize_tdesc_arm_with_m ();
25f8c692 9580 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9581 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9582 initialize_tdesc_arm_with_iwmmxt ();
9583 initialize_tdesc_arm_with_vfpv2 ();
9584 initialize_tdesc_arm_with_vfpv3 ();
9585 initialize_tdesc_arm_with_neon ();
9779414d 9586
afd7eef0
RE
9587 /* Add root prefix command for all "set arm"/"show arm" commands. */
9588 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9589 _("Various ARM-specific commands."),
afd7eef0
RE
9590 &setarmcmdlist, "set arm ", 0, &setlist);
9591
9592 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9593 _("Various ARM-specific commands."),
afd7eef0 9594 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9595
c5aa993b 9596
65b48a81
PB
9597 arm_disassembler_options = xstrdup ("reg-names-std");
9598 const disasm_options_t *disasm_options = disassembler_options_arm ();
9599 int num_disassembly_styles = 0;
9600 for (i = 0; disasm_options->name[i] != NULL; i++)
9601 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9602 num_disassembly_styles++;
9603
9604 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
8d749320 9605 valid_disassembly_styles = XNEWVEC (const char *,
65b48a81
PB
9606 num_disassembly_styles + 1);
9607 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9608 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9609 {
9610 size_t offset = strlen ("reg-names-");
9611 const char *style = disasm_options->name[i];
9612 valid_disassembly_styles[j++] = &style[offset];
9613 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9614 disasm_options->description[i]);
9615 rdptr += length;
9616 rest -= length;
9617 }
94c30b78 9618 /* Mark the end of valid options. */
65b48a81 9619 valid_disassembly_styles[num_disassembly_styles] = NULL;
c906108c 9620
edefbb7c 9621 /* Create the help text. */
d7e74731
PA
9622 std::string helptext = string_printf ("%s%s%s",
9623 _("The valid values are:\n"),
9624 regdesc,
9625 _("The default is \"std\"."));
ed9a39eb 9626
edefbb7c
AC
9627 add_setshow_enum_cmd("disassembler", no_class,
9628 valid_disassembly_styles, &disassembly_style,
9629 _("Set the disassembly style."),
9630 _("Show the disassembly style."),
09b0e4b0 9631 helptext.c_str (),
2c5b56ce 9632 set_disassembly_style_sfunc,
65b48a81 9633 show_disassembly_style_sfunc,
7376b4c2 9634 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9635
9636 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9637 _("Set usage of ARM 32-bit mode."),
9638 _("Show usage of ARM 32-bit mode."),
9639 _("When off, a 26-bit PC will be used."),
2c5b56ce 9640 NULL,
0963b4bd
MS
9641 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9642 mode is %s. */
26304000 9643 &setarmcmdlist, &showarmcmdlist);
c906108c 9644
fd50bc42 9645 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9646 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9647 _("Set the floating point type."),
9648 _("Show the floating point type."),
9649 _("auto - Determine the FP typefrom the OS-ABI.\n\
9650softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9651fpa - FPA co-processor (GCC compiled).\n\
9652softvfp - Software FP with pure-endian doubles.\n\
9653vfp - VFP co-processor."),
edefbb7c 9654 set_fp_model_sfunc, show_fp_model,
7376b4c2 9655 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9656
28e97307
DJ
9657 /* Add a command to allow the user to force the ABI. */
9658 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9659 _("Set the ABI."),
9660 _("Show the ABI."),
9661 NULL, arm_set_abi, arm_show_abi,
9662 &setarmcmdlist, &showarmcmdlist);
9663
0428b8f5
DJ
9664 /* Add two commands to allow the user to force the assumed
9665 execution mode. */
9666 add_setshow_enum_cmd ("fallback-mode", class_support,
9667 arm_mode_strings, &arm_fallback_mode_string,
9668 _("Set the mode assumed when symbols are unavailable."),
9669 _("Show the mode assumed when symbols are unavailable."),
9670 NULL, NULL, arm_show_fallback_mode,
9671 &setarmcmdlist, &showarmcmdlist);
9672 add_setshow_enum_cmd ("force-mode", class_support,
9673 arm_mode_strings, &arm_force_mode_string,
9674 _("Set the mode assumed even when symbols are available."),
9675 _("Show the mode assumed even when symbols are available."),
9676 NULL, NULL, arm_show_force_mode,
9677 &setarmcmdlist, &showarmcmdlist);
9678
6529d2dd 9679 /* Debugging flag. */
edefbb7c
AC
9680 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9681 _("Set ARM debugging."),
9682 _("Show ARM debugging."),
9683 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9684 NULL,
7915a72c 9685 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9686 &setdebuglist, &showdebuglist);
b121eeb9
YQ
9687
9688#if GDB_SELF_TEST
1526853e 9689 selftests::register_test ("arm-record", selftests::arm_record_test);
b121eeb9
YQ
9690#endif
9691
c906108c 9692}
72508ac0
PO
9693
9694/* ARM-reversible process record data structures. */
9695
9696#define ARM_INSN_SIZE_BYTES 4
9697#define THUMB_INSN_SIZE_BYTES 2
9698#define THUMB2_INSN_SIZE_BYTES 4
9699
9700
71e396f9
LM
9701/* Position of the bit within a 32-bit ARM instruction
9702 that defines whether the instruction is a load or store. */
72508ac0
PO
9703#define INSN_S_L_BIT_NUM 20
9704
9705#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9706 do \
9707 { \
9708 unsigned int reg_len = LENGTH; \
9709 if (reg_len) \
9710 { \
9711 REGS = XNEWVEC (uint32_t, reg_len); \
9712 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9713 } \
9714 } \
9715 while (0)
9716
9717#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9718 do \
9719 { \
9720 unsigned int mem_len = LENGTH; \
9721 if (mem_len) \
9722 { \
9723 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9724 memcpy(&MEMS->len, &RECORD_BUF[0], \
9725 sizeof(struct arm_mem_r) * LENGTH); \
9726 } \
9727 } \
9728 while (0)
9729
9730/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9731#define INSN_RECORDED(ARM_RECORD) \
9732 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9733
9734/* ARM memory record structure. */
9735struct arm_mem_r
9736{
9737 uint32_t len; /* Record length. */
bfbbec00 9738 uint32_t addr; /* Memory address. */
72508ac0
PO
9739};
9740
9741/* ARM instruction record contains opcode of current insn
9742 and execution state (before entry to decode_insn()),
9743 contains list of to-be-modified registers and
9744 memory blocks (on return from decode_insn()). */
9745
9746typedef struct insn_decode_record_t
9747{
9748 struct gdbarch *gdbarch;
9749 struct regcache *regcache;
9750 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9751 uint32_t arm_insn; /* Should accommodate thumb. */
9752 uint32_t cond; /* Condition code. */
9753 uint32_t opcode; /* Insn opcode. */
9754 uint32_t decode; /* Insn decode bits. */
9755 uint32_t mem_rec_count; /* No of mem records. */
9756 uint32_t reg_rec_count; /* No of reg records. */
9757 uint32_t *arm_regs; /* Registers to be saved for this record. */
9758 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9759} insn_decode_record;
9760
9761
9762/* Checks ARM SBZ and SBO mandatory fields. */
9763
9764static int
9765sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9766{
9767 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9768
9769 if (!len)
9770 return 1;
9771
9772 if (!sbo)
9773 ones = ~ones;
9774
9775 while (ones)
9776 {
9777 if (!(ones & sbo))
9778 {
9779 return 0;
9780 }
9781 ones = ones >> 1;
9782 }
9783 return 1;
9784}
9785
c6ec2b30
OJ
9786enum arm_record_result
9787{
9788 ARM_RECORD_SUCCESS = 0,
9789 ARM_RECORD_FAILURE = 1
9790};
9791
72508ac0
PO
9792typedef enum
9793{
9794 ARM_RECORD_STRH=1,
9795 ARM_RECORD_STRD
9796} arm_record_strx_t;
9797
9798typedef enum
9799{
9800 ARM_RECORD=1,
9801 THUMB_RECORD,
9802 THUMB2_RECORD
9803} record_type_t;
9804
9805
9806static int
9807arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9808 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9809{
9810
9811 struct regcache *reg_cache = arm_insn_r->regcache;
9812 ULONGEST u_regval[2]= {0};
9813
9814 uint32_t reg_src1 = 0, reg_src2 = 0;
9815 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9816
9817 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9818 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9819
9820 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9821 {
9822 /* 1) Handle misc store, immediate offset. */
9823 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9824 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9825 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9826 regcache_raw_read_unsigned (reg_cache, reg_src1,
9827 &u_regval[0]);
9828 if (ARM_PC_REGNUM == reg_src1)
9829 {
9830 /* If R15 was used as Rn, hence current PC+8. */
9831 u_regval[0] = u_regval[0] + 8;
9832 }
9833 offset_8 = (immed_high << 4) | immed_low;
9834 /* Calculate target store address. */
9835 if (14 == arm_insn_r->opcode)
9836 {
9837 tgt_mem_addr = u_regval[0] + offset_8;
9838 }
9839 else
9840 {
9841 tgt_mem_addr = u_regval[0] - offset_8;
9842 }
9843 if (ARM_RECORD_STRH == str_type)
9844 {
9845 record_buf_mem[0] = 2;
9846 record_buf_mem[1] = tgt_mem_addr;
9847 arm_insn_r->mem_rec_count = 1;
9848 }
9849 else if (ARM_RECORD_STRD == str_type)
9850 {
9851 record_buf_mem[0] = 4;
9852 record_buf_mem[1] = tgt_mem_addr;
9853 record_buf_mem[2] = 4;
9854 record_buf_mem[3] = tgt_mem_addr + 4;
9855 arm_insn_r->mem_rec_count = 2;
9856 }
9857 }
9858 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9859 {
9860 /* 2) Store, register offset. */
9861 /* Get Rm. */
9862 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9863 /* Get Rn. */
9864 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9865 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9866 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9867 if (15 == reg_src2)
9868 {
9869 /* If R15 was used as Rn, hence current PC+8. */
9870 u_regval[0] = u_regval[0] + 8;
9871 }
9872 /* Calculate target store address, Rn +/- Rm, register offset. */
9873 if (12 == arm_insn_r->opcode)
9874 {
9875 tgt_mem_addr = u_regval[0] + u_regval[1];
9876 }
9877 else
9878 {
9879 tgt_mem_addr = u_regval[1] - u_regval[0];
9880 }
9881 if (ARM_RECORD_STRH == str_type)
9882 {
9883 record_buf_mem[0] = 2;
9884 record_buf_mem[1] = tgt_mem_addr;
9885 arm_insn_r->mem_rec_count = 1;
9886 }
9887 else if (ARM_RECORD_STRD == str_type)
9888 {
9889 record_buf_mem[0] = 4;
9890 record_buf_mem[1] = tgt_mem_addr;
9891 record_buf_mem[2] = 4;
9892 record_buf_mem[3] = tgt_mem_addr + 4;
9893 arm_insn_r->mem_rec_count = 2;
9894 }
9895 }
9896 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9897 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9898 {
9899 /* 3) Store, immediate pre-indexed. */
9900 /* 5) Store, immediate post-indexed. */
9901 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9902 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9903 offset_8 = (immed_high << 4) | immed_low;
9904 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9905 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9906 /* Calculate target store address, Rn +/- Rm, register offset. */
9907 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9908 {
9909 tgt_mem_addr = u_regval[0] + offset_8;
9910 }
9911 else
9912 {
9913 tgt_mem_addr = u_regval[0] - offset_8;
9914 }
9915 if (ARM_RECORD_STRH == str_type)
9916 {
9917 record_buf_mem[0] = 2;
9918 record_buf_mem[1] = tgt_mem_addr;
9919 arm_insn_r->mem_rec_count = 1;
9920 }
9921 else if (ARM_RECORD_STRD == str_type)
9922 {
9923 record_buf_mem[0] = 4;
9924 record_buf_mem[1] = tgt_mem_addr;
9925 record_buf_mem[2] = 4;
9926 record_buf_mem[3] = tgt_mem_addr + 4;
9927 arm_insn_r->mem_rec_count = 2;
9928 }
9929 /* Record Rn also as it changes. */
9930 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9931 arm_insn_r->reg_rec_count = 1;
9932 }
9933 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9934 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9935 {
9936 /* 4) Store, register pre-indexed. */
9937 /* 6) Store, register post -indexed. */
9938 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9939 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9940 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9941 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9942 /* Calculate target store address, Rn +/- Rm, register offset. */
9943 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9944 {
9945 tgt_mem_addr = u_regval[0] + u_regval[1];
9946 }
9947 else
9948 {
9949 tgt_mem_addr = u_regval[1] - u_regval[0];
9950 }
9951 if (ARM_RECORD_STRH == str_type)
9952 {
9953 record_buf_mem[0] = 2;
9954 record_buf_mem[1] = tgt_mem_addr;
9955 arm_insn_r->mem_rec_count = 1;
9956 }
9957 else if (ARM_RECORD_STRD == str_type)
9958 {
9959 record_buf_mem[0] = 4;
9960 record_buf_mem[1] = tgt_mem_addr;
9961 record_buf_mem[2] = 4;
9962 record_buf_mem[3] = tgt_mem_addr + 4;
9963 arm_insn_r->mem_rec_count = 2;
9964 }
9965 /* Record Rn also as it changes. */
9966 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9967 arm_insn_r->reg_rec_count = 1;
9968 }
9969 return 0;
9970}
9971
9972/* Handling ARM extension space insns. */
9973
9974static int
9975arm_record_extension_space (insn_decode_record *arm_insn_r)
9976{
9977 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9978 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9979 uint32_t record_buf[8], record_buf_mem[8];
9980 uint32_t reg_src1 = 0;
72508ac0
PO
9981 struct regcache *reg_cache = arm_insn_r->regcache;
9982 ULONGEST u_regval = 0;
9983
9984 gdb_assert (!INSN_RECORDED(arm_insn_r));
9985 /* Handle unconditional insn extension space. */
9986
9987 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9988 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9989 if (arm_insn_r->cond)
9990 {
9991 /* PLD has no affect on architectural state, it just affects
9992 the caches. */
9993 if (5 == ((opcode1 & 0xE0) >> 5))
9994 {
9995 /* BLX(1) */
9996 record_buf[0] = ARM_PS_REGNUM;
9997 record_buf[1] = ARM_LR_REGNUM;
9998 arm_insn_r->reg_rec_count = 2;
9999 }
10000 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10001 }
10002
10003
10004 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10005 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10006 {
10007 ret = -1;
10008 /* Undefined instruction on ARM V5; need to handle if later
10009 versions define it. */
10010 }
10011
10012 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10013 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10014 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10015
10016 /* Handle arithmetic insn extension space. */
10017 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10018 && !INSN_RECORDED(arm_insn_r))
10019 {
10020 /* Handle MLA(S) and MUL(S). */
10021 if (0 <= insn_op1 && 3 >= insn_op1)
10022 {
10023 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10024 record_buf[1] = ARM_PS_REGNUM;
10025 arm_insn_r->reg_rec_count = 2;
10026 }
10027 else if (4 <= insn_op1 && 15 >= insn_op1)
10028 {
10029 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10030 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10031 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10032 record_buf[2] = ARM_PS_REGNUM;
10033 arm_insn_r->reg_rec_count = 3;
10034 }
10035 }
10036
10037 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10038 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10039 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10040
10041 /* Handle control insn extension space. */
10042
10043 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10044 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10045 {
10046 if (!bit (arm_insn_r->arm_insn,25))
10047 {
10048 if (!bits (arm_insn_r->arm_insn, 4, 7))
10049 {
10050 if ((0 == insn_op1) || (2 == insn_op1))
10051 {
10052 /* MRS. */
10053 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10054 arm_insn_r->reg_rec_count = 1;
10055 }
10056 else if (1 == insn_op1)
10057 {
10058 /* CSPR is going to be changed. */
10059 record_buf[0] = ARM_PS_REGNUM;
10060 arm_insn_r->reg_rec_count = 1;
10061 }
10062 else if (3 == insn_op1)
10063 {
10064 /* SPSR is going to be changed. */
10065 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10066 return -1;
10067 }
10068 }
10069 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10070 {
10071 if (1 == insn_op1)
10072 {
10073 /* BX. */
10074 record_buf[0] = ARM_PS_REGNUM;
10075 arm_insn_r->reg_rec_count = 1;
10076 }
10077 else if (3 == insn_op1)
10078 {
10079 /* CLZ. */
10080 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10081 arm_insn_r->reg_rec_count = 1;
10082 }
10083 }
10084 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10085 {
10086 /* BLX. */
10087 record_buf[0] = ARM_PS_REGNUM;
10088 record_buf[1] = ARM_LR_REGNUM;
10089 arm_insn_r->reg_rec_count = 2;
10090 }
10091 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10092 {
10093 /* QADD, QSUB, QDADD, QDSUB */
10094 record_buf[0] = ARM_PS_REGNUM;
10095 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10096 arm_insn_r->reg_rec_count = 2;
10097 }
10098 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10099 {
10100 /* BKPT. */
10101 record_buf[0] = ARM_PS_REGNUM;
10102 record_buf[1] = ARM_LR_REGNUM;
10103 arm_insn_r->reg_rec_count = 2;
10104
10105 /* Save SPSR also;how? */
72508ac0
PO
10106 return -1;
10107 }
10108 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10109 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10110 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10111 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10112 )
10113 {
10114 if (0 == insn_op1 || 1 == insn_op1)
10115 {
10116 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10117 /* We dont do optimization for SMULW<y> where we
10118 need only Rd. */
10119 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10120 record_buf[1] = ARM_PS_REGNUM;
10121 arm_insn_r->reg_rec_count = 2;
10122 }
10123 else if (2 == insn_op1)
10124 {
10125 /* SMLAL<x><y>. */
10126 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10127 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10128 arm_insn_r->reg_rec_count = 2;
10129 }
10130 else if (3 == insn_op1)
10131 {
10132 /* SMUL<x><y>. */
10133 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10134 arm_insn_r->reg_rec_count = 1;
10135 }
10136 }
10137 }
10138 else
10139 {
10140 /* MSR : immediate form. */
10141 if (1 == insn_op1)
10142 {
10143 /* CSPR is going to be changed. */
10144 record_buf[0] = ARM_PS_REGNUM;
10145 arm_insn_r->reg_rec_count = 1;
10146 }
10147 else if (3 == insn_op1)
10148 {
10149 /* SPSR is going to be changed. */
10150 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10151 return -1;
10152 }
10153 }
10154 }
10155
10156 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10157 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10158 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10159
10160 /* Handle load/store insn extension space. */
10161
10162 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10163 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10164 && !INSN_RECORDED(arm_insn_r))
10165 {
10166 /* SWP/SWPB. */
10167 if (0 == insn_op1)
10168 {
10169 /* These insn, changes register and memory as well. */
10170 /* SWP or SWPB insn. */
10171 /* Get memory address given by Rn. */
10172 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10173 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10174 /* SWP insn ?, swaps word. */
10175 if (8 == arm_insn_r->opcode)
10176 {
10177 record_buf_mem[0] = 4;
10178 }
10179 else
10180 {
10181 /* SWPB insn, swaps only byte. */
10182 record_buf_mem[0] = 1;
10183 }
10184 record_buf_mem[1] = u_regval;
10185 arm_insn_r->mem_rec_count = 1;
10186 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10187 arm_insn_r->reg_rec_count = 1;
10188 }
10189 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10190 {
10191 /* STRH. */
10192 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10193 ARM_RECORD_STRH);
10194 }
10195 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10196 {
10197 /* LDRD. */
10198 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10199 record_buf[1] = record_buf[0] + 1;
10200 arm_insn_r->reg_rec_count = 2;
10201 }
10202 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10203 {
10204 /* STRD. */
10205 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10206 ARM_RECORD_STRD);
10207 }
10208 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10209 {
10210 /* LDRH, LDRSB, LDRSH. */
10211 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10212 arm_insn_r->reg_rec_count = 1;
10213 }
10214
10215 }
10216
10217 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10218 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10219 && !INSN_RECORDED(arm_insn_r))
10220 {
10221 ret = -1;
10222 /* Handle coprocessor insn extension space. */
10223 }
10224
10225 /* To be done for ARMv5 and later; as of now we return -1. */
10226 if (-1 == ret)
ca92db2d 10227 return ret;
72508ac0
PO
10228
10229 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10230 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10231
10232 return ret;
10233}
10234
10235/* Handling opcode 000 insns. */
10236
10237static int
10238arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10239{
10240 struct regcache *reg_cache = arm_insn_r->regcache;
10241 uint32_t record_buf[8], record_buf_mem[8];
10242 ULONGEST u_regval[2] = {0};
10243
bec2ab5a 10244 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10245 uint32_t opcode1 = 0;
10246
10247 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10248 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10249 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10250
10251 /* Data processing insn /multiply insn. */
10252 if (9 == arm_insn_r->decode
10253 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10254 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10255 {
10256 /* Handle multiply instructions. */
10257 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10258 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10259 {
10260 /* Handle MLA and MUL. */
10261 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10262 record_buf[1] = ARM_PS_REGNUM;
10263 arm_insn_r->reg_rec_count = 2;
10264 }
10265 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10266 {
10267 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10268 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10269 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10270 record_buf[2] = ARM_PS_REGNUM;
10271 arm_insn_r->reg_rec_count = 3;
10272 }
10273 }
10274 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10275 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10276 {
10277 /* Handle misc load insns, as 20th bit (L = 1). */
10278 /* LDR insn has a capability to do branching, if
10279 MOV LR, PC is precceded by LDR insn having Rn as R15
10280 in that case, it emulates branch and link insn, and hence we
10281 need to save CSPR and PC as well. I am not sure this is right
10282 place; as opcode = 010 LDR insn make this happen, if R15 was
10283 used. */
10284 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10285 if (15 != reg_dest)
10286 {
10287 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10288 arm_insn_r->reg_rec_count = 1;
10289 }
10290 else
10291 {
10292 record_buf[0] = reg_dest;
10293 record_buf[1] = ARM_PS_REGNUM;
10294 arm_insn_r->reg_rec_count = 2;
10295 }
10296 }
10297 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10298 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10299 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10300 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10301 {
10302 /* Handle MSR insn. */
10303 if (9 == arm_insn_r->opcode)
10304 {
10305 /* CSPR is going to be changed. */
10306 record_buf[0] = ARM_PS_REGNUM;
10307 arm_insn_r->reg_rec_count = 1;
10308 }
10309 else
10310 {
10311 /* SPSR is going to be changed. */
10312 /* How to read SPSR value? */
72508ac0
PO
10313 return -1;
10314 }
10315 }
10316 else if (9 == arm_insn_r->decode
10317 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10318 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10319 {
10320 /* Handling SWP, SWPB. */
10321 /* These insn, changes register and memory as well. */
10322 /* SWP or SWPB insn. */
10323
10324 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10325 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10326 /* SWP insn ?, swaps word. */
10327 if (8 == arm_insn_r->opcode)
10328 {
10329 record_buf_mem[0] = 4;
10330 }
10331 else
10332 {
10333 /* SWPB insn, swaps only byte. */
10334 record_buf_mem[0] = 1;
10335 }
10336 record_buf_mem[1] = u_regval[0];
10337 arm_insn_r->mem_rec_count = 1;
10338 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10339 arm_insn_r->reg_rec_count = 1;
10340 }
10341 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10342 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10343 {
10344 /* Handle BLX, branch and link/exchange. */
10345 if (9 == arm_insn_r->opcode)
10346 {
10347 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10348 and R14 stores the return address. */
10349 record_buf[0] = ARM_PS_REGNUM;
10350 record_buf[1] = ARM_LR_REGNUM;
10351 arm_insn_r->reg_rec_count = 2;
10352 }
10353 }
10354 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10355 {
10356 /* Handle enhanced software breakpoint insn, BKPT. */
10357 /* CPSR is changed to be executed in ARM state, disabling normal
10358 interrupts, entering abort mode. */
10359 /* According to high vector configuration PC is set. */
10360 /* user hit breakpoint and type reverse, in
10361 that case, we need to go back with previous CPSR and
10362 Program Counter. */
10363 record_buf[0] = ARM_PS_REGNUM;
10364 record_buf[1] = ARM_LR_REGNUM;
10365 arm_insn_r->reg_rec_count = 2;
10366
10367 /* Save SPSR also; how? */
72508ac0
PO
10368 return -1;
10369 }
10370 else if (11 == arm_insn_r->decode
10371 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10372 {
10373 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10374
10375 /* Handle str(x) insn */
10376 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10377 ARM_RECORD_STRH);
10378 }
10379 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10380 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10381 {
10382 /* Handle BX, branch and link/exchange. */
10383 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10384 record_buf[0] = ARM_PS_REGNUM;
10385 arm_insn_r->reg_rec_count = 1;
10386 }
10387 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10388 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10389 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10390 {
10391 /* Count leading zeros: CLZ. */
10392 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10393 arm_insn_r->reg_rec_count = 1;
10394 }
10395 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10396 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10397 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10398 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10399 )
10400 {
10401 /* Handle MRS insn. */
10402 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10403 arm_insn_r->reg_rec_count = 1;
10404 }
10405 else if (arm_insn_r->opcode <= 15)
10406 {
10407 /* Normal data processing insns. */
10408 /* Out of 11 shifter operands mode, all the insn modifies destination
10409 register, which is specified by 13-16 decode. */
10410 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10411 record_buf[1] = ARM_PS_REGNUM;
10412 arm_insn_r->reg_rec_count = 2;
10413 }
10414 else
10415 {
10416 return -1;
10417 }
10418
10419 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10420 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10421 return 0;
10422}
10423
10424/* Handling opcode 001 insns. */
10425
10426static int
10427arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10428{
10429 uint32_t record_buf[8], record_buf_mem[8];
10430
10431 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10432 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10433
10434 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10435 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10436 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10437 )
10438 {
10439 /* Handle MSR insn. */
10440 if (9 == arm_insn_r->opcode)
10441 {
10442 /* CSPR is going to be changed. */
10443 record_buf[0] = ARM_PS_REGNUM;
10444 arm_insn_r->reg_rec_count = 1;
10445 }
10446 else
10447 {
10448 /* SPSR is going to be changed. */
10449 }
10450 }
10451 else if (arm_insn_r->opcode <= 15)
10452 {
10453 /* Normal data processing insns. */
10454 /* Out of 11 shifter operands mode, all the insn modifies destination
10455 register, which is specified by 13-16 decode. */
10456 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10457 record_buf[1] = ARM_PS_REGNUM;
10458 arm_insn_r->reg_rec_count = 2;
10459 }
10460 else
10461 {
10462 return -1;
10463 }
10464
10465 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10466 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10467 return 0;
10468}
10469
c55978a6
YQ
10470static int
10471arm_record_media (insn_decode_record *arm_insn_r)
10472{
10473 uint32_t record_buf[8];
10474
10475 switch (bits (arm_insn_r->arm_insn, 22, 24))
10476 {
10477 case 0:
10478 /* Parallel addition and subtraction, signed */
10479 case 1:
10480 /* Parallel addition and subtraction, unsigned */
10481 case 2:
10482 case 3:
10483 /* Packing, unpacking, saturation and reversal */
10484 {
10485 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10486
10487 record_buf[arm_insn_r->reg_rec_count++] = rd;
10488 }
10489 break;
10490
10491 case 4:
10492 case 5:
10493 /* Signed multiplies */
10494 {
10495 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10496 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10497
10498 record_buf[arm_insn_r->reg_rec_count++] = rd;
10499 if (op1 == 0x0)
10500 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10501 else if (op1 == 0x4)
10502 record_buf[arm_insn_r->reg_rec_count++]
10503 = bits (arm_insn_r->arm_insn, 12, 15);
10504 }
10505 break;
10506
10507 case 6:
10508 {
10509 if (bit (arm_insn_r->arm_insn, 21)
10510 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10511 {
10512 /* SBFX */
10513 record_buf[arm_insn_r->reg_rec_count++]
10514 = bits (arm_insn_r->arm_insn, 12, 15);
10515 }
10516 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10517 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10518 {
10519 /* USAD8 and USADA8 */
10520 record_buf[arm_insn_r->reg_rec_count++]
10521 = bits (arm_insn_r->arm_insn, 16, 19);
10522 }
10523 }
10524 break;
10525
10526 case 7:
10527 {
10528 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10529 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10530 {
10531 /* Permanently UNDEFINED */
10532 return -1;
10533 }
10534 else
10535 {
10536 /* BFC, BFI and UBFX */
10537 record_buf[arm_insn_r->reg_rec_count++]
10538 = bits (arm_insn_r->arm_insn, 12, 15);
10539 }
10540 }
10541 break;
10542
10543 default:
10544 return -1;
10545 }
10546
10547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10548
10549 return 0;
10550}
10551
71e396f9 10552/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10553
10554static int
10555arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10556{
10557 struct regcache *reg_cache = arm_insn_r->regcache;
10558
71e396f9
LM
10559 uint32_t reg_base , reg_dest;
10560 uint32_t offset_12, tgt_mem_addr;
72508ac0 10561 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10562 unsigned char wback;
10563 ULONGEST u_regval;
72508ac0 10564
71e396f9
LM
10565 /* Calculate wback. */
10566 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10567 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10568
71e396f9
LM
10569 arm_insn_r->reg_rec_count = 0;
10570 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10571
10572 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10573 {
71e396f9
LM
10574 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10575 and LDRT. */
10576
72508ac0 10577 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10578 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10579
10580 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10581 preceeds a LDR instruction having R15 as reg_base, it
10582 emulates a branch and link instruction, and hence we need to save
10583 CPSR and PC as well. */
10584 if (ARM_PC_REGNUM == reg_dest)
10585 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10586
10587 /* If wback is true, also save the base register, which is going to be
10588 written to. */
10589 if (wback)
10590 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10591 }
10592 else
10593 {
71e396f9
LM
10594 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10595
72508ac0 10596 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10597 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10598
10599 /* Handle bit U. */
72508ac0 10600 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10601 {
10602 /* U == 1: Add the offset. */
10603 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10604 }
72508ac0 10605 else
71e396f9
LM
10606 {
10607 /* U == 0: subtract the offset. */
10608 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10609 }
10610
10611 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10612 bytes. */
10613 if (bit (arm_insn_r->arm_insn, 22))
10614 {
10615 /* STRB and STRBT: 1 byte. */
10616 record_buf_mem[0] = 1;
10617 }
10618 else
10619 {
10620 /* STR and STRT: 4 bytes. */
10621 record_buf_mem[0] = 4;
10622 }
10623
10624 /* Handle bit P. */
10625 if (bit (arm_insn_r->arm_insn, 24))
10626 record_buf_mem[1] = tgt_mem_addr;
10627 else
10628 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10629
72508ac0
PO
10630 arm_insn_r->mem_rec_count = 1;
10631
71e396f9
LM
10632 /* If wback is true, also save the base register, which is going to be
10633 written to. */
10634 if (wback)
10635 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10636 }
10637
10638 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10639 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10640 return 0;
10641}
10642
10643/* Handling opcode 011 insns. */
10644
10645static int
10646arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10647{
10648 struct regcache *reg_cache = arm_insn_r->regcache;
10649
10650 uint32_t shift_imm = 0;
10651 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10652 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10653 uint32_t record_buf[8], record_buf_mem[8];
10654
10655 LONGEST s_word;
10656 ULONGEST u_regval[2];
10657
c55978a6
YQ
10658 if (bit (arm_insn_r->arm_insn, 4))
10659 return arm_record_media (arm_insn_r);
10660
72508ac0
PO
10661 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10662 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10663
10664 /* Handle enhanced store insns and LDRD DSP insn,
10665 order begins according to addressing modes for store insns
10666 STRH insn. */
10667
10668 /* LDR or STR? */
10669 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10670 {
10671 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10672 /* LDR insn has a capability to do branching, if
10673 MOV LR, PC is precedded by LDR insn having Rn as R15
10674 in that case, it emulates branch and link insn, and hence we
10675 need to save CSPR and PC as well. */
10676 if (15 != reg_dest)
10677 {
10678 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10679 arm_insn_r->reg_rec_count = 1;
10680 }
10681 else
10682 {
10683 record_buf[0] = reg_dest;
10684 record_buf[1] = ARM_PS_REGNUM;
10685 arm_insn_r->reg_rec_count = 2;
10686 }
10687 }
10688 else
10689 {
10690 if (! bits (arm_insn_r->arm_insn, 4, 11))
10691 {
10692 /* Store insn, register offset and register pre-indexed,
10693 register post-indexed. */
10694 /* Get Rm. */
10695 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10696 /* Get Rn. */
10697 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10698 regcache_raw_read_unsigned (reg_cache, reg_src1
10699 , &u_regval[0]);
10700 regcache_raw_read_unsigned (reg_cache, reg_src2
10701 , &u_regval[1]);
10702 if (15 == reg_src2)
10703 {
10704 /* If R15 was used as Rn, hence current PC+8. */
10705 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10706 u_regval[0] = u_regval[0] + 8;
10707 }
10708 /* Calculate target store address, Rn +/- Rm, register offset. */
10709 /* U == 1. */
10710 if (bit (arm_insn_r->arm_insn, 23))
10711 {
10712 tgt_mem_addr = u_regval[0] + u_regval[1];
10713 }
10714 else
10715 {
10716 tgt_mem_addr = u_regval[1] - u_regval[0];
10717 }
10718
10719 switch (arm_insn_r->opcode)
10720 {
10721 /* STR. */
10722 case 8:
10723 case 12:
10724 /* STR. */
10725 case 9:
10726 case 13:
10727 /* STRT. */
10728 case 1:
10729 case 5:
10730 /* STR. */
10731 case 0:
10732 case 4:
10733 record_buf_mem[0] = 4;
10734 break;
10735
10736 /* STRB. */
10737 case 10:
10738 case 14:
10739 /* STRB. */
10740 case 11:
10741 case 15:
10742 /* STRBT. */
10743 case 3:
10744 case 7:
10745 /* STRB. */
10746 case 2:
10747 case 6:
10748 record_buf_mem[0] = 1;
10749 break;
10750
10751 default:
10752 gdb_assert_not_reached ("no decoding pattern found");
10753 break;
10754 }
10755 record_buf_mem[1] = tgt_mem_addr;
10756 arm_insn_r->mem_rec_count = 1;
10757
10758 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10759 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10760 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10761 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10762 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10763 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10764 )
10765 {
10766 /* Rn is going to be changed in pre-indexed mode and
10767 post-indexed mode as well. */
10768 record_buf[0] = reg_src2;
10769 arm_insn_r->reg_rec_count = 1;
10770 }
10771 }
10772 else
10773 {
10774 /* Store insn, scaled register offset; scaled pre-indexed. */
10775 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10776 /* Get Rm. */
10777 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10778 /* Get Rn. */
10779 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10780 /* Get shift_imm. */
10781 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10782 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10783 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10784 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10785 /* Offset_12 used as shift. */
10786 switch (offset_12)
10787 {
10788 case 0:
10789 /* Offset_12 used as index. */
10790 offset_12 = u_regval[0] << shift_imm;
10791 break;
10792
10793 case 1:
10794 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10795 break;
10796
10797 case 2:
10798 if (!shift_imm)
10799 {
10800 if (bit (u_regval[0], 31))
10801 {
10802 offset_12 = 0xFFFFFFFF;
10803 }
10804 else
10805 {
10806 offset_12 = 0;
10807 }
10808 }
10809 else
10810 {
10811 /* This is arithmetic shift. */
10812 offset_12 = s_word >> shift_imm;
10813 }
10814 break;
10815
10816 case 3:
10817 if (!shift_imm)
10818 {
10819 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10820 &u_regval[1]);
10821 /* Get C flag value and shift it by 31. */
10822 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10823 | (u_regval[0]) >> 1);
10824 }
10825 else
10826 {
10827 offset_12 = (u_regval[0] >> shift_imm) \
10828 | (u_regval[0] <<
10829 (sizeof(uint32_t) - shift_imm));
10830 }
10831 break;
10832
10833 default:
10834 gdb_assert_not_reached ("no decoding pattern found");
10835 break;
10836 }
10837
10838 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10839 /* bit U set. */
10840 if (bit (arm_insn_r->arm_insn, 23))
10841 {
10842 tgt_mem_addr = u_regval[1] + offset_12;
10843 }
10844 else
10845 {
10846 tgt_mem_addr = u_regval[1] - offset_12;
10847 }
10848
10849 switch (arm_insn_r->opcode)
10850 {
10851 /* STR. */
10852 case 8:
10853 case 12:
10854 /* STR. */
10855 case 9:
10856 case 13:
10857 /* STRT. */
10858 case 1:
10859 case 5:
10860 /* STR. */
10861 case 0:
10862 case 4:
10863 record_buf_mem[0] = 4;
10864 break;
10865
10866 /* STRB. */
10867 case 10:
10868 case 14:
10869 /* STRB. */
10870 case 11:
10871 case 15:
10872 /* STRBT. */
10873 case 3:
10874 case 7:
10875 /* STRB. */
10876 case 2:
10877 case 6:
10878 record_buf_mem[0] = 1;
10879 break;
10880
10881 default:
10882 gdb_assert_not_reached ("no decoding pattern found");
10883 break;
10884 }
10885 record_buf_mem[1] = tgt_mem_addr;
10886 arm_insn_r->mem_rec_count = 1;
10887
10888 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10889 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10890 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10891 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10892 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10893 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10894 )
10895 {
10896 /* Rn is going to be changed in register scaled pre-indexed
10897 mode,and scaled post indexed mode. */
10898 record_buf[0] = reg_src2;
10899 arm_insn_r->reg_rec_count = 1;
10900 }
10901 }
10902 }
10903
10904 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10905 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10906 return 0;
10907}
10908
71e396f9 10909/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10910
10911static int
10912arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10913{
10914 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10915 uint32_t register_count = 0, register_bits;
10916 uint32_t reg_base, addr_mode;
72508ac0 10917 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10918 uint32_t wback;
10919 ULONGEST u_regval;
72508ac0 10920
71e396f9
LM
10921 /* Fetch the list of registers. */
10922 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10923 arm_insn_r->reg_rec_count = 0;
10924
10925 /* Fetch the base register that contains the address we are loading data
10926 to. */
10927 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10928
71e396f9
LM
10929 /* Calculate wback. */
10930 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10931
10932 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10933 {
71e396f9 10934 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10935
71e396f9 10936 /* Find out which registers are going to be loaded from memory. */
72508ac0 10937 while (register_bits)
71e396f9
LM
10938 {
10939 if (register_bits & 0x00000001)
10940 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10941 register_bits = register_bits >> 1;
10942 register_count++;
10943 }
72508ac0 10944
71e396f9
LM
10945
10946 /* If wback is true, also save the base register, which is going to be
10947 written to. */
10948 if (wback)
10949 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10950
10951 /* Save the CPSR register. */
10952 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10953 }
10954 else
10955 {
71e396f9 10956 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10957
71e396f9
LM
10958 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10959
10960 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10961
10962 /* Find out how many registers are going to be stored to memory. */
72508ac0 10963 while (register_bits)
71e396f9
LM
10964 {
10965 if (register_bits & 0x00000001)
10966 register_count++;
10967 register_bits = register_bits >> 1;
10968 }
72508ac0
PO
10969
10970 switch (addr_mode)
71e396f9
LM
10971 {
10972 /* STMDA (STMED): Decrement after. */
10973 case 0:
10974 record_buf_mem[1] = (uint32_t) u_regval
10975 - register_count * INT_REGISTER_SIZE + 4;
10976 break;
10977 /* STM (STMIA, STMEA): Increment after. */
10978 case 1:
10979 record_buf_mem[1] = (uint32_t) u_regval;
10980 break;
10981 /* STMDB (STMFD): Decrement before. */
10982 case 2:
10983 record_buf_mem[1] = (uint32_t) u_regval
10984 - register_count * INT_REGISTER_SIZE;
10985 break;
10986 /* STMIB (STMFA): Increment before. */
10987 case 3:
10988 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10989 break;
10990 default:
10991 gdb_assert_not_reached ("no decoding pattern found");
10992 break;
10993 }
72508ac0 10994
71e396f9
LM
10995 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10996 arm_insn_r->mem_rec_count = 1;
10997
10998 /* If wback is true, also save the base register, which is going to be
10999 written to. */
11000 if (wback)
11001 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11002 }
11003
11004 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11005 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11006 return 0;
11007}
11008
11009/* Handling opcode 101 insns. */
11010
11011static int
11012arm_record_b_bl (insn_decode_record *arm_insn_r)
11013{
11014 uint32_t record_buf[8];
11015
11016 /* Handle B, BL, BLX(1) insns. */
11017 /* B simply branches so we do nothing here. */
11018 /* Note: BLX(1) doesnt fall here but instead it falls into
11019 extension space. */
11020 if (bit (arm_insn_r->arm_insn, 24))
11021 {
11022 record_buf[0] = ARM_LR_REGNUM;
11023 arm_insn_r->reg_rec_count = 1;
11024 }
11025
11026 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11027
11028 return 0;
11029}
11030
72508ac0 11031static int
c6ec2b30 11032arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11033{
11034 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11035 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11036 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11037
11038 return -1;
11039}
11040
5a578da5
OJ
11041/* Record handler for vector data transfer instructions. */
11042
11043static int
11044arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11045{
11046 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11047 uint32_t record_buf[4];
11048
5a578da5
OJ
11049 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11050 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11051 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11052 bit_l = bit (arm_insn_r->arm_insn, 20);
11053 bit_c = bit (arm_insn_r->arm_insn, 8);
11054
11055 /* Handle VMOV instruction. */
11056 if (bit_l && bit_c)
11057 {
11058 record_buf[0] = reg_t;
11059 arm_insn_r->reg_rec_count = 1;
11060 }
11061 else if (bit_l && !bit_c)
11062 {
11063 /* Handle VMOV instruction. */
11064 if (bits_a == 0x00)
11065 {
f1771dce 11066 record_buf[0] = reg_t;
5a578da5
OJ
11067 arm_insn_r->reg_rec_count = 1;
11068 }
11069 /* Handle VMRS instruction. */
11070 else if (bits_a == 0x07)
11071 {
11072 if (reg_t == 15)
11073 reg_t = ARM_PS_REGNUM;
11074
11075 record_buf[0] = reg_t;
11076 arm_insn_r->reg_rec_count = 1;
11077 }
11078 }
11079 else if (!bit_l && !bit_c)
11080 {
11081 /* Handle VMOV instruction. */
11082 if (bits_a == 0x00)
11083 {
f1771dce 11084 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11085
11086 arm_insn_r->reg_rec_count = 1;
11087 }
11088 /* Handle VMSR instruction. */
11089 else if (bits_a == 0x07)
11090 {
11091 record_buf[0] = ARM_FPSCR_REGNUM;
11092 arm_insn_r->reg_rec_count = 1;
11093 }
11094 }
11095 else if (!bit_l && bit_c)
11096 {
11097 /* Handle VMOV instruction. */
11098 if (!(bits_a & 0x04))
11099 {
11100 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11101 + ARM_D0_REGNUM;
11102 arm_insn_r->reg_rec_count = 1;
11103 }
11104 /* Handle VDUP instruction. */
11105 else
11106 {
11107 if (bit (arm_insn_r->arm_insn, 21))
11108 {
11109 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11110 record_buf[0] = reg_v + ARM_D0_REGNUM;
11111 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11112 arm_insn_r->reg_rec_count = 2;
11113 }
11114 else
11115 {
11116 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11117 record_buf[0] = reg_v + ARM_D0_REGNUM;
11118 arm_insn_r->reg_rec_count = 1;
11119 }
11120 }
11121 }
11122
11123 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11124 return 0;
11125}
11126
f20f80dd
OJ
11127/* Record handler for extension register load/store instructions. */
11128
11129static int
11130arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11131{
11132 uint32_t opcode, single_reg;
11133 uint8_t op_vldm_vstm;
11134 uint32_t record_buf[8], record_buf_mem[128];
11135 ULONGEST u_regval = 0;
11136
11137 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11138
11139 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11140 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11141 op_vldm_vstm = opcode & 0x1b;
11142
11143 /* Handle VMOV instructions. */
11144 if ((opcode & 0x1e) == 0x04)
11145 {
9fde51ed 11146 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11147 {
11148 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11149 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11150 arm_insn_r->reg_rec_count = 2;
11151 }
f20f80dd 11152 else
01e57735 11153 {
9fde51ed
YQ
11154 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11155 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11156
9fde51ed 11157 if (single_reg)
01e57735 11158 {
9fde51ed
YQ
11159 /* The first S register number m is REG_M:M (M is bit 5),
11160 the corresponding D register number is REG_M:M / 2, which
11161 is REG_M. */
11162 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11163 /* The second S register number is REG_M:M + 1, the
11164 corresponding D register number is (REG_M:M + 1) / 2.
11165 IOW, if bit M is 1, the first and second S registers
11166 are mapped to different D registers, otherwise, they are
11167 in the same D register. */
11168 if (bit_m)
11169 {
11170 record_buf[arm_insn_r->reg_rec_count++]
11171 = ARM_D0_REGNUM + reg_m + 1;
11172 }
01e57735
YQ
11173 }
11174 else
11175 {
9fde51ed 11176 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11177 arm_insn_r->reg_rec_count = 1;
11178 }
11179 }
f20f80dd
OJ
11180 }
11181 /* Handle VSTM and VPUSH instructions. */
11182 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11183 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11184 {
11185 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11186 uint32_t memory_index = 0;
11187
11188 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11189 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11190 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11191 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11192 memory_count = imm_off8;
11193
11194 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11195 start_address = u_regval;
f20f80dd 11196 else
01e57735 11197 start_address = u_regval - imm_off32;
f20f80dd
OJ
11198
11199 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11200 {
11201 record_buf[0] = reg_rn;
11202 arm_insn_r->reg_rec_count = 1;
11203 }
f20f80dd
OJ
11204
11205 while (memory_count > 0)
01e57735 11206 {
9fde51ed 11207 if (single_reg)
01e57735 11208 {
9fde51ed
YQ
11209 record_buf_mem[memory_index] = 4;
11210 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11211 start_address = start_address + 4;
11212 memory_index = memory_index + 2;
11213 }
11214 else
11215 {
9fde51ed
YQ
11216 record_buf_mem[memory_index] = 4;
11217 record_buf_mem[memory_index + 1] = start_address;
11218 record_buf_mem[memory_index + 2] = 4;
11219 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11220 start_address = start_address + 8;
11221 memory_index = memory_index + 4;
11222 }
11223 memory_count--;
11224 }
f20f80dd
OJ
11225 arm_insn_r->mem_rec_count = (memory_index >> 1);
11226 }
11227 /* Handle VLDM instructions. */
11228 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11229 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11230 {
11231 uint32_t reg_count, reg_vd;
11232 uint32_t reg_index = 0;
9fde51ed 11233 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11234
11235 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11236 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11237
9fde51ed
YQ
11238 /* REG_VD is the first D register number. If the instruction
11239 loads memory to S registers (SINGLE_REG is TRUE), the register
11240 number is (REG_VD << 1 | bit D), so the corresponding D
11241 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11242 if (!single_reg)
11243 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11244
9fde51ed 11245 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11246 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11247
9fde51ed
YQ
11248 /* If the instruction loads memory to D register, REG_COUNT should
11249 be divided by 2, according to the ARM Architecture Reference
11250 Manual. If the instruction loads memory to S register, divide by
11251 2 as well because two S registers are mapped to D register. */
11252 reg_count = reg_count / 2;
11253 if (single_reg && bit_d)
01e57735 11254 {
9fde51ed
YQ
11255 /* Increase the register count if S register list starts from
11256 an odd number (bit d is one). */
11257 reg_count++;
11258 }
f20f80dd 11259
9fde51ed
YQ
11260 while (reg_count > 0)
11261 {
11262 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11263 reg_count--;
11264 }
f20f80dd
OJ
11265 arm_insn_r->reg_rec_count = reg_index;
11266 }
11267 /* VSTR Vector store register. */
11268 else if ((opcode & 0x13) == 0x10)
11269 {
bec2ab5a 11270 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11271 uint32_t memory_index = 0;
11272
11273 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11274 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11275 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11276 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11277
11278 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11279 start_address = u_regval + imm_off32;
f20f80dd 11280 else
01e57735 11281 start_address = u_regval - imm_off32;
f20f80dd
OJ
11282
11283 if (single_reg)
01e57735 11284 {
9fde51ed
YQ
11285 record_buf_mem[memory_index] = 4;
11286 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11287 arm_insn_r->mem_rec_count = 1;
11288 }
f20f80dd 11289 else
01e57735 11290 {
9fde51ed
YQ
11291 record_buf_mem[memory_index] = 4;
11292 record_buf_mem[memory_index + 1] = start_address;
11293 record_buf_mem[memory_index + 2] = 4;
11294 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11295 arm_insn_r->mem_rec_count = 2;
11296 }
f20f80dd
OJ
11297 }
11298 /* VLDR Vector load register. */
11299 else if ((opcode & 0x13) == 0x11)
11300 {
11301 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11302
11303 if (!single_reg)
01e57735
YQ
11304 {
11305 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11306 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11307 }
f20f80dd 11308 else
01e57735
YQ
11309 {
11310 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11311 /* Record register D rather than pseudo register S. */
11312 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11313 }
f20f80dd
OJ
11314 arm_insn_r->reg_rec_count = 1;
11315 }
11316
11317 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11318 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11319 return 0;
11320}
11321
851f26ae
OJ
11322/* Record handler for arm/thumb mode VFP data processing instructions. */
11323
11324static int
11325arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11326{
11327 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11328 uint32_t record_buf[4];
11329 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11330 enum insn_types curr_insn_type = INSN_INV;
11331
11332 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11333 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11334 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11335 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11336 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11337 bit_d = bit (arm_insn_r->arm_insn, 22);
11338 opc1 = opc1 & 0x04;
11339
11340 /* Handle VMLA, VMLS. */
11341 if (opc1 == 0x00)
11342 {
11343 if (bit (arm_insn_r->arm_insn, 10))
11344 {
11345 if (bit (arm_insn_r->arm_insn, 6))
11346 curr_insn_type = INSN_T0;
11347 else
11348 curr_insn_type = INSN_T1;
11349 }
11350 else
11351 {
11352 if (dp_op_sz)
11353 curr_insn_type = INSN_T1;
11354 else
11355 curr_insn_type = INSN_T2;
11356 }
11357 }
11358 /* Handle VNMLA, VNMLS, VNMUL. */
11359 else if (opc1 == 0x01)
11360 {
11361 if (dp_op_sz)
11362 curr_insn_type = INSN_T1;
11363 else
11364 curr_insn_type = INSN_T2;
11365 }
11366 /* Handle VMUL. */
11367 else if (opc1 == 0x02 && !(opc3 & 0x01))
11368 {
11369 if (bit (arm_insn_r->arm_insn, 10))
11370 {
11371 if (bit (arm_insn_r->arm_insn, 6))
11372 curr_insn_type = INSN_T0;
11373 else
11374 curr_insn_type = INSN_T1;
11375 }
11376 else
11377 {
11378 if (dp_op_sz)
11379 curr_insn_type = INSN_T1;
11380 else
11381 curr_insn_type = INSN_T2;
11382 }
11383 }
11384 /* Handle VADD, VSUB. */
11385 else if (opc1 == 0x03)
11386 {
11387 if (!bit (arm_insn_r->arm_insn, 9))
11388 {
11389 if (bit (arm_insn_r->arm_insn, 6))
11390 curr_insn_type = INSN_T0;
11391 else
11392 curr_insn_type = INSN_T1;
11393 }
11394 else
11395 {
11396 if (dp_op_sz)
11397 curr_insn_type = INSN_T1;
11398 else
11399 curr_insn_type = INSN_T2;
11400 }
11401 }
11402 /* Handle VDIV. */
11403 else if (opc1 == 0x0b)
11404 {
11405 if (dp_op_sz)
11406 curr_insn_type = INSN_T1;
11407 else
11408 curr_insn_type = INSN_T2;
11409 }
11410 /* Handle all other vfp data processing instructions. */
11411 else if (opc1 == 0x0b)
11412 {
11413 /* Handle VMOV. */
11414 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11415 {
11416 if (bit (arm_insn_r->arm_insn, 4))
11417 {
11418 if (bit (arm_insn_r->arm_insn, 6))
11419 curr_insn_type = INSN_T0;
11420 else
11421 curr_insn_type = INSN_T1;
11422 }
11423 else
11424 {
11425 if (dp_op_sz)
11426 curr_insn_type = INSN_T1;
11427 else
11428 curr_insn_type = INSN_T2;
11429 }
11430 }
11431 /* Handle VNEG and VABS. */
11432 else if ((opc2 == 0x01 && opc3 == 0x01)
11433 || (opc2 == 0x00 && opc3 == 0x03))
11434 {
11435 if (!bit (arm_insn_r->arm_insn, 11))
11436 {
11437 if (bit (arm_insn_r->arm_insn, 6))
11438 curr_insn_type = INSN_T0;
11439 else
11440 curr_insn_type = INSN_T1;
11441 }
11442 else
11443 {
11444 if (dp_op_sz)
11445 curr_insn_type = INSN_T1;
11446 else
11447 curr_insn_type = INSN_T2;
11448 }
11449 }
11450 /* Handle VSQRT. */
11451 else if (opc2 == 0x01 && opc3 == 0x03)
11452 {
11453 if (dp_op_sz)
11454 curr_insn_type = INSN_T1;
11455 else
11456 curr_insn_type = INSN_T2;
11457 }
11458 /* Handle VCVT. */
11459 else if (opc2 == 0x07 && opc3 == 0x03)
11460 {
11461 if (!dp_op_sz)
11462 curr_insn_type = INSN_T1;
11463 else
11464 curr_insn_type = INSN_T2;
11465 }
11466 else if (opc3 & 0x01)
11467 {
11468 /* Handle VCVT. */
11469 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11470 {
11471 if (!bit (arm_insn_r->arm_insn, 18))
11472 curr_insn_type = INSN_T2;
11473 else
11474 {
11475 if (dp_op_sz)
11476 curr_insn_type = INSN_T1;
11477 else
11478 curr_insn_type = INSN_T2;
11479 }
11480 }
11481 /* Handle VCVT. */
11482 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11483 {
11484 if (dp_op_sz)
11485 curr_insn_type = INSN_T1;
11486 else
11487 curr_insn_type = INSN_T2;
11488 }
11489 /* Handle VCVTB, VCVTT. */
11490 else if ((opc2 & 0x0e) == 0x02)
11491 curr_insn_type = INSN_T2;
11492 /* Handle VCMP, VCMPE. */
11493 else if ((opc2 & 0x0e) == 0x04)
11494 curr_insn_type = INSN_T3;
11495 }
11496 }
11497
11498 switch (curr_insn_type)
11499 {
11500 case INSN_T0:
11501 reg_vd = reg_vd | (bit_d << 4);
11502 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11503 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11504 arm_insn_r->reg_rec_count = 2;
11505 break;
11506
11507 case INSN_T1:
11508 reg_vd = reg_vd | (bit_d << 4);
11509 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11510 arm_insn_r->reg_rec_count = 1;
11511 break;
11512
11513 case INSN_T2:
11514 reg_vd = (reg_vd << 1) | bit_d;
11515 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11516 arm_insn_r->reg_rec_count = 1;
11517 break;
11518
11519 case INSN_T3:
11520 record_buf[0] = ARM_FPSCR_REGNUM;
11521 arm_insn_r->reg_rec_count = 1;
11522 break;
11523
11524 default:
11525 gdb_assert_not_reached ("no decoding pattern found");
11526 break;
11527 }
11528
11529 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11530 return 0;
11531}
11532
60cc5e93
OJ
11533/* Handling opcode 110 insns. */
11534
11535static int
11536arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11537{
bec2ab5a 11538 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11539
11540 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11541 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11542 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11543
11544 if ((coproc & 0x0e) == 0x0a)
11545 {
11546 /* Handle extension register ld/st instructions. */
11547 if (!(op1 & 0x20))
f20f80dd 11548 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11549
11550 /* 64-bit transfers between arm core and extension registers. */
11551 if ((op1 & 0x3e) == 0x04)
f20f80dd 11552 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11553 }
11554 else
11555 {
11556 /* Handle coprocessor ld/st instructions. */
11557 if (!(op1 & 0x3a))
11558 {
11559 /* Store. */
11560 if (!op1_ebit)
11561 return arm_record_unsupported_insn (arm_insn_r);
11562 else
11563 /* Load. */
11564 return arm_record_unsupported_insn (arm_insn_r);
11565 }
11566
11567 /* Move to coprocessor from two arm core registers. */
11568 if (op1 == 0x4)
11569 return arm_record_unsupported_insn (arm_insn_r);
11570
11571 /* Move to two arm core registers from coprocessor. */
11572 if (op1 == 0x5)
11573 {
11574 uint32_t reg_t[2];
11575
11576 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11577 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11578 arm_insn_r->reg_rec_count = 2;
11579
11580 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11581 return 0;
11582 }
11583 }
11584 return arm_record_unsupported_insn (arm_insn_r);
11585}
11586
72508ac0
PO
11587/* Handling opcode 111 insns. */
11588
11589static int
11590arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11591{
60cc5e93 11592 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11593 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11594 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11595
11596 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11597 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11598 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11599 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11600 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11601
11602 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11603 if (op1_sbit)
97dfe206
OJ
11604 {
11605 if (tdep->arm_syscall_record != NULL)
11606 {
11607 ULONGEST svc_operand, svc_number;
11608
11609 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11610
11611 if (svc_operand) /* OABI. */
11612 svc_number = svc_operand - 0x900000;
11613 else /* EABI. */
11614 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11615
60cc5e93 11616 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11617 }
11618 else
11619 {
11620 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11621 return -1;
97dfe206
OJ
11622 }
11623 }
60cc5e93
OJ
11624
11625 if ((coproc & 0x0e) == 0x0a)
11626 {
11627 /* VFP data-processing instructions. */
11628 if (!op1_sbit && !op)
851f26ae 11629 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11630
11631 /* Advanced SIMD, VFP instructions. */
11632 if (!op1_sbit && op)
5a578da5 11633 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11634 }
97dfe206
OJ
11635 else
11636 {
60cc5e93
OJ
11637 /* Coprocessor data operations. */
11638 if (!op1_sbit && !op)
11639 return arm_record_unsupported_insn (arm_insn_r);
11640
11641 /* Move to Coprocessor from ARM core register. */
11642 if (!op1_sbit && !op1_ebit && op)
11643 return arm_record_unsupported_insn (arm_insn_r);
11644
11645 /* Move to arm core register from coprocessor. */
11646 if (!op1_sbit && op1_ebit && op)
11647 {
11648 uint32_t record_buf[1];
11649
11650 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11651 if (record_buf[0] == 15)
11652 record_buf[0] = ARM_PS_REGNUM;
11653
11654 arm_insn_r->reg_rec_count = 1;
11655 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11656 record_buf);
11657 return 0;
11658 }
97dfe206 11659 }
72508ac0 11660
60cc5e93 11661 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11662}
11663
11664/* Handling opcode 000 insns. */
11665
11666static int
11667thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11668{
11669 uint32_t record_buf[8];
11670 uint32_t reg_src1 = 0;
11671
11672 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11673
11674 record_buf[0] = ARM_PS_REGNUM;
11675 record_buf[1] = reg_src1;
11676 thumb_insn_r->reg_rec_count = 2;
11677
11678 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11679
11680 return 0;
11681}
11682
11683
11684/* Handling opcode 001 insns. */
11685
11686static int
11687thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11688{
11689 uint32_t record_buf[8];
11690 uint32_t reg_src1 = 0;
11691
11692 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11693
11694 record_buf[0] = ARM_PS_REGNUM;
11695 record_buf[1] = reg_src1;
11696 thumb_insn_r->reg_rec_count = 2;
11697
11698 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11699
11700 return 0;
11701}
11702
11703/* Handling opcode 010 insns. */
11704
11705static int
11706thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11707{
11708 struct regcache *reg_cache = thumb_insn_r->regcache;
11709 uint32_t record_buf[8], record_buf_mem[8];
11710
11711 uint32_t reg_src1 = 0, reg_src2 = 0;
11712 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11713
11714 ULONGEST u_regval[2] = {0};
11715
11716 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11717
11718 if (bit (thumb_insn_r->arm_insn, 12))
11719 {
11720 /* Handle load/store register offset. */
b121eeb9
YQ
11721 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11722
11723 if (opB >= 4 && opB <= 7)
72508ac0
PO
11724 {
11725 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11726 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11727 record_buf[0] = reg_src1;
11728 thumb_insn_r->reg_rec_count = 1;
11729 }
b121eeb9 11730 else if (opB >= 0 && opB <= 2)
72508ac0
PO
11731 {
11732 /* STR(2), STRB(2), STRH(2) . */
11733 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11734 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11735 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11736 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
b121eeb9 11737 if (0 == opB)
72508ac0 11738 record_buf_mem[0] = 4; /* STR (2). */
b121eeb9 11739 else if (2 == opB)
72508ac0 11740 record_buf_mem[0] = 1; /* STRB (2). */
b121eeb9 11741 else if (1 == opB)
72508ac0
PO
11742 record_buf_mem[0] = 2; /* STRH (2). */
11743 record_buf_mem[1] = u_regval[0] + u_regval[1];
11744 thumb_insn_r->mem_rec_count = 1;
11745 }
11746 }
11747 else if (bit (thumb_insn_r->arm_insn, 11))
11748 {
11749 /* Handle load from literal pool. */
11750 /* LDR(3). */
11751 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11752 record_buf[0] = reg_src1;
11753 thumb_insn_r->reg_rec_count = 1;
11754 }
11755 else if (opcode1)
11756 {
b121eeb9 11757 /* Special data instructions and branch and exchange */
72508ac0
PO
11758 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11759 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11760 if ((3 == opcode2) && (!opcode3))
11761 {
11762 /* Branch with exchange. */
11763 record_buf[0] = ARM_PS_REGNUM;
11764 thumb_insn_r->reg_rec_count = 1;
11765 }
11766 else
11767 {
1f33efec
YQ
11768 /* Format 8; special data processing insns. */
11769 record_buf[0] = ARM_PS_REGNUM;
11770 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11771 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11772 thumb_insn_r->reg_rec_count = 2;
11773 }
11774 }
11775 else
11776 {
11777 /* Format 5; data processing insns. */
11778 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11779 if (bit (thumb_insn_r->arm_insn, 7))
11780 {
11781 reg_src1 = reg_src1 + 8;
11782 }
11783 record_buf[0] = ARM_PS_REGNUM;
11784 record_buf[1] = reg_src1;
11785 thumb_insn_r->reg_rec_count = 2;
11786 }
11787
11788 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11789 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11790 record_buf_mem);
11791
11792 return 0;
11793}
11794
11795/* Handling opcode 001 insns. */
11796
11797static int
11798thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11799{
11800 struct regcache *reg_cache = thumb_insn_r->regcache;
11801 uint32_t record_buf[8], record_buf_mem[8];
11802
11803 uint32_t reg_src1 = 0;
11804 uint32_t opcode = 0, immed_5 = 0;
11805
11806 ULONGEST u_regval = 0;
11807
11808 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11809
11810 if (opcode)
11811 {
11812 /* LDR(1). */
11813 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11814 record_buf[0] = reg_src1;
11815 thumb_insn_r->reg_rec_count = 1;
11816 }
11817 else
11818 {
11819 /* STR(1). */
11820 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11821 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11822 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11823 record_buf_mem[0] = 4;
11824 record_buf_mem[1] = u_regval + (immed_5 * 4);
11825 thumb_insn_r->mem_rec_count = 1;
11826 }
11827
11828 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11829 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11830 record_buf_mem);
11831
11832 return 0;
11833}
11834
11835/* Handling opcode 100 insns. */
11836
11837static int
11838thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11839{
11840 struct regcache *reg_cache = thumb_insn_r->regcache;
11841 uint32_t record_buf[8], record_buf_mem[8];
11842
11843 uint32_t reg_src1 = 0;
11844 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11845
11846 ULONGEST u_regval = 0;
11847
11848 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11849
11850 if (3 == opcode)
11851 {
11852 /* LDR(4). */
11853 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11854 record_buf[0] = reg_src1;
11855 thumb_insn_r->reg_rec_count = 1;
11856 }
11857 else if (1 == opcode)
11858 {
11859 /* LDRH(1). */
11860 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11861 record_buf[0] = reg_src1;
11862 thumb_insn_r->reg_rec_count = 1;
11863 }
11864 else if (2 == opcode)
11865 {
11866 /* STR(3). */
11867 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11868 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11869 record_buf_mem[0] = 4;
11870 record_buf_mem[1] = u_regval + (immed_8 * 4);
11871 thumb_insn_r->mem_rec_count = 1;
11872 }
11873 else if (0 == opcode)
11874 {
11875 /* STRH(1). */
11876 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11877 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11878 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11879 record_buf_mem[0] = 2;
11880 record_buf_mem[1] = u_regval + (immed_5 * 2);
11881 thumb_insn_r->mem_rec_count = 1;
11882 }
11883
11884 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11885 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11886 record_buf_mem);
11887
11888 return 0;
11889}
11890
11891/* Handling opcode 101 insns. */
11892
11893static int
11894thumb_record_misc (insn_decode_record *thumb_insn_r)
11895{
11896 struct regcache *reg_cache = thumb_insn_r->regcache;
11897
b121eeb9 11898 uint32_t opcode = 0;
72508ac0 11899 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11900 uint32_t index = 0, start_address = 0;
72508ac0
PO
11901 uint32_t record_buf[24], record_buf_mem[48];
11902 uint32_t reg_src1;
11903
11904 ULONGEST u_regval = 0;
11905
11906 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
72508ac0 11907
b121eeb9 11908 if (opcode == 0 || opcode == 1)
72508ac0 11909 {
b121eeb9
YQ
11910 /* ADR and ADD (SP plus immediate) */
11911
72508ac0
PO
11912 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11913 record_buf[0] = reg_src1;
11914 thumb_insn_r->reg_rec_count = 1;
11915 }
b121eeb9 11916 else
72508ac0 11917 {
b121eeb9
YQ
11918 /* Miscellaneous 16-bit instructions */
11919 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11920
11921 switch (opcode2)
11922 {
11923 case 6:
11924 /* SETEND and CPS */
11925 break;
11926 case 0:
11927 /* ADD/SUB (SP plus immediate) */
11928 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11929 record_buf[0] = ARM_SP_REGNUM;
11930 thumb_insn_r->reg_rec_count = 1;
11931 break;
11932 case 1: /* fall through */
11933 case 3: /* fall through */
11934 case 9: /* fall through */
11935 case 11:
11936 /* CBNZ, CBZ */
b121eeb9
YQ
11937 break;
11938 case 2:
11939 /* SXTH, SXTB, UXTH, UXTB */
11940 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11941 thumb_insn_r->reg_rec_count = 1;
11942 break;
11943 case 4: /* fall through */
11944 case 5:
11945 /* PUSH. */
11946 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11947 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11948 while (register_bits)
11949 {
11950 if (register_bits & 0x00000001)
11951 register_count++;
11952 register_bits = register_bits >> 1;
11953 }
11954 start_address = u_regval - \
11955 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11956 thumb_insn_r->mem_rec_count = register_count;
11957 while (register_count)
11958 {
11959 record_buf_mem[(register_count * 2) - 1] = start_address;
11960 record_buf_mem[(register_count * 2) - 2] = 4;
11961 start_address = start_address + 4;
11962 register_count--;
11963 }
11964 record_buf[0] = ARM_SP_REGNUM;
11965 thumb_insn_r->reg_rec_count = 1;
11966 break;
11967 case 10:
11968 /* REV, REV16, REVSH */
ba14f379
YQ
11969 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11970 thumb_insn_r->reg_rec_count = 1;
b121eeb9
YQ
11971 break;
11972 case 12: /* fall through */
11973 case 13:
11974 /* POP. */
11975 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11976 while (register_bits)
11977 {
11978 if (register_bits & 0x00000001)
11979 record_buf[index++] = register_count;
11980 register_bits = register_bits >> 1;
11981 register_count++;
11982 }
11983 record_buf[index++] = ARM_PS_REGNUM;
11984 record_buf[index++] = ARM_SP_REGNUM;
11985 thumb_insn_r->reg_rec_count = index;
11986 break;
11987 case 0xe:
11988 /* BKPT insn. */
11989 /* Handle enhanced software breakpoint insn, BKPT. */
11990 /* CPSR is changed to be executed in ARM state, disabling normal
11991 interrupts, entering abort mode. */
11992 /* According to high vector configuration PC is set. */
11993 /* User hits breakpoint and type reverse, in that case, we need to go back with
11994 previous CPSR and Program Counter. */
11995 record_buf[0] = ARM_PS_REGNUM;
11996 record_buf[1] = ARM_LR_REGNUM;
11997 thumb_insn_r->reg_rec_count = 2;
11998 /* We need to save SPSR value, which is not yet done. */
11999 printf_unfiltered (_("Process record does not support instruction "
12000 "0x%0x at address %s.\n"),
12001 thumb_insn_r->arm_insn,
12002 paddress (thumb_insn_r->gdbarch,
12003 thumb_insn_r->this_addr));
12004 return -1;
12005
12006 case 0xf:
12007 /* If-Then, and hints */
12008 break;
12009 default:
12010 return -1;
12011 };
72508ac0
PO
12012 }
12013
12014 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12015 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12016 record_buf_mem);
12017
12018 return 0;
12019}
12020
12021/* Handling opcode 110 insns. */
12022
12023static int
12024thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12025{
12026 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12027 struct regcache *reg_cache = thumb_insn_r->regcache;
12028
12029 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12030 uint32_t reg_src1 = 0;
12031 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12032 uint32_t index = 0, start_address = 0;
72508ac0
PO
12033 uint32_t record_buf[24], record_buf_mem[48];
12034
12035 ULONGEST u_regval = 0;
12036
12037 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12038 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12039
12040 if (1 == opcode2)
12041 {
12042
12043 /* LDMIA. */
12044 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12045 /* Get Rn. */
12046 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12047 while (register_bits)
12048 {
12049 if (register_bits & 0x00000001)
f969241e 12050 record_buf[index++] = register_count;
72508ac0 12051 register_bits = register_bits >> 1;
f969241e 12052 register_count++;
72508ac0 12053 }
f969241e
OJ
12054 record_buf[index++] = reg_src1;
12055 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12056 }
12057 else if (0 == opcode2)
12058 {
12059 /* It handles both STMIA. */
12060 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12061 /* Get Rn. */
12062 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12063 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12064 while (register_bits)
12065 {
12066 if (register_bits & 0x00000001)
12067 register_count++;
12068 register_bits = register_bits >> 1;
12069 }
12070 start_address = u_regval;
12071 thumb_insn_r->mem_rec_count = register_count;
12072 while (register_count)
12073 {
12074 record_buf_mem[(register_count * 2) - 1] = start_address;
12075 record_buf_mem[(register_count * 2) - 2] = 4;
12076 start_address = start_address + 4;
12077 register_count--;
12078 }
12079 }
12080 else if (0x1F == opcode1)
12081 {
12082 /* Handle arm syscall insn. */
97dfe206 12083 if (tdep->arm_syscall_record != NULL)
72508ac0 12084 {
97dfe206
OJ
12085 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12086 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12087 }
12088 else
12089 {
12090 printf_unfiltered (_("no syscall record support\n"));
12091 return -1;
12092 }
12093 }
12094
12095 /* B (1), conditional branch is automatically taken care in process_record,
12096 as PC is saved there. */
12097
12098 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12099 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12100 record_buf_mem);
12101
12102 return ret;
12103}
12104
12105/* Handling opcode 111 insns. */
12106
12107static int
12108thumb_record_branch (insn_decode_record *thumb_insn_r)
12109{
12110 uint32_t record_buf[8];
12111 uint32_t bits_h = 0;
12112
12113 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12114
12115 if (2 == bits_h || 3 == bits_h)
12116 {
12117 /* BL */
12118 record_buf[0] = ARM_LR_REGNUM;
12119 thumb_insn_r->reg_rec_count = 1;
12120 }
12121 else if (1 == bits_h)
12122 {
12123 /* BLX(1). */
12124 record_buf[0] = ARM_PS_REGNUM;
12125 record_buf[1] = ARM_LR_REGNUM;
12126 thumb_insn_r->reg_rec_count = 2;
12127 }
12128
12129 /* B(2) is automatically taken care in process_record, as PC is
12130 saved there. */
12131
12132 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12133
12134 return 0;
12135}
12136
c6ec2b30
OJ
12137/* Handler for thumb2 load/store multiple instructions. */
12138
12139static int
12140thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12141{
12142 struct regcache *reg_cache = thumb2_insn_r->regcache;
12143
12144 uint32_t reg_rn, op;
12145 uint32_t register_bits = 0, register_count = 0;
12146 uint32_t index = 0, start_address = 0;
12147 uint32_t record_buf[24], record_buf_mem[48];
12148
12149 ULONGEST u_regval = 0;
12150
12151 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12152 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12153
12154 if (0 == op || 3 == op)
12155 {
12156 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12157 {
12158 /* Handle RFE instruction. */
12159 record_buf[0] = ARM_PS_REGNUM;
12160 thumb2_insn_r->reg_rec_count = 1;
12161 }
12162 else
12163 {
12164 /* Handle SRS instruction after reading banked SP. */
12165 return arm_record_unsupported_insn (thumb2_insn_r);
12166 }
12167 }
12168 else if (1 == op || 2 == op)
12169 {
12170 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12171 {
12172 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12173 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12174 while (register_bits)
12175 {
12176 if (register_bits & 0x00000001)
12177 record_buf[index++] = register_count;
12178
12179 register_count++;
12180 register_bits = register_bits >> 1;
12181 }
12182 record_buf[index++] = reg_rn;
12183 record_buf[index++] = ARM_PS_REGNUM;
12184 thumb2_insn_r->reg_rec_count = index;
12185 }
12186 else
12187 {
12188 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12189 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12190 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12191 while (register_bits)
12192 {
12193 if (register_bits & 0x00000001)
12194 register_count++;
12195
12196 register_bits = register_bits >> 1;
12197 }
12198
12199 if (1 == op)
12200 {
12201 /* Start address calculation for LDMDB/LDMEA. */
12202 start_address = u_regval;
12203 }
12204 else if (2 == op)
12205 {
12206 /* Start address calculation for LDMDB/LDMEA. */
12207 start_address = u_regval - register_count * 4;
12208 }
12209
12210 thumb2_insn_r->mem_rec_count = register_count;
12211 while (register_count)
12212 {
12213 record_buf_mem[register_count * 2 - 1] = start_address;
12214 record_buf_mem[register_count * 2 - 2] = 4;
12215 start_address = start_address + 4;
12216 register_count--;
12217 }
12218 record_buf[0] = reg_rn;
12219 record_buf[1] = ARM_PS_REGNUM;
12220 thumb2_insn_r->reg_rec_count = 2;
12221 }
12222 }
12223
12224 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12225 record_buf_mem);
12226 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12227 record_buf);
12228 return ARM_RECORD_SUCCESS;
12229}
12230
12231/* Handler for thumb2 load/store (dual/exclusive) and table branch
12232 instructions. */
12233
12234static int
12235thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12236{
12237 struct regcache *reg_cache = thumb2_insn_r->regcache;
12238
12239 uint32_t reg_rd, reg_rn, offset_imm;
12240 uint32_t reg_dest1, reg_dest2;
12241 uint32_t address, offset_addr;
12242 uint32_t record_buf[8], record_buf_mem[8];
12243 uint32_t op1, op2, op3;
c6ec2b30
OJ
12244
12245 ULONGEST u_regval[2];
12246
12247 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12248 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12249 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12250
12251 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12252 {
12253 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12254 {
12255 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12256 record_buf[0] = reg_dest1;
12257 record_buf[1] = ARM_PS_REGNUM;
12258 thumb2_insn_r->reg_rec_count = 2;
12259 }
12260
12261 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12262 {
12263 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12264 record_buf[2] = reg_dest2;
12265 thumb2_insn_r->reg_rec_count = 3;
12266 }
12267 }
12268 else
12269 {
12270 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12271 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12272
12273 if (0 == op1 && 0 == op2)
12274 {
12275 /* Handle STREX. */
12276 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12277 address = u_regval[0] + (offset_imm * 4);
12278 record_buf_mem[0] = 4;
12279 record_buf_mem[1] = address;
12280 thumb2_insn_r->mem_rec_count = 1;
12281 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12282 record_buf[0] = reg_rd;
12283 thumb2_insn_r->reg_rec_count = 1;
12284 }
12285 else if (1 == op1 && 0 == op2)
12286 {
12287 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12288 record_buf[0] = reg_rd;
12289 thumb2_insn_r->reg_rec_count = 1;
12290 address = u_regval[0];
12291 record_buf_mem[1] = address;
12292
12293 if (4 == op3)
12294 {
12295 /* Handle STREXB. */
12296 record_buf_mem[0] = 1;
12297 thumb2_insn_r->mem_rec_count = 1;
12298 }
12299 else if (5 == op3)
12300 {
12301 /* Handle STREXH. */
12302 record_buf_mem[0] = 2 ;
12303 thumb2_insn_r->mem_rec_count = 1;
12304 }
12305 else if (7 == op3)
12306 {
12307 /* Handle STREXD. */
12308 address = u_regval[0];
12309 record_buf_mem[0] = 4;
12310 record_buf_mem[2] = 4;
12311 record_buf_mem[3] = address + 4;
12312 thumb2_insn_r->mem_rec_count = 2;
12313 }
12314 }
12315 else
12316 {
12317 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12318
12319 if (bit (thumb2_insn_r->arm_insn, 24))
12320 {
12321 if (bit (thumb2_insn_r->arm_insn, 23))
12322 offset_addr = u_regval[0] + (offset_imm * 4);
12323 else
12324 offset_addr = u_regval[0] - (offset_imm * 4);
12325
12326 address = offset_addr;
12327 }
12328 else
12329 address = u_regval[0];
12330
12331 record_buf_mem[0] = 4;
12332 record_buf_mem[1] = address;
12333 record_buf_mem[2] = 4;
12334 record_buf_mem[3] = address + 4;
12335 thumb2_insn_r->mem_rec_count = 2;
12336 record_buf[0] = reg_rn;
12337 thumb2_insn_r->reg_rec_count = 1;
12338 }
12339 }
12340
12341 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12342 record_buf);
12343 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12344 record_buf_mem);
12345 return ARM_RECORD_SUCCESS;
12346}
12347
12348/* Handler for thumb2 data processing (shift register and modified immediate)
12349 instructions. */
12350
12351static int
12352thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12353{
12354 uint32_t reg_rd, op;
12355 uint32_t record_buf[8];
12356
12357 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12358 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12359
12360 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12361 {
12362 record_buf[0] = ARM_PS_REGNUM;
12363 thumb2_insn_r->reg_rec_count = 1;
12364 }
12365 else
12366 {
12367 record_buf[0] = reg_rd;
12368 record_buf[1] = ARM_PS_REGNUM;
12369 thumb2_insn_r->reg_rec_count = 2;
12370 }
12371
12372 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12373 record_buf);
12374 return ARM_RECORD_SUCCESS;
12375}
12376
12377/* Generic handler for thumb2 instructions which effect destination and PS
12378 registers. */
12379
12380static int
12381thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12382{
12383 uint32_t reg_rd;
12384 uint32_t record_buf[8];
12385
12386 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12387
12388 record_buf[0] = reg_rd;
12389 record_buf[1] = ARM_PS_REGNUM;
12390 thumb2_insn_r->reg_rec_count = 2;
12391
12392 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12393 record_buf);
12394 return ARM_RECORD_SUCCESS;
12395}
12396
12397/* Handler for thumb2 branch and miscellaneous control instructions. */
12398
12399static int
12400thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12401{
12402 uint32_t op, op1, op2;
12403 uint32_t record_buf[8];
12404
12405 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12406 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12407 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12408
12409 /* Handle MSR insn. */
12410 if (!(op1 & 0x2) && 0x38 == op)
12411 {
12412 if (!(op2 & 0x3))
12413 {
12414 /* CPSR is going to be changed. */
12415 record_buf[0] = ARM_PS_REGNUM;
12416 thumb2_insn_r->reg_rec_count = 1;
12417 }
12418 else
12419 {
12420 arm_record_unsupported_insn(thumb2_insn_r);
12421 return -1;
12422 }
12423 }
12424 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12425 {
12426 /* BLX. */
12427 record_buf[0] = ARM_PS_REGNUM;
12428 record_buf[1] = ARM_LR_REGNUM;
12429 thumb2_insn_r->reg_rec_count = 2;
12430 }
12431
12432 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12433 record_buf);
12434 return ARM_RECORD_SUCCESS;
12435}
12436
12437/* Handler for thumb2 store single data item instructions. */
12438
12439static int
12440thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12441{
12442 struct regcache *reg_cache = thumb2_insn_r->regcache;
12443
12444 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12445 uint32_t address, offset_addr;
12446 uint32_t record_buf[8], record_buf_mem[8];
12447 uint32_t op1, op2;
12448
12449 ULONGEST u_regval[2];
12450
12451 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12452 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12453 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12454 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12455
12456 if (bit (thumb2_insn_r->arm_insn, 23))
12457 {
12458 /* T2 encoding. */
12459 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12460 offset_addr = u_regval[0] + offset_imm;
12461 address = offset_addr;
12462 }
12463 else
12464 {
12465 /* T3 encoding. */
12466 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12467 {
12468 /* Handle STRB (register). */
12469 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12470 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12471 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12472 offset_addr = u_regval[1] << shift_imm;
12473 address = u_regval[0] + offset_addr;
12474 }
12475 else
12476 {
12477 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12478 if (bit (thumb2_insn_r->arm_insn, 10))
12479 {
12480 if (bit (thumb2_insn_r->arm_insn, 9))
12481 offset_addr = u_regval[0] + offset_imm;
12482 else
12483 offset_addr = u_regval[0] - offset_imm;
12484
12485 address = offset_addr;
12486 }
12487 else
12488 address = u_regval[0];
12489 }
12490 }
12491
12492 switch (op1)
12493 {
12494 /* Store byte instructions. */
12495 case 4:
12496 case 0:
12497 record_buf_mem[0] = 1;
12498 break;
12499 /* Store half word instructions. */
12500 case 1:
12501 case 5:
12502 record_buf_mem[0] = 2;
12503 break;
12504 /* Store word instructions. */
12505 case 2:
12506 case 6:
12507 record_buf_mem[0] = 4;
12508 break;
12509
12510 default:
12511 gdb_assert_not_reached ("no decoding pattern found");
12512 break;
12513 }
12514
12515 record_buf_mem[1] = address;
12516 thumb2_insn_r->mem_rec_count = 1;
12517 record_buf[0] = reg_rn;
12518 thumb2_insn_r->reg_rec_count = 1;
12519
12520 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12521 record_buf);
12522 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12523 record_buf_mem);
12524 return ARM_RECORD_SUCCESS;
12525}
12526
12527/* Handler for thumb2 load memory hints instructions. */
12528
12529static int
12530thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12531{
12532 uint32_t record_buf[8];
12533 uint32_t reg_rt, reg_rn;
12534
12535 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12536 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12537
12538 if (ARM_PC_REGNUM != reg_rt)
12539 {
12540 record_buf[0] = reg_rt;
12541 record_buf[1] = reg_rn;
12542 record_buf[2] = ARM_PS_REGNUM;
12543 thumb2_insn_r->reg_rec_count = 3;
12544
12545 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12546 record_buf);
12547 return ARM_RECORD_SUCCESS;
12548 }
12549
12550 return ARM_RECORD_FAILURE;
12551}
12552
12553/* Handler for thumb2 load word instructions. */
12554
12555static int
12556thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12557{
c6ec2b30
OJ
12558 uint32_t record_buf[8];
12559
12560 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12561 record_buf[1] = ARM_PS_REGNUM;
12562 thumb2_insn_r->reg_rec_count = 2;
12563
12564 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12565 record_buf);
12566 return ARM_RECORD_SUCCESS;
12567}
12568
12569/* Handler for thumb2 long multiply, long multiply accumulate, and
12570 divide instructions. */
12571
12572static int
12573thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12574{
12575 uint32_t opcode1 = 0, opcode2 = 0;
12576 uint32_t record_buf[8];
c6ec2b30
OJ
12577
12578 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12579 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12580
12581 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12582 {
12583 /* Handle SMULL, UMULL, SMULAL. */
12584 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12585 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12586 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12587 record_buf[2] = ARM_PS_REGNUM;
12588 thumb2_insn_r->reg_rec_count = 3;
12589 }
12590 else if (1 == opcode1 || 3 == opcode2)
12591 {
12592 /* Handle SDIV and UDIV. */
12593 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12594 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12595 record_buf[2] = ARM_PS_REGNUM;
12596 thumb2_insn_r->reg_rec_count = 3;
12597 }
12598 else
12599 return ARM_RECORD_FAILURE;
12600
12601 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12602 record_buf);
12603 return ARM_RECORD_SUCCESS;
12604}
12605
60cc5e93
OJ
12606/* Record handler for thumb32 coprocessor instructions. */
12607
12608static int
12609thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12610{
12611 if (bit (thumb2_insn_r->arm_insn, 25))
12612 return arm_record_coproc_data_proc (thumb2_insn_r);
12613 else
12614 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12615}
12616
1e1b6563
OJ
12617/* Record handler for advance SIMD structure load/store instructions. */
12618
12619static int
12620thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12621{
12622 struct regcache *reg_cache = thumb2_insn_r->regcache;
12623 uint32_t l_bit, a_bit, b_bits;
12624 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12625 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12626 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12627 uint8_t f_ebytes;
12628
12629 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12630 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12631 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12632 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12633 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12634 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12635 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12636 f_elem = 8 / f_ebytes;
12637
12638 if (!l_bit)
12639 {
12640 ULONGEST u_regval = 0;
12641 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12642 address = u_regval;
12643
12644 if (!a_bit)
12645 {
12646 /* Handle VST1. */
12647 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12648 {
12649 if (b_bits == 0x07)
12650 bf_regs = 1;
12651 else if (b_bits == 0x0a)
12652 bf_regs = 2;
12653 else if (b_bits == 0x06)
12654 bf_regs = 3;
12655 else if (b_bits == 0x02)
12656 bf_regs = 4;
12657 else
12658 bf_regs = 0;
12659
12660 for (index_r = 0; index_r < bf_regs; index_r++)
12661 {
12662 for (index_e = 0; index_e < f_elem; index_e++)
12663 {
12664 record_buf_mem[index_m++] = f_ebytes;
12665 record_buf_mem[index_m++] = address;
12666 address = address + f_ebytes;
12667 thumb2_insn_r->mem_rec_count += 1;
12668 }
12669 }
12670 }
12671 /* Handle VST2. */
12672 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12673 {
12674 if (b_bits == 0x09 || b_bits == 0x08)
12675 bf_regs = 1;
12676 else if (b_bits == 0x03)
12677 bf_regs = 2;
12678 else
12679 bf_regs = 0;
12680
12681 for (index_r = 0; index_r < bf_regs; index_r++)
12682 for (index_e = 0; index_e < f_elem; index_e++)
12683 {
12684 for (loop_t = 0; loop_t < 2; loop_t++)
12685 {
12686 record_buf_mem[index_m++] = f_ebytes;
12687 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12688 thumb2_insn_r->mem_rec_count += 1;
12689 }
12690 address = address + (2 * f_ebytes);
12691 }
12692 }
12693 /* Handle VST3. */
12694 else if ((b_bits & 0x0e) == 0x04)
12695 {
12696 for (index_e = 0; index_e < f_elem; index_e++)
12697 {
12698 for (loop_t = 0; loop_t < 3; loop_t++)
12699 {
12700 record_buf_mem[index_m++] = f_ebytes;
12701 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12702 thumb2_insn_r->mem_rec_count += 1;
12703 }
12704 address = address + (3 * f_ebytes);
12705 }
12706 }
12707 /* Handle VST4. */
12708 else if (!(b_bits & 0x0e))
12709 {
12710 for (index_e = 0; index_e < f_elem; index_e++)
12711 {
12712 for (loop_t = 0; loop_t < 4; loop_t++)
12713 {
12714 record_buf_mem[index_m++] = f_ebytes;
12715 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12716 thumb2_insn_r->mem_rec_count += 1;
12717 }
12718 address = address + (4 * f_ebytes);
12719 }
12720 }
12721 }
12722 else
12723 {
12724 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12725
12726 if (bft_size == 0x00)
12727 f_ebytes = 1;
12728 else if (bft_size == 0x01)
12729 f_ebytes = 2;
12730 else if (bft_size == 0x02)
12731 f_ebytes = 4;
12732 else
12733 f_ebytes = 0;
12734
12735 /* Handle VST1. */
12736 if (!(b_bits & 0x0b) || b_bits == 0x08)
12737 thumb2_insn_r->mem_rec_count = 1;
12738 /* Handle VST2. */
12739 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12740 thumb2_insn_r->mem_rec_count = 2;
12741 /* Handle VST3. */
12742 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12743 thumb2_insn_r->mem_rec_count = 3;
12744 /* Handle VST4. */
12745 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12746 thumb2_insn_r->mem_rec_count = 4;
12747
12748 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12749 {
12750 record_buf_mem[index_m] = f_ebytes;
12751 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12752 }
12753 }
12754 }
12755 else
12756 {
12757 if (!a_bit)
12758 {
12759 /* Handle VLD1. */
12760 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12761 thumb2_insn_r->reg_rec_count = 1;
12762 /* Handle VLD2. */
12763 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12764 thumb2_insn_r->reg_rec_count = 2;
12765 /* Handle VLD3. */
12766 else if ((b_bits & 0x0e) == 0x04)
12767 thumb2_insn_r->reg_rec_count = 3;
12768 /* Handle VLD4. */
12769 else if (!(b_bits & 0x0e))
12770 thumb2_insn_r->reg_rec_count = 4;
12771 }
12772 else
12773 {
12774 /* Handle VLD1. */
12775 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12776 thumb2_insn_r->reg_rec_count = 1;
12777 /* Handle VLD2. */
12778 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12779 thumb2_insn_r->reg_rec_count = 2;
12780 /* Handle VLD3. */
12781 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12782 thumb2_insn_r->reg_rec_count = 3;
12783 /* Handle VLD4. */
12784 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12785 thumb2_insn_r->reg_rec_count = 4;
12786
12787 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12788 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12789 }
12790 }
12791
12792 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12793 {
12794 record_buf[index_r] = reg_rn;
12795 thumb2_insn_r->reg_rec_count += 1;
12796 }
12797
12798 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12799 record_buf);
12800 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12801 record_buf_mem);
12802 return 0;
12803}
12804
c6ec2b30
OJ
12805/* Decodes thumb2 instruction type and invokes its record handler. */
12806
12807static unsigned int
12808thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12809{
12810 uint32_t op, op1, op2;
12811
12812 op = bit (thumb2_insn_r->arm_insn, 15);
12813 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12814 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12815
12816 if (op1 == 0x01)
12817 {
12818 if (!(op2 & 0x64 ))
12819 {
12820 /* Load/store multiple instruction. */
12821 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12822 }
b121eeb9 12823 else if ((op2 & 0x64) == 0x4)
c6ec2b30
OJ
12824 {
12825 /* Load/store (dual/exclusive) and table branch instruction. */
12826 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12827 }
b121eeb9 12828 else if ((op2 & 0x60) == 0x20)
c6ec2b30
OJ
12829 {
12830 /* Data-processing (shifted register). */
12831 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12832 }
12833 else if (op2 & 0x40)
12834 {
12835 /* Co-processor instructions. */
60cc5e93 12836 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12837 }
12838 }
12839 else if (op1 == 0x02)
12840 {
12841 if (op)
12842 {
12843 /* Branches and miscellaneous control instructions. */
12844 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12845 }
12846 else if (op2 & 0x20)
12847 {
12848 /* Data-processing (plain binary immediate) instruction. */
12849 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12850 }
12851 else
12852 {
12853 /* Data-processing (modified immediate). */
12854 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12855 }
12856 }
12857 else if (op1 == 0x03)
12858 {
12859 if (!(op2 & 0x71 ))
12860 {
12861 /* Store single data item. */
12862 return thumb2_record_str_single_data (thumb2_insn_r);
12863 }
12864 else if (!((op2 & 0x71) ^ 0x10))
12865 {
12866 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12867 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12868 }
12869 else if (!((op2 & 0x67) ^ 0x01))
12870 {
12871 /* Load byte, memory hints instruction. */
12872 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12873 }
12874 else if (!((op2 & 0x67) ^ 0x03))
12875 {
12876 /* Load halfword, memory hints instruction. */
12877 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12878 }
12879 else if (!((op2 & 0x67) ^ 0x05))
12880 {
12881 /* Load word instruction. */
12882 return thumb2_record_ld_word (thumb2_insn_r);
12883 }
12884 else if (!((op2 & 0x70) ^ 0x20))
12885 {
12886 /* Data-processing (register) instruction. */
12887 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12888 }
12889 else if (!((op2 & 0x78) ^ 0x30))
12890 {
12891 /* Multiply, multiply accumulate, abs diff instruction. */
12892 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12893 }
12894 else if (!((op2 & 0x78) ^ 0x38))
12895 {
12896 /* Long multiply, long multiply accumulate, and divide. */
12897 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12898 }
12899 else if (op2 & 0x40)
12900 {
12901 /* Co-processor instructions. */
60cc5e93 12902 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12903 }
12904 }
12905
12906 return -1;
12907}
72508ac0 12908
ffdbe864 12909namespace {
728a7913
YQ
12910/* Abstract memory reader. */
12911
12912class abstract_memory_reader
12913{
12914public:
12915 /* Read LEN bytes of target memory at address MEMADDR, placing the
12916 results in GDB's memory at BUF. Return true on success. */
12917
12918 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12919};
12920
12921/* Instruction reader from real target. */
12922
12923class instruction_reader : public abstract_memory_reader
12924{
12925 public:
12926 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12927 {
12928 if (target_read_memory (memaddr, buf, len))
12929 return false;
12930 else
12931 return true;
12932 }
12933};
12934
ffdbe864
YQ
12935} // namespace
12936
72508ac0
PO
12937/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12938and positive val on fauilure. */
12939
12940static int
728a7913
YQ
12941extract_arm_insn (abstract_memory_reader& reader,
12942 insn_decode_record *insn_record, uint32_t insn_size)
72508ac0
PO
12943{
12944 gdb_byte buf[insn_size];
12945
12946 memset (&buf[0], 0, insn_size);
12947
728a7913 12948 if (!reader.read (insn_record->this_addr, buf, insn_size))
72508ac0
PO
12949 return 1;
12950 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12951 insn_size,
2959fed9 12952 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12953 return 0;
12954}
12955
12956typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12957
12958/* Decode arm/thumb insn depending on condition cods and opcodes; and
12959 dispatch it. */
12960
12961static int
728a7913
YQ
12962decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12963 record_type_t record_type, uint32_t insn_size)
72508ac0
PO
12964{
12965
01e57735
YQ
12966 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12967 instruction. */
0fa9c223 12968 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12969 {
12970 arm_record_data_proc_misc_ld_str, /* 000. */
12971 arm_record_data_proc_imm, /* 001. */
12972 arm_record_ld_st_imm_offset, /* 010. */
12973 arm_record_ld_st_reg_offset, /* 011. */
12974 arm_record_ld_st_multiple, /* 100. */
12975 arm_record_b_bl, /* 101. */
60cc5e93 12976 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12977 arm_record_coproc_data_proc /* 111. */
12978 };
12979
01e57735
YQ
12980 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12981 instruction. */
0fa9c223 12982 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12983 { \
12984 thumb_record_shift_add_sub, /* 000. */
12985 thumb_record_add_sub_cmp_mov, /* 001. */
12986 thumb_record_ld_st_reg_offset, /* 010. */
12987 thumb_record_ld_st_imm_offset, /* 011. */
12988 thumb_record_ld_st_stack, /* 100. */
12989 thumb_record_misc, /* 101. */
12990 thumb_record_ldm_stm_swi, /* 110. */
12991 thumb_record_branch /* 111. */
12992 };
12993
12994 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12995 uint32_t insn_id = 0;
12996
728a7913 12997 if (extract_arm_insn (reader, arm_record, insn_size))
72508ac0
PO
12998 {
12999 if (record_debug)
01e57735
YQ
13000 {
13001 printf_unfiltered (_("Process record: error reading memory at "
13002 "addr %s len = %d.\n"),
13003 paddress (arm_record->gdbarch,
13004 arm_record->this_addr), insn_size);
13005 }
72508ac0
PO
13006 return -1;
13007 }
13008 else if (ARM_RECORD == record_type)
13009 {
13010 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13011 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
13012
13013 if (arm_record->cond == 0xf)
13014 ret = arm_record_extension_space (arm_record);
13015 else
01e57735 13016 {
ca92db2d
YQ
13017 /* If this insn has fallen into extension space
13018 then we need not decode it anymore. */
01e57735
YQ
13019 ret = arm_handle_insn[insn_id] (arm_record);
13020 }
ca92db2d
YQ
13021 if (ret != ARM_RECORD_SUCCESS)
13022 {
13023 arm_record_unsupported_insn (arm_record);
13024 ret = -1;
13025 }
72508ac0
PO
13026 }
13027 else if (THUMB_RECORD == record_type)
13028 {
13029 /* As thumb does not have condition codes, we set negative. */
13030 arm_record->cond = -1;
13031 insn_id = bits (arm_record->arm_insn, 13, 15);
13032 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13033 if (ret != ARM_RECORD_SUCCESS)
13034 {
13035 arm_record_unsupported_insn (arm_record);
13036 ret = -1;
13037 }
72508ac0
PO
13038 }
13039 else if (THUMB2_RECORD == record_type)
13040 {
c6ec2b30
OJ
13041 /* As thumb does not have condition codes, we set negative. */
13042 arm_record->cond = -1;
13043
13044 /* Swap first half of 32bit thumb instruction with second half. */
13045 arm_record->arm_insn
01e57735 13046 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13047
ca92db2d 13048 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13049
ca92db2d 13050 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13051 {
13052 arm_record_unsupported_insn (arm_record);
13053 ret = -1;
13054 }
72508ac0
PO
13055 }
13056 else
13057 {
13058 /* Throw assertion. */
13059 gdb_assert_not_reached ("not a valid instruction, could not decode");
13060 }
13061
13062 return ret;
13063}
13064
b121eeb9
YQ
13065#if GDB_SELF_TEST
13066namespace selftests {
13067
13068/* Provide both 16-bit and 32-bit thumb instructions. */
13069
13070class instruction_reader_thumb : public abstract_memory_reader
13071{
13072public:
13073 template<size_t SIZE>
13074 instruction_reader_thumb (enum bfd_endian endian,
13075 const uint16_t (&insns)[SIZE])
13076 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13077 {}
13078
13079 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13080 {
13081 SELF_CHECK (len == 4 || len == 2);
13082 SELF_CHECK (memaddr % 2 == 0);
13083 SELF_CHECK ((memaddr / 2) < m_insns_size);
13084
13085 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13086 if (len == 4)
13087 {
13088 store_unsigned_integer (&buf[2], 2, m_endian,
13089 m_insns[memaddr / 2 + 1]);
13090 }
13091 return true;
13092 }
13093
13094private:
13095 enum bfd_endian m_endian;
13096 const uint16_t *m_insns;
13097 size_t m_insns_size;
13098};
13099
13100static void
13101arm_record_test (void)
13102{
13103 struct gdbarch_info info;
13104 gdbarch_info_init (&info);
13105 info.bfd_arch_info = bfd_scan_arch ("arm");
13106
13107 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13108
13109 SELF_CHECK (gdbarch != NULL);
13110
13111 /* 16-bit Thumb instructions. */
13112 {
13113 insn_decode_record arm_record;
13114
13115 memset (&arm_record, 0, sizeof (insn_decode_record));
13116 arm_record.gdbarch = gdbarch;
13117
13118 static const uint16_t insns[] = {
13119 /* db b2 uxtb r3, r3 */
13120 0xb2db,
13121 /* cd 58 ldr r5, [r1, r3] */
13122 0x58cd,
13123 };
13124
13125 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13126 instruction_reader_thumb reader (endian, insns);
13127 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13128 THUMB_INSN_SIZE_BYTES);
13129
13130 SELF_CHECK (ret == 0);
13131 SELF_CHECK (arm_record.mem_rec_count == 0);
13132 SELF_CHECK (arm_record.reg_rec_count == 1);
13133 SELF_CHECK (arm_record.arm_regs[0] == 3);
13134
13135 arm_record.this_addr += 2;
13136 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13137 THUMB_INSN_SIZE_BYTES);
13138
13139 SELF_CHECK (ret == 0);
13140 SELF_CHECK (arm_record.mem_rec_count == 0);
13141 SELF_CHECK (arm_record.reg_rec_count == 1);
13142 SELF_CHECK (arm_record.arm_regs[0] == 5);
13143 }
13144
13145 /* 32-bit Thumb-2 instructions. */
13146 {
13147 insn_decode_record arm_record;
13148
13149 memset (&arm_record, 0, sizeof (insn_decode_record));
13150 arm_record.gdbarch = gdbarch;
13151
13152 static const uint16_t insns[] = {
13153 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13154 0xee1d, 0x7f70,
13155 };
13156
13157 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13158 instruction_reader_thumb reader (endian, insns);
13159 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13160 THUMB2_INSN_SIZE_BYTES);
13161
13162 SELF_CHECK (ret == 0);
13163 SELF_CHECK (arm_record.mem_rec_count == 0);
13164 SELF_CHECK (arm_record.reg_rec_count == 1);
13165 SELF_CHECK (arm_record.arm_regs[0] == 7);
13166 }
13167}
13168} // namespace selftests
13169#endif /* GDB_SELF_TEST */
72508ac0
PO
13170
13171/* Cleans up local record registers and memory allocations. */
13172
13173static void
13174deallocate_reg_mem (insn_decode_record *record)
13175{
13176 xfree (record->arm_regs);
13177 xfree (record->arm_mems);
13178}
13179
13180
01e57735 13181/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13182 memory that will be changed in current instruction to record_arch_list".
13183 Return -1 if something is wrong. */
13184
13185int
01e57735
YQ
13186arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13187 CORE_ADDR insn_addr)
72508ac0
PO
13188{
13189
72508ac0
PO
13190 uint32_t no_of_rec = 0;
13191 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13192 ULONGEST t_bit = 0, insn_id = 0;
13193
13194 ULONGEST u_regval = 0;
13195
13196 insn_decode_record arm_record;
13197
13198 memset (&arm_record, 0, sizeof (insn_decode_record));
13199 arm_record.regcache = regcache;
13200 arm_record.this_addr = insn_addr;
13201 arm_record.gdbarch = gdbarch;
13202
13203
13204 if (record_debug > 1)
13205 {
13206 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13207 "addr = %s\n",
72508ac0
PO
13208 paddress (gdbarch, arm_record.this_addr));
13209 }
13210
728a7913
YQ
13211 instruction_reader reader;
13212 if (extract_arm_insn (reader, &arm_record, 2))
72508ac0
PO
13213 {
13214 if (record_debug)
01e57735
YQ
13215 {
13216 printf_unfiltered (_("Process record: error reading memory at "
13217 "addr %s len = %d.\n"),
13218 paddress (arm_record.gdbarch,
13219 arm_record.this_addr), 2);
13220 }
72508ac0
PO
13221 return -1;
13222 }
13223
13224 /* Check the insn, whether it is thumb or arm one. */
13225
13226 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13227 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13228
13229
13230 if (!(u_regval & t_bit))
13231 {
13232 /* We are decoding arm insn. */
728a7913 13233 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
72508ac0
PO
13234 }
13235 else
13236 {
13237 insn_id = bits (arm_record.arm_insn, 11, 15);
13238 /* is it thumb2 insn? */
13239 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735 13240 {
728a7913 13241 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
01e57735
YQ
13242 THUMB2_INSN_SIZE_BYTES);
13243 }
72508ac0 13244 else
01e57735
YQ
13245 {
13246 /* We are decoding thumb insn. */
728a7913
YQ
13247 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13248 THUMB_INSN_SIZE_BYTES);
01e57735 13249 }
72508ac0
PO
13250 }
13251
13252 if (0 == ret)
13253 {
13254 /* Record registers. */
25ea693b 13255 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13256 if (arm_record.arm_regs)
01e57735
YQ
13257 {
13258 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13259 {
13260 if (record_full_arch_list_add_reg
25ea693b 13261 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13262 ret = -1;
13263 }
13264 }
72508ac0
PO
13265 /* Record memories. */
13266 if (arm_record.arm_mems)
01e57735
YQ
13267 {
13268 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13269 {
13270 if (record_full_arch_list_add_mem
13271 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13272 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13273 ret = -1;
13274 }
13275 }
72508ac0 13276
25ea693b 13277 if (record_full_arch_list_add_end ())
01e57735 13278 ret = -1;
72508ac0
PO
13279 }
13280
13281
13282 deallocate_reg_mem (&arm_record);
13283
13284 return ret;
13285}
This page took 2.548987 seconds and 4 git commands to generate.