Arm: Prefix register sizes with ARM_
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
42a4f53d 3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
4de283e4 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
4de283e4
TT
24#include "frame.h"
25#include "inferior.h"
26#include "infrun.h"
27#include "gdbcmd.h"
28#include "gdbcore.h"
29#include "dis-asm.h" /* For register styles. */
30#include "disasm.h"
31#include "regcache.h"
32#include "reggroups.h"
33#include "target-float.h"
34#include "value.h"
d55e5aa6 35#include "arch-utils.h"
4de283e4
TT
36#include "osabi.h"
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
40#include "objfiles.h"
41#include "dwarf2-frame.h"
42#include "gdbtypes.h"
43#include "prologue-value.h"
44#include "remote.h"
45#include "target-descriptions.h"
46#include "user-regs.h"
47#include "observable.h"
48
d55e5aa6 49#include "arch/arm.h"
4de283e4 50#include "arch/arm-get-next-pcs.h"
34e8f22d 51#include "arm-tdep.h"
4de283e4
TT
52#include "gdb/sim-arm.h"
53
d55e5aa6 54#include "elf-bfd.h"
4de283e4 55#include "coff/internal.h"
d55e5aa6 56#include "elf/arm.h"
4de283e4
TT
57
58#include "common/vec.h"
59
60#include "record.h"
61#include "record-full.h"
62#include <algorithm>
63
64#include "features/arm/arm-with-m.c"
0a69eedb
YQ
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
4de283e4 67#include "features/arm/arm-with-iwmmxt.c"
0a69eedb
YQ
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
4de283e4 70#include "features/arm/arm-with-neon.c"
9779414d 71
b121eeb9 72#if GDB_SELF_TEST
0747795c 73#include "common/selftest.h"
b121eeb9
YQ
74#endif
75
6529d2dd
AC
76static int arm_debug;
77
082fc60d
RE
78/* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 80 is used for this purpose.
082fc60d
RE
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 84
0963b4bd 85#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
87
88#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 89 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 90
60c5725c
DJ
91struct arm_mapping_symbol
92{
93 bfd_vma value;
94 char type;
54cc7474
SM
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
60c5725c 98};
54cc7474
SM
99
100typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
60c5725c
DJ
101
102struct arm_per_objfile
103{
54cc7474 104 explicit arm_per_objfile (size_t num_sections)
4838e44c
SM
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
54cc7474
SM
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
4838e44c
SM
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
60c5725c
DJ
124};
125
1b7f24cd
TT
126/* Per-objfile data used for mapping symbols. */
127static objfile_key<arm_per_objfile> arm_objfile_data_key;
128
afd7eef0
RE
129/* The list of available "set arm ..." and "show arm ..." commands. */
130static struct cmd_list_element *setarmcmdlist = NULL;
131static struct cmd_list_element *showarmcmdlist = NULL;
132
fd50bc42
RE
133/* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 135static const char *const fp_model_strings[] =
fd50bc42
RE
136{
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
28e97307
DJ
141 "vfp",
142 NULL
fd50bc42
RE
143};
144
145/* A variable that can be configured by the user. */
146static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147static const char *current_fp_model = "auto";
148
28e97307 149/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 150static const char *const arm_abi_strings[] =
28e97307
DJ
151{
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156};
157
158/* A variable that can be configured by the user. */
159static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160static const char *arm_abi_string = "auto";
161
0428b8f5 162/* The execution mode to assume. */
40478521 163static const char *const arm_mode_strings[] =
0428b8f5
DJ
164 {
165 "auto",
166 "arm",
68770265
MGD
167 "thumb",
168 NULL
0428b8f5
DJ
169 };
170
171static const char *arm_fallback_mode_string = "auto";
172static const char *arm_force_mode_string = "auto";
173
f32bf4a4
YQ
174/* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
178static const struct
179{
180 const char *name;
181 int regnum;
182} arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
123dc839 218 { "lr", 14 },
123dc839
DJ
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
123dc839
DJ
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223};
bc90b915 224
123dc839 225static const char *const arm_register_names[] =
da59e081
JM
226{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 232 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 233
65b48a81
PB
234/* Holds the current set of options to be passed to the disassembler. */
235static char *arm_disassembler_options;
236
afd7eef0
RE
237/* Valid register name styles. */
238static const char **valid_disassembly_styles;
ed9a39eb 239
afd7eef0
RE
240/* Disassembly style to use. Default to "std" register names. */
241static const char *disassembly_style;
96baa820 242
ed9a39eb 243/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0 244 style. */
eb4c3f4a 245static void set_disassembly_style_sfunc (const char *, int,
ed9a39eb 246 struct cmd_list_element *);
65b48a81
PB
247static void show_disassembly_style_sfunc (struct ui_file *, int,
248 struct cmd_list_element *,
249 const char *);
ed9a39eb 250
05d1431c 251static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
849d0ba8 252 readable_regcache *regcache,
05d1431c 253 int regnum, gdb_byte *buf);
58d6951d
DJ
254static void arm_neon_quad_write (struct gdbarch *gdbarch,
255 struct regcache *regcache,
256 int regnum, const gdb_byte *buf);
257
e7cf25a8 258static CORE_ADDR
553cb527 259 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
260
261
d9311bfa
AT
262/* get_next_pcs operations. */
263static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
264 arm_get_next_pcs_read_memory_unsigned_integer,
265 arm_get_next_pcs_syscall_next_pc,
266 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
267 arm_get_next_pcs_is_thumb,
268 NULL,
d9311bfa
AT
269};
270
9b8d791a 271struct arm_prologue_cache
c3b4394c 272{
eb5492fa
DJ
273 /* The stack pointer at the time this frame was created; i.e. the
274 caller's stack pointer when this function was called. It is used
275 to identify this frame. */
276 CORE_ADDR prev_sp;
277
4be43953
DJ
278 /* The frame base for this frame is just prev_sp - frame size.
279 FRAMESIZE is the distance from the frame pointer to the
280 initial stack pointer. */
eb5492fa 281
c3b4394c 282 int framesize;
eb5492fa
DJ
283
284 /* The register used to hold the frame pointer for this frame. */
c3b4394c 285 int framereg;
eb5492fa
DJ
286
287 /* Saved register offsets. */
288 struct trad_frame_saved_reg *saved_regs;
c3b4394c 289};
ed9a39eb 290
0d39a070
DJ
291static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
292 CORE_ADDR prologue_start,
293 CORE_ADDR prologue_end,
294 struct arm_prologue_cache *cache);
295
cca44b1b
JB
296/* Architecture version for displaced stepping. This effects the behaviour of
297 certain instructions, and really should not be hard-wired. */
298
299#define DISPLACED_STEPPING_ARCH_VERSION 5
300
94c30b78 301/* Set to true if the 32-bit mode is in use. */
c906108c
SS
302
303int arm_apcs_32 = 1;
304
9779414d
DJ
305/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
306
478fd957 307int
9779414d
DJ
308arm_psr_thumb_bit (struct gdbarch *gdbarch)
309{
310 if (gdbarch_tdep (gdbarch)->is_m)
311 return XPSR_T;
312 else
313 return CPSR_T;
314}
315
d0e59a68
AT
316/* Determine if the processor is currently executing in Thumb mode. */
317
318int
319arm_is_thumb (struct regcache *regcache)
320{
321 ULONGEST cpsr;
ac7936df 322 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
d0e59a68
AT
323
324 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
325
326 return (cpsr & t_bit) != 0;
327}
328
b39cc962
DJ
329/* Determine if FRAME is executing in Thumb mode. */
330
25b41d01 331int
b39cc962
DJ
332arm_frame_is_thumb (struct frame_info *frame)
333{
334 CORE_ADDR cpsr;
9779414d 335 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
336
337 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
338 directly (from a signal frame or dummy frame) or by interpreting
339 the saved LR (from a prologue or DWARF frame). So consult it and
340 trust the unwinders. */
341 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
342
9779414d 343 return (cpsr & t_bit) != 0;
b39cc962
DJ
344}
345
f9d67f43
DJ
346/* Search for the mapping symbol covering MEMADDR. If one is found,
347 return its type. Otherwise, return 0. If START is non-NULL,
348 set *START to the location of the mapping symbol. */
c906108c 349
f9d67f43
DJ
350static char
351arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 352{
60c5725c 353 struct obj_section *sec;
0428b8f5 354
60c5725c
DJ
355 /* If there are mapping symbols, consult them. */
356 sec = find_pc_section (memaddr);
357 if (sec != NULL)
358 {
1b7f24cd 359 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
60c5725c
DJ
360 if (data != NULL)
361 {
4838e44c
SM
362 unsigned int section_idx = sec->the_bfd_section->index;
363 arm_mapping_symbol_vec &map
364 = data->section_maps[section_idx];
365
366 /* Sort the vector on first use. */
367 if (!data->section_maps_sorted[section_idx])
368 {
369 std::sort (map.begin (), map.end ());
370 data->section_maps_sorted[section_idx] = true;
371 }
372
54cc7474
SM
373 struct arm_mapping_symbol map_key
374 = { memaddr - obj_section_addr (sec), 0 };
54cc7474
SM
375 arm_mapping_symbol_vec::const_iterator it
376 = std::lower_bound (map.begin (), map.end (), map_key);
377
378 /* std::lower_bound finds the earliest ordered insertion
379 point. If the symbol at this position starts at this exact
380 address, we use that; otherwise, the preceding
381 mapping symbol covers this address. */
382 if (it < map.end ())
60c5725c 383 {
54cc7474 384 if (it->value == map_key.value)
60c5725c 385 {
f9d67f43 386 if (start)
54cc7474
SM
387 *start = it->value + obj_section_addr (sec);
388 return it->type;
60c5725c
DJ
389 }
390 }
54cc7474
SM
391
392 if (it > map.begin ())
393 {
394 arm_mapping_symbol_vec::const_iterator prev_it
395 = it - 1;
396
397 if (start)
398 *start = prev_it->value + obj_section_addr (sec);
399 return prev_it->type;
400 }
60c5725c
DJ
401 }
402 }
403
f9d67f43
DJ
404 return 0;
405}
406
407/* Determine if the program counter specified in MEMADDR is in a Thumb
408 function. This function should be called for addresses unrelated to
409 any executing frame; otherwise, prefer arm_frame_is_thumb. */
410
e3039479 411int
9779414d 412arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 413{
7cbd4a93 414 struct bound_minimal_symbol sym;
f9d67f43 415 char type;
cfba9872
SM
416 arm_displaced_step_closure *dsc
417 = ((arm_displaced_step_closure * )
418 get_displaced_step_closure_by_addr (memaddr));
a42244db
YQ
419
420 /* If checking the mode of displaced instruction in copy area, the mode
421 should be determined by instruction on the original address. */
422 if (dsc)
423 {
424 if (debug_displaced)
425 fprintf_unfiltered (gdb_stdlog,
426 "displaced: check mode of %.8lx instead of %.8lx\n",
427 (unsigned long) dsc->insn_addr,
428 (unsigned long) memaddr);
429 memaddr = dsc->insn_addr;
430 }
f9d67f43
DJ
431
432 /* If bit 0 of the address is set, assume this is a Thumb address. */
433 if (IS_THUMB_ADDR (memaddr))
434 return 1;
435
436 /* If the user wants to override the symbol table, let him. */
437 if (strcmp (arm_force_mode_string, "arm") == 0)
438 return 0;
439 if (strcmp (arm_force_mode_string, "thumb") == 0)
440 return 1;
441
9779414d
DJ
442 /* ARM v6-M and v7-M are always in Thumb mode. */
443 if (gdbarch_tdep (gdbarch)->is_m)
444 return 1;
445
f9d67f43
DJ
446 /* If there are mapping symbols, consult them. */
447 type = arm_find_mapping_symbol (memaddr, NULL);
448 if (type)
449 return type == 't';
450
ed9a39eb 451 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 452 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
453 if (sym.minsym)
454 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
455
456 /* If the user wants to override the fallback mode, let them. */
457 if (strcmp (arm_fallback_mode_string, "arm") == 0)
458 return 0;
459 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
460 return 1;
461
462 /* If we couldn't find any symbol, but we're talking to a running
463 target, then trust the current value of $cpsr. This lets
464 "display/i $pc" always show the correct mode (though if there is
465 a symbol table we will not reach here, so it still may not be
18819fa6 466 displayed in the mode it will be executed). */
0428b8f5 467 if (target_has_registers)
18819fa6 468 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
469
470 /* Otherwise we're out of luck; we assume ARM. */
471 return 0;
c906108c
SS
472}
473
ca90e760
FH
474/* Determine if the address specified equals any of these magic return
475 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
476 architectures.
477
478 From ARMv6-M Reference Manual B1.5.8
479 Table B1-5 Exception return behavior
480
481 EXC_RETURN Return To Return Stack
482 0xFFFFFFF1 Handler mode Main
483 0xFFFFFFF9 Thread mode Main
484 0xFFFFFFFD Thread mode Process
485
486 From ARMv7-M Reference Manual B1.5.8
487 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
488
489 EXC_RETURN Return To Return Stack
490 0xFFFFFFF1 Handler mode Main
491 0xFFFFFFF9 Thread mode Main
492 0xFFFFFFFD Thread mode Process
493
494 Table B1-9 EXC_RETURN definition of exception return behavior, with
495 FP
496
497 EXC_RETURN Return To Return Stack Frame Type
498 0xFFFFFFE1 Handler mode Main Extended
499 0xFFFFFFE9 Thread mode Main Extended
500 0xFFFFFFED Thread mode Process Extended
501 0xFFFFFFF1 Handler mode Main Basic
502 0xFFFFFFF9 Thread mode Main Basic
503 0xFFFFFFFD Thread mode Process Basic
504
505 For more details see "B1.5.8 Exception return behavior"
506 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
507
508static int
509arm_m_addr_is_magic (CORE_ADDR addr)
510{
511 switch (addr)
512 {
513 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
514 the exception return behavior. */
515 case 0xffffffe1:
516 case 0xffffffe9:
517 case 0xffffffed:
518 case 0xfffffff1:
519 case 0xfffffff9:
520 case 0xfffffffd:
521 /* Address is magic. */
522 return 1;
523
524 default:
525 /* Address is not magic. */
526 return 0;
527 }
528}
529
181c1381 530/* Remove useless bits from addresses in a running program. */
34e8f22d 531static CORE_ADDR
24568a2c 532arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 533{
2ae28aa9
YQ
534 /* On M-profile devices, do not strip the low bit from EXC_RETURN
535 (the magic exception return address). */
536 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 537 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
538 return val;
539
a3a2ee65 540 if (arm_apcs_32)
dd6be234 541 return UNMAKE_THUMB_ADDR (val);
c906108c 542 else
a3a2ee65 543 return (val & 0x03fffffc);
c906108c
SS
544}
545
0d39a070 546/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
547 can be safely ignored during prologue skipping. IS_THUMB is true
548 if the function is known to be a Thumb function due to the way it
549 is being called. */
0d39a070 550static int
e0634ccf 551skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 552{
e0634ccf 553 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 554 struct bound_minimal_symbol msym;
0d39a070
DJ
555
556 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 557 if (msym.minsym != NULL
77e371c0 558 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 559 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 560 {
efd66ac6 561 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 562
e0634ccf
UW
563 /* The GNU linker's Thumb call stub to foo is named
564 __foo_from_thumb. */
565 if (strstr (name, "_from_thumb") != NULL)
566 name += 2;
0d39a070 567
e0634ccf
UW
568 /* On soft-float targets, __truncdfsf2 is called to convert promoted
569 arguments to their argument types in non-prototyped
570 functions. */
61012eef 571 if (startswith (name, "__truncdfsf2"))
e0634ccf 572 return 1;
61012eef 573 if (startswith (name, "__aeabi_d2f"))
e0634ccf 574 return 1;
0d39a070 575
e0634ccf 576 /* Internal functions related to thread-local storage. */
61012eef 577 if (startswith (name, "__tls_get_addr"))
e0634ccf 578 return 1;
61012eef 579 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
580 return 1;
581 }
582 else
583 {
584 /* If we run against a stripped glibc, we may be unable to identify
585 special functions by name. Check for one important case,
586 __aeabi_read_tp, by comparing the *code* against the default
587 implementation (this is hand-written ARM assembler in glibc). */
588
589 if (!is_thumb
198cd59d 590 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
e0634ccf 591 == 0xe3e00a0f /* mov r0, #0xffff0fff */
198cd59d 592 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
e0634ccf
UW
593 == 0xe240f01f) /* sub pc, r0, #31 */
594 return 1;
595 }
ec3d575a 596
0d39a070
DJ
597 return 0;
598}
599
621c6d5b
YQ
600/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
601 the first 16-bit of instruction, and INSN2 is the second 16-bit of
602 instruction. */
603#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
604 ((bits ((insn1), 0, 3) << 12) \
605 | (bits ((insn1), 10, 10) << 11) \
606 | (bits ((insn2), 12, 14) << 8) \
607 | bits ((insn2), 0, 7))
608
609/* Extract the immediate from instruction movw/movt of encoding A. INSN is
610 the 32-bit instruction. */
611#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
612 ((bits ((insn), 16, 19) << 12) \
613 | bits ((insn), 0, 11))
614
ec3d575a
UW
615/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
616
617static unsigned int
618thumb_expand_immediate (unsigned int imm)
619{
620 unsigned int count = imm >> 7;
621
622 if (count < 8)
623 switch (count / 2)
624 {
625 case 0:
626 return imm & 0xff;
627 case 1:
628 return (imm & 0xff) | ((imm & 0xff) << 16);
629 case 2:
630 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
631 case 3:
632 return (imm & 0xff) | ((imm & 0xff) << 8)
633 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
634 }
635
636 return (0x80 | (imm & 0x7f)) << (32 - count);
637}
638
540314bd
YQ
639/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
640 epilogue, 0 otherwise. */
641
642static int
643thumb_instruction_restores_sp (unsigned short insn)
644{
645 return (insn == 0x46bd /* mov sp, r7 */
646 || (insn & 0xff80) == 0xb000 /* add sp, imm */
647 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
648}
649
29d73ae4
DJ
650/* Analyze a Thumb prologue, looking for a recognizable stack frame
651 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
652 clobber the stack frame unexpectedly, or an unknown instruction.
653 Return the last address which is definitely safe to skip for an
654 initial breakpoint. */
c906108c
SS
655
656static CORE_ADDR
29d73ae4
DJ
657thumb_analyze_prologue (struct gdbarch *gdbarch,
658 CORE_ADDR start, CORE_ADDR limit,
659 struct arm_prologue_cache *cache)
c906108c 660{
0d39a070 661 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 662 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
663 int i;
664 pv_t regs[16];
29d73ae4 665 CORE_ADDR offset;
ec3d575a 666 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 667
29d73ae4
DJ
668 for (i = 0; i < 16; i++)
669 regs[i] = pv_register (i, 0);
f7b7ed97 670 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4 671
29d73ae4 672 while (start < limit)
c906108c 673 {
29d73ae4
DJ
674 unsigned short insn;
675
198cd59d 676 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 677
94c30b78 678 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 679 {
29d73ae4
DJ
680 int regno;
681 int mask;
4be43953 682
f7b7ed97 683 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953 684 break;
29d73ae4
DJ
685
686 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
687 whether to save LR (R14). */
688 mask = (insn & 0xff) | ((insn & 0x100) << 6);
689
690 /* Calculate offsets of saved R0-R7 and LR. */
691 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
692 if (mask & (1 << regno))
693 {
29d73ae4
DJ
694 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
695 -4);
f7b7ed97 696 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
29d73ae4 697 }
da59e081 698 }
1db01f22 699 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 700 {
29d73ae4 701 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
702 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
703 -offset);
da59e081 704 }
808f7ab1
YQ
705 else if (thumb_instruction_restores_sp (insn))
706 {
707 /* Don't scan past the epilogue. */
708 break;
709 }
0d39a070
DJ
710 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
711 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
712 (insn & 0xff) << 2);
713 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
714 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
715 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
716 bits (insn, 6, 8));
717 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
718 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
719 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
720 bits (insn, 0, 7));
721 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
722 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
723 && pv_is_constant (regs[bits (insn, 3, 5)]))
724 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
725 regs[bits (insn, 6, 8)]);
726 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
727 && pv_is_constant (regs[bits (insn, 3, 6)]))
728 {
729 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
730 int rm = bits (insn, 3, 6);
731 regs[rd] = pv_add (regs[rd], regs[rm]);
732 }
29d73ae4 733 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 734 {
29d73ae4
DJ
735 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
736 int src_reg = (insn & 0x78) >> 3;
737 regs[dst_reg] = regs[src_reg];
da59e081 738 }
29d73ae4 739 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 740 {
29d73ae4
DJ
741 /* Handle stores to the stack. Normally pushes are used,
742 but with GCC -mtpcs-frame, there may be other stores
743 in the prologue to create the frame. */
744 int regno = (insn >> 8) & 0x7;
745 pv_t addr;
746
747 offset = (insn & 0xff) << 2;
748 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
749
f7b7ed97 750 if (stack.store_would_trash (addr))
29d73ae4
DJ
751 break;
752
f7b7ed97 753 stack.store (addr, 4, regs[regno]);
da59e081 754 }
0d39a070
DJ
755 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
756 {
757 int rd = bits (insn, 0, 2);
758 int rn = bits (insn, 3, 5);
759 pv_t addr;
760
761 offset = bits (insn, 6, 10) << 2;
762 addr = pv_add_constant (regs[rn], offset);
763
f7b7ed97 764 if (stack.store_would_trash (addr))
0d39a070
DJ
765 break;
766
f7b7ed97 767 stack.store (addr, 4, regs[rd]);
0d39a070
DJ
768 }
769 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
770 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
771 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
772 /* Ignore stores of argument registers to the stack. */
773 ;
774 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
775 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
776 /* Ignore block loads from the stack, potentially copying
777 parameters from memory. */
778 ;
779 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
780 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
781 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
782 /* Similarly ignore single loads from the stack. */
783 ;
784 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
785 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
786 /* Skip register copies, i.e. saves to another register
787 instead of the stack. */
788 ;
789 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
790 /* Recognize constant loads; even with small stacks these are necessary
791 on Thumb. */
792 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
793 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
794 {
795 /* Constant pool loads, for the same reason. */
796 unsigned int constant;
797 CORE_ADDR loc;
798
799 loc = start + 4 + bits (insn, 0, 7) * 4;
800 constant = read_memory_unsigned_integer (loc, 4, byte_order);
801 regs[bits (insn, 8, 10)] = pv_constant (constant);
802 }
db24da6d 803 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 804 {
0d39a070
DJ
805 unsigned short inst2;
806
198cd59d
YQ
807 inst2 = read_code_unsigned_integer (start + 2, 2,
808 byte_order_for_code);
0d39a070
DJ
809
810 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
811 {
812 /* BL, BLX. Allow some special function calls when
813 skipping the prologue; GCC generates these before
814 storing arguments to the stack. */
815 CORE_ADDR nextpc;
816 int j1, j2, imm1, imm2;
817
818 imm1 = sbits (insn, 0, 10);
819 imm2 = bits (inst2, 0, 10);
820 j1 = bit (inst2, 13);
821 j2 = bit (inst2, 11);
822
823 offset = ((imm1 << 12) + (imm2 << 1));
824 offset ^= ((!j2) << 22) | ((!j1) << 23);
825
826 nextpc = start + 4 + offset;
827 /* For BLX make sure to clear the low bits. */
828 if (bit (inst2, 12) == 0)
829 nextpc = nextpc & 0xfffffffc;
830
e0634ccf
UW
831 if (!skip_prologue_function (gdbarch, nextpc,
832 bit (inst2, 12) != 0))
0d39a070
DJ
833 break;
834 }
ec3d575a 835
0963b4bd
MS
836 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
837 { registers } */
ec3d575a
UW
838 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
839 {
840 pv_t addr = regs[bits (insn, 0, 3)];
841 int regno;
842
f7b7ed97 843 if (stack.store_would_trash (addr))
ec3d575a
UW
844 break;
845
846 /* Calculate offsets of saved registers. */
847 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
848 if (inst2 & (1 << regno))
849 {
850 addr = pv_add_constant (addr, -4);
f7b7ed97 851 stack.store (addr, 4, regs[regno]);
ec3d575a
UW
852 }
853
854 if (insn & 0x0020)
855 regs[bits (insn, 0, 3)] = addr;
856 }
857
0963b4bd
MS
858 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
859 [Rn, #+/-imm]{!} */
ec3d575a
UW
860 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
861 {
862 int regno1 = bits (inst2, 12, 15);
863 int regno2 = bits (inst2, 8, 11);
864 pv_t addr = regs[bits (insn, 0, 3)];
865
866 offset = inst2 & 0xff;
867 if (insn & 0x0080)
868 addr = pv_add_constant (addr, offset);
869 else
870 addr = pv_add_constant (addr, -offset);
871
f7b7ed97 872 if (stack.store_would_trash (addr))
ec3d575a
UW
873 break;
874
f7b7ed97
TT
875 stack.store (addr, 4, regs[regno1]);
876 stack.store (pv_add_constant (addr, 4),
877 4, regs[regno2]);
ec3d575a
UW
878
879 if (insn & 0x0020)
880 regs[bits (insn, 0, 3)] = addr;
881 }
882
883 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
884 && (inst2 & 0x0c00) == 0x0c00
885 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
886 {
887 int regno = bits (inst2, 12, 15);
888 pv_t addr = regs[bits (insn, 0, 3)];
889
890 offset = inst2 & 0xff;
891 if (inst2 & 0x0200)
892 addr = pv_add_constant (addr, offset);
893 else
894 addr = pv_add_constant (addr, -offset);
895
f7b7ed97 896 if (stack.store_would_trash (addr))
ec3d575a
UW
897 break;
898
f7b7ed97 899 stack.store (addr, 4, regs[regno]);
ec3d575a
UW
900
901 if (inst2 & 0x0100)
902 regs[bits (insn, 0, 3)] = addr;
903 }
904
905 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
906 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
907 {
908 int regno = bits (inst2, 12, 15);
909 pv_t addr;
910
911 offset = inst2 & 0xfff;
912 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
913
f7b7ed97 914 if (stack.store_would_trash (addr))
ec3d575a
UW
915 break;
916
f7b7ed97 917 stack.store (addr, 4, regs[regno]);
ec3d575a
UW
918 }
919
920 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 922 /* Ignore stores of argument registers to the stack. */
0d39a070 923 ;
ec3d575a
UW
924
925 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
926 && (inst2 & 0x0d00) == 0x0c00
0d39a070 927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 928 /* Ignore stores of argument registers to the stack. */
0d39a070 929 ;
ec3d575a 930
0963b4bd
MS
931 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
932 { registers } */
ec3d575a
UW
933 && (inst2 & 0x8000) == 0x0000
934 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
935 /* Ignore block loads from the stack, potentially copying
936 parameters from memory. */
0d39a070 937 ;
ec3d575a 938
0963b4bd
MS
939 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
940 [Rn, #+/-imm] */
0d39a070 941 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 942 /* Similarly ignore dual loads from the stack. */
0d39a070 943 ;
ec3d575a
UW
944
945 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
946 && (inst2 & 0x0d00) == 0x0c00
0d39a070 947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 948 /* Similarly ignore single loads from the stack. */
0d39a070 949 ;
ec3d575a
UW
950
951 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 952 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 953 /* Similarly ignore single loads from the stack. */
0d39a070 954 ;
ec3d575a
UW
955
956 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
957 && (inst2 & 0x8000) == 0x0000)
958 {
959 unsigned int imm = ((bits (insn, 10, 10) << 11)
960 | (bits (inst2, 12, 14) << 8)
961 | bits (inst2, 0, 7));
962
963 regs[bits (inst2, 8, 11)]
964 = pv_add_constant (regs[bits (insn, 0, 3)],
965 thumb_expand_immediate (imm));
966 }
967
968 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
969 && (inst2 & 0x8000) == 0x0000)
0d39a070 970 {
ec3d575a
UW
971 unsigned int imm = ((bits (insn, 10, 10) << 11)
972 | (bits (inst2, 12, 14) << 8)
973 | bits (inst2, 0, 7));
974
975 regs[bits (inst2, 8, 11)]
976 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
977 }
978
979 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
980 && (inst2 & 0x8000) == 0x0000)
981 {
982 unsigned int imm = ((bits (insn, 10, 10) << 11)
983 | (bits (inst2, 12, 14) << 8)
984 | bits (inst2, 0, 7));
985
986 regs[bits (inst2, 8, 11)]
987 = pv_add_constant (regs[bits (insn, 0, 3)],
988 - (CORE_ADDR) thumb_expand_immediate (imm));
989 }
990
991 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
992 && (inst2 & 0x8000) == 0x0000)
993 {
994 unsigned int imm = ((bits (insn, 10, 10) << 11)
995 | (bits (inst2, 12, 14) << 8)
996 | bits (inst2, 0, 7));
997
998 regs[bits (inst2, 8, 11)]
999 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1000 }
1001
1002 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1003 {
1004 unsigned int imm = ((bits (insn, 10, 10) << 11)
1005 | (bits (inst2, 12, 14) << 8)
1006 | bits (inst2, 0, 7));
1007
1008 regs[bits (inst2, 8, 11)]
1009 = pv_constant (thumb_expand_immediate (imm));
1010 }
1011
1012 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1013 {
621c6d5b
YQ
1014 unsigned int imm
1015 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1016
1017 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1018 }
1019
1020 else if (insn == 0xea5f /* mov.w Rd,Rm */
1021 && (inst2 & 0xf0f0) == 0)
1022 {
1023 int dst_reg = (inst2 & 0x0f00) >> 8;
1024 int src_reg = inst2 & 0xf;
1025 regs[dst_reg] = regs[src_reg];
1026 }
1027
1028 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1029 {
1030 /* Constant pool loads. */
1031 unsigned int constant;
1032 CORE_ADDR loc;
1033
cac395ea 1034 offset = bits (inst2, 0, 11);
ec3d575a
UW
1035 if (insn & 0x0080)
1036 loc = start + 4 + offset;
1037 else
1038 loc = start + 4 - offset;
1039
1040 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1041 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1042 }
1043
1044 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1045 {
1046 /* Constant pool loads. */
1047 unsigned int constant;
1048 CORE_ADDR loc;
1049
cac395ea 1050 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1051 if (insn & 0x0080)
1052 loc = start + 4 + offset;
1053 else
1054 loc = start + 4 - offset;
1055
1056 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1057 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1058
1059 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1060 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1061 }
1062
1063 else if (thumb2_instruction_changes_pc (insn, inst2))
1064 {
1065 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1066 break;
1067 }
ec3d575a
UW
1068 else
1069 {
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1073 }
0d39a070
DJ
1074
1075 start += 2;
1076 }
ec3d575a 1077 else if (thumb_instruction_changes_pc (insn))
3d74b771 1078 {
ec3d575a 1079 /* Don't scan past anything that might change control flow. */
da3c6d4a 1080 break;
3d74b771 1081 }
ec3d575a
UW
1082 else
1083 {
1084 /* The optimizer might shove anything into the prologue,
1085 so we just skip what we don't recognize. */
1086 unrecognized_pc = start;
1087 }
29d73ae4
DJ
1088
1089 start += 2;
c906108c
SS
1090 }
1091
0d39a070
DJ
1092 if (arm_debug)
1093 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1094 paddress (gdbarch, start));
1095
ec3d575a
UW
1096 if (unrecognized_pc == 0)
1097 unrecognized_pc = start;
1098
29d73ae4 1099 if (cache == NULL)
f7b7ed97 1100 return unrecognized_pc;
29d73ae4 1101
29d73ae4
DJ
1102 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1103 {
1104 /* Frame pointer is fp. Frame size is constant. */
1105 cache->framereg = ARM_FP_REGNUM;
1106 cache->framesize = -regs[ARM_FP_REGNUM].k;
1107 }
1108 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1109 {
1110 /* Frame pointer is r7. Frame size is constant. */
1111 cache->framereg = THUMB_FP_REGNUM;
1112 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1113 }
72a2e3dc 1114 else
29d73ae4
DJ
1115 {
1116 /* Try the stack pointer... this is a bit desperate. */
1117 cache->framereg = ARM_SP_REGNUM;
1118 cache->framesize = -regs[ARM_SP_REGNUM].k;
1119 }
29d73ae4
DJ
1120
1121 for (i = 0; i < 16; i++)
f7b7ed97 1122 if (stack.find_reg (gdbarch, i, &offset))
29d73ae4
DJ
1123 cache->saved_regs[i].addr = offset;
1124
ec3d575a 1125 return unrecognized_pc;
c906108c
SS
1126}
1127
621c6d5b
YQ
1128
1129/* Try to analyze the instructions starting from PC, which load symbol
1130 __stack_chk_guard. Return the address of instruction after loading this
1131 symbol, set the dest register number to *BASEREG, and set the size of
1132 instructions for loading symbol in OFFSET. Return 0 if instructions are
1133 not recognized. */
1134
1135static CORE_ADDR
1136arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1137 unsigned int *destreg, int *offset)
1138{
1139 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1140 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1141 unsigned int low, high, address;
1142
1143 address = 0;
1144 if (is_thumb)
1145 {
1146 unsigned short insn1
198cd59d 1147 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
621c6d5b
YQ
1148
1149 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1150 {
1151 *destreg = bits (insn1, 8, 10);
1152 *offset = 2;
6ae274b7
YQ
1153 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1154 address = read_memory_unsigned_integer (address, 4,
1155 byte_order_for_code);
621c6d5b
YQ
1156 }
1157 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1158 {
1159 unsigned short insn2
198cd59d 1160 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
621c6d5b
YQ
1161
1162 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1163
1164 insn1
198cd59d 1165 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
621c6d5b 1166 insn2
198cd59d 1167 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
621c6d5b
YQ
1168
1169 /* movt Rd, #const */
1170 if ((insn1 & 0xfbc0) == 0xf2c0)
1171 {
1172 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1173 *destreg = bits (insn2, 8, 11);
1174 *offset = 8;
1175 address = (high << 16 | low);
1176 }
1177 }
1178 }
1179 else
1180 {
2e9e421f 1181 unsigned int insn
198cd59d 1182 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
2e9e421f 1183
6ae274b7 1184 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1185 {
6ae274b7
YQ
1186 address = bits (insn, 0, 11) + pc + 8;
1187 address = read_memory_unsigned_integer (address, 4,
1188 byte_order_for_code);
1189
2e9e421f
UW
1190 *destreg = bits (insn, 12, 15);
1191 *offset = 4;
1192 }
1193 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1194 {
1195 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1196
1197 insn
198cd59d 1198 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
2e9e421f
UW
1199
1200 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1201 {
1202 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1203 *destreg = bits (insn, 12, 15);
1204 *offset = 8;
1205 address = (high << 16 | low);
1206 }
1207 }
621c6d5b
YQ
1208 }
1209
1210 return address;
1211}
1212
1213/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1214 points to the first instruction of this sequence, return the address of
1215 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1216
1217 On arm, this sequence of instructions is composed of mainly three steps,
1218 Step 1: load symbol __stack_chk_guard,
1219 Step 2: load from address of __stack_chk_guard,
1220 Step 3: store it to somewhere else.
1221
1222 Usually, instructions on step 2 and step 3 are the same on various ARM
1223 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1224 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1225 instructions in step 1 vary from different ARM architectures. On ARMv7,
1226 they are,
1227
1228 movw Rn, #:lower16:__stack_chk_guard
1229 movt Rn, #:upper16:__stack_chk_guard
1230
1231 On ARMv5t, it is,
1232
1233 ldr Rn, .Label
1234 ....
1235 .Lable:
1236 .word __stack_chk_guard
1237
1238 Since ldr/str is a very popular instruction, we can't use them as
1239 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1240 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1241 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1242
1243static CORE_ADDR
1244arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1245{
1246 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1247 unsigned int basereg;
7cbd4a93 1248 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1249 int offset;
1250 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1251 CORE_ADDR addr;
1252
1253 /* Try to parse the instructions in Step 1. */
1254 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1255 &basereg, &offset);
1256 if (!addr)
1257 return pc;
1258
1259 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1260 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1261 Otherwise, this sequence cannot be for stack protector. */
1262 if (stack_chk_guard.minsym == NULL
61012eef 1263 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1264 return pc;
1265
1266 if (is_thumb)
1267 {
1268 unsigned int destreg;
1269 unsigned short insn
198cd59d 1270 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
621c6d5b
YQ
1271
1272 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1273 if ((insn & 0xf800) != 0x6800)
1274 return pc;
1275 if (bits (insn, 3, 5) != basereg)
1276 return pc;
1277 destreg = bits (insn, 0, 2);
1278
198cd59d
YQ
1279 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1280 byte_order_for_code);
621c6d5b
YQ
1281 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1282 if ((insn & 0xf800) != 0x6000)
1283 return pc;
1284 if (destreg != bits (insn, 0, 2))
1285 return pc;
1286 }
1287 else
1288 {
1289 unsigned int destreg;
1290 unsigned int insn
198cd59d 1291 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
621c6d5b
YQ
1292
1293 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1294 if ((insn & 0x0e500000) != 0x04100000)
1295 return pc;
1296 if (bits (insn, 16, 19) != basereg)
1297 return pc;
1298 destreg = bits (insn, 12, 15);
1299 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
198cd59d 1300 insn = read_code_unsigned_integer (pc + offset + 4,
621c6d5b
YQ
1301 4, byte_order_for_code);
1302 if ((insn & 0x0e500000) != 0x04000000)
1303 return pc;
1304 if (bits (insn, 12, 15) != destreg)
1305 return pc;
1306 }
1307 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1308 on arm. */
1309 if (is_thumb)
1310 return pc + offset + 4;
1311 else
1312 return pc + offset + 8;
1313}
1314
da3c6d4a
MS
1315/* Advance the PC across any function entry prologue instructions to
1316 reach some "real" code.
34e8f22d
RE
1317
1318 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1319 prologue:
c906108c 1320
c5aa993b
JM
1321 mov ip, sp
1322 [stmfd sp!, {a1,a2,a3,a4}]
1323 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1324 [stfe f7, [sp, #-12]!]
1325 [stfe f6, [sp, #-12]!]
1326 [stfe f5, [sp, #-12]!]
1327 [stfe f4, [sp, #-12]!]
0963b4bd 1328 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1329
34e8f22d 1330static CORE_ADDR
6093d2eb 1331arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1332{
a89fea3c 1333 CORE_ADDR func_addr, limit_pc;
c906108c 1334
a89fea3c
JL
1335 /* See if we can determine the end of the prologue via the symbol table.
1336 If so, then return either PC, or the PC after the prologue, whichever
1337 is greater. */
1338 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1339 {
d80b854b
UW
1340 CORE_ADDR post_prologue_pc
1341 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1342 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1343
621c6d5b
YQ
1344 if (post_prologue_pc)
1345 post_prologue_pc
1346 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1347
1348
0d39a070
DJ
1349 /* GCC always emits a line note before the prologue and another
1350 one after, even if the two are at the same address or on the
1351 same line. Take advantage of this so that we do not need to
1352 know every instruction that might appear in the prologue. We
1353 will have producer information for most binaries; if it is
1354 missing (e.g. for -gstabs), assuming the GNU tools. */
1355 if (post_prologue_pc
43f3e411
DE
1356 && (cust == NULL
1357 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1358 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1359 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1360 return post_prologue_pc;
1361
a89fea3c 1362 if (post_prologue_pc != 0)
0d39a070
DJ
1363 {
1364 CORE_ADDR analyzed_limit;
1365
1366 /* For non-GCC compilers, make sure the entire line is an
1367 acceptable prologue; GDB will round this function's
1368 return value up to the end of the following line so we
1369 can not skip just part of a line (and we do not want to).
1370
1371 RealView does not treat the prologue specially, but does
1372 associate prologue code with the opening brace; so this
1373 lets us skip the first line if we think it is the opening
1374 brace. */
9779414d 1375 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1376 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1377 post_prologue_pc, NULL);
1378 else
1379 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1380 post_prologue_pc, NULL);
1381
1382 if (analyzed_limit != post_prologue_pc)
1383 return func_addr;
1384
1385 return post_prologue_pc;
1386 }
c906108c
SS
1387 }
1388
a89fea3c
JL
1389 /* Can't determine prologue from the symbol table, need to examine
1390 instructions. */
c906108c 1391
a89fea3c
JL
1392 /* Find an upper limit on the function prologue using the debug
1393 information. If the debug information could not be used to provide
1394 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1395 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1396 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1397 if (limit_pc == 0)
1398 limit_pc = pc + 64; /* Magic. */
1399
c906108c 1400
29d73ae4 1401 /* Check if this is Thumb code. */
9779414d 1402 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1403 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1404 else
1405 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1406}
94c30b78 1407
c5aa993b 1408/* *INDENT-OFF* */
c906108c
SS
1409/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1410 This function decodes a Thumb function prologue to determine:
1411 1) the size of the stack frame
1412 2) which registers are saved on it
1413 3) the offsets of saved regs
1414 4) the offset from the stack pointer to the frame pointer
c906108c 1415
da59e081
JM
1416 A typical Thumb function prologue would create this stack frame
1417 (offsets relative to FP)
c906108c
SS
1418 old SP -> 24 stack parameters
1419 20 LR
1420 16 R7
1421 R7 -> 0 local variables (16 bytes)
1422 SP -> -12 additional stack space (12 bytes)
1423 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1424 12 bytes. The frame register is R7.
da59e081 1425
da3c6d4a
MS
1426 The comments for thumb_skip_prolog() describe the algorithm we use
1427 to detect the end of the prolog. */
c5aa993b
JM
1428/* *INDENT-ON* */
1429
c906108c 1430static void
be8626e0 1431thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1432 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1433{
1434 CORE_ADDR prologue_start;
1435 CORE_ADDR prologue_end;
c906108c 1436
b39cc962
DJ
1437 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1438 &prologue_end))
c906108c 1439 {
ec3d575a
UW
1440 /* See comment in arm_scan_prologue for an explanation of
1441 this heuristics. */
1442 if (prologue_end > prologue_start + 64)
1443 {
1444 prologue_end = prologue_start + 64;
1445 }
c906108c
SS
1446 }
1447 else
f7060f85
DJ
1448 /* We're in the boondocks: we have no idea where the start of the
1449 function is. */
1450 return;
c906108c 1451
325fac50 1452 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1453
be8626e0 1454 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1455}
1456
f303bc3e
YQ
1457/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1458 otherwise. */
1459
1460static int
1461arm_instruction_restores_sp (unsigned int insn)
1462{
1463 if (bits (insn, 28, 31) != INST_NV)
1464 {
1465 if ((insn & 0x0df0f000) == 0x0080d000
1466 /* ADD SP (register or immediate). */
1467 || (insn & 0x0df0f000) == 0x0040d000
1468 /* SUB SP (register or immediate). */
1469 || (insn & 0x0ffffff0) == 0x01a0d000
1470 /* MOV SP. */
1471 || (insn & 0x0fff0000) == 0x08bd0000
1472 /* POP (LDMIA). */
1473 || (insn & 0x0fff0000) == 0x049d0000)
1474 /* POP of a single register. */
1475 return 1;
1476 }
1477
1478 return 0;
1479}
1480
0d39a070
DJ
1481/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1482 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1483 fill it in. Return the first address not recognized as a prologue
1484 instruction.
eb5492fa 1485
0d39a070
DJ
1486 We recognize all the instructions typically found in ARM prologues,
1487 plus harmless instructions which can be skipped (either for analysis
1488 purposes, or a more restrictive set that can be skipped when finding
1489 the end of the prologue). */
1490
1491static CORE_ADDR
1492arm_analyze_prologue (struct gdbarch *gdbarch,
1493 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1494 struct arm_prologue_cache *cache)
1495{
0d39a070
DJ
1496 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1497 int regno;
1498 CORE_ADDR offset, current_pc;
1499 pv_t regs[ARM_FPS_REGNUM];
0d39a070
DJ
1500 CORE_ADDR unrecognized_pc = 0;
1501
1502 /* Search the prologue looking for instructions that set up the
96baa820 1503 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1504
96baa820
JM
1505 Be careful, however, and if it doesn't look like a prologue,
1506 don't try to scan it. If, for instance, a frameless function
1507 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1508 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1509 and other operations that rely on a knowledge of the stack
0d39a070 1510 traceback. */
d4473757 1511
4be43953
DJ
1512 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1513 regs[regno] = pv_register (regno, 0);
f7b7ed97 1514 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953 1515
94c30b78
MS
1516 for (current_pc = prologue_start;
1517 current_pc < prologue_end;
f43845b3 1518 current_pc += 4)
96baa820 1519 {
e17a4113 1520 unsigned int insn
198cd59d 1521 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1522
94c30b78 1523 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1524 {
4be43953 1525 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1526 continue;
1527 }
0d39a070
DJ
1528 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1529 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1530 {
1531 unsigned imm = insn & 0xff; /* immediate value */
1532 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1533 int rd = bits (insn, 12, 15);
28cd8767 1534 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1535 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1536 continue;
1537 }
0d39a070
DJ
1538 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1539 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1540 {
1541 unsigned imm = insn & 0xff; /* immediate value */
1542 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1543 int rd = bits (insn, 12, 15);
28cd8767 1544 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1545 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1546 continue;
1547 }
0963b4bd
MS
1548 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1549 [sp, #-4]! */
f43845b3 1550 {
f7b7ed97 1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1552 break;
1553 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
f7b7ed97
TT
1554 stack.store (regs[ARM_SP_REGNUM], 4,
1555 regs[bits (insn, 12, 15)]);
f43845b3
MS
1556 continue;
1557 }
1558 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1559 /* stmfd sp!, {..., fp, ip, lr, pc}
1560 or
1561 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1562 {
d4473757 1563 int mask = insn & 0xffff;
ed9a39eb 1564
f7b7ed97 1565 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1566 break;
1567
94c30b78 1568 /* Calculate offsets of saved registers. */
34e8f22d 1569 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1570 if (mask & (1 << regno))
1571 {
0963b4bd
MS
1572 regs[ARM_SP_REGNUM]
1573 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
f7b7ed97 1574 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1575 }
1576 }
0d39a070
DJ
1577 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1578 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1579 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1580 {
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1582 continue;
1583 }
0d39a070
DJ
1584 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1585 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1586 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1587 {
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1589 continue;
1590 }
0963b4bd
MS
1591 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1592 { registers } */
0d39a070
DJ
1593 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1594 {
1595 /* No need to add this to saved_regs -- it's just arg regs. */
1596 continue;
1597 }
d4473757
KB
1598 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1599 {
94c30b78
MS
1600 unsigned imm = insn & 0xff; /* immediate value */
1601 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1602 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1603 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1604 }
1605 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1606 {
94c30b78
MS
1607 unsigned imm = insn & 0xff; /* immediate value */
1608 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1609 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1610 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1611 }
0963b4bd
MS
1612 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1613 [sp, -#c]! */
2af46ca0 1614 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1615 {
f7b7ed97 1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1617 break;
1618
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1620 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
f7b7ed97 1621 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1622 }
0963b4bd
MS
1623 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1624 [sp!] */
2af46ca0 1625 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1626 {
1627 int n_saved_fp_regs;
1628 unsigned int fp_start_reg, fp_bound_reg;
1629
f7b7ed97 1630 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
4be43953
DJ
1631 break;
1632
94c30b78 1633 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1634 {
d4473757
KB
1635 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs = 3;
1637 else
1638 n_saved_fp_regs = 1;
96baa820 1639 }
d4473757 1640 else
96baa820 1641 {
d4473757
KB
1642 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs = 2;
1644 else
1645 n_saved_fp_regs = 4;
96baa820 1646 }
d4473757 1647
34e8f22d 1648 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1649 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1650 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1651 {
4be43953 1652 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
f7b7ed97
TT
1653 stack.store (regs[ARM_SP_REGNUM], 12,
1654 regs[fp_start_reg++]);
96baa820 1655 }
c906108c 1656 }
0d39a070
DJ
1657 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1658 {
1659 /* Allow some special function calls when skipping the
1660 prologue; GCC generates these before storing arguments to
1661 the stack. */
1662 CORE_ADDR dest = BranchDest (current_pc, insn);
1663
e0634ccf 1664 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1665 continue;
1666 else
1667 break;
1668 }
d4473757 1669 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1670 break; /* Condition not true, exit early. */
0d39a070
DJ
1671 else if (arm_instruction_changes_pc (insn))
1672 /* Don't scan past anything that might change control flow. */
1673 break;
f303bc3e
YQ
1674 else if (arm_instruction_restores_sp (insn))
1675 {
1676 /* Don't scan past the epilogue. */
1677 break;
1678 }
d19f7eee
UW
1679 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Ignore block loads from the stack, potentially copying
1682 parameters from memory. */
1683 continue;
1684 else if ((insn & 0xfc500000) == 0xe4100000
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1686 /* Similarly ignore single loads from the stack. */
1687 continue;
0d39a070
DJ
1688 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1689 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1690 register instead of the stack. */
d4473757 1691 continue;
0d39a070
DJ
1692 else
1693 {
21daaaaf
YQ
1694 /* The optimizer might shove anything into the prologue, if
1695 we build up cache (cache != NULL) from scanning prologue,
1696 we just skip what we don't recognize and scan further to
1697 make cache as complete as possible. However, if we skip
1698 prologue, we'll stop immediately on unrecognized
1699 instruction. */
0d39a070 1700 unrecognized_pc = current_pc;
21daaaaf
YQ
1701 if (cache != NULL)
1702 continue;
1703 else
1704 break;
0d39a070 1705 }
c906108c
SS
1706 }
1707
0d39a070
DJ
1708 if (unrecognized_pc == 0)
1709 unrecognized_pc = current_pc;
1710
0d39a070
DJ
1711 if (cache)
1712 {
4072f920
YQ
1713 int framereg, framesize;
1714
1715 /* The frame size is just the distance from the frame register
1716 to the original stack pointer. */
1717 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1718 {
1719 /* Frame pointer is fp. */
1720 framereg = ARM_FP_REGNUM;
1721 framesize = -regs[ARM_FP_REGNUM].k;
1722 }
1723 else
1724 {
1725 /* Try the stack pointer... this is a bit desperate. */
1726 framereg = ARM_SP_REGNUM;
1727 framesize = -regs[ARM_SP_REGNUM].k;
1728 }
1729
0d39a070
DJ
1730 cache->framereg = framereg;
1731 cache->framesize = framesize;
1732
1733 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
f7b7ed97 1734 if (stack.find_reg (gdbarch, regno, &offset))
0d39a070
DJ
1735 cache->saved_regs[regno].addr = offset;
1736 }
1737
1738 if (arm_debug)
1739 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1740 paddress (gdbarch, unrecognized_pc));
4be43953 1741
0d39a070
DJ
1742 return unrecognized_pc;
1743}
1744
1745static void
1746arm_scan_prologue (struct frame_info *this_frame,
1747 struct arm_prologue_cache *cache)
1748{
1749 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1751 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1752 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1753 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1754
1755 /* Assume there is no frame until proven otherwise. */
1756 cache->framereg = ARM_SP_REGNUM;
1757 cache->framesize = 0;
1758
1759 /* Check for Thumb prologue. */
1760 if (arm_frame_is_thumb (this_frame))
1761 {
1762 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1763 return;
1764 }
1765
1766 /* Find the function prologue. If we can't find the function in
1767 the symbol table, peek in the stack frame to find the PC. */
1768 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1769 &prologue_end))
1770 {
1771 /* One way to find the end of the prologue (which works well
1772 for unoptimized code) is to do the following:
1773
1774 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1775
1776 if (sal.line == 0)
1777 prologue_end = prev_pc;
1778 else if (sal.end < prologue_end)
1779 prologue_end = sal.end;
1780
1781 This mechanism is very accurate so long as the optimizer
1782 doesn't move any instructions from the function body into the
1783 prologue. If this happens, sal.end will be the last
1784 instruction in the first hunk of prologue code just before
1785 the first instruction that the scheduler has moved from
1786 the body to the prologue.
1787
1788 In order to make sure that we scan all of the prologue
1789 instructions, we use a slightly less accurate mechanism which
1790 may scan more than necessary. To help compensate for this
1791 lack of accuracy, the prologue scanning loop below contains
1792 several clauses which'll cause the loop to terminate early if
1793 an implausible prologue instruction is encountered.
1794
1795 The expression
1796
1797 prologue_start + 64
1798
1799 is a suitable endpoint since it accounts for the largest
1800 possible prologue plus up to five instructions inserted by
1801 the scheduler. */
1802
1803 if (prologue_end > prologue_start + 64)
1804 {
1805 prologue_end = prologue_start + 64; /* See above. */
1806 }
1807 }
1808 else
1809 {
1810 /* We have no symbol information. Our only option is to assume this
1811 function has a standard stack frame and the normal frame register.
1812 Then, we can find the value of our frame pointer on entrance to
1813 the callee (or at the present moment if this is the innermost frame).
1814 The value stored there should be the address of the stmfd + 8. */
1815 CORE_ADDR frame_loc;
7913a64c 1816 ULONGEST return_value;
0d39a070 1817
9e237747
MM
1818 /* AAPCS does not use a frame register, so we can abort here. */
1819 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1820 return;
1821
0d39a070 1822 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
7913a64c
YQ
1823 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1824 &return_value))
0d39a070
DJ
1825 return;
1826 else
1827 {
1828 prologue_start = gdbarch_addr_bits_remove
1829 (gdbarch, return_value) - 8;
1830 prologue_end = prologue_start + 64; /* See above. */
1831 }
1832 }
1833
1834 if (prev_pc < prologue_end)
1835 prologue_end = prev_pc;
1836
1837 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1838}
1839
eb5492fa 1840static struct arm_prologue_cache *
a262aec2 1841arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1842{
eb5492fa
DJ
1843 int reg;
1844 struct arm_prologue_cache *cache;
1845 CORE_ADDR unwound_fp;
c5aa993b 1846
35d5d4ee 1847 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1848 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1849
a262aec2 1850 arm_scan_prologue (this_frame, cache);
848cfffb 1851
a262aec2 1852 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1853 if (unwound_fp == 0)
1854 return cache;
c906108c 1855
4be43953 1856 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1857
eb5492fa
DJ
1858 /* Calculate actual addresses of saved registers using offsets
1859 determined by arm_scan_prologue. */
a262aec2 1860 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1861 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1862 cache->saved_regs[reg].addr += cache->prev_sp;
1863
1864 return cache;
c906108c
SS
1865}
1866
c1ee9414
LM
1867/* Implementation of the stop_reason hook for arm_prologue frames. */
1868
1869static enum unwind_stop_reason
1870arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1871 void **this_cache)
1872{
1873 struct arm_prologue_cache *cache;
1874 CORE_ADDR pc;
1875
1876 if (*this_cache == NULL)
1877 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1878 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1879
1880 /* This is meant to halt the backtrace at "_start". */
1881 pc = get_frame_pc (this_frame);
1882 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1883 return UNWIND_OUTERMOST;
1884
1885 /* If we've hit a wall, stop. */
1886 if (cache->prev_sp == 0)
1887 return UNWIND_OUTERMOST;
1888
1889 return UNWIND_NO_REASON;
1890}
1891
eb5492fa
DJ
1892/* Our frame ID for a normal frame is the current function's starting PC
1893 and the caller's SP when we were called. */
c906108c 1894
148754e5 1895static void
a262aec2 1896arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1897 void **this_cache,
1898 struct frame_id *this_id)
c906108c 1899{
eb5492fa
DJ
1900 struct arm_prologue_cache *cache;
1901 struct frame_id id;
2c404490 1902 CORE_ADDR pc, func;
f079148d 1903
eb5492fa 1904 if (*this_cache == NULL)
a262aec2 1905 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1906 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1907
0e9e9abd
UW
1908 /* Use function start address as part of the frame ID. If we cannot
1909 identify the start address (due to missing symbol information),
1910 fall back to just using the current PC. */
c1ee9414 1911 pc = get_frame_pc (this_frame);
2c404490 1912 func = get_frame_func (this_frame);
0e9e9abd
UW
1913 if (!func)
1914 func = pc;
1915
eb5492fa 1916 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1917 *this_id = id;
c906108c
SS
1918}
1919
a262aec2
DJ
1920static struct value *
1921arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1922 void **this_cache,
a262aec2 1923 int prev_regnum)
24de872b 1924{
24568a2c 1925 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1926 struct arm_prologue_cache *cache;
1927
eb5492fa 1928 if (*this_cache == NULL)
a262aec2 1929 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1930 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1931
eb5492fa 1932 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1933 instead. The prologue may save PC, but it will point into this
1934 frame's prologue, not the next frame's resume location. Also
1935 strip the saved T bit. A valid LR may have the low bit set, but
1936 a valid PC never does. */
eb5492fa 1937 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1938 {
1939 CORE_ADDR lr;
1940
1941 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1942 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1943 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1944 }
24de872b 1945
eb5492fa 1946 /* SP is generally not saved to the stack, but this frame is
a262aec2 1947 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1948 The value was already reconstructed into PREV_SP. */
1949 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1950 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1951
b39cc962
DJ
1952 /* The CPSR may have been changed by the call instruction and by the
1953 called function. The only bit we can reconstruct is the T bit,
1954 by checking the low bit of LR as of the call. This is a reliable
1955 indicator of Thumb-ness except for some ARM v4T pre-interworking
1956 Thumb code, which could get away with a clear low bit as long as
1957 the called function did not use bx. Guess that all other
1958 bits are unchanged; the condition flags are presumably lost,
1959 but the processor status is likely valid. */
1960 if (prev_regnum == ARM_PS_REGNUM)
1961 {
1962 CORE_ADDR lr, cpsr;
9779414d 1963 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1964
1965 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1966 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1967 if (IS_THUMB_ADDR (lr))
9779414d 1968 cpsr |= t_bit;
b39cc962 1969 else
9779414d 1970 cpsr &= ~t_bit;
b39cc962
DJ
1971 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1972 }
1973
a262aec2
DJ
1974 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1975 prev_regnum);
eb5492fa
DJ
1976}
1977
1978struct frame_unwind arm_prologue_unwind = {
1979 NORMAL_FRAME,
c1ee9414 1980 arm_prologue_unwind_stop_reason,
eb5492fa 1981 arm_prologue_this_id,
a262aec2
DJ
1982 arm_prologue_prev_register,
1983 NULL,
1984 default_frame_sniffer
eb5492fa
DJ
1985};
1986
0e9e9abd
UW
1987/* Maintain a list of ARM exception table entries per objfile, similar to the
1988 list of mapping symbols. We only cache entries for standard ARM-defined
1989 personality routines; the cache will contain only the frame unwinding
1990 instructions associated with the entry (not the descriptors). */
1991
1992static const struct objfile_data *arm_exidx_data_key;
1993
1994struct arm_exidx_entry
1995{
1996 bfd_vma addr;
1997 gdb_byte *entry;
1998};
1999typedef struct arm_exidx_entry arm_exidx_entry_s;
2000DEF_VEC_O(arm_exidx_entry_s);
2001
2002struct arm_exidx_data
2003{
2004 VEC(arm_exidx_entry_s) **section_maps;
2005};
2006
2007static void
2008arm_exidx_data_free (struct objfile *objfile, void *arg)
2009{
9a3c8263 2010 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2011 unsigned int i;
2012
2013 for (i = 0; i < objfile->obfd->section_count; i++)
2014 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2015}
2016
2017static inline int
2018arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2019 const struct arm_exidx_entry *rhs)
2020{
2021 return lhs->addr < rhs->addr;
2022}
2023
2024static struct obj_section *
2025arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2026{
2027 struct obj_section *osect;
2028
2029 ALL_OBJFILE_OSECTIONS (objfile, osect)
2030 if (bfd_get_section_flags (objfile->obfd,
2031 osect->the_bfd_section) & SEC_ALLOC)
2032 {
2033 bfd_vma start, size;
2034 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2035 size = bfd_get_section_size (osect->the_bfd_section);
2036
2037 if (start <= vma && vma < start + size)
2038 return osect;
2039 }
2040
2041 return NULL;
2042}
2043
2044/* Parse contents of exception table and exception index sections
2045 of OBJFILE, and fill in the exception table entry cache.
2046
2047 For each entry that refers to a standard ARM-defined personality
2048 routine, extract the frame unwinding instructions (from either
2049 the index or the table section). The unwinding instructions
2050 are normalized by:
2051 - extracting them from the rest of the table data
2052 - converting to host endianness
2053 - appending the implicit 0xb0 ("Finish") code
2054
2055 The extracted and normalized instructions are stored for later
2056 retrieval by the arm_find_exidx_entry routine. */
2057
2058static void
2059arm_exidx_new_objfile (struct objfile *objfile)
2060{
0e9e9abd
UW
2061 struct arm_exidx_data *data;
2062 asection *exidx, *extab;
2063 bfd_vma exidx_vma = 0, extab_vma = 0;
0e9e9abd
UW
2064 LONGEST i;
2065
2066 /* If we've already touched this file, do nothing. */
2067 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2068 return;
2069
2070 /* Read contents of exception table and index. */
a5eda10c 2071 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
984c7238 2072 gdb::byte_vector exidx_data;
0e9e9abd
UW
2073 if (exidx)
2074 {
2075 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
984c7238 2076 exidx_data.resize (bfd_get_section_size (exidx));
0e9e9abd
UW
2077
2078 if (!bfd_get_section_contents (objfile->obfd, exidx,
984c7238
TT
2079 exidx_data.data (), 0,
2080 exidx_data.size ()))
2081 return;
0e9e9abd
UW
2082 }
2083
2084 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
984c7238 2085 gdb::byte_vector extab_data;
0e9e9abd
UW
2086 if (extab)
2087 {
2088 extab_vma = bfd_section_vma (objfile->obfd, extab);
984c7238 2089 extab_data.resize (bfd_get_section_size (extab));
0e9e9abd
UW
2090
2091 if (!bfd_get_section_contents (objfile->obfd, extab,
984c7238
TT
2092 extab_data.data (), 0,
2093 extab_data.size ()))
2094 return;
0e9e9abd
UW
2095 }
2096
2097 /* Allocate exception table data structure. */
2098 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2099 set_objfile_data (objfile, arm_exidx_data_key, data);
2100 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2101 objfile->obfd->section_count,
2102 VEC(arm_exidx_entry_s) *);
2103
2104 /* Fill in exception table. */
984c7238 2105 for (i = 0; i < exidx_data.size () / 8; i++)
0e9e9abd
UW
2106 {
2107 struct arm_exidx_entry new_exidx_entry;
984c7238
TT
2108 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2109 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2110 exidx_data.data () + i * 8 + 4);
0e9e9abd
UW
2111 bfd_vma addr = 0, word = 0;
2112 int n_bytes = 0, n_words = 0;
2113 struct obj_section *sec;
2114 gdb_byte *entry = NULL;
2115
2116 /* Extract address of start of function. */
2117 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 idx += exidx_vma + i * 8;
2119
2120 /* Find section containing function and compute section offset. */
2121 sec = arm_obj_section_from_vma (objfile, idx);
2122 if (sec == NULL)
2123 continue;
2124 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2125
2126 /* Determine address of exception table entry. */
2127 if (val == 1)
2128 {
2129 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2130 }
2131 else if ((val & 0xff000000) == 0x80000000)
2132 {
2133 /* Exception table entry embedded in .ARM.exidx
2134 -- must be short form. */
2135 word = val;
2136 n_bytes = 3;
2137 }
2138 else if (!(val & 0x80000000))
2139 {
2140 /* Exception table entry in .ARM.extab. */
2141 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2142 addr += exidx_vma + i * 8 + 4;
2143
984c7238 2144 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
0e9e9abd
UW
2145 {
2146 word = bfd_h_get_32 (objfile->obfd,
984c7238 2147 extab_data.data () + addr - extab_vma);
0e9e9abd
UW
2148 addr += 4;
2149
2150 if ((word & 0xff000000) == 0x80000000)
2151 {
2152 /* Short form. */
2153 n_bytes = 3;
2154 }
2155 else if ((word & 0xff000000) == 0x81000000
2156 || (word & 0xff000000) == 0x82000000)
2157 {
2158 /* Long form. */
2159 n_bytes = 2;
2160 n_words = ((word >> 16) & 0xff);
2161 }
2162 else if (!(word & 0x80000000))
2163 {
2164 bfd_vma pers;
2165 struct obj_section *pers_sec;
2166 int gnu_personality = 0;
2167
2168 /* Custom personality routine. */
2169 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2171
2172 /* Check whether we've got one of the variants of the
2173 GNU personality routines. */
2174 pers_sec = arm_obj_section_from_vma (objfile, pers);
2175 if (pers_sec)
2176 {
2177 static const char *personality[] =
2178 {
2179 "__gcc_personality_v0",
2180 "__gxx_personality_v0",
2181 "__gcj_personality_v0",
2182 "__gnu_objc_personality_v0",
2183 NULL
2184 };
2185
2186 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2187 int k;
2188
2189 for (k = 0; personality[k]; k++)
2190 if (lookup_minimal_symbol_by_pc_name
2191 (pc, personality[k], objfile))
2192 {
2193 gnu_personality = 1;
2194 break;
2195 }
2196 }
2197
2198 /* If so, the next word contains a word count in the high
2199 byte, followed by the same unwind instructions as the
2200 pre-defined forms. */
2201 if (gnu_personality
984c7238 2202 && addr + 4 <= extab_vma + extab_data.size ())
0e9e9abd
UW
2203 {
2204 word = bfd_h_get_32 (objfile->obfd,
984c7238
TT
2205 (extab_data.data ()
2206 + addr - extab_vma));
0e9e9abd
UW
2207 addr += 4;
2208 n_bytes = 3;
2209 n_words = ((word >> 24) & 0xff);
2210 }
2211 }
2212 }
2213 }
2214
2215 /* Sanity check address. */
2216 if (n_words)
984c7238
TT
2217 if (addr < extab_vma
2218 || addr + 4 * n_words > extab_vma + extab_data.size ())
0e9e9abd
UW
2219 n_words = n_bytes = 0;
2220
2221 /* The unwind instructions reside in WORD (only the N_BYTES least
2222 significant bytes are valid), followed by N_WORDS words in the
2223 extab section starting at ADDR. */
2224 if (n_bytes || n_words)
2225 {
224c3ddb
SM
2226 gdb_byte *p = entry
2227 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2228 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2229
2230 while (n_bytes--)
2231 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2232
2233 while (n_words--)
2234 {
2235 word = bfd_h_get_32 (objfile->obfd,
984c7238 2236 extab_data.data () + addr - extab_vma);
0e9e9abd
UW
2237 addr += 4;
2238
2239 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2240 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2241 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2242 *p++ = (gdb_byte) (word & 0xff);
2243 }
2244
2245 /* Implied "Finish" to terminate the list. */
2246 *p++ = 0xb0;
2247 }
2248
2249 /* Push entry onto vector. They are guaranteed to always
2250 appear in order of increasing addresses. */
2251 new_exidx_entry.addr = idx;
2252 new_exidx_entry.entry = entry;
2253 VEC_safe_push (arm_exidx_entry_s,
2254 data->section_maps[sec->the_bfd_section->index],
2255 &new_exidx_entry);
2256 }
0e9e9abd
UW
2257}
2258
2259/* Search for the exception table entry covering MEMADDR. If one is found,
2260 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2261 set *START to the start of the region covered by this entry. */
2262
2263static gdb_byte *
2264arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2265{
2266 struct obj_section *sec;
2267
2268 sec = find_pc_section (memaddr);
2269 if (sec != NULL)
2270 {
2271 struct arm_exidx_data *data;
2272 VEC(arm_exidx_entry_s) *map;
2273 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2274 unsigned int idx;
2275
9a3c8263
SM
2276 data = ((struct arm_exidx_data *)
2277 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2278 if (data != NULL)
2279 {
2280 map = data->section_maps[sec->the_bfd_section->index];
2281 if (!VEC_empty (arm_exidx_entry_s, map))
2282 {
2283 struct arm_exidx_entry *map_sym;
2284
2285 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2286 arm_compare_exidx_entries);
2287
2288 /* VEC_lower_bound finds the earliest ordered insertion
2289 point. If the following symbol starts at this exact
2290 address, we use that; otherwise, the preceding
2291 exception table entry covers this address. */
2292 if (idx < VEC_length (arm_exidx_entry_s, map))
2293 {
2294 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2295 if (map_sym->addr == map_key.addr)
2296 {
2297 if (start)
2298 *start = map_sym->addr + obj_section_addr (sec);
2299 return map_sym->entry;
2300 }
2301 }
2302
2303 if (idx > 0)
2304 {
2305 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2306 if (start)
2307 *start = map_sym->addr + obj_section_addr (sec);
2308 return map_sym->entry;
2309 }
2310 }
2311 }
2312 }
2313
2314 return NULL;
2315}
2316
2317/* Given the current frame THIS_FRAME, and its associated frame unwinding
2318 instruction list from the ARM exception table entry ENTRY, allocate and
2319 return a prologue cache structure describing how to unwind this frame.
2320
2321 Return NULL if the unwinding instruction list contains a "spare",
2322 "reserved" or "refuse to unwind" instruction as defined in section
2323 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2324 for the ARM Architecture" document. */
2325
2326static struct arm_prologue_cache *
2327arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2328{
2329 CORE_ADDR vsp = 0;
2330 int vsp_valid = 0;
2331
2332 struct arm_prologue_cache *cache;
2333 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2334 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2335
2336 for (;;)
2337 {
2338 gdb_byte insn;
2339
2340 /* Whenever we reload SP, we actually have to retrieve its
2341 actual value in the current frame. */
2342 if (!vsp_valid)
2343 {
2344 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2345 {
2346 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2347 vsp = get_frame_register_unsigned (this_frame, reg);
2348 }
2349 else
2350 {
2351 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2352 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2353 }
2354
2355 vsp_valid = 1;
2356 }
2357
2358 /* Decode next unwind instruction. */
2359 insn = *entry++;
2360
2361 if ((insn & 0xc0) == 0)
2362 {
2363 int offset = insn & 0x3f;
2364 vsp += (offset << 2) + 4;
2365 }
2366 else if ((insn & 0xc0) == 0x40)
2367 {
2368 int offset = insn & 0x3f;
2369 vsp -= (offset << 2) + 4;
2370 }
2371 else if ((insn & 0xf0) == 0x80)
2372 {
2373 int mask = ((insn & 0xf) << 8) | *entry++;
2374 int i;
2375
2376 /* The special case of an all-zero mask identifies
2377 "Refuse to unwind". We return NULL to fall back
2378 to the prologue analyzer. */
2379 if (mask == 0)
2380 return NULL;
2381
2382 /* Pop registers r4..r15 under mask. */
2383 for (i = 0; i < 12; i++)
2384 if (mask & (1 << i))
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* Special-case popping SP -- we need to reload vsp. */
2391 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2392 vsp_valid = 0;
2393 }
2394 else if ((insn & 0xf0) == 0x90)
2395 {
2396 int reg = insn & 0xf;
2397
2398 /* Reserved cases. */
2399 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2400 return NULL;
2401
2402 /* Set SP from another register and mark VSP for reload. */
2403 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2404 vsp_valid = 0;
2405 }
2406 else if ((insn & 0xf0) == 0xa0)
2407 {
2408 int count = insn & 0x7;
2409 int pop_lr = (insn & 0x8) != 0;
2410 int i;
2411
2412 /* Pop r4..r[4+count]. */
2413 for (i = 0; i <= count; i++)
2414 {
2415 cache->saved_regs[4 + i].addr = vsp;
2416 vsp += 4;
2417 }
2418
2419 /* If indicated by flag, pop LR as well. */
2420 if (pop_lr)
2421 {
2422 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2423 vsp += 4;
2424 }
2425 }
2426 else if (insn == 0xb0)
2427 {
2428 /* We could only have updated PC by popping into it; if so, it
2429 will show up as address. Otherwise, copy LR into PC. */
2430 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2431 cache->saved_regs[ARM_PC_REGNUM]
2432 = cache->saved_regs[ARM_LR_REGNUM];
2433
2434 /* We're done. */
2435 break;
2436 }
2437 else if (insn == 0xb1)
2438 {
2439 int mask = *entry++;
2440 int i;
2441
2442 /* All-zero mask and mask >= 16 is "spare". */
2443 if (mask == 0 || mask >= 16)
2444 return NULL;
2445
2446 /* Pop r0..r3 under mask. */
2447 for (i = 0; i < 4; i++)
2448 if (mask & (1 << i))
2449 {
2450 cache->saved_regs[i].addr = vsp;
2451 vsp += 4;
2452 }
2453 }
2454 else if (insn == 0xb2)
2455 {
2456 ULONGEST offset = 0;
2457 unsigned shift = 0;
2458
2459 do
2460 {
2461 offset |= (*entry & 0x7f) << shift;
2462 shift += 7;
2463 }
2464 while (*entry++ & 0x80);
2465
2466 vsp += 0x204 + (offset << 2);
2467 }
2468 else if (insn == 0xb3)
2469 {
2470 int start = *entry >> 4;
2471 int count = (*entry++) & 0xf;
2472 int i;
2473
2474 /* Only registers D0..D15 are valid here. */
2475 if (start + count >= 16)
2476 return NULL;
2477
2478 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2479 for (i = 0; i <= count; i++)
2480 {
2481 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2482 vsp += 8;
2483 }
2484
2485 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 vsp += 4;
2487 }
2488 else if ((insn & 0xf8) == 0xb8)
2489 {
2490 int count = insn & 0x7;
2491 int i;
2492
2493 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2494 for (i = 0; i <= count; i++)
2495 {
2496 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2497 vsp += 8;
2498 }
2499
2500 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2501 vsp += 4;
2502 }
2503 else if (insn == 0xc6)
2504 {
2505 int start = *entry >> 4;
2506 int count = (*entry++) & 0xf;
2507 int i;
2508
2509 /* Only registers WR0..WR15 are valid. */
2510 if (start + count >= 16)
2511 return NULL;
2512
2513 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc7)
2521 {
2522 int mask = *entry++;
2523 int i;
2524
2525 /* All-zero mask and mask >= 16 is "spare". */
2526 if (mask == 0 || mask >= 16)
2527 return NULL;
2528
2529 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2530 for (i = 0; i < 4; i++)
2531 if (mask & (1 << i))
2532 {
2533 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2534 vsp += 4;
2535 }
2536 }
2537 else if ((insn & 0xf8) == 0xc0)
2538 {
2539 int count = insn & 0x7;
2540 int i;
2541
2542 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2543 for (i = 0; i <= count; i++)
2544 {
2545 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2546 vsp += 8;
2547 }
2548 }
2549 else if (insn == 0xc8)
2550 {
2551 int start = *entry >> 4;
2552 int count = (*entry++) & 0xf;
2553 int i;
2554
2555 /* Only registers D0..D31 are valid. */
2556 if (start + count >= 16)
2557 return NULL;
2558
2559 /* Pop VFP double-precision registers
2560 D[16+start]..D[16+start+count]. */
2561 for (i = 0; i <= count; i++)
2562 {
2563 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2564 vsp += 8;
2565 }
2566 }
2567 else if (insn == 0xc9)
2568 {
2569 int start = *entry >> 4;
2570 int count = (*entry++) & 0xf;
2571 int i;
2572
2573 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2574 for (i = 0; i <= count; i++)
2575 {
2576 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2577 vsp += 8;
2578 }
2579 }
2580 else if ((insn & 0xf8) == 0xd0)
2581 {
2582 int count = insn & 0x7;
2583 int i;
2584
2585 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2586 for (i = 0; i <= count; i++)
2587 {
2588 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2589 vsp += 8;
2590 }
2591 }
2592 else
2593 {
2594 /* Everything else is "spare". */
2595 return NULL;
2596 }
2597 }
2598
2599 /* If we restore SP from a register, assume this was the frame register.
2600 Otherwise just fall back to SP as frame register. */
2601 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2602 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2603 else
2604 cache->framereg = ARM_SP_REGNUM;
2605
2606 /* Determine offset to previous frame. */
2607 cache->framesize
2608 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2609
2610 /* We already got the previous SP. */
2611 cache->prev_sp = vsp;
2612
2613 return cache;
2614}
2615
2616/* Unwinding via ARM exception table entries. Note that the sniffer
2617 already computes a filled-in prologue cache, which is then used
2618 with the same arm_prologue_this_id and arm_prologue_prev_register
2619 routines also used for prologue-parsing based unwinding. */
2620
2621static int
2622arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2623 struct frame_info *this_frame,
2624 void **this_prologue_cache)
2625{
2626 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2627 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2628 CORE_ADDR addr_in_block, exidx_region, func_start;
2629 struct arm_prologue_cache *cache;
2630 gdb_byte *entry;
2631
2632 /* See if we have an ARM exception table entry covering this address. */
2633 addr_in_block = get_frame_address_in_block (this_frame);
2634 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2635 if (!entry)
2636 return 0;
2637
2638 /* The ARM exception table does not describe unwind information
2639 for arbitrary PC values, but is guaranteed to be correct only
2640 at call sites. We have to decide here whether we want to use
2641 ARM exception table information for this frame, or fall back
2642 to using prologue parsing. (Note that if we have DWARF CFI,
2643 this sniffer isn't even called -- CFI is always preferred.)
2644
2645 Before we make this decision, however, we check whether we
2646 actually have *symbol* information for the current frame.
2647 If not, prologue parsing would not work anyway, so we might
2648 as well use the exception table and hope for the best. */
2649 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2650 {
2651 int exc_valid = 0;
2652
2653 /* If the next frame is "normal", we are at a call site in this
2654 frame, so exception information is guaranteed to be valid. */
2655 if (get_next_frame (this_frame)
2656 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2657 exc_valid = 1;
2658
2659 /* We also assume exception information is valid if we're currently
2660 blocked in a system call. The system library is supposed to
d9311bfa
AT
2661 ensure this, so that e.g. pthread cancellation works. */
2662 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2663 {
7913a64c 2664 ULONGEST insn;
416dc9c6 2665
7913a64c
YQ
2666 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2667 2, byte_order_for_code, &insn)
d9311bfa
AT
2668 && (insn & 0xff00) == 0xdf00 /* svc */)
2669 exc_valid = 1;
0e9e9abd 2670 }
d9311bfa
AT
2671 else
2672 {
7913a64c 2673 ULONGEST insn;
416dc9c6 2674
7913a64c
YQ
2675 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2676 4, byte_order_for_code, &insn)
d9311bfa
AT
2677 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2678 exc_valid = 1;
2679 }
2680
0e9e9abd
UW
2681 /* Bail out if we don't know that exception information is valid. */
2682 if (!exc_valid)
2683 return 0;
2684
2685 /* The ARM exception index does not mark the *end* of the region
2686 covered by the entry, and some functions will not have any entry.
2687 To correctly recognize the end of the covered region, the linker
2688 should have inserted dummy records with a CANTUNWIND marker.
2689
2690 Unfortunately, current versions of GNU ld do not reliably do
2691 this, and thus we may have found an incorrect entry above.
2692 As a (temporary) sanity check, we only use the entry if it
2693 lies *within* the bounds of the function. Note that this check
2694 might reject perfectly valid entries that just happen to cover
2695 multiple functions; therefore this check ought to be removed
2696 once the linker is fixed. */
2697 if (func_start > exidx_region)
2698 return 0;
2699 }
2700
2701 /* Decode the list of unwinding instructions into a prologue cache.
2702 Note that this may fail due to e.g. a "refuse to unwind" code. */
2703 cache = arm_exidx_fill_cache (this_frame, entry);
2704 if (!cache)
2705 return 0;
2706
2707 *this_prologue_cache = cache;
2708 return 1;
2709}
2710
2711struct frame_unwind arm_exidx_unwind = {
2712 NORMAL_FRAME,
8fbca658 2713 default_frame_unwind_stop_reason,
0e9e9abd
UW
2714 arm_prologue_this_id,
2715 arm_prologue_prev_register,
2716 NULL,
2717 arm_exidx_unwind_sniffer
2718};
2719
779aa56f
YQ
2720static struct arm_prologue_cache *
2721arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2722{
2723 struct arm_prologue_cache *cache;
779aa56f
YQ
2724 int reg;
2725
2726 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2727 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2728
2729 /* Still rely on the offset calculated from prologue. */
2730 arm_scan_prologue (this_frame, cache);
2731
2732 /* Since we are in epilogue, the SP has been restored. */
2733 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2734
2735 /* Calculate actual addresses of saved registers using offsets
2736 determined by arm_scan_prologue. */
2737 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2738 if (trad_frame_addr_p (cache->saved_regs, reg))
2739 cache->saved_regs[reg].addr += cache->prev_sp;
2740
2741 return cache;
2742}
2743
2744/* Implementation of function hook 'this_id' in
2745 'struct frame_uwnind' for epilogue unwinder. */
2746
2747static void
2748arm_epilogue_frame_this_id (struct frame_info *this_frame,
2749 void **this_cache,
2750 struct frame_id *this_id)
2751{
2752 struct arm_prologue_cache *cache;
2753 CORE_ADDR pc, func;
2754
2755 if (*this_cache == NULL)
2756 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2757 cache = (struct arm_prologue_cache *) *this_cache;
2758
2759 /* Use function start address as part of the frame ID. If we cannot
2760 identify the start address (due to missing symbol information),
2761 fall back to just using the current PC. */
2762 pc = get_frame_pc (this_frame);
2763 func = get_frame_func (this_frame);
fb3f3d25 2764 if (func == 0)
779aa56f
YQ
2765 func = pc;
2766
2767 (*this_id) = frame_id_build (cache->prev_sp, pc);
2768}
2769
2770/* Implementation of function hook 'prev_register' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2772
2773static struct value *
2774arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2775 void **this_cache, int regnum)
2776{
779aa56f
YQ
2777 if (*this_cache == NULL)
2778 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2779
2780 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2781}
2782
2783static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2784 CORE_ADDR pc);
2785static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2786 CORE_ADDR pc);
2787
2788/* Implementation of function hook 'sniffer' in
2789 'struct frame_uwnind' for epilogue unwinder. */
2790
2791static int
2792arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2793 struct frame_info *this_frame,
2794 void **this_prologue_cache)
2795{
2796 if (frame_relative_level (this_frame) == 0)
2797 {
2798 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2799 CORE_ADDR pc = get_frame_pc (this_frame);
2800
2801 if (arm_frame_is_thumb (this_frame))
2802 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2803 else
2804 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2805 }
2806 else
2807 return 0;
2808}
2809
2810/* Frame unwinder from epilogue. */
2811
2812static const struct frame_unwind arm_epilogue_frame_unwind =
2813{
2814 NORMAL_FRAME,
2815 default_frame_unwind_stop_reason,
2816 arm_epilogue_frame_this_id,
2817 arm_epilogue_frame_prev_register,
2818 NULL,
2819 arm_epilogue_frame_sniffer,
2820};
2821
80d8d390
YQ
2822/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2823 trampoline, return the target PC. Otherwise return 0.
2824
2825 void call0a (char c, short s, int i, long l) {}
2826
2827 int main (void)
2828 {
2829 (*pointer_to_call0a) (c, s, i, l);
2830 }
2831
2832 Instead of calling a stub library function _call_via_xx (xx is
2833 the register name), GCC may inline the trampoline in the object
2834 file as below (register r2 has the address of call0a).
2835
2836 .global main
2837 .type main, %function
2838 ...
2839 bl .L1
2840 ...
2841 .size main, .-main
2842
2843 .L1:
2844 bx r2
2845
2846 The trampoline 'bx r2' doesn't belong to main. */
2847
2848static CORE_ADDR
2849arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2850{
2851 /* The heuristics of recognizing such trampoline is that FRAME is
2852 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2853 if (arm_frame_is_thumb (frame))
2854 {
2855 gdb_byte buf[2];
2856
2857 if (target_read_memory (pc, buf, 2) == 0)
2858 {
2859 struct gdbarch *gdbarch = get_frame_arch (frame);
2860 enum bfd_endian byte_order_for_code
2861 = gdbarch_byte_order_for_code (gdbarch);
2862 uint16_t insn
2863 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2864
2865 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2866 {
2867 CORE_ADDR dest
2868 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2869
2870 /* Clear the LSB so that gdb core sets step-resume
2871 breakpoint at the right address. */
2872 return UNMAKE_THUMB_ADDR (dest);
2873 }
2874 }
2875 }
2876
2877 return 0;
2878}
2879
909cf6ea 2880static struct arm_prologue_cache *
a262aec2 2881arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2882{
909cf6ea 2883 struct arm_prologue_cache *cache;
909cf6ea 2884
35d5d4ee 2885 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2886 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2887
a262aec2 2888 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2889
2890 return cache;
2891}
2892
2893/* Our frame ID for a stub frame is the current SP and LR. */
2894
2895static void
a262aec2 2896arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2897 void **this_cache,
2898 struct frame_id *this_id)
2899{
2900 struct arm_prologue_cache *cache;
2901
2902 if (*this_cache == NULL)
a262aec2 2903 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2904 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2905
a262aec2 2906 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2907}
2908
a262aec2
DJ
2909static int
2910arm_stub_unwind_sniffer (const struct frame_unwind *self,
2911 struct frame_info *this_frame,
2912 void **this_prologue_cache)
909cf6ea 2913{
93d42b30 2914 CORE_ADDR addr_in_block;
948f8e3d 2915 gdb_byte dummy[4];
18d18ac8
YQ
2916 CORE_ADDR pc, start_addr;
2917 const char *name;
909cf6ea 2918
a262aec2 2919 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2920 pc = get_frame_pc (this_frame);
3e5d3a5a 2921 if (in_plt_section (addr_in_block)
fc36e839
DE
2922 /* We also use the stub winder if the target memory is unreadable
2923 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2924 || target_read_memory (pc, dummy, 4) != 0)
2925 return 1;
2926
2927 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2928 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2929 return 1;
909cf6ea 2930
a262aec2 2931 return 0;
909cf6ea
DJ
2932}
2933
a262aec2
DJ
2934struct frame_unwind arm_stub_unwind = {
2935 NORMAL_FRAME,
8fbca658 2936 default_frame_unwind_stop_reason,
a262aec2
DJ
2937 arm_stub_this_id,
2938 arm_prologue_prev_register,
2939 NULL,
2940 arm_stub_unwind_sniffer
2941};
2942
2ae28aa9
YQ
2943/* Put here the code to store, into CACHE->saved_regs, the addresses
2944 of the saved registers of frame described by THIS_FRAME. CACHE is
2945 returned. */
2946
2947static struct arm_prologue_cache *
2948arm_m_exception_cache (struct frame_info *this_frame)
2949{
2950 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2951 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2952 struct arm_prologue_cache *cache;
2953 CORE_ADDR unwound_sp;
2954 LONGEST xpsr;
2955
2956 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2957 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2958
2959 unwound_sp = get_frame_register_unsigned (this_frame,
2960 ARM_SP_REGNUM);
2961
2962 /* The hardware saves eight 32-bit words, comprising xPSR,
2963 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2964 "B1.5.6 Exception entry behavior" in
2965 "ARMv7-M Architecture Reference Manual". */
2966 cache->saved_regs[0].addr = unwound_sp;
2967 cache->saved_regs[1].addr = unwound_sp + 4;
2968 cache->saved_regs[2].addr = unwound_sp + 8;
2969 cache->saved_regs[3].addr = unwound_sp + 12;
2970 cache->saved_regs[12].addr = unwound_sp + 16;
2971 cache->saved_regs[14].addr = unwound_sp + 20;
2972 cache->saved_regs[15].addr = unwound_sp + 24;
2973 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2974
2975 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2976 aligner between the top of the 32-byte stack frame and the
2977 previous context's stack pointer. */
2978 cache->prev_sp = unwound_sp + 32;
2979 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2980 && (xpsr & (1 << 9)) != 0)
2981 cache->prev_sp += 4;
2982
2983 return cache;
2984}
2985
2986/* Implementation of function hook 'this_id' in
2987 'struct frame_uwnind'. */
2988
2989static void
2990arm_m_exception_this_id (struct frame_info *this_frame,
2991 void **this_cache,
2992 struct frame_id *this_id)
2993{
2994 struct arm_prologue_cache *cache;
2995
2996 if (*this_cache == NULL)
2997 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2998 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2999
3000 /* Our frame ID for a stub frame is the current SP and LR. */
3001 *this_id = frame_id_build (cache->prev_sp,
3002 get_frame_pc (this_frame));
3003}
3004
3005/* Implementation of function hook 'prev_register' in
3006 'struct frame_uwnind'. */
3007
3008static struct value *
3009arm_m_exception_prev_register (struct frame_info *this_frame,
3010 void **this_cache,
3011 int prev_regnum)
3012{
2ae28aa9
YQ
3013 struct arm_prologue_cache *cache;
3014
3015 if (*this_cache == NULL)
3016 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3017 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3018
3019 /* The value was already reconstructed into PREV_SP. */
3020 if (prev_regnum == ARM_SP_REGNUM)
3021 return frame_unwind_got_constant (this_frame, prev_regnum,
3022 cache->prev_sp);
3023
3024 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3025 prev_regnum);
3026}
3027
3028/* Implementation of function hook 'sniffer' in
3029 'struct frame_uwnind'. */
3030
3031static int
3032arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3033 struct frame_info *this_frame,
3034 void **this_prologue_cache)
3035{
3036 CORE_ADDR this_pc = get_frame_pc (this_frame);
3037
3038 /* No need to check is_m; this sniffer is only registered for
3039 M-profile architectures. */
3040
ca90e760
FH
3041 /* Check if exception frame returns to a magic PC value. */
3042 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3043}
3044
3045/* Frame unwinder for M-profile exceptions. */
3046
3047struct frame_unwind arm_m_exception_unwind =
3048{
3049 SIGTRAMP_FRAME,
3050 default_frame_unwind_stop_reason,
3051 arm_m_exception_this_id,
3052 arm_m_exception_prev_register,
3053 NULL,
3054 arm_m_exception_unwind_sniffer
3055};
3056
24de872b 3057static CORE_ADDR
a262aec2 3058arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3059{
3060 struct arm_prologue_cache *cache;
3061
eb5492fa 3062 if (*this_cache == NULL)
a262aec2 3063 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3064 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3065
4be43953 3066 return cache->prev_sp - cache->framesize;
24de872b
DJ
3067}
3068
eb5492fa
DJ
3069struct frame_base arm_normal_base = {
3070 &arm_prologue_unwind,
3071 arm_normal_frame_base,
3072 arm_normal_frame_base,
3073 arm_normal_frame_base
3074};
3075
b39cc962
DJ
3076static struct value *
3077arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3078 int regnum)
3079{
24568a2c 3080 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3081 CORE_ADDR lr, cpsr;
9779414d 3082 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3083
3084 switch (regnum)
3085 {
3086 case ARM_PC_REGNUM:
3087 /* The PC is normally copied from the return column, which
3088 describes saves of LR. However, that version may have an
3089 extra bit set to indicate Thumb state. The bit is not
3090 part of the PC. */
3091 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3092 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3093 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3094
3095 case ARM_PS_REGNUM:
3096 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3097 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3098 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3099 if (IS_THUMB_ADDR (lr))
9779414d 3100 cpsr |= t_bit;
b39cc962 3101 else
9779414d 3102 cpsr &= ~t_bit;
ca38c58e 3103 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3104
3105 default:
3106 internal_error (__FILE__, __LINE__,
3107 _("Unexpected register %d"), regnum);
3108 }
3109}
3110
3111static void
3112arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3113 struct dwarf2_frame_state_reg *reg,
3114 struct frame_info *this_frame)
3115{
3116 switch (regnum)
3117 {
3118 case ARM_PC_REGNUM:
3119 case ARM_PS_REGNUM:
3120 reg->how = DWARF2_FRAME_REG_FN;
3121 reg->loc.fn = arm_dwarf2_prev_register;
3122 break;
3123 case ARM_SP_REGNUM:
3124 reg->how = DWARF2_FRAME_REG_CFA;
3125 break;
3126 }
3127}
3128
c9cf6e20 3129/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3130
3131static int
c9cf6e20 3132thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3133{
3134 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3135 unsigned int insn, insn2;
3136 int found_return = 0, found_stack_adjust = 0;
3137 CORE_ADDR func_start, func_end;
3138 CORE_ADDR scan_pc;
3139 gdb_byte buf[4];
3140
3141 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3142 return 0;
3143
3144 /* The epilogue is a sequence of instructions along the following lines:
3145
3146 - add stack frame size to SP or FP
3147 - [if frame pointer used] restore SP from FP
3148 - restore registers from SP [may include PC]
3149 - a return-type instruction [if PC wasn't already restored]
3150
3151 In a first pass, we scan forward from the current PC and verify the
3152 instructions we find as compatible with this sequence, ending in a
3153 return instruction.
3154
3155 However, this is not sufficient to distinguish indirect function calls
3156 within a function from indirect tail calls in the epilogue in some cases.
3157 Therefore, if we didn't already find any SP-changing instruction during
3158 forward scan, we add a backward scanning heuristic to ensure we actually
3159 are in the epilogue. */
3160
3161 scan_pc = pc;
3162 while (scan_pc < func_end && !found_return)
3163 {
3164 if (target_read_memory (scan_pc, buf, 2))
3165 break;
3166
3167 scan_pc += 2;
3168 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3169
3170 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3171 found_return = 1;
3172 else if (insn == 0x46f7) /* mov pc, lr */
3173 found_return = 1;
540314bd 3174 else if (thumb_instruction_restores_sp (insn))
4024ca99 3175 {
b7576e5c 3176 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3177 found_return = 1;
3178 }
db24da6d 3179 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3180 {
3181 if (target_read_memory (scan_pc, buf, 2))
3182 break;
3183
3184 scan_pc += 2;
3185 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3186
3187 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3188 {
4024ca99
UW
3189 if (insn2 & 0x8000) /* <registers> include PC. */
3190 found_return = 1;
3191 }
3192 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3193 && (insn2 & 0x0fff) == 0x0b04)
3194 {
4024ca99
UW
3195 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3196 found_return = 1;
3197 }
3198 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3199 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3200 ;
4024ca99
UW
3201 else
3202 break;
3203 }
3204 else
3205 break;
3206 }
3207
3208 if (!found_return)
3209 return 0;
3210
3211 /* Since any instruction in the epilogue sequence, with the possible
3212 exception of return itself, updates the stack pointer, we need to
3213 scan backwards for at most one instruction. Try either a 16-bit or
3214 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3215 too much about false positives. */
4024ca99 3216
6b65d1b6
YQ
3217 if (pc - 4 < func_start)
3218 return 0;
3219 if (target_read_memory (pc - 4, buf, 4))
3220 return 0;
4024ca99 3221
6b65d1b6
YQ
3222 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3223 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3224
3225 if (thumb_instruction_restores_sp (insn2))
3226 found_stack_adjust = 1;
3227 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3228 found_stack_adjust = 1;
3229 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3230 && (insn2 & 0x0fff) == 0x0b04)
3231 found_stack_adjust = 1;
3232 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3233 && (insn2 & 0x0e00) == 0x0a00)
3234 found_stack_adjust = 1;
4024ca99
UW
3235
3236 return found_stack_adjust;
3237}
3238
4024ca99 3239static int
c58b006a 3240arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3241{
3242 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3243 unsigned int insn;
f303bc3e 3244 int found_return;
4024ca99
UW
3245 CORE_ADDR func_start, func_end;
3246
4024ca99
UW
3247 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3248 return 0;
3249
3250 /* We are in the epilogue if the previous instruction was a stack
3251 adjustment and the next instruction is a possible return (bx, mov
3252 pc, or pop). We could have to scan backwards to find the stack
3253 adjustment, or forwards to find the return, but this is a decent
3254 approximation. First scan forwards. */
3255
3256 found_return = 0;
3257 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3258 if (bits (insn, 28, 31) != INST_NV)
3259 {
3260 if ((insn & 0x0ffffff0) == 0x012fff10)
3261 /* BX. */
3262 found_return = 1;
3263 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3264 /* MOV PC. */
3265 found_return = 1;
3266 else if ((insn & 0x0fff0000) == 0x08bd0000
3267 && (insn & 0x0000c000) != 0)
3268 /* POP (LDMIA), including PC or LR. */
3269 found_return = 1;
3270 }
3271
3272 if (!found_return)
3273 return 0;
3274
3275 /* Scan backwards. This is just a heuristic, so do not worry about
3276 false positives from mode changes. */
3277
3278 if (pc < func_start + 4)
3279 return 0;
3280
3281 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3282 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3283 return 1;
3284
3285 return 0;
3286}
3287
c58b006a
YQ
3288/* Implement the stack_frame_destroyed_p gdbarch method. */
3289
3290static int
3291arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3292{
3293 if (arm_pc_is_thumb (gdbarch, pc))
3294 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3295 else
3296 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3297}
4024ca99 3298
2dd604e7
RE
3299/* When arguments must be pushed onto the stack, they go on in reverse
3300 order. The code below implements a FILO (stack) to do this. */
3301
3302struct stack_item
3303{
3304 int len;
3305 struct stack_item *prev;
7c543f7b 3306 gdb_byte *data;
2dd604e7
RE
3307};
3308
3309static struct stack_item *
df3b6708 3310push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3311{
3312 struct stack_item *si;
8d749320 3313 si = XNEW (struct stack_item);
7c543f7b 3314 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3315 si->len = len;
3316 si->prev = prev;
3317 memcpy (si->data, contents, len);
3318 return si;
3319}
3320
3321static struct stack_item *
3322pop_stack_item (struct stack_item *si)
3323{
3324 struct stack_item *dead = si;
3325 si = si->prev;
3326 xfree (dead->data);
3327 xfree (dead);
3328 return si;
3329}
3330
030197b4
AB
3331/* Implement the gdbarch type alignment method, overrides the generic
3332 alignment algorithm for anything that is arm specific. */
2af48f68 3333
030197b4
AB
3334static ULONGEST
3335arm_type_align (gdbarch *gdbarch, struct type *t)
2af48f68 3336{
2af48f68 3337 t = check_typedef (t);
030197b4 3338 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
2af48f68 3339 {
030197b4
AB
3340 /* Use the natural alignment for vector types (the same for
3341 scalar type), but the maximum alignment is 64-bit. */
3342 if (TYPE_LENGTH (t) > 8)
3343 return 8;
c4312b19 3344 else
030197b4 3345 return TYPE_LENGTH (t);
2af48f68 3346 }
030197b4
AB
3347
3348 /* Allow the common code to calculate the alignment. */
3349 return 0;
2af48f68
PB
3350}
3351
90445bd3
DJ
3352/* Possible base types for a candidate for passing and returning in
3353 VFP registers. */
3354
3355enum arm_vfp_cprc_base_type
3356{
3357 VFP_CPRC_UNKNOWN,
3358 VFP_CPRC_SINGLE,
3359 VFP_CPRC_DOUBLE,
3360 VFP_CPRC_VEC64,
3361 VFP_CPRC_VEC128
3362};
3363
3364/* The length of one element of base type B. */
3365
3366static unsigned
3367arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3368{
3369 switch (b)
3370 {
3371 case VFP_CPRC_SINGLE:
3372 return 4;
3373 case VFP_CPRC_DOUBLE:
3374 return 8;
3375 case VFP_CPRC_VEC64:
3376 return 8;
3377 case VFP_CPRC_VEC128:
3378 return 16;
3379 default:
3380 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3381 (int) b);
3382 }
3383}
3384
3385/* The character ('s', 'd' or 'q') for the type of VFP register used
3386 for passing base type B. */
3387
3388static int
3389arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3390{
3391 switch (b)
3392 {
3393 case VFP_CPRC_SINGLE:
3394 return 's';
3395 case VFP_CPRC_DOUBLE:
3396 return 'd';
3397 case VFP_CPRC_VEC64:
3398 return 'd';
3399 case VFP_CPRC_VEC128:
3400 return 'q';
3401 default:
3402 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3403 (int) b);
3404 }
3405}
3406
3407/* Determine whether T may be part of a candidate for passing and
3408 returning in VFP registers, ignoring the limit on the total number
3409 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3410 classification of the first valid component found; if it is not
3411 VFP_CPRC_UNKNOWN, all components must have the same classification
3412 as *BASE_TYPE. If it is found that T contains a type not permitted
3413 for passing and returning in VFP registers, a type differently
3414 classified from *BASE_TYPE, or two types differently classified
3415 from each other, return -1, otherwise return the total number of
3416 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3417 array). Vector types are not currently supported, matching the
3418 generic AAPCS support. */
90445bd3
DJ
3419
3420static int
3421arm_vfp_cprc_sub_candidate (struct type *t,
3422 enum arm_vfp_cprc_base_type *base_type)
3423{
3424 t = check_typedef (t);
3425 switch (TYPE_CODE (t))
3426 {
3427 case TYPE_CODE_FLT:
3428 switch (TYPE_LENGTH (t))
3429 {
3430 case 4:
3431 if (*base_type == VFP_CPRC_UNKNOWN)
3432 *base_type = VFP_CPRC_SINGLE;
3433 else if (*base_type != VFP_CPRC_SINGLE)
3434 return -1;
3435 return 1;
3436
3437 case 8:
3438 if (*base_type == VFP_CPRC_UNKNOWN)
3439 *base_type = VFP_CPRC_DOUBLE;
3440 else if (*base_type != VFP_CPRC_DOUBLE)
3441 return -1;
3442 return 1;
3443
3444 default:
3445 return -1;
3446 }
3447 break;
3448
817e0957
YQ
3449 case TYPE_CODE_COMPLEX:
3450 /* Arguments of complex T where T is one of the types float or
3451 double get treated as if they are implemented as:
3452
3453 struct complexT
3454 {
3455 T real;
3456 T imag;
5f52445b
YQ
3457 };
3458
3459 */
817e0957
YQ
3460 switch (TYPE_LENGTH (t))
3461 {
3462 case 8:
3463 if (*base_type == VFP_CPRC_UNKNOWN)
3464 *base_type = VFP_CPRC_SINGLE;
3465 else if (*base_type != VFP_CPRC_SINGLE)
3466 return -1;
3467 return 2;
3468
3469 case 16:
3470 if (*base_type == VFP_CPRC_UNKNOWN)
3471 *base_type = VFP_CPRC_DOUBLE;
3472 else if (*base_type != VFP_CPRC_DOUBLE)
3473 return -1;
3474 return 2;
3475
3476 default:
3477 return -1;
3478 }
3479 break;
3480
90445bd3
DJ
3481 case TYPE_CODE_ARRAY:
3482 {
c4312b19 3483 if (TYPE_VECTOR (t))
90445bd3 3484 {
c4312b19
YQ
3485 /* A 64-bit or 128-bit containerized vector type are VFP
3486 CPRCs. */
3487 switch (TYPE_LENGTH (t))
3488 {
3489 case 8:
3490 if (*base_type == VFP_CPRC_UNKNOWN)
3491 *base_type = VFP_CPRC_VEC64;
3492 return 1;
3493 case 16:
3494 if (*base_type == VFP_CPRC_UNKNOWN)
3495 *base_type = VFP_CPRC_VEC128;
3496 return 1;
3497 default:
3498 return -1;
3499 }
3500 }
3501 else
3502 {
3503 int count;
3504 unsigned unitlen;
3505
3506 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3507 base_type);
3508 if (count == -1)
3509 return -1;
3510 if (TYPE_LENGTH (t) == 0)
3511 {
3512 gdb_assert (count == 0);
3513 return 0;
3514 }
3515 else if (count == 0)
3516 return -1;
3517 unitlen = arm_vfp_cprc_unit_length (*base_type);
3518 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3519 return TYPE_LENGTH (t) / unitlen;
90445bd3 3520 }
90445bd3
DJ
3521 }
3522 break;
3523
3524 case TYPE_CODE_STRUCT:
3525 {
3526 int count = 0;
3527 unsigned unitlen;
3528 int i;
3529 for (i = 0; i < TYPE_NFIELDS (t); i++)
3530 {
1040b979
YQ
3531 int sub_count = 0;
3532
3533 if (!field_is_static (&TYPE_FIELD (t, i)))
3534 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3535 base_type);
90445bd3
DJ
3536 if (sub_count == -1)
3537 return -1;
3538 count += sub_count;
3539 }
3540 if (TYPE_LENGTH (t) == 0)
3541 {
3542 gdb_assert (count == 0);
3543 return 0;
3544 }
3545 else if (count == 0)
3546 return -1;
3547 unitlen = arm_vfp_cprc_unit_length (*base_type);
3548 if (TYPE_LENGTH (t) != unitlen * count)
3549 return -1;
3550 return count;
3551 }
3552
3553 case TYPE_CODE_UNION:
3554 {
3555 int count = 0;
3556 unsigned unitlen;
3557 int i;
3558 for (i = 0; i < TYPE_NFIELDS (t); i++)
3559 {
3560 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3561 base_type);
3562 if (sub_count == -1)
3563 return -1;
3564 count = (count > sub_count ? count : sub_count);
3565 }
3566 if (TYPE_LENGTH (t) == 0)
3567 {
3568 gdb_assert (count == 0);
3569 return 0;
3570 }
3571 else if (count == 0)
3572 return -1;
3573 unitlen = arm_vfp_cprc_unit_length (*base_type);
3574 if (TYPE_LENGTH (t) != unitlen * count)
3575 return -1;
3576 return count;
3577 }
3578
3579 default:
3580 break;
3581 }
3582
3583 return -1;
3584}
3585
3586/* Determine whether T is a VFP co-processor register candidate (CPRC)
3587 if passed to or returned from a non-variadic function with the VFP
3588 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3589 *BASE_TYPE to the base type for T and *COUNT to the number of
3590 elements of that base type before returning. */
3591
3592static int
3593arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3594 int *count)
3595{
3596 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3597 int c = arm_vfp_cprc_sub_candidate (t, &b);
3598 if (c <= 0 || c > 4)
3599 return 0;
3600 *base_type = b;
3601 *count = c;
3602 return 1;
3603}
3604
3605/* Return 1 if the VFP ABI should be used for passing arguments to and
3606 returning values from a function of type FUNC_TYPE, 0
3607 otherwise. */
3608
3609static int
3610arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3611{
3612 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3613 /* Variadic functions always use the base ABI. Assume that functions
3614 without debug info are not variadic. */
3615 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3616 return 0;
3617 /* The VFP ABI is only supported as a variant of AAPCS. */
3618 if (tdep->arm_abi != ARM_ABI_AAPCS)
3619 return 0;
3620 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3621}
3622
3623/* We currently only support passing parameters in integer registers, which
3624 conforms with GCC's default model, and VFP argument passing following
3625 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3626 we should probably support some of them based on the selected ABI. */
3627
3628static CORE_ADDR
7d9b040b 3629arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a 3630 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
cf84fa6b
AH
3631 struct value **args, CORE_ADDR sp,
3632 function_call_return_method return_method,
6a65450a 3633 CORE_ADDR struct_addr)
2dd604e7 3634{
e17a4113 3635 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3636 int argnum;
3637 int argreg;
3638 int nstack;
3639 struct stack_item *si = NULL;
90445bd3
DJ
3640 int use_vfp_abi;
3641 struct type *ftype;
3642 unsigned vfp_regs_free = (1 << 16) - 1;
3643
3644 /* Determine the type of this function and whether the VFP ABI
3645 applies. */
3646 ftype = check_typedef (value_type (function));
3647 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3648 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3649 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3650
6a65450a
AC
3651 /* Set the return address. For the ARM, the return breakpoint is
3652 always at BP_ADDR. */
9779414d 3653 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3654 bp_addr |= 1;
6a65450a 3655 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3656
3657 /* Walk through the list of args and determine how large a temporary
3658 stack is required. Need to take care here as structs may be
7a9dd1b2 3659 passed on the stack, and we have to push them. */
2dd604e7
RE
3660 nstack = 0;
3661
3662 argreg = ARM_A1_REGNUM;
3663 nstack = 0;
3664
2dd604e7
RE
3665 /* The struct_return pointer occupies the first parameter
3666 passing register. */
cf84fa6b 3667 if (return_method == return_method_struct)
2dd604e7
RE
3668 {
3669 if (arm_debug)
5af949e3 3670 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3671 gdbarch_register_name (gdbarch, argreg),
5af949e3 3672 paddress (gdbarch, struct_addr));
2dd604e7
RE
3673 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3674 argreg++;
3675 }
3676
3677 for (argnum = 0; argnum < nargs; argnum++)
3678 {
3679 int len;
3680 struct type *arg_type;
3681 struct type *target_type;
3682 enum type_code typecode;
8c6363cf 3683 const bfd_byte *val;
2af48f68 3684 int align;
90445bd3
DJ
3685 enum arm_vfp_cprc_base_type vfp_base_type;
3686 int vfp_base_count;
3687 int may_use_core_reg = 1;
2dd604e7 3688
df407dfe 3689 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3690 len = TYPE_LENGTH (arg_type);
3691 target_type = TYPE_TARGET_TYPE (arg_type);
3692 typecode = TYPE_CODE (arg_type);
8c6363cf 3693 val = value_contents (args[argnum]);
2dd604e7 3694
030197b4 3695 align = type_align (arg_type);
2af48f68 3696 /* Round alignment up to a whole number of words. */
f0452268
AH
3697 align = (align + ARM_INT_REGISTER_SIZE - 1)
3698 & ~(ARM_INT_REGISTER_SIZE - 1);
2af48f68
PB
3699 /* Different ABIs have different maximum alignments. */
3700 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3701 {
3702 /* The APCS ABI only requires word alignment. */
f0452268 3703 align = ARM_INT_REGISTER_SIZE;
2af48f68
PB
3704 }
3705 else
3706 {
3707 /* The AAPCS requires at most doubleword alignment. */
f0452268
AH
3708 if (align > ARM_INT_REGISTER_SIZE * 2)
3709 align = ARM_INT_REGISTER_SIZE * 2;
2af48f68
PB
3710 }
3711
90445bd3
DJ
3712 if (use_vfp_abi
3713 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3714 &vfp_base_count))
3715 {
3716 int regno;
3717 int unit_length;
3718 int shift;
3719 unsigned mask;
3720
3721 /* Because this is a CPRC it cannot go in a core register or
3722 cause a core register to be skipped for alignment.
3723 Either it goes in VFP registers and the rest of this loop
3724 iteration is skipped for this argument, or it goes on the
3725 stack (and the stack alignment code is correct for this
3726 case). */
3727 may_use_core_reg = 0;
3728
3729 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3730 shift = unit_length / 4;
3731 mask = (1 << (shift * vfp_base_count)) - 1;
3732 for (regno = 0; regno < 16; regno += shift)
3733 if (((vfp_regs_free >> regno) & mask) == mask)
3734 break;
3735
3736 if (regno < 16)
3737 {
3738 int reg_char;
3739 int reg_scaled;
3740 int i;
3741
3742 vfp_regs_free &= ~(mask << regno);
3743 reg_scaled = regno / shift;
3744 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3745 for (i = 0; i < vfp_base_count; i++)
3746 {
3747 char name_buf[4];
3748 int regnum;
58d6951d
DJ
3749 if (reg_char == 'q')
3750 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3751 val + i * unit_length);
58d6951d
DJ
3752 else
3753 {
8c042590
PM
3754 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3755 reg_char, reg_scaled + i);
58d6951d
DJ
3756 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3757 strlen (name_buf));
b66f5587 3758 regcache->cooked_write (regnum, val + i * unit_length);
58d6951d 3759 }
90445bd3
DJ
3760 }
3761 continue;
3762 }
3763 else
3764 {
3765 /* This CPRC could not go in VFP registers, so all VFP
3766 registers are now marked as used. */
3767 vfp_regs_free = 0;
3768 }
3769 }
3770
2af48f68
PB
3771 /* Push stack padding for dowubleword alignment. */
3772 if (nstack & (align - 1))
3773 {
f0452268
AH
3774 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3775 nstack += ARM_INT_REGISTER_SIZE;
2af48f68
PB
3776 }
3777
3778 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3779 if (may_use_core_reg
3780 && argreg <= ARM_LAST_ARG_REGNUM
f0452268 3781 && align > ARM_INT_REGISTER_SIZE
2af48f68
PB
3782 && argreg & 1)
3783 argreg++;
3784
2dd604e7
RE
3785 /* If the argument is a pointer to a function, and it is a
3786 Thumb function, create a LOCAL copy of the value and set
3787 the THUMB bit in it. */
3788 if (TYPE_CODE_PTR == typecode
3789 && target_type != NULL
f96b8fa0 3790 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3791 {
e17a4113 3792 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3793 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3794 {
224c3ddb 3795 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3796 store_unsigned_integer (copy, len, byte_order,
e17a4113 3797 MAKE_THUMB_ADDR (regval));
8c6363cf 3798 val = copy;
2dd604e7
RE
3799 }
3800 }
3801
3802 /* Copy the argument to general registers or the stack in
3803 register-sized pieces. Large arguments are split between
3804 registers and stack. */
3805 while (len > 0)
3806 {
f0452268
AH
3807 int partial_len = len < ARM_INT_REGISTER_SIZE
3808 ? len : ARM_INT_REGISTER_SIZE;
ef9bd0b8
YQ
3809 CORE_ADDR regval
3810 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3811
90445bd3 3812 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3813 {
3814 /* The argument is being passed in a general purpose
3815 register. */
e17a4113 3816 if (byte_order == BFD_ENDIAN_BIG)
f0452268 3817 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3818 if (arm_debug)
3819 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3820 argnum,
3821 gdbarch_register_name
2af46ca0 3822 (gdbarch, argreg),
f0452268 3823 phex (regval, ARM_INT_REGISTER_SIZE));
2dd604e7
RE
3824 regcache_cooked_write_unsigned (regcache, argreg, regval);
3825 argreg++;
3826 }
3827 else
3828 {
f0452268 3829 gdb_byte buf[ARM_INT_REGISTER_SIZE];
ef9bd0b8
YQ
3830
3831 memset (buf, 0, sizeof (buf));
3832 store_unsigned_integer (buf, partial_len, byte_order, regval);
3833
2dd604e7
RE
3834 /* Push the arguments onto the stack. */
3835 if (arm_debug)
3836 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3837 argnum, nstack);
f0452268
AH
3838 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3839 nstack += ARM_INT_REGISTER_SIZE;
2dd604e7
RE
3840 }
3841
3842 len -= partial_len;
3843 val += partial_len;
3844 }
3845 }
3846 /* If we have an odd number of words to push, then decrement the stack
3847 by one word now, so first stack argument will be dword aligned. */
3848 if (nstack & 4)
3849 sp -= 4;
3850
3851 while (si)
3852 {
3853 sp -= si->len;
3854 write_memory (sp, si->data, si->len);
3855 si = pop_stack_item (si);
3856 }
3857
3858 /* Finally, update teh SP register. */
3859 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3860
3861 return sp;
3862}
3863
f53f0d0b
PB
3864
3865/* Always align the frame to an 8-byte boundary. This is required on
3866 some platforms and harmless on the rest. */
3867
3868static CORE_ADDR
3869arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3870{
3871 /* Align the stack to eight bytes. */
3872 return sp & ~ (CORE_ADDR) 7;
3873}
3874
c906108c 3875static void
12b27276 3876print_fpu_flags (struct ui_file *file, int flags)
c906108c 3877{
c5aa993b 3878 if (flags & (1 << 0))
12b27276 3879 fputs_filtered ("IVO ", file);
c5aa993b 3880 if (flags & (1 << 1))
12b27276 3881 fputs_filtered ("DVZ ", file);
c5aa993b 3882 if (flags & (1 << 2))
12b27276 3883 fputs_filtered ("OFL ", file);
c5aa993b 3884 if (flags & (1 << 3))
12b27276 3885 fputs_filtered ("UFL ", file);
c5aa993b 3886 if (flags & (1 << 4))
12b27276
WN
3887 fputs_filtered ("INX ", file);
3888 fputc_filtered ('\n', file);
c906108c
SS
3889}
3890
5e74b15c
RE
3891/* Print interesting information about the floating point processor
3892 (if present) or emulator. */
34e8f22d 3893static void
d855c300 3894arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3895 struct frame_info *frame, const char *args)
c906108c 3896{
9c9acae0 3897 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3898 int type;
3899
3900 type = (status >> 24) & 127;
edefbb7c 3901 if (status & (1 << 31))
12b27276 3902 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3903 else
12b27276 3904 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3905 /* i18n: [floating point unit] mask */
12b27276
WN
3906 fputs_filtered (_("mask: "), file);
3907 print_fpu_flags (file, status >> 16);
edefbb7c 3908 /* i18n: [floating point unit] flags */
12b27276
WN
3909 fputs_filtered (_("flags: "), file);
3910 print_fpu_flags (file, status);
c906108c
SS
3911}
3912
27067745
UW
3913/* Construct the ARM extended floating point type. */
3914static struct type *
3915arm_ext_type (struct gdbarch *gdbarch)
3916{
3917 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3918
3919 if (!tdep->arm_ext_type)
3920 tdep->arm_ext_type
e9bb382b 3921 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3922 floatformats_arm_ext);
3923
3924 return tdep->arm_ext_type;
3925}
3926
58d6951d
DJ
3927static struct type *
3928arm_neon_double_type (struct gdbarch *gdbarch)
3929{
3930 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3931
3932 if (tdep->neon_double_type == NULL)
3933 {
3934 struct type *t, *elem;
3935
3936 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3937 TYPE_CODE_UNION);
3938 elem = builtin_type (gdbarch)->builtin_uint8;
3939 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3940 elem = builtin_type (gdbarch)->builtin_uint16;
3941 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3942 elem = builtin_type (gdbarch)->builtin_uint32;
3943 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3944 elem = builtin_type (gdbarch)->builtin_uint64;
3945 append_composite_type_field (t, "u64", elem);
3946 elem = builtin_type (gdbarch)->builtin_float;
3947 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3948 elem = builtin_type (gdbarch)->builtin_double;
3949 append_composite_type_field (t, "f64", elem);
3950
3951 TYPE_VECTOR (t) = 1;
3952 TYPE_NAME (t) = "neon_d";
3953 tdep->neon_double_type = t;
3954 }
3955
3956 return tdep->neon_double_type;
3957}
3958
3959/* FIXME: The vector types are not correctly ordered on big-endian
3960 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3961 bits of d0 - regardless of what unit size is being held in d0. So
3962 the offset of the first uint8 in d0 is 7, but the offset of the
3963 first float is 4. This code works as-is for little-endian
3964 targets. */
3965
3966static struct type *
3967arm_neon_quad_type (struct gdbarch *gdbarch)
3968{
3969 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3970
3971 if (tdep->neon_quad_type == NULL)
3972 {
3973 struct type *t, *elem;
3974
3975 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3976 TYPE_CODE_UNION);
3977 elem = builtin_type (gdbarch)->builtin_uint8;
3978 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3979 elem = builtin_type (gdbarch)->builtin_uint16;
3980 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3981 elem = builtin_type (gdbarch)->builtin_uint32;
3982 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3983 elem = builtin_type (gdbarch)->builtin_uint64;
3984 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3985 elem = builtin_type (gdbarch)->builtin_float;
3986 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3987 elem = builtin_type (gdbarch)->builtin_double;
3988 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3989
3990 TYPE_VECTOR (t) = 1;
3991 TYPE_NAME (t) = "neon_q";
3992 tdep->neon_quad_type = t;
3993 }
3994
3995 return tdep->neon_quad_type;
3996}
3997
34e8f22d
RE
3998/* Return the GDB type object for the "standard" data type of data in
3999 register N. */
4000
4001static struct type *
7a5ea0d4 4002arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4003{
58d6951d
DJ
4004 int num_regs = gdbarch_num_regs (gdbarch);
4005
4006 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4007 && regnum >= num_regs && regnum < num_regs + 32)
4008 return builtin_type (gdbarch)->builtin_float;
4009
4010 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4011 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4012 return arm_neon_quad_type (gdbarch);
4013
4014 /* If the target description has register information, we are only
4015 in this function so that we can override the types of
4016 double-precision registers for NEON. */
4017 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4018 {
4019 struct type *t = tdesc_register_type (gdbarch, regnum);
4020
4021 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4022 && TYPE_CODE (t) == TYPE_CODE_FLT
4023 && gdbarch_tdep (gdbarch)->have_neon)
4024 return arm_neon_double_type (gdbarch);
4025 else
4026 return t;
4027 }
4028
34e8f22d 4029 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4030 {
4031 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4032 return builtin_type (gdbarch)->builtin_void;
4033
4034 return arm_ext_type (gdbarch);
4035 }
e4c16157 4036 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4037 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4038 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4039 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4040 else if (regnum >= ARRAY_SIZE (arm_register_names))
4041 /* These registers are only supported on targets which supply
4042 an XML description. */
df4df182 4043 return builtin_type (gdbarch)->builtin_int0;
032758dc 4044 else
df4df182 4045 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4046}
4047
ff6f572f
DJ
4048/* Map a DWARF register REGNUM onto the appropriate GDB register
4049 number. */
4050
4051static int
d3f73121 4052arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4053{
4054 /* Core integer regs. */
4055 if (reg >= 0 && reg <= 15)
4056 return reg;
4057
4058 /* Legacy FPA encoding. These were once used in a way which
4059 overlapped with VFP register numbering, so their use is
4060 discouraged, but GDB doesn't support the ARM toolchain
4061 which used them for VFP. */
4062 if (reg >= 16 && reg <= 23)
4063 return ARM_F0_REGNUM + reg - 16;
4064
4065 /* New assignments for the FPA registers. */
4066 if (reg >= 96 && reg <= 103)
4067 return ARM_F0_REGNUM + reg - 96;
4068
4069 /* WMMX register assignments. */
4070 if (reg >= 104 && reg <= 111)
4071 return ARM_WCGR0_REGNUM + reg - 104;
4072
4073 if (reg >= 112 && reg <= 127)
4074 return ARM_WR0_REGNUM + reg - 112;
4075
4076 if (reg >= 192 && reg <= 199)
4077 return ARM_WC0_REGNUM + reg - 192;
4078
58d6951d
DJ
4079 /* VFP v2 registers. A double precision value is actually
4080 in d1 rather than s2, but the ABI only defines numbering
4081 for the single precision registers. This will "just work"
4082 in GDB for little endian targets (we'll read eight bytes,
4083 starting in s0 and then progressing to s1), but will be
4084 reversed on big endian targets with VFP. This won't
4085 be a problem for the new Neon quad registers; you're supposed
4086 to use DW_OP_piece for those. */
4087 if (reg >= 64 && reg <= 95)
4088 {
4089 char name_buf[4];
4090
8c042590 4091 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4092 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4093 strlen (name_buf));
4094 }
4095
4096 /* VFP v3 / Neon registers. This range is also used for VFP v2
4097 registers, except that it now describes d0 instead of s0. */
4098 if (reg >= 256 && reg <= 287)
4099 {
4100 char name_buf[4];
4101
8c042590 4102 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4103 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4104 strlen (name_buf));
4105 }
4106
ff6f572f
DJ
4107 return -1;
4108}
4109
26216b98
AC
4110/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4111static int
e7faf938 4112arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4113{
4114 int reg = regnum;
e7faf938 4115 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4116
ff6f572f
DJ
4117 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4118 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4119
4120 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4121 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4122
4123 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4124 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4125
26216b98
AC
4126 if (reg < NUM_GREGS)
4127 return SIM_ARM_R0_REGNUM + reg;
4128 reg -= NUM_GREGS;
4129
4130 if (reg < NUM_FREGS)
4131 return SIM_ARM_FP0_REGNUM + reg;
4132 reg -= NUM_FREGS;
4133
4134 if (reg < NUM_SREGS)
4135 return SIM_ARM_FPS_REGNUM + reg;
4136 reg -= NUM_SREGS;
4137
edefbb7c 4138 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4139}
34e8f22d 4140
d9311bfa
AT
4141/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4142 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4143 NULL if an error occurs. BUF is freed. */
c906108c 4144
d9311bfa
AT
4145static gdb_byte *
4146extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4147 int old_len, int new_len)
4148{
4149 gdb_byte *new_buf;
4150 int bytes_to_read = new_len - old_len;
c906108c 4151
d9311bfa
AT
4152 new_buf = (gdb_byte *) xmalloc (new_len);
4153 memcpy (new_buf + bytes_to_read, buf, old_len);
4154 xfree (buf);
198cd59d 4155 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
d9311bfa
AT
4156 {
4157 xfree (new_buf);
4158 return NULL;
c906108c 4159 }
d9311bfa 4160 return new_buf;
c906108c
SS
4161}
4162
d9311bfa
AT
4163/* An IT block is at most the 2-byte IT instruction followed by
4164 four 4-byte instructions. The furthest back we must search to
4165 find an IT block that affects the current instruction is thus
4166 2 + 3 * 4 == 14 bytes. */
4167#define MAX_IT_BLOCK_PREFIX 14
177321bd 4168
d9311bfa
AT
4169/* Use a quick scan if there are more than this many bytes of
4170 code. */
4171#define IT_SCAN_THRESHOLD 32
177321bd 4172
d9311bfa
AT
4173/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4174 A breakpoint in an IT block may not be hit, depending on the
4175 condition flags. */
ad527d2e 4176static CORE_ADDR
d9311bfa 4177arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4178{
d9311bfa
AT
4179 gdb_byte *buf;
4180 char map_type;
4181 CORE_ADDR boundary, func_start;
4182 int buf_len;
4183 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4184 int i, any, last_it, last_it_count;
177321bd 4185
d9311bfa
AT
4186 /* If we are using BKPT breakpoints, none of this is necessary. */
4187 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4188 return bpaddr;
177321bd 4189
d9311bfa
AT
4190 /* ARM mode does not have this problem. */
4191 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4192 return bpaddr;
177321bd 4193
d9311bfa
AT
4194 /* We are setting a breakpoint in Thumb code that could potentially
4195 contain an IT block. The first step is to find how much Thumb
4196 code there is; we do not need to read outside of known Thumb
4197 sequences. */
4198 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4199 if (map_type == 0)
4200 /* Thumb-2 code must have mapping symbols to have a chance. */
4201 return bpaddr;
9dca5578 4202
d9311bfa 4203 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4204
d9311bfa
AT
4205 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4206 && func_start > boundary)
4207 boundary = func_start;
9dca5578 4208
d9311bfa
AT
4209 /* Search for a candidate IT instruction. We have to do some fancy
4210 footwork to distinguish a real IT instruction from the second
4211 half of a 32-bit instruction, but there is no need for that if
4212 there's no candidate. */
325fac50 4213 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4214 if (buf_len == 0)
4215 /* No room for an IT instruction. */
4216 return bpaddr;
c906108c 4217
d9311bfa 4218 buf = (gdb_byte *) xmalloc (buf_len);
198cd59d 4219 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
d9311bfa
AT
4220 return bpaddr;
4221 any = 0;
4222 for (i = 0; i < buf_len; i += 2)
c906108c 4223 {
d9311bfa
AT
4224 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4225 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4226 {
d9311bfa
AT
4227 any = 1;
4228 break;
25b41d01 4229 }
c906108c 4230 }
d9311bfa
AT
4231
4232 if (any == 0)
c906108c 4233 {
d9311bfa
AT
4234 xfree (buf);
4235 return bpaddr;
f9d67f43
DJ
4236 }
4237
4238 /* OK, the code bytes before this instruction contain at least one
4239 halfword which resembles an IT instruction. We know that it's
4240 Thumb code, but there are still two possibilities. Either the
4241 halfword really is an IT instruction, or it is the second half of
4242 a 32-bit Thumb instruction. The only way we can tell is to
4243 scan forwards from a known instruction boundary. */
4244 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4245 {
4246 int definite;
4247
4248 /* There's a lot of code before this instruction. Start with an
4249 optimistic search; it's easy to recognize halfwords that can
4250 not be the start of a 32-bit instruction, and use that to
4251 lock on to the instruction boundaries. */
4252 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4253 if (buf == NULL)
4254 return bpaddr;
4255 buf_len = IT_SCAN_THRESHOLD;
4256
4257 definite = 0;
4258 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4259 {
4260 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4261 if (thumb_insn_size (inst1) == 2)
4262 {
4263 definite = 1;
4264 break;
4265 }
4266 }
4267
4268 /* At this point, if DEFINITE, BUF[I] is the first place we
4269 are sure that we know the instruction boundaries, and it is far
4270 enough from BPADDR that we could not miss an IT instruction
4271 affecting BPADDR. If ! DEFINITE, give up - start from a
4272 known boundary. */
4273 if (! definite)
4274 {
0963b4bd
MS
4275 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4276 bpaddr - boundary);
f9d67f43
DJ
4277 if (buf == NULL)
4278 return bpaddr;
4279 buf_len = bpaddr - boundary;
4280 i = 0;
4281 }
4282 }
4283 else
4284 {
4285 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4286 if (buf == NULL)
4287 return bpaddr;
4288 buf_len = bpaddr - boundary;
4289 i = 0;
4290 }
4291
4292 /* Scan forwards. Find the last IT instruction before BPADDR. */
4293 last_it = -1;
4294 last_it_count = 0;
4295 while (i < buf_len)
4296 {
4297 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4298 last_it_count--;
4299 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4300 {
4301 last_it = i;
4302 if (inst1 & 0x0001)
4303 last_it_count = 4;
4304 else if (inst1 & 0x0002)
4305 last_it_count = 3;
4306 else if (inst1 & 0x0004)
4307 last_it_count = 2;
4308 else
4309 last_it_count = 1;
4310 }
4311 i += thumb_insn_size (inst1);
4312 }
4313
4314 xfree (buf);
4315
4316 if (last_it == -1)
4317 /* There wasn't really an IT instruction after all. */
4318 return bpaddr;
4319
4320 if (last_it_count < 1)
4321 /* It was too far away. */
4322 return bpaddr;
4323
4324 /* This really is a trouble spot. Move the breakpoint to the IT
4325 instruction. */
4326 return bpaddr - buf_len + last_it;
4327}
4328
cca44b1b 4329/* ARM displaced stepping support.
c906108c 4330
cca44b1b 4331 Generally ARM displaced stepping works as follows:
c906108c 4332
cca44b1b 4333 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4334 arm_process_displaced_insn. Depending on the type of instruction, it is
4335 then copied to a scratch location, possibly in a modified form. The
4336 copy_* set of functions performs such modification, as necessary. A
4337 breakpoint is placed after the modified instruction in the scratch space
4338 to return control to GDB. Note in particular that instructions which
4339 modify the PC will no longer do so after modification.
c5aa993b 4340
cca44b1b
JB
4341 2. The instruction is single-stepped, by setting the PC to the scratch
4342 location address, and resuming. Control returns to GDB when the
4343 breakpoint is hit.
c5aa993b 4344
cca44b1b
JB
4345 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4346 function used for the current instruction. This function's job is to
4347 put the CPU/memory state back to what it would have been if the
4348 instruction had been executed unmodified in its original location. */
c5aa993b 4349
cca44b1b
JB
4350/* NOP instruction (mov r0, r0). */
4351#define ARM_NOP 0xe1a00000
34518530 4352#define THUMB_NOP 0x4600
cca44b1b
JB
4353
4354/* Helper for register reads for displaced stepping. In particular, this
4355 returns the PC as it would be seen by the instruction at its original
4356 location. */
4357
4358ULONGEST
cfba9872 4359displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
36073a92 4360 int regno)
cca44b1b
JB
4361{
4362 ULONGEST ret;
36073a92 4363 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4364
bf9f652a 4365 if (regno == ARM_PC_REGNUM)
cca44b1b 4366 {
4db71c0b
YQ
4367 /* Compute pipeline offset:
4368 - When executing an ARM instruction, PC reads as the address of the
4369 current instruction plus 8.
4370 - When executing a Thumb instruction, PC reads as the address of the
4371 current instruction plus 4. */
4372
36073a92 4373 if (!dsc->is_thumb)
4db71c0b
YQ
4374 from += 8;
4375 else
4376 from += 4;
4377
cca44b1b
JB
4378 if (debug_displaced)
4379 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4380 (unsigned long) from);
4381 return (ULONGEST) from;
cca44b1b 4382 }
c906108c 4383 else
cca44b1b
JB
4384 {
4385 regcache_cooked_read_unsigned (regs, regno, &ret);
4386 if (debug_displaced)
4387 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4388 regno, (unsigned long) ret);
4389 return ret;
4390 }
c906108c
SS
4391}
4392
cca44b1b
JB
4393static int
4394displaced_in_arm_mode (struct regcache *regs)
4395{
4396 ULONGEST ps;
ac7936df 4397 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
66e810cd 4398
cca44b1b 4399 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4400
9779414d 4401 return (ps & t_bit) == 0;
cca44b1b 4402}
66e810cd 4403
cca44b1b 4404/* Write to the PC as from a branch instruction. */
c906108c 4405
cca44b1b 4406static void
cfba9872 4407branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
36073a92 4408 ULONGEST val)
c906108c 4409{
36073a92 4410 if (!dsc->is_thumb)
cca44b1b
JB
4411 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4412 architecture versions < 6. */
0963b4bd
MS
4413 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4414 val & ~(ULONGEST) 0x3);
cca44b1b 4415 else
0963b4bd
MS
4416 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4417 val & ~(ULONGEST) 0x1);
cca44b1b 4418}
66e810cd 4419
cca44b1b
JB
4420/* Write to the PC as from a branch-exchange instruction. */
4421
4422static void
4423bx_write_pc (struct regcache *regs, ULONGEST val)
4424{
4425 ULONGEST ps;
ac7936df 4426 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
cca44b1b
JB
4427
4428 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4429
4430 if ((val & 1) == 1)
c906108c 4431 {
9779414d 4432 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4433 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4434 }
4435 else if ((val & 2) == 0)
4436 {
9779414d 4437 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4438 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4439 }
4440 else
4441 {
cca44b1b
JB
4442 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4443 mode, align dest to 4 bytes). */
4444 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4445 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4446 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4447 }
4448}
ed9a39eb 4449
cca44b1b 4450/* Write to the PC as if from a load instruction. */
ed9a39eb 4451
34e8f22d 4452static void
cfba9872 4453load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
36073a92 4454 ULONGEST val)
ed9a39eb 4455{
cca44b1b
JB
4456 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4457 bx_write_pc (regs, val);
4458 else
36073a92 4459 branch_write_pc (regs, dsc, val);
cca44b1b 4460}
be8626e0 4461
cca44b1b
JB
4462/* Write to the PC as if from an ALU instruction. */
4463
4464static void
cfba9872 4465alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
36073a92 4466 ULONGEST val)
cca44b1b 4467{
36073a92 4468 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4469 bx_write_pc (regs, val);
4470 else
36073a92 4471 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4472}
4473
4474/* Helper for writing to registers for displaced stepping. Writing to the PC
4475 has a varying effects depending on the instruction which does the write:
4476 this is controlled by the WRITE_PC argument. */
4477
4478void
cfba9872 4479displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
cca44b1b
JB
4480 int regno, ULONGEST val, enum pc_write_style write_pc)
4481{
bf9f652a 4482 if (regno == ARM_PC_REGNUM)
08216dd7 4483 {
cca44b1b
JB
4484 if (debug_displaced)
4485 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4486 (unsigned long) val);
4487 switch (write_pc)
08216dd7 4488 {
cca44b1b 4489 case BRANCH_WRITE_PC:
36073a92 4490 branch_write_pc (regs, dsc, val);
08216dd7
RE
4491 break;
4492
cca44b1b
JB
4493 case BX_WRITE_PC:
4494 bx_write_pc (regs, val);
4495 break;
4496
4497 case LOAD_WRITE_PC:
36073a92 4498 load_write_pc (regs, dsc, val);
cca44b1b
JB
4499 break;
4500
4501 case ALU_WRITE_PC:
36073a92 4502 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4503 break;
4504
4505 case CANNOT_WRITE_PC:
4506 warning (_("Instruction wrote to PC in an unexpected way when "
4507 "single-stepping"));
08216dd7
RE
4508 break;
4509
4510 default:
97b9747c
JB
4511 internal_error (__FILE__, __LINE__,
4512 _("Invalid argument to displaced_write_reg"));
08216dd7 4513 }
b508a996 4514
cca44b1b 4515 dsc->wrote_to_pc = 1;
b508a996 4516 }
ed9a39eb 4517 else
b508a996 4518 {
cca44b1b
JB
4519 if (debug_displaced)
4520 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4521 regno, (unsigned long) val);
4522 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4523 }
34e8f22d
RE
4524}
4525
cca44b1b
JB
4526/* This function is used to concisely determine if an instruction INSN
4527 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4528 corresponding fields of BITMASK set to 0b1111. The function
4529 returns return 1 if any of these fields in INSN reference the PC
4530 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4531
4532static int
cca44b1b 4533insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4534{
cca44b1b 4535 uint32_t lowbit = 1;
67255d04 4536
cca44b1b
JB
4537 while (bitmask != 0)
4538 {
4539 uint32_t mask;
44e1a9eb 4540
cca44b1b
JB
4541 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4542 ;
67255d04 4543
cca44b1b
JB
4544 if (!lowbit)
4545 break;
67255d04 4546
cca44b1b 4547 mask = lowbit * 0xf;
67255d04 4548
cca44b1b
JB
4549 if ((insn & mask) == mask)
4550 return 1;
4551
4552 bitmask &= ~mask;
67255d04
RE
4553 }
4554
cca44b1b
JB
4555 return 0;
4556}
2af48f68 4557
cca44b1b
JB
4558/* The simplest copy function. Many instructions have the same effect no
4559 matter what address they are executed at: in those cases, use this. */
67255d04 4560
cca44b1b 4561static int
7ff120b4 4562arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 4563 const char *iname, arm_displaced_step_closure *dsc)
cca44b1b
JB
4564{
4565 if (debug_displaced)
4566 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4567 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4568 iname);
67255d04 4569
cca44b1b 4570 dsc->modinsn[0] = insn;
67255d04 4571
cca44b1b
JB
4572 return 0;
4573}
4574
34518530
YQ
4575static int
4576thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4577 uint16_t insn2, const char *iname,
cfba9872 4578 arm_displaced_step_closure *dsc)
34518530
YQ
4579{
4580 if (debug_displaced)
4581 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4582 "opcode/class '%s' unmodified\n", insn1, insn2,
4583 iname);
4584
4585 dsc->modinsn[0] = insn1;
4586 dsc->modinsn[1] = insn2;
4587 dsc->numinsns = 2;
4588
4589 return 0;
4590}
4591
4592/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4593 modification. */
4594static int
615234c1 4595thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530 4596 const char *iname,
cfba9872 4597 arm_displaced_step_closure *dsc)
34518530
YQ
4598{
4599 if (debug_displaced)
4600 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4601 "opcode/class '%s' unmodified\n", insn,
4602 iname);
4603
4604 dsc->modinsn[0] = insn;
4605
4606 return 0;
4607}
4608
cca44b1b
JB
4609/* Preload instructions with immediate offset. */
4610
4611static void
6e39997a 4612cleanup_preload (struct gdbarch *gdbarch,
cfba9872 4613 struct regcache *regs, arm_displaced_step_closure *dsc)
cca44b1b
JB
4614{
4615 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4616 if (!dsc->u.preload.immed)
4617 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4618}
4619
7ff120b4
YQ
4620static void
4621install_preload (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 4622 arm_displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4623{
cca44b1b 4624 ULONGEST rn_val;
cca44b1b
JB
4625 /* Preload instructions:
4626
4627 {pli/pld} [rn, #+/-imm]
4628 ->
4629 {pli/pld} [r0, #+/-imm]. */
4630
36073a92
YQ
4631 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4632 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4633 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4634 dsc->u.preload.immed = 1;
4635
cca44b1b 4636 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4637}
4638
cca44b1b 4639static int
7ff120b4 4640arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cfba9872 4641 arm_displaced_step_closure *dsc)
cca44b1b
JB
4642{
4643 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4644
7ff120b4
YQ
4645 if (!insn_references_pc (insn, 0x000f0000ul))
4646 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4647
4648 if (debug_displaced)
4649 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4650 (unsigned long) insn);
4651
7ff120b4
YQ
4652 dsc->modinsn[0] = insn & 0xfff0ffff;
4653
4654 install_preload (gdbarch, regs, dsc, rn);
4655
4656 return 0;
4657}
4658
34518530
YQ
4659static int
4660thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
cfba9872 4661 struct regcache *regs, arm_displaced_step_closure *dsc)
34518530
YQ
4662{
4663 unsigned int rn = bits (insn1, 0, 3);
4664 unsigned int u_bit = bit (insn1, 7);
4665 int imm12 = bits (insn2, 0, 11);
4666 ULONGEST pc_val;
4667
4668 if (rn != ARM_PC_REGNUM)
4669 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4670
4671 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4672 PLD (literal) Encoding T1. */
4673 if (debug_displaced)
4674 fprintf_unfiltered (gdb_stdlog,
4675 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4676 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4677 imm12);
4678
4679 if (!u_bit)
4680 imm12 = -1 * imm12;
4681
4682 /* Rewrite instruction {pli/pld} PC imm12 into:
4683 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4684
4685 {pli/pld} [r0, r1]
4686
4687 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4688
4689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4690 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4691
4692 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4693
4694 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4695 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4696 dsc->u.preload.immed = 0;
4697
4698 /* {pli/pld} [r0, r1] */
4699 dsc->modinsn[0] = insn1 & 0xfff0;
4700 dsc->modinsn[1] = 0xf001;
4701 dsc->numinsns = 2;
4702
4703 dsc->cleanup = &cleanup_preload;
4704 return 0;
4705}
4706
7ff120b4
YQ
4707/* Preload instructions with register offset. */
4708
4709static void
4710install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 4711 arm_displaced_step_closure *dsc, unsigned int rn,
7ff120b4
YQ
4712 unsigned int rm)
4713{
4714 ULONGEST rn_val, rm_val;
4715
cca44b1b
JB
4716 /* Preload register-offset instructions:
4717
4718 {pli/pld} [rn, rm {, shift}]
4719 ->
4720 {pli/pld} [r0, r1 {, shift}]. */
4721
36073a92
YQ
4722 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4723 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4724 rn_val = displaced_read_reg (regs, dsc, rn);
4725 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4726 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4727 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4728 dsc->u.preload.immed = 0;
4729
cca44b1b 4730 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4731}
4732
4733static int
4734arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4735 struct regcache *regs,
cfba9872 4736 arm_displaced_step_closure *dsc)
7ff120b4
YQ
4737{
4738 unsigned int rn = bits (insn, 16, 19);
4739 unsigned int rm = bits (insn, 0, 3);
4740
4741
4742 if (!insn_references_pc (insn, 0x000f000ful))
4743 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4744
4745 if (debug_displaced)
4746 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4747 (unsigned long) insn);
4748
4749 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4750
7ff120b4 4751 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4752 return 0;
4753}
4754
4755/* Copy/cleanup coprocessor load and store instructions. */
4756
4757static void
6e39997a 4758cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b 4759 struct regcache *regs,
cfba9872 4760 arm_displaced_step_closure *dsc)
cca44b1b 4761{
36073a92 4762 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4763
4764 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4765
4766 if (dsc->u.ldst.writeback)
4767 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4768}
4769
7ff120b4
YQ
4770static void
4771install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 4772 arm_displaced_step_closure *dsc,
7ff120b4 4773 int writeback, unsigned int rn)
cca44b1b 4774{
cca44b1b 4775 ULONGEST rn_val;
cca44b1b 4776
cca44b1b
JB
4777 /* Coprocessor load/store instructions:
4778
4779 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4780 ->
4781 {stc/stc2} [r0, #+/-imm].
4782
4783 ldc/ldc2 are handled identically. */
4784
36073a92
YQ
4785 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4786 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4787 /* PC should be 4-byte aligned. */
4788 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4789 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4790
7ff120b4 4791 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4792 dsc->u.ldst.rn = rn;
4793
7ff120b4
YQ
4794 dsc->cleanup = &cleanup_copro_load_store;
4795}
4796
4797static int
4798arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4799 struct regcache *regs,
cfba9872 4800 arm_displaced_step_closure *dsc)
7ff120b4
YQ
4801{
4802 unsigned int rn = bits (insn, 16, 19);
4803
4804 if (!insn_references_pc (insn, 0x000f0000ul))
4805 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4806
4807 if (debug_displaced)
4808 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4809 "load/store insn %.8lx\n", (unsigned long) insn);
4810
cca44b1b
JB
4811 dsc->modinsn[0] = insn & 0xfff0ffff;
4812
7ff120b4 4813 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4814
4815 return 0;
4816}
4817
34518530
YQ
4818static int
4819thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4820 uint16_t insn2, struct regcache *regs,
cfba9872 4821 arm_displaced_step_closure *dsc)
34518530
YQ
4822{
4823 unsigned int rn = bits (insn1, 0, 3);
4824
4825 if (rn != ARM_PC_REGNUM)
4826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4827 "copro load/store", dsc);
4828
4829 if (debug_displaced)
4830 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4831 "load/store insn %.4x%.4x\n", insn1, insn2);
4832
4833 dsc->modinsn[0] = insn1 & 0xfff0;
4834 dsc->modinsn[1] = insn2;
4835 dsc->numinsns = 2;
4836
4837 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4838 doesn't support writeback, so pass 0. */
4839 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4840
4841 return 0;
4842}
4843
cca44b1b
JB
4844/* Clean up branch instructions (actually perform the branch, by setting
4845 PC). */
4846
4847static void
6e39997a 4848cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 4849 arm_displaced_step_closure *dsc)
cca44b1b 4850{
36073a92 4851 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4852 int branch_taken = condition_true (dsc->u.branch.cond, status);
4853 enum pc_write_style write_pc = dsc->u.branch.exchange
4854 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4855
4856 if (!branch_taken)
4857 return;
4858
4859 if (dsc->u.branch.link)
4860 {
8c8dba6d
YQ
4861 /* The value of LR should be the next insn of current one. In order
4862 not to confuse logic hanlding later insn `bx lr', if current insn mode
4863 is Thumb, the bit 0 of LR value should be set to 1. */
4864 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4865
4866 if (dsc->is_thumb)
4867 next_insn_addr |= 0x1;
4868
4869 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4870 CANNOT_WRITE_PC);
cca44b1b
JB
4871 }
4872
bf9f652a 4873 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4874}
4875
4876/* Copy B/BL/BLX instructions with immediate destinations. */
4877
7ff120b4
YQ
4878static void
4879install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 4880 arm_displaced_step_closure *dsc,
7ff120b4
YQ
4881 unsigned int cond, int exchange, int link, long offset)
4882{
4883 /* Implement "BL<cond> <label>" as:
4884
4885 Preparation: cond <- instruction condition
4886 Insn: mov r0, r0 (nop)
4887 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4888
4889 B<cond> similar, but don't set r14 in cleanup. */
4890
4891 dsc->u.branch.cond = cond;
4892 dsc->u.branch.link = link;
4893 dsc->u.branch.exchange = exchange;
4894
2b16b2e3
YQ
4895 dsc->u.branch.dest = dsc->insn_addr;
4896 if (link && exchange)
4897 /* For BLX, offset is computed from the Align (PC, 4). */
4898 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4899
7ff120b4 4900 if (dsc->is_thumb)
2b16b2e3 4901 dsc->u.branch.dest += 4 + offset;
7ff120b4 4902 else
2b16b2e3 4903 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
4904
4905 dsc->cleanup = &cleanup_branch;
4906}
cca44b1b 4907static int
7ff120b4 4908arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 4909 struct regcache *regs, arm_displaced_step_closure *dsc)
cca44b1b
JB
4910{
4911 unsigned int cond = bits (insn, 28, 31);
4912 int exchange = (cond == 0xf);
4913 int link = exchange || bit (insn, 24);
cca44b1b
JB
4914 long offset;
4915
4916 if (debug_displaced)
4917 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4918 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4919 (unsigned long) insn);
cca44b1b
JB
4920 if (exchange)
4921 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4922 then arrange the switch into Thumb mode. */
4923 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4924 else
4925 offset = bits (insn, 0, 23) << 2;
4926
4927 if (bit (offset, 25))
4928 offset = offset | ~0x3ffffff;
4929
cca44b1b
JB
4930 dsc->modinsn[0] = ARM_NOP;
4931
7ff120b4 4932 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
4933 return 0;
4934}
4935
34518530
YQ
4936static int
4937thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4938 uint16_t insn2, struct regcache *regs,
cfba9872 4939 arm_displaced_step_closure *dsc)
34518530
YQ
4940{
4941 int link = bit (insn2, 14);
4942 int exchange = link && !bit (insn2, 12);
4943 int cond = INST_AL;
4944 long offset = 0;
4945 int j1 = bit (insn2, 13);
4946 int j2 = bit (insn2, 11);
4947 int s = sbits (insn1, 10, 10);
4948 int i1 = !(j1 ^ bit (insn1, 10));
4949 int i2 = !(j2 ^ bit (insn1, 10));
4950
4951 if (!link && !exchange) /* B */
4952 {
4953 offset = (bits (insn2, 0, 10) << 1);
4954 if (bit (insn2, 12)) /* Encoding T4 */
4955 {
4956 offset |= (bits (insn1, 0, 9) << 12)
4957 | (i2 << 22)
4958 | (i1 << 23)
4959 | (s << 24);
4960 cond = INST_AL;
4961 }
4962 else /* Encoding T3 */
4963 {
4964 offset |= (bits (insn1, 0, 5) << 12)
4965 | (j1 << 18)
4966 | (j2 << 19)
4967 | (s << 20);
4968 cond = bits (insn1, 6, 9);
4969 }
4970 }
4971 else
4972 {
4973 offset = (bits (insn1, 0, 9) << 12);
4974 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4975 offset |= exchange ?
4976 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4977 }
4978
4979 if (debug_displaced)
4980 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4981 "%.4x %.4x with offset %.8lx\n",
4982 link ? (exchange) ? "blx" : "bl" : "b",
4983 insn1, insn2, offset);
4984
4985 dsc->modinsn[0] = THUMB_NOP;
4986
4987 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4988 return 0;
4989}
4990
4991/* Copy B Thumb instructions. */
4992static int
615234c1 4993thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
cfba9872 4994 arm_displaced_step_closure *dsc)
34518530
YQ
4995{
4996 unsigned int cond = 0;
4997 int offset = 0;
4998 unsigned short bit_12_15 = bits (insn, 12, 15);
4999 CORE_ADDR from = dsc->insn_addr;
5000
5001 if (bit_12_15 == 0xd)
5002 {
5003 /* offset = SignExtend (imm8:0, 32) */
5004 offset = sbits ((insn << 1), 0, 8);
5005 cond = bits (insn, 8, 11);
5006 }
5007 else if (bit_12_15 == 0xe) /* Encoding T2 */
5008 {
5009 offset = sbits ((insn << 1), 0, 11);
5010 cond = INST_AL;
5011 }
5012
5013 if (debug_displaced)
5014 fprintf_unfiltered (gdb_stdlog,
5015 "displaced: copying b immediate insn %.4x "
5016 "with offset %d\n", insn, offset);
5017
5018 dsc->u.branch.cond = cond;
5019 dsc->u.branch.link = 0;
5020 dsc->u.branch.exchange = 0;
5021 dsc->u.branch.dest = from + 4 + offset;
5022
5023 dsc->modinsn[0] = THUMB_NOP;
5024
5025 dsc->cleanup = &cleanup_branch;
5026
5027 return 0;
5028}
5029
cca44b1b
JB
5030/* Copy BX/BLX with register-specified destinations. */
5031
7ff120b4
YQ
5032static void
5033install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5034 arm_displaced_step_closure *dsc, int link,
7ff120b4 5035 unsigned int cond, unsigned int rm)
cca44b1b 5036{
cca44b1b
JB
5037 /* Implement {BX,BLX}<cond> <reg>" as:
5038
5039 Preparation: cond <- instruction condition
5040 Insn: mov r0, r0 (nop)
5041 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5042
5043 Don't set r14 in cleanup for BX. */
5044
36073a92 5045 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5046
5047 dsc->u.branch.cond = cond;
5048 dsc->u.branch.link = link;
cca44b1b 5049
7ff120b4 5050 dsc->u.branch.exchange = 1;
cca44b1b
JB
5051
5052 dsc->cleanup = &cleanup_branch;
7ff120b4 5053}
cca44b1b 5054
7ff120b4
YQ
5055static int
5056arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 5057 struct regcache *regs, arm_displaced_step_closure *dsc)
7ff120b4
YQ
5058{
5059 unsigned int cond = bits (insn, 28, 31);
5060 /* BX: x12xxx1x
5061 BLX: x12xxx3x. */
5062 int link = bit (insn, 5);
5063 unsigned int rm = bits (insn, 0, 3);
5064
5065 if (debug_displaced)
5066 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5067 (unsigned long) insn);
5068
5069 dsc->modinsn[0] = ARM_NOP;
5070
5071 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5072 return 0;
5073}
5074
34518530
YQ
5075static int
5076thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5077 struct regcache *regs,
cfba9872 5078 arm_displaced_step_closure *dsc)
34518530
YQ
5079{
5080 int link = bit (insn, 7);
5081 unsigned int rm = bits (insn, 3, 6);
5082
5083 if (debug_displaced)
5084 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5085 (unsigned short) insn);
5086
5087 dsc->modinsn[0] = THUMB_NOP;
5088
5089 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5090
5091 return 0;
5092}
5093
5094
0963b4bd 5095/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5096
5097static void
6e39997a 5098cleanup_alu_imm (struct gdbarch *gdbarch,
cfba9872 5099 struct regcache *regs, arm_displaced_step_closure *dsc)
cca44b1b 5100{
36073a92 5101 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5102 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5103 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5104 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5105}
5106
5107static int
7ff120b4 5108arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cfba9872 5109 arm_displaced_step_closure *dsc)
cca44b1b
JB
5110{
5111 unsigned int rn = bits (insn, 16, 19);
5112 unsigned int rd = bits (insn, 12, 15);
5113 unsigned int op = bits (insn, 21, 24);
5114 int is_mov = (op == 0xd);
5115 ULONGEST rd_val, rn_val;
cca44b1b
JB
5116
5117 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5118 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5119
5120 if (debug_displaced)
5121 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5122 "%.8lx\n", is_mov ? "move" : "ALU",
5123 (unsigned long) insn);
5124
5125 /* Instruction is of form:
5126
5127 <op><cond> rd, [rn,] #imm
5128
5129 Rewrite as:
5130
5131 Preparation: tmp1, tmp2 <- r0, r1;
5132 r0, r1 <- rd, rn
5133 Insn: <op><cond> r0, r1, #imm
5134 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5135 */
5136
36073a92
YQ
5137 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5138 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5139 rn_val = displaced_read_reg (regs, dsc, rn);
5140 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5141 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5142 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5143 dsc->rd = rd;
5144
5145 if (is_mov)
5146 dsc->modinsn[0] = insn & 0xfff00fff;
5147 else
5148 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5149
5150 dsc->cleanup = &cleanup_alu_imm;
5151
5152 return 0;
5153}
5154
34518530
YQ
5155static int
5156thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5157 uint16_t insn2, struct regcache *regs,
cfba9872 5158 arm_displaced_step_closure *dsc)
34518530
YQ
5159{
5160 unsigned int op = bits (insn1, 5, 8);
5161 unsigned int rn, rm, rd;
5162 ULONGEST rd_val, rn_val;
5163
5164 rn = bits (insn1, 0, 3); /* Rn */
5165 rm = bits (insn2, 0, 3); /* Rm */
5166 rd = bits (insn2, 8, 11); /* Rd */
5167
5168 /* This routine is only called for instruction MOV. */
5169 gdb_assert (op == 0x2 && rn == 0xf);
5170
5171 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5172 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5173
5174 if (debug_displaced)
5175 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5176 "ALU", insn1, insn2);
5177
5178 /* Instruction is of form:
5179
5180 <op><cond> rd, [rn,] #imm
5181
5182 Rewrite as:
5183
5184 Preparation: tmp1, tmp2 <- r0, r1;
5185 r0, r1 <- rd, rn
5186 Insn: <op><cond> r0, r1, #imm
5187 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5188 */
5189
5190 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5191 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5192 rn_val = displaced_read_reg (regs, dsc, rn);
5193 rd_val = displaced_read_reg (regs, dsc, rd);
5194 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5195 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5196 dsc->rd = rd;
5197
5198 dsc->modinsn[0] = insn1;
5199 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5200 dsc->numinsns = 2;
5201
5202 dsc->cleanup = &cleanup_alu_imm;
5203
5204 return 0;
5205}
5206
cca44b1b
JB
5207/* Copy/cleanup arithmetic/logic insns with register RHS. */
5208
5209static void
6e39997a 5210cleanup_alu_reg (struct gdbarch *gdbarch,
cfba9872 5211 struct regcache *regs, arm_displaced_step_closure *dsc)
cca44b1b
JB
5212{
5213 ULONGEST rd_val;
5214 int i;
5215
36073a92 5216 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5217
5218 for (i = 0; i < 3; i++)
5219 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5220
5221 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5222}
5223
7ff120b4
YQ
5224static void
5225install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5226 arm_displaced_step_closure *dsc,
7ff120b4 5227 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5228{
cca44b1b 5229 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5230
cca44b1b
JB
5231 /* Instruction is of form:
5232
5233 <op><cond> rd, [rn,] rm [, <shift>]
5234
5235 Rewrite as:
5236
5237 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5238 r0, r1, r2 <- rd, rn, rm
ef713951 5239 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5240 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5241 */
5242
36073a92
YQ
5243 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5244 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5245 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5246 rd_val = displaced_read_reg (regs, dsc, rd);
5247 rn_val = displaced_read_reg (regs, dsc, rn);
5248 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5249 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5250 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5251 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5252 dsc->rd = rd;
5253
7ff120b4
YQ
5254 dsc->cleanup = &cleanup_alu_reg;
5255}
5256
5257static int
5258arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cfba9872 5259 arm_displaced_step_closure *dsc)
7ff120b4
YQ
5260{
5261 unsigned int op = bits (insn, 21, 24);
5262 int is_mov = (op == 0xd);
5263
5264 if (!insn_references_pc (insn, 0x000ff00ful))
5265 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5266
5267 if (debug_displaced)
5268 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5269 is_mov ? "move" : "ALU", (unsigned long) insn);
5270
cca44b1b
JB
5271 if (is_mov)
5272 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5273 else
5274 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5275
7ff120b4
YQ
5276 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5277 bits (insn, 0, 3));
cca44b1b
JB
5278 return 0;
5279}
5280
34518530
YQ
5281static int
5282thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5283 struct regcache *regs,
cfba9872 5284 arm_displaced_step_closure *dsc)
34518530 5285{
ef713951 5286 unsigned rm, rd;
34518530 5287
ef713951
YQ
5288 rm = bits (insn, 3, 6);
5289 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5290
ef713951 5291 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5292 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5293
5294 if (debug_displaced)
ef713951
YQ
5295 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5296 (unsigned short) insn);
34518530 5297
ef713951 5298 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5299
ef713951 5300 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5301
5302 return 0;
5303}
5304
cca44b1b
JB
5305/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5306
5307static void
6e39997a 5308cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b 5309 struct regcache *regs,
cfba9872 5310 arm_displaced_step_closure *dsc)
cca44b1b 5311{
36073a92 5312 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5313 int i;
5314
5315 for (i = 0; i < 4; i++)
5316 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5317
5318 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5319}
5320
7ff120b4
YQ
5321static void
5322install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5323 arm_displaced_step_closure *dsc,
7ff120b4
YQ
5324 unsigned int rd, unsigned int rn, unsigned int rm,
5325 unsigned rs)
cca44b1b 5326{
7ff120b4 5327 int i;
cca44b1b 5328 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5329
cca44b1b
JB
5330 /* Instruction is of form:
5331
5332 <op><cond> rd, [rn,] rm, <shift> rs
5333
5334 Rewrite as:
5335
5336 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5337 r0, r1, r2, r3 <- rd, rn, rm, rs
5338 Insn: <op><cond> r0, r1, r2, <shift> r3
5339 Cleanup: tmp5 <- r0
5340 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5341 rd <- tmp5
5342 */
5343
5344 for (i = 0; i < 4; i++)
36073a92 5345 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5346
36073a92
YQ
5347 rd_val = displaced_read_reg (regs, dsc, rd);
5348 rn_val = displaced_read_reg (regs, dsc, rn);
5349 rm_val = displaced_read_reg (regs, dsc, rm);
5350 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5351 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5352 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5353 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5354 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5355 dsc->rd = rd;
7ff120b4
YQ
5356 dsc->cleanup = &cleanup_alu_shifted_reg;
5357}
5358
5359static int
5360arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5361 struct regcache *regs,
cfba9872 5362 arm_displaced_step_closure *dsc)
7ff120b4
YQ
5363{
5364 unsigned int op = bits (insn, 21, 24);
5365 int is_mov = (op == 0xd);
5366 unsigned int rd, rn, rm, rs;
5367
5368 if (!insn_references_pc (insn, 0x000fff0ful))
5369 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5370
5371 if (debug_displaced)
5372 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5373 "%.8lx\n", is_mov ? "move" : "ALU",
5374 (unsigned long) insn);
5375
5376 rn = bits (insn, 16, 19);
5377 rm = bits (insn, 0, 3);
5378 rs = bits (insn, 8, 11);
5379 rd = bits (insn, 12, 15);
cca44b1b
JB
5380
5381 if (is_mov)
5382 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5383 else
5384 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5385
7ff120b4 5386 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5387
5388 return 0;
5389}
5390
5391/* Clean up load instructions. */
5392
5393static void
6e39997a 5394cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5395 arm_displaced_step_closure *dsc)
cca44b1b
JB
5396{
5397 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5398
36073a92 5399 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5400 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5401 rt_val2 = displaced_read_reg (regs, dsc, 1);
5402 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5403
5404 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5405 if (dsc->u.ldst.xfersize > 4)
5406 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5407 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5408 if (!dsc->u.ldst.immed)
5409 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5410
5411 /* Handle register writeback. */
5412 if (dsc->u.ldst.writeback)
5413 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5414 /* Put result in right place. */
5415 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5416 if (dsc->u.ldst.xfersize == 8)
5417 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5418}
5419
5420/* Clean up store instructions. */
5421
5422static void
6e39997a 5423cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5424 arm_displaced_step_closure *dsc)
cca44b1b 5425{
36073a92 5426 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5427
5428 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5429 if (dsc->u.ldst.xfersize > 4)
5430 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5431 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5432 if (!dsc->u.ldst.immed)
5433 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5434 if (!dsc->u.ldst.restore_r4)
5435 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5436
5437 /* Writeback. */
5438 if (dsc->u.ldst.writeback)
5439 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5440}
5441
5442/* Copy "extra" load/store instructions. These are halfword/doubleword
5443 transfers, which have a different encoding to byte/word transfers. */
5444
5445static int
550dc4e2 5446arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
cfba9872 5447 struct regcache *regs, arm_displaced_step_closure *dsc)
cca44b1b
JB
5448{
5449 unsigned int op1 = bits (insn, 20, 24);
5450 unsigned int op2 = bits (insn, 5, 6);
5451 unsigned int rt = bits (insn, 12, 15);
5452 unsigned int rn = bits (insn, 16, 19);
5453 unsigned int rm = bits (insn, 0, 3);
5454 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5455 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5456 int immed = (op1 & 0x4) != 0;
5457 int opcode;
5458 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5459
5460 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5461 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5462
5463 if (debug_displaced)
5464 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5465 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5466 (unsigned long) insn);
5467
5468 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5469
5470 if (opcode < 0)
5471 internal_error (__FILE__, __LINE__,
5472 _("copy_extra_ld_st: instruction decode error"));
5473
36073a92
YQ
5474 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5475 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5476 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5477 if (!immed)
36073a92 5478 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5479
36073a92 5480 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5481 if (bytesize[opcode] == 8)
36073a92
YQ
5482 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5483 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5484 if (!immed)
36073a92 5485 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5486
5487 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5488 if (bytesize[opcode] == 8)
5489 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5490 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5491 if (!immed)
5492 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5493
5494 dsc->rd = rt;
5495 dsc->u.ldst.xfersize = bytesize[opcode];
5496 dsc->u.ldst.rn = rn;
5497 dsc->u.ldst.immed = immed;
5498 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5499 dsc->u.ldst.restore_r4 = 0;
5500
5501 if (immed)
5502 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5503 ->
5504 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5505 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5506 else
5507 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5508 ->
5509 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5510 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5511
5512 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5513
5514 return 0;
5515}
5516
0f6f04ba 5517/* Copy byte/half word/word loads and stores. */
cca44b1b 5518
7ff120b4 5519static void
0f6f04ba 5520install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5521 arm_displaced_step_closure *dsc, int load,
0f6f04ba
YQ
5522 int immed, int writeback, int size, int usermode,
5523 int rt, int rm, int rn)
cca44b1b 5524{
cca44b1b 5525 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5526
36073a92
YQ
5527 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5528 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5529 if (!immed)
36073a92 5530 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5531 if (!load)
36073a92 5532 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5533
36073a92
YQ
5534 rt_val = displaced_read_reg (regs, dsc, rt);
5535 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5536 if (!immed)
36073a92 5537 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5538
5539 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5540 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5541 if (!immed)
5542 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5543 dsc->rd = rt;
0f6f04ba 5544 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5545 dsc->u.ldst.rn = rn;
5546 dsc->u.ldst.immed = immed;
7ff120b4 5547 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5548
5549 /* To write PC we can do:
5550
494e194e
YQ
5551 Before this sequence of instructions:
5552 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5553 r2 is the Rn value got from dispalced_read_reg.
5554
5555 Insn1: push {pc} Write address of STR instruction + offset on stack
5556 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5557 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5558 = addr(Insn1) + offset - addr(Insn3) - 8
5559 = offset - 16
5560 Insn4: add r4, r4, #8 r4 = offset - 8
5561 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5562 = from + offset
5563 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5564
5565 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5566 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5567 of this can be found in Section "Saving from r15" in
5568 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5569
7ff120b4
YQ
5570 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5571}
5572
34518530
YQ
5573
5574static int
5575thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5576 uint16_t insn2, struct regcache *regs,
cfba9872 5577 arm_displaced_step_closure *dsc, int size)
34518530
YQ
5578{
5579 unsigned int u_bit = bit (insn1, 7);
5580 unsigned int rt = bits (insn2, 12, 15);
5581 int imm12 = bits (insn2, 0, 11);
5582 ULONGEST pc_val;
5583
5584 if (debug_displaced)
5585 fprintf_unfiltered (gdb_stdlog,
5586 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5587 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5588 imm12);
5589
5590 if (!u_bit)
5591 imm12 = -1 * imm12;
5592
5593 /* Rewrite instruction LDR Rt imm12 into:
5594
5595 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5596
5597 LDR R0, R2, R3,
5598
5599 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5600
5601
5602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5603 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5604 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5605
5606 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5607
5608 pc_val = pc_val & 0xfffffffc;
5609
5610 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5611 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5612
5613 dsc->rd = rt;
5614
5615 dsc->u.ldst.xfersize = size;
5616 dsc->u.ldst.immed = 0;
5617 dsc->u.ldst.writeback = 0;
5618 dsc->u.ldst.restore_r4 = 0;
5619
5620 /* LDR R0, R2, R3 */
5621 dsc->modinsn[0] = 0xf852;
5622 dsc->modinsn[1] = 0x3;
5623 dsc->numinsns = 2;
5624
5625 dsc->cleanup = &cleanup_load;
5626
5627 return 0;
5628}
5629
5630static int
5631thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5632 uint16_t insn2, struct regcache *regs,
cfba9872 5633 arm_displaced_step_closure *dsc,
34518530
YQ
5634 int writeback, int immed)
5635{
5636 unsigned int rt = bits (insn2, 12, 15);
5637 unsigned int rn = bits (insn1, 0, 3);
5638 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5639 /* In LDR (register), there is also a register Rm, which is not allowed to
5640 be PC, so we don't have to check it. */
5641
5642 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5643 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5644 dsc);
5645
5646 if (debug_displaced)
5647 fprintf_unfiltered (gdb_stdlog,
5648 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5649 rt, rn, insn1, insn2);
5650
5651 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5652 0, rt, rm, rn);
5653
5654 dsc->u.ldst.restore_r4 = 0;
5655
5656 if (immed)
5657 /* ldr[b]<cond> rt, [rn, #imm], etc.
5658 ->
5659 ldr[b]<cond> r0, [r2, #imm]. */
5660 {
5661 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5662 dsc->modinsn[1] = insn2 & 0x0fff;
5663 }
5664 else
5665 /* ldr[b]<cond> rt, [rn, rm], etc.
5666 ->
5667 ldr[b]<cond> r0, [r2, r3]. */
5668 {
5669 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5670 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5671 }
5672
5673 dsc->numinsns = 2;
5674
5675 return 0;
5676}
5677
5678
7ff120b4
YQ
5679static int
5680arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5681 struct regcache *regs,
cfba9872 5682 arm_displaced_step_closure *dsc,
0f6f04ba 5683 int load, int size, int usermode)
7ff120b4
YQ
5684{
5685 int immed = !bit (insn, 25);
5686 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5687 unsigned int rt = bits (insn, 12, 15);
5688 unsigned int rn = bits (insn, 16, 19);
5689 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5690
5691 if (!insn_references_pc (insn, 0x000ff00ful))
5692 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5693
5694 if (debug_displaced)
5695 fprintf_unfiltered (gdb_stdlog,
5696 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5697 load ? (size == 1 ? "ldrb" : "ldr")
5698 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5699 rt, rn,
5700 (unsigned long) insn);
5701
0f6f04ba
YQ
5702 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5703 usermode, rt, rm, rn);
7ff120b4 5704
bf9f652a 5705 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5706 {
5707 dsc->u.ldst.restore_r4 = 0;
5708
5709 if (immed)
5710 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5711 ->
5712 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5713 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5714 else
5715 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5716 ->
5717 {ldr,str}[b]<cond> r0, [r2, r3]. */
5718 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5719 }
5720 else
5721 {
5722 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5723 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5724 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5725 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5726 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5727 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5728 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5729
5730 /* As above. */
5731 if (immed)
5732 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5733 else
5734 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5735
cca44b1b
JB
5736 dsc->numinsns = 6;
5737 }
5738
5739 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5740
5741 return 0;
5742}
5743
5744/* Cleanup LDM instructions with fully-populated register list. This is an
5745 unfortunate corner case: it's impossible to implement correctly by modifying
5746 the instruction. The issue is as follows: we have an instruction,
5747
5748 ldm rN, {r0-r15}
5749
5750 which we must rewrite to avoid loading PC. A possible solution would be to
5751 do the load in two halves, something like (with suitable cleanup
5752 afterwards):
5753
5754 mov r8, rN
5755 ldm[id][ab] r8!, {r0-r7}
5756 str r7, <temp>
5757 ldm[id][ab] r8, {r7-r14}
5758 <bkpt>
5759
5760 but at present there's no suitable place for <temp>, since the scratch space
5761 is overwritten before the cleanup routine is called. For now, we simply
5762 emulate the instruction. */
5763
5764static void
5765cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5766 arm_displaced_step_closure *dsc)
cca44b1b 5767{
cca44b1b
JB
5768 int inc = dsc->u.block.increment;
5769 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5770 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5771 uint32_t regmask = dsc->u.block.regmask;
5772 int regno = inc ? 0 : 15;
5773 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5774 int exception_return = dsc->u.block.load && dsc->u.block.user
5775 && (regmask & 0x8000) != 0;
36073a92 5776 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5777 int do_transfer = condition_true (dsc->u.block.cond, status);
5778 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5779
5780 if (!do_transfer)
5781 return;
5782
5783 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5784 sensible we can do here. Complain loudly. */
5785 if (exception_return)
5786 error (_("Cannot single-step exception return"));
5787
5788 /* We don't handle any stores here for now. */
5789 gdb_assert (dsc->u.block.load != 0);
5790
5791 if (debug_displaced)
5792 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5793 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5794 dsc->u.block.increment ? "inc" : "dec",
5795 dsc->u.block.before ? "before" : "after");
5796
5797 while (regmask)
5798 {
5799 uint32_t memword;
5800
5801 if (inc)
bf9f652a 5802 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5803 regno++;
5804 else
5805 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5806 regno--;
5807
5808 xfer_addr += bump_before;
5809
5810 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5811 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5812
5813 xfer_addr += bump_after;
5814
5815 regmask &= ~(1 << regno);
5816 }
5817
5818 if (dsc->u.block.writeback)
5819 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5820 CANNOT_WRITE_PC);
5821}
5822
5823/* Clean up an STM which included the PC in the register list. */
5824
5825static void
5826cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 5827 arm_displaced_step_closure *dsc)
cca44b1b 5828{
36073a92 5829 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5830 int store_executed = condition_true (dsc->u.block.cond, status);
5831 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5832 CORE_ADDR stm_insn_addr;
5833 uint32_t pc_val;
5834 long offset;
5835 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5836
5837 /* If condition code fails, there's nothing else to do. */
5838 if (!store_executed)
5839 return;
5840
5841 if (dsc->u.block.increment)
5842 {
5843 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5844
5845 if (dsc->u.block.before)
5846 pc_stored_at += 4;
5847 }
5848 else
5849 {
5850 pc_stored_at = dsc->u.block.xfer_addr;
5851
5852 if (dsc->u.block.before)
5853 pc_stored_at -= 4;
5854 }
5855
5856 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5857 stm_insn_addr = dsc->scratch_base;
5858 offset = pc_val - stm_insn_addr;
5859
5860 if (debug_displaced)
5861 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5862 "STM instruction\n", offset);
5863
5864 /* Rewrite the stored PC to the proper value for the non-displaced original
5865 instruction. */
5866 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5867 dsc->insn_addr + offset);
5868}
5869
5870/* Clean up an LDM which includes the PC in the register list. We clumped all
5871 the registers in the transferred list into a contiguous range r0...rX (to
5872 avoid loading PC directly and losing control of the debugged program), so we
5873 must undo that here. */
5874
5875static void
6e39997a 5876cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b 5877 struct regcache *regs,
cfba9872 5878 arm_displaced_step_closure *dsc)
cca44b1b 5879{
36073a92 5880 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5881 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5882 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5883 unsigned int regs_loaded = bitcount (mask);
5884 unsigned int num_to_shuffle = regs_loaded, clobbered;
5885
5886 /* The method employed here will fail if the register list is fully populated
5887 (we need to avoid loading PC directly). */
5888 gdb_assert (num_to_shuffle < 16);
5889
5890 if (!load_executed)
5891 return;
5892
5893 clobbered = (1 << num_to_shuffle) - 1;
5894
5895 while (num_to_shuffle > 0)
5896 {
5897 if ((mask & (1 << write_reg)) != 0)
5898 {
5899 unsigned int read_reg = num_to_shuffle - 1;
5900
5901 if (read_reg != write_reg)
5902 {
36073a92 5903 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
5904 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5905 if (debug_displaced)
5906 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5907 "loaded register r%d to r%d\n"), read_reg,
5908 write_reg);
5909 }
5910 else if (debug_displaced)
5911 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5912 "r%d already in the right place\n"),
5913 write_reg);
5914
5915 clobbered &= ~(1 << write_reg);
5916
5917 num_to_shuffle--;
5918 }
5919
5920 write_reg--;
5921 }
5922
5923 /* Restore any registers we scribbled over. */
5924 for (write_reg = 0; clobbered != 0; write_reg++)
5925 {
5926 if ((clobbered & (1 << write_reg)) != 0)
5927 {
5928 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5929 CANNOT_WRITE_PC);
5930 if (debug_displaced)
5931 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5932 "clobbered register r%d\n"), write_reg);
5933 clobbered &= ~(1 << write_reg);
5934 }
5935 }
5936
5937 /* Perform register writeback manually. */
5938 if (dsc->u.block.writeback)
5939 {
5940 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5941
5942 if (dsc->u.block.increment)
5943 new_rn_val += regs_loaded * 4;
5944 else
5945 new_rn_val -= regs_loaded * 4;
5946
5947 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5948 CANNOT_WRITE_PC);
5949 }
5950}
5951
5952/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5953 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5954
5955static int
7ff120b4
YQ
5956arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5957 struct regcache *regs,
cfba9872 5958 arm_displaced_step_closure *dsc)
cca44b1b
JB
5959{
5960 int load = bit (insn, 20);
5961 int user = bit (insn, 22);
5962 int increment = bit (insn, 23);
5963 int before = bit (insn, 24);
5964 int writeback = bit (insn, 21);
5965 int rn = bits (insn, 16, 19);
cca44b1b 5966
0963b4bd
MS
5967 /* Block transfers which don't mention PC can be run directly
5968 out-of-line. */
bf9f652a 5969 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 5970 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 5971
bf9f652a 5972 if (rn == ARM_PC_REGNUM)
cca44b1b 5973 {
0963b4bd
MS
5974 warning (_("displaced: Unpredictable LDM or STM with "
5975 "base register r15"));
7ff120b4 5976 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
5977 }
5978
5979 if (debug_displaced)
5980 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5981 "%.8lx\n", (unsigned long) insn);
5982
36073a92 5983 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
5984 dsc->u.block.rn = rn;
5985
5986 dsc->u.block.load = load;
5987 dsc->u.block.user = user;
5988 dsc->u.block.increment = increment;
5989 dsc->u.block.before = before;
5990 dsc->u.block.writeback = writeback;
5991 dsc->u.block.cond = bits (insn, 28, 31);
5992
5993 dsc->u.block.regmask = insn & 0xffff;
5994
5995 if (load)
5996 {
5997 if ((insn & 0xffff) == 0xffff)
5998 {
5999 /* LDM with a fully-populated register list. This case is
6000 particularly tricky. Implement for now by fully emulating the
6001 instruction (which might not behave perfectly in all cases, but
6002 these instructions should be rare enough for that not to matter
6003 too much). */
6004 dsc->modinsn[0] = ARM_NOP;
6005
6006 dsc->cleanup = &cleanup_block_load_all;
6007 }
6008 else
6009 {
6010 /* LDM of a list of registers which includes PC. Implement by
6011 rewriting the list of registers to be transferred into a
6012 contiguous chunk r0...rX before doing the transfer, then shuffling
6013 registers into the correct places in the cleanup routine. */
6014 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6015 unsigned int num_in_list = bitcount (regmask), new_regmask;
6016 unsigned int i;
cca44b1b
JB
6017
6018 for (i = 0; i < num_in_list; i++)
36073a92 6019 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6020
6021 /* Writeback makes things complicated. We need to avoid clobbering
6022 the base register with one of the registers in our modified
6023 register list, but just using a different register can't work in
6024 all cases, e.g.:
6025
6026 ldm r14!, {r0-r13,pc}
6027
6028 which would need to be rewritten as:
6029
6030 ldm rN!, {r0-r14}
6031
6032 but that can't work, because there's no free register for N.
6033
6034 Solve this by turning off the writeback bit, and emulating
6035 writeback manually in the cleanup routine. */
6036
6037 if (writeback)
6038 insn &= ~(1 << 21);
6039
6040 new_regmask = (1 << num_in_list) - 1;
6041
6042 if (debug_displaced)
6043 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6044 "{..., pc}: original reg list %.4x, modified "
6045 "list %.4x\n"), rn, writeback ? "!" : "",
6046 (int) insn & 0xffff, new_regmask);
6047
6048 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6049
6050 dsc->cleanup = &cleanup_block_load_pc;
6051 }
6052 }
6053 else
6054 {
6055 /* STM of a list of registers which includes PC. Run the instruction
6056 as-is, but out of line: this will store the wrong value for the PC,
6057 so we must manually fix up the memory in the cleanup routine.
6058 Doing things this way has the advantage that we can auto-detect
6059 the offset of the PC write (which is architecture-dependent) in
6060 the cleanup routine. */
6061 dsc->modinsn[0] = insn;
6062
6063 dsc->cleanup = &cleanup_block_store_pc;
6064 }
6065
6066 return 0;
6067}
6068
34518530
YQ
6069static int
6070thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6071 struct regcache *regs,
cfba9872 6072 arm_displaced_step_closure *dsc)
cca44b1b 6073{
34518530
YQ
6074 int rn = bits (insn1, 0, 3);
6075 int load = bit (insn1, 4);
6076 int writeback = bit (insn1, 5);
cca44b1b 6077
34518530
YQ
6078 /* Block transfers which don't mention PC can be run directly
6079 out-of-line. */
6080 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6081 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6082
34518530
YQ
6083 if (rn == ARM_PC_REGNUM)
6084 {
6085 warning (_("displaced: Unpredictable LDM or STM with "
6086 "base register r15"));
6087 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6088 "unpredictable ldm/stm", dsc);
6089 }
cca44b1b
JB
6090
6091 if (debug_displaced)
34518530
YQ
6092 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6093 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6094
34518530
YQ
6095 /* Clear bit 13, since it should be always zero. */
6096 dsc->u.block.regmask = (insn2 & 0xdfff);
6097 dsc->u.block.rn = rn;
cca44b1b 6098
34518530
YQ
6099 dsc->u.block.load = load;
6100 dsc->u.block.user = 0;
6101 dsc->u.block.increment = bit (insn1, 7);
6102 dsc->u.block.before = bit (insn1, 8);
6103 dsc->u.block.writeback = writeback;
6104 dsc->u.block.cond = INST_AL;
6105 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6106
34518530
YQ
6107 if (load)
6108 {
6109 if (dsc->u.block.regmask == 0xffff)
6110 {
6111 /* This branch is impossible to happen. */
6112 gdb_assert (0);
6113 }
6114 else
6115 {
6116 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6117 unsigned int num_in_list = bitcount (regmask), new_regmask;
6118 unsigned int i;
34518530
YQ
6119
6120 for (i = 0; i < num_in_list; i++)
6121 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6122
6123 if (writeback)
6124 insn1 &= ~(1 << 5);
6125
6126 new_regmask = (1 << num_in_list) - 1;
6127
6128 if (debug_displaced)
6129 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6130 "{..., pc}: original reg list %.4x, modified "
6131 "list %.4x\n"), rn, writeback ? "!" : "",
6132 (int) dsc->u.block.regmask, new_regmask);
6133
6134 dsc->modinsn[0] = insn1;
6135 dsc->modinsn[1] = (new_regmask & 0xffff);
6136 dsc->numinsns = 2;
6137
6138 dsc->cleanup = &cleanup_block_load_pc;
6139 }
6140 }
6141 else
6142 {
6143 dsc->modinsn[0] = insn1;
6144 dsc->modinsn[1] = insn2;
6145 dsc->numinsns = 2;
6146 dsc->cleanup = &cleanup_block_store_pc;
6147 }
6148 return 0;
6149}
6150
d9311bfa
AT
6151/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6152 This is used to avoid a dependency on BFD's bfd_endian enum. */
6153
6154ULONGEST
6155arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6156 int byte_order)
6157{
5f2dfcfd
AT
6158 return read_memory_unsigned_integer (memaddr, len,
6159 (enum bfd_endian) byte_order);
d9311bfa
AT
6160}
6161
6162/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6163
6164CORE_ADDR
6165arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6166 CORE_ADDR val)
6167{
ac7936df 6168 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
d9311bfa
AT
6169}
6170
6171/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6172
e7cf25a8 6173static CORE_ADDR
553cb527 6174arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6175{
d9311bfa
AT
6176 return 0;
6177}
6178
6179/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6180
6181int
6182arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6183{
6184 return arm_is_thumb (self->regcache);
6185}
6186
6187/* single_step() is called just before we want to resume the inferior,
6188 if we want to single-step it but there is no hardware or kernel
6189 single-step support. We find the target of the coming instructions
6190 and breakpoint them. */
6191
a0ff9e1a 6192std::vector<CORE_ADDR>
f5ea389a 6193arm_software_single_step (struct regcache *regcache)
d9311bfa 6194{
ac7936df 6195 struct gdbarch *gdbarch = regcache->arch ();
d9311bfa 6196 struct arm_get_next_pcs next_pcs_ctx;
d9311bfa
AT
6197
6198 arm_get_next_pcs_ctor (&next_pcs_ctx,
6199 &arm_get_next_pcs_ops,
6200 gdbarch_byte_order (gdbarch),
6201 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6202 0,
d9311bfa
AT
6203 regcache);
6204
a0ff9e1a 6205 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa 6206
a0ff9e1a
SM
6207 for (CORE_ADDR &pc_ref : next_pcs)
6208 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
d9311bfa 6209
93f9a11f 6210 return next_pcs;
d9311bfa
AT
6211}
6212
34518530
YQ
6213/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6214 for Linux, where some SVC instructions must be treated specially. */
6215
6216static void
6217cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 6218 arm_displaced_step_closure *dsc)
34518530
YQ
6219{
6220 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6221
6222 if (debug_displaced)
6223 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6224 "%.8lx\n", (unsigned long) resume_addr);
6225
6226 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6227}
6228
6229
6230/* Common copy routine for svc instruciton. */
6231
6232static int
6233install_svc (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 6234 arm_displaced_step_closure *dsc)
34518530
YQ
6235{
6236 /* Preparation: none.
6237 Insn: unmodified svc.
6238 Cleanup: pc <- insn_addr + insn_size. */
6239
6240 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6241 instruction. */
6242 dsc->wrote_to_pc = 1;
6243
6244 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6245 if (dsc->u.svc.copy_svc_os)
6246 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6247 else
6248 {
6249 dsc->cleanup = &cleanup_svc;
6250 return 0;
6251 }
34518530
YQ
6252}
6253
6254static int
6255arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 6256 struct regcache *regs, arm_displaced_step_closure *dsc)
34518530
YQ
6257{
6258
6259 if (debug_displaced)
6260 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6261 (unsigned long) insn);
6262
6263 dsc->modinsn[0] = insn;
6264
6265 return install_svc (gdbarch, regs, dsc);
6266}
6267
6268static int
6269thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
cfba9872 6270 struct regcache *regs, arm_displaced_step_closure *dsc)
34518530
YQ
6271{
6272
6273 if (debug_displaced)
6274 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6275 insn);
bd18283a 6276
34518530
YQ
6277 dsc->modinsn[0] = insn;
6278
6279 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6280}
6281
6282/* Copy undefined instructions. */
6283
6284static int
7ff120b4 6285arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 6286 arm_displaced_step_closure *dsc)
cca44b1b
JB
6287{
6288 if (debug_displaced)
0963b4bd
MS
6289 fprintf_unfiltered (gdb_stdlog,
6290 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6291 (unsigned long) insn);
6292
6293 dsc->modinsn[0] = insn;
6294
6295 return 0;
6296}
6297
34518530
YQ
6298static int
6299thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
cfba9872 6300 arm_displaced_step_closure *dsc)
34518530
YQ
6301{
6302
6303 if (debug_displaced)
6304 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6305 "%.4x %.4x\n", (unsigned short) insn1,
6306 (unsigned short) insn2);
6307
6308 dsc->modinsn[0] = insn1;
6309 dsc->modinsn[1] = insn2;
6310 dsc->numinsns = 2;
6311
6312 return 0;
6313}
6314
cca44b1b
JB
6315/* Copy unpredictable instructions. */
6316
6317static int
7ff120b4 6318arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 6319 arm_displaced_step_closure *dsc)
cca44b1b
JB
6320{
6321 if (debug_displaced)
6322 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6323 "%.8lx\n", (unsigned long) insn);
6324
6325 dsc->modinsn[0] = insn;
6326
6327 return 0;
6328}
6329
6330/* The decode_* functions are instruction decoding helpers. They mostly follow
6331 the presentation in the ARM ARM. */
6332
6333static int
7ff120b4
YQ
6334arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6335 struct regcache *regs,
cfba9872 6336 arm_displaced_step_closure *dsc)
cca44b1b
JB
6337{
6338 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6339 unsigned int rn = bits (insn, 16, 19);
6340
2f924de6 6341 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7ff120b4 6342 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
2f924de6 6343 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7ff120b4 6344 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6345 else if ((op1 & 0x60) == 0x20)
7ff120b4 6346 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6347 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6348 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6349 dsc);
cca44b1b 6350 else if ((op1 & 0x77) == 0x41)
7ff120b4 6351 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6352 else if ((op1 & 0x77) == 0x45)
7ff120b4 6353 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6354 else if ((op1 & 0x77) == 0x51)
6355 {
6356 if (rn != 0xf)
7ff120b4 6357 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6358 else
7ff120b4 6359 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6360 }
6361 else if ((op1 & 0x77) == 0x55)
7ff120b4 6362 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6363 else if (op1 == 0x57)
6364 switch (op2)
6365 {
7ff120b4
YQ
6366 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6367 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6368 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6369 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6370 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6371 }
6372 else if ((op1 & 0x63) == 0x43)
7ff120b4 6373 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6374 else if ((op2 & 0x1) == 0x0)
6375 switch (op1 & ~0x80)
6376 {
6377 case 0x61:
7ff120b4 6378 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6379 case 0x65:
7ff120b4 6380 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6381 case 0x71: case 0x75:
6382 /* pld/pldw reg. */
7ff120b4 6383 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6384 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6385 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6386 default:
7ff120b4 6387 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6388 }
6389 else
7ff120b4 6390 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6391}
6392
6393static int
7ff120b4
YQ
6394arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6395 struct regcache *regs,
cfba9872 6396 arm_displaced_step_closure *dsc)
cca44b1b
JB
6397{
6398 if (bit (insn, 27) == 0)
7ff120b4 6399 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6400 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6401 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6402 {
6403 case 0x0: case 0x2:
7ff120b4 6404 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6405
6406 case 0x1: case 0x3:
7ff120b4 6407 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6408
6409 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6410 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6411
6412 case 0x8:
6413 switch ((insn & 0xe00000) >> 21)
6414 {
6415 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6416 /* stc/stc2. */
7ff120b4 6417 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6418
6419 case 0x2:
7ff120b4 6420 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6421
6422 default:
7ff120b4 6423 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6424 }
6425
6426 case 0x9:
6427 {
6428 int rn_f = (bits (insn, 16, 19) == 0xf);
6429 switch ((insn & 0xe00000) >> 21)
6430 {
6431 case 0x1: case 0x3:
6432 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6433 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6434 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6435
6436 case 0x2:
7ff120b4 6437 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6438
6439 case 0x4: case 0x5: case 0x6: case 0x7:
6440 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6441 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6442 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6443
6444 default:
7ff120b4 6445 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6446 }
6447 }
6448
6449 case 0xa:
7ff120b4 6450 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6451
6452 case 0xb:
6453 if (bits (insn, 16, 19) == 0xf)
6454 /* ldc/ldc2 lit. */
7ff120b4 6455 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6456 else
7ff120b4 6457 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6458
6459 case 0xc:
6460 if (bit (insn, 4))
7ff120b4 6461 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6462 else
7ff120b4 6463 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6464
6465 case 0xd:
6466 if (bit (insn, 4))
7ff120b4 6467 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6468 else
7ff120b4 6469 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6470
6471 default:
7ff120b4 6472 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6473 }
6474}
6475
6476/* Decode miscellaneous instructions in dp/misc encoding space. */
6477
6478static int
7ff120b4
YQ
6479arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6480 struct regcache *regs,
cfba9872 6481 arm_displaced_step_closure *dsc)
cca44b1b
JB
6482{
6483 unsigned int op2 = bits (insn, 4, 6);
6484 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6485
6486 switch (op2)
6487 {
6488 case 0x0:
7ff120b4 6489 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6490
6491 case 0x1:
6492 if (op == 0x1) /* bx. */
7ff120b4 6493 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6494 else if (op == 0x3)
7ff120b4 6495 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6496 else
7ff120b4 6497 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6498
6499 case 0x2:
6500 if (op == 0x1)
6501 /* Not really supported. */
7ff120b4 6502 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6503 else
7ff120b4 6504 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6505
6506 case 0x3:
6507 if (op == 0x1)
7ff120b4 6508 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6509 regs, dsc); /* blx register. */
cca44b1b 6510 else
7ff120b4 6511 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6512
6513 case 0x5:
7ff120b4 6514 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6515
6516 case 0x7:
6517 if (op == 0x1)
7ff120b4 6518 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6519 else if (op == 0x3)
6520 /* Not really supported. */
7ff120b4 6521 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
86a73007 6522 /* Fall through. */
cca44b1b
JB
6523
6524 default:
7ff120b4 6525 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6526 }
6527}
6528
6529static int
7ff120b4
YQ
6530arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6531 struct regcache *regs,
cfba9872 6532 arm_displaced_step_closure *dsc)
cca44b1b
JB
6533{
6534 if (bit (insn, 25))
6535 switch (bits (insn, 20, 24))
6536 {
6537 case 0x10:
7ff120b4 6538 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6539
6540 case 0x14:
7ff120b4 6541 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6542
6543 case 0x12: case 0x16:
7ff120b4 6544 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6545
6546 default:
7ff120b4 6547 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6548 }
6549 else
6550 {
6551 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6552
6553 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6554 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6555 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6556 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6557 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6558 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6559 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6560 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6561 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6562 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6563 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6564 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6565 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6566 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6567 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6568 dsc);
cca44b1b
JB
6569 }
6570
6571 /* Should be unreachable. */
6572 return 1;
6573}
6574
6575static int
7ff120b4
YQ
6576arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6577 struct regcache *regs,
cfba9872 6578 arm_displaced_step_closure *dsc)
cca44b1b
JB
6579{
6580 int a = bit (insn, 25), b = bit (insn, 4);
6581 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6582
6583 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6584 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6585 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6586 else if ((!a && (op1 & 0x17) == 0x02)
6587 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6588 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6589 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6590 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6591 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6592 else if ((!a && (op1 & 0x17) == 0x03)
6593 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6594 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6595 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6596 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6597 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6598 else if ((!a && (op1 & 0x17) == 0x06)
6599 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6600 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6601 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6602 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6603 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6604 else if ((!a && (op1 & 0x17) == 0x07)
6605 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6606 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6607
6608 /* Should be unreachable. */
6609 return 1;
6610}
6611
6612static int
7ff120b4 6613arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 6614 arm_displaced_step_closure *dsc)
cca44b1b
JB
6615{
6616 switch (bits (insn, 20, 24))
6617 {
6618 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6619 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6620
6621 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6622 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6623
6624 case 0x08: case 0x09: case 0x0a: case 0x0b:
6625 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6626 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6627 "decode/pack/unpack/saturate/reverse", dsc);
6628
6629 case 0x18:
6630 if (bits (insn, 5, 7) == 0) /* op2. */
6631 {
6632 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6633 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6634 else
7ff120b4 6635 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6636 }
6637 else
7ff120b4 6638 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6639
6640 case 0x1a: case 0x1b:
6641 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6642 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6643 else
7ff120b4 6644 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6645
6646 case 0x1c: case 0x1d:
6647 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6648 {
6649 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6650 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6651 else
7ff120b4 6652 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6653 }
6654 else
7ff120b4 6655 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6656
6657 case 0x1e: case 0x1f:
6658 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6659 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6660 else
7ff120b4 6661 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6662 }
6663
6664 /* Should be unreachable. */
6665 return 1;
6666}
6667
6668static int
615234c1 6669arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6670 struct regcache *regs,
cfba9872 6671 arm_displaced_step_closure *dsc)
cca44b1b
JB
6672{
6673 if (bit (insn, 25))
7ff120b4 6674 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6675 else
7ff120b4 6676 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6677}
6678
6679static int
7ff120b4
YQ
6680arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6681 struct regcache *regs,
cfba9872 6682 arm_displaced_step_closure *dsc)
cca44b1b
JB
6683{
6684 unsigned int opcode = bits (insn, 20, 24);
6685
6686 switch (opcode)
6687 {
6688 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6689 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6690
6691 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6692 case 0x12: case 0x16:
7ff120b4 6693 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6694
6695 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6696 case 0x13: case 0x17:
7ff120b4 6697 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6698
6699 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6700 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6701 /* Note: no writeback for these instructions. Bit 25 will always be
6702 zero though (via caller), so the following works OK. */
7ff120b4 6703 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6704 }
6705
6706 /* Should be unreachable. */
6707 return 1;
6708}
6709
34518530
YQ
6710/* Decode shifted register instructions. */
6711
6712static int
6713thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6714 uint16_t insn2, struct regcache *regs,
cfba9872 6715 arm_displaced_step_closure *dsc)
34518530
YQ
6716{
6717 /* PC is only allowed to be used in instruction MOV. */
6718
6719 unsigned int op = bits (insn1, 5, 8);
6720 unsigned int rn = bits (insn1, 0, 3);
6721
6722 if (op == 0x2 && rn == 0xf) /* MOV */
6723 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6724 else
6725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6726 "dp (shift reg)", dsc);
6727}
6728
6729
6730/* Decode extension register load/store. Exactly the same as
6731 arm_decode_ext_reg_ld_st. */
6732
6733static int
6734thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6735 uint16_t insn2, struct regcache *regs,
cfba9872 6736 arm_displaced_step_closure *dsc)
34518530
YQ
6737{
6738 unsigned int opcode = bits (insn1, 4, 8);
6739
6740 switch (opcode)
6741 {
6742 case 0x04: case 0x05:
6743 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6744 "vfp/neon vmov", dsc);
6745
6746 case 0x08: case 0x0c: /* 01x00 */
6747 case 0x0a: case 0x0e: /* 01x10 */
6748 case 0x12: case 0x16: /* 10x10 */
6749 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6750 "vfp/neon vstm/vpush", dsc);
6751
6752 case 0x09: case 0x0d: /* 01x01 */
6753 case 0x0b: case 0x0f: /* 01x11 */
6754 case 0x13: case 0x17: /* 10x11 */
6755 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6756 "vfp/neon vldm/vpop", dsc);
6757
6758 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6759 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6760 "vstr", dsc);
6761 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6762 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6763 }
6764
6765 /* Should be unreachable. */
6766 return 1;
6767}
6768
cca44b1b 6769static int
12545665 6770arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
cfba9872 6771 struct regcache *regs, arm_displaced_step_closure *dsc)
cca44b1b
JB
6772{
6773 unsigned int op1 = bits (insn, 20, 25);
6774 int op = bit (insn, 4);
6775 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6776
6777 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6778 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6779 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6780 && (coproc & 0xe) != 0xa)
6781 /* stc/stc2. */
7ff120b4 6782 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6783 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6784 && (coproc & 0xe) != 0xa)
6785 /* ldc/ldc2 imm/lit. */
7ff120b4 6786 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6787 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6788 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6789 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6790 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6791 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6792 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6793 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6794 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6795 else if ((op1 & 0x30) == 0x20 && !op)
6796 {
6797 if ((coproc & 0xe) == 0xa)
7ff120b4 6798 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6799 else
7ff120b4 6800 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6801 }
6802 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6803 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6804 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6805 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6806 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6807 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6808 else if ((op1 & 0x30) == 0x30)
7ff120b4 6809 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6810 else
7ff120b4 6811 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6812}
6813
34518530
YQ
6814static int
6815thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6816 uint16_t insn2, struct regcache *regs,
cfba9872 6817 arm_displaced_step_closure *dsc)
34518530
YQ
6818{
6819 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6820 unsigned int bit_5_8 = bits (insn1, 5, 8);
6821 unsigned int bit_9 = bit (insn1, 9);
6822 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6823
6824 if (bit_9 == 0)
6825 {
6826 if (bit_5_8 == 2)
6827 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6828 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6829 dsc);
6830 else if (bit_5_8 == 0) /* UNDEFINED. */
6831 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6832 else
6833 {
6834 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6835 if ((coproc & 0xe) == 0xa)
6836 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6837 dsc);
6838 else /* coproc is not 101x. */
6839 {
6840 if (bit_4 == 0) /* STC/STC2. */
6841 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6842 "stc/stc2", dsc);
6843 else /* LDC/LDC2 {literal, immeidate}. */
6844 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6845 regs, dsc);
6846 }
6847 }
6848 }
6849 else
6850 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6851
6852 return 0;
6853}
6854
6855static void
6856install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 6857 arm_displaced_step_closure *dsc, int rd)
34518530
YQ
6858{
6859 /* ADR Rd, #imm
6860
6861 Rewrite as:
6862
6863 Preparation: Rd <- PC
6864 Insn: ADD Rd, #imm
6865 Cleanup: Null.
6866 */
6867
6868 /* Rd <- PC */
6869 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6870 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6871}
6872
6873static int
6874thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 6875 arm_displaced_step_closure *dsc,
34518530
YQ
6876 int rd, unsigned int imm)
6877{
6878
6879 /* Encoding T2: ADDS Rd, #imm */
6880 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6881
6882 install_pc_relative (gdbarch, regs, dsc, rd);
6883
6884 return 0;
6885}
6886
6887static int
6888thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6889 struct regcache *regs,
cfba9872 6890 arm_displaced_step_closure *dsc)
34518530
YQ
6891{
6892 unsigned int rd = bits (insn, 8, 10);
6893 unsigned int imm8 = bits (insn, 0, 7);
6894
6895 if (debug_displaced)
6896 fprintf_unfiltered (gdb_stdlog,
6897 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6898 rd, imm8, insn);
6899
6900 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6901}
6902
6903static int
6904thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6905 uint16_t insn2, struct regcache *regs,
cfba9872 6906 arm_displaced_step_closure *dsc)
34518530
YQ
6907{
6908 unsigned int rd = bits (insn2, 8, 11);
6909 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6910 extract raw immediate encoding rather than computing immediate. When
6911 generating ADD or SUB instruction, we can simply perform OR operation to
6912 set immediate into ADD. */
6913 unsigned int imm_3_8 = insn2 & 0x70ff;
6914 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6915
6916 if (debug_displaced)
6917 fprintf_unfiltered (gdb_stdlog,
6918 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6919 rd, imm_i, imm_3_8, insn1, insn2);
6920
6921 if (bit (insn1, 7)) /* Encoding T2 */
6922 {
6923 /* Encoding T3: SUB Rd, Rd, #imm */
6924 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6925 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6926 }
6927 else /* Encoding T3 */
6928 {
6929 /* Encoding T3: ADD Rd, Rd, #imm */
6930 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6931 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6932 }
6933 dsc->numinsns = 2;
6934
6935 install_pc_relative (gdbarch, regs, dsc, rd);
6936
6937 return 0;
6938}
6939
6940static int
615234c1 6941thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530 6942 struct regcache *regs,
cfba9872 6943 arm_displaced_step_closure *dsc)
34518530
YQ
6944{
6945 unsigned int rt = bits (insn1, 8, 10);
6946 unsigned int pc;
6947 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
6948
6949 /* LDR Rd, #imm8
6950
6951 Rwrite as:
6952
6953 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6954
6955 Insn: LDR R0, [R2, R3];
6956 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6957
6958 if (debug_displaced)
6959 fprintf_unfiltered (gdb_stdlog,
6960 "displaced: copying thumb ldr r%d [pc #%d]\n"
6961 , rt, imm8);
6962
6963 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6964 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6965 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6966 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6967 /* The assembler calculates the required value of the offset from the
6968 Align(PC,4) value of this instruction to the label. */
6969 pc = pc & 0xfffffffc;
6970
6971 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6972 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6973
6974 dsc->rd = rt;
6975 dsc->u.ldst.xfersize = 4;
6976 dsc->u.ldst.rn = 0;
6977 dsc->u.ldst.immed = 0;
6978 dsc->u.ldst.writeback = 0;
6979 dsc->u.ldst.restore_r4 = 0;
6980
6981 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6982
6983 dsc->cleanup = &cleanup_load;
6984
6985 return 0;
6986}
6987
6988/* Copy Thumb cbnz/cbz insruction. */
6989
6990static int
6991thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6992 struct regcache *regs,
cfba9872 6993 arm_displaced_step_closure *dsc)
34518530
YQ
6994{
6995 int non_zero = bit (insn1, 11);
6996 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6997 CORE_ADDR from = dsc->insn_addr;
6998 int rn = bits (insn1, 0, 2);
6999 int rn_val = displaced_read_reg (regs, dsc, rn);
7000
7001 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7002 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7003 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7004 condition is false, let it be, cleanup_branch will do nothing. */
7005 if (dsc->u.branch.cond)
7006 {
7007 dsc->u.branch.cond = INST_AL;
7008 dsc->u.branch.dest = from + 4 + imm5;
7009 }
7010 else
7011 dsc->u.branch.dest = from + 2;
7012
7013 dsc->u.branch.link = 0;
7014 dsc->u.branch.exchange = 0;
7015
7016 if (debug_displaced)
7017 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7018 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7019 rn, rn_val, insn1, dsc->u.branch.dest);
7020
7021 dsc->modinsn[0] = THUMB_NOP;
7022
7023 dsc->cleanup = &cleanup_branch;
7024 return 0;
7025}
7026
7027/* Copy Table Branch Byte/Halfword */
7028static int
7029thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7030 uint16_t insn2, struct regcache *regs,
cfba9872 7031 arm_displaced_step_closure *dsc)
34518530
YQ
7032{
7033 ULONGEST rn_val, rm_val;
7034 int is_tbh = bit (insn2, 4);
7035 CORE_ADDR halfwords = 0;
7036 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7037
7038 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7039 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7040
7041 if (is_tbh)
7042 {
7043 gdb_byte buf[2];
7044
7045 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7046 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7047 }
7048 else
7049 {
7050 gdb_byte buf[1];
7051
7052 target_read_memory (rn_val + rm_val, buf, 1);
7053 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7054 }
7055
7056 if (debug_displaced)
7057 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7058 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7059 (unsigned int) rn_val, (unsigned int) rm_val,
7060 (unsigned int) halfwords);
7061
7062 dsc->u.branch.cond = INST_AL;
7063 dsc->u.branch.link = 0;
7064 dsc->u.branch.exchange = 0;
7065 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7066
7067 dsc->cleanup = &cleanup_branch;
7068
7069 return 0;
7070}
7071
7072static void
7073cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
cfba9872 7074 arm_displaced_step_closure *dsc)
34518530
YQ
7075{
7076 /* PC <- r7 */
7077 int val = displaced_read_reg (regs, dsc, 7);
7078 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7079
7080 /* r7 <- r8 */
7081 val = displaced_read_reg (regs, dsc, 8);
7082 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7083
7084 /* r8 <- tmp[0] */
7085 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7086
7087}
7088
7089static int
615234c1 7090thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530 7091 struct regcache *regs,
cfba9872 7092 arm_displaced_step_closure *dsc)
34518530
YQ
7093{
7094 dsc->u.block.regmask = insn1 & 0x00ff;
7095
7096 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7097 to :
7098
7099 (1) register list is full, that is, r0-r7 are used.
7100 Prepare: tmp[0] <- r8
7101
7102 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7103 MOV r8, r7; Move value of r7 to r8;
7104 POP {r7}; Store PC value into r7.
7105
7106 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7107
7108 (2) register list is not full, supposing there are N registers in
7109 register list (except PC, 0 <= N <= 7).
7110 Prepare: for each i, 0 - N, tmp[i] <- ri.
7111
7112 POP {r0, r1, ...., rN};
7113
7114 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7115 from tmp[] properly.
7116 */
7117 if (debug_displaced)
7118 fprintf_unfiltered (gdb_stdlog,
7119 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7120 dsc->u.block.regmask, insn1);
7121
7122 if (dsc->u.block.regmask == 0xff)
7123 {
7124 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7125
7126 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7127 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7128 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7129
7130 dsc->numinsns = 3;
7131 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7132 }
7133 else
7134 {
7135 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7136 unsigned int i;
7137 unsigned int new_regmask;
34518530
YQ
7138
7139 for (i = 0; i < num_in_list + 1; i++)
7140 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7141
7142 new_regmask = (1 << (num_in_list + 1)) - 1;
7143
7144 if (debug_displaced)
7145 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7146 "{..., pc}: original reg list %.4x,"
7147 " modified list %.4x\n"),
7148 (int) dsc->u.block.regmask, new_regmask);
7149
7150 dsc->u.block.regmask |= 0x8000;
7151 dsc->u.block.writeback = 0;
7152 dsc->u.block.cond = INST_AL;
7153
7154 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7155
7156 dsc->cleanup = &cleanup_block_load_pc;
7157 }
7158
7159 return 0;
7160}
7161
7162static void
7163thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7164 struct regcache *regs,
cfba9872 7165 arm_displaced_step_closure *dsc)
34518530
YQ
7166{
7167 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7168 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7169 int err = 0;
7170
7171 /* 16-bit thumb instructions. */
7172 switch (op_bit_12_15)
7173 {
7174 /* Shift (imme), add, subtract, move and compare. */
7175 case 0: case 1: case 2: case 3:
7176 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7177 "shift/add/sub/mov/cmp",
7178 dsc);
7179 break;
7180 case 4:
7181 switch (op_bit_10_11)
7182 {
7183 case 0: /* Data-processing */
7184 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7185 "data-processing",
7186 dsc);
7187 break;
7188 case 1: /* Special data instructions and branch and exchange. */
7189 {
7190 unsigned short op = bits (insn1, 7, 9);
7191 if (op == 6 || op == 7) /* BX or BLX */
7192 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7193 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7194 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7195 else
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7197 dsc);
7198 }
7199 break;
7200 default: /* LDR (literal) */
7201 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7202 }
7203 break;
7204 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7205 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7206 break;
7207 case 10:
7208 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7209 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7210 else /* Generate SP-relative address */
7211 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7212 break;
7213 case 11: /* Misc 16-bit instructions */
7214 {
7215 switch (bits (insn1, 8, 11))
7216 {
7217 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7218 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7219 break;
7220 case 12: case 13: /* POP */
7221 if (bit (insn1, 8)) /* PC is in register list. */
7222 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7223 else
7224 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7225 break;
7226 case 15: /* If-Then, and hints */
7227 if (bits (insn1, 0, 3))
7228 /* If-Then makes up to four following instructions conditional.
7229 IT instruction itself is not conditional, so handle it as a
7230 common unmodified instruction. */
7231 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7232 dsc);
7233 else
7234 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7235 break;
7236 default:
7237 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7238 }
7239 }
7240 break;
7241 case 12:
7242 if (op_bit_10_11 < 2) /* Store multiple registers */
7243 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7244 else /* Load multiple registers */
7245 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7246 break;
7247 case 13: /* Conditional branch and supervisor call */
7248 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7249 err = thumb_copy_b (gdbarch, insn1, dsc);
7250 else
7251 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7252 break;
7253 case 14: /* Unconditional branch */
7254 err = thumb_copy_b (gdbarch, insn1, dsc);
7255 break;
7256 default:
7257 err = 1;
7258 }
7259
7260 if (err)
7261 internal_error (__FILE__, __LINE__,
7262 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7263}
7264
7265static int
7266decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7267 uint16_t insn1, uint16_t insn2,
7268 struct regcache *regs,
cfba9872 7269 arm_displaced_step_closure *dsc)
34518530
YQ
7270{
7271 int rt = bits (insn2, 12, 15);
7272 int rn = bits (insn1, 0, 3);
7273 int op1 = bits (insn1, 7, 8);
34518530
YQ
7274
7275 switch (bits (insn1, 5, 6))
7276 {
7277 case 0: /* Load byte and memory hints */
7278 if (rt == 0xf) /* PLD/PLI */
7279 {
7280 if (rn == 0xf)
7281 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7282 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7283 else
7284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7285 "pli/pld", dsc);
7286 }
7287 else
7288 {
7289 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7291 1);
7292 else
7293 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7294 "ldrb{reg, immediate}/ldrbt",
7295 dsc);
7296 }
7297
7298 break;
7299 case 1: /* Load halfword and memory hints. */
7300 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7302 "pld/unalloc memhint", dsc);
7303 else
7304 {
7305 if (rn == 0xf)
7306 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7307 2);
7308 else
7309 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7310 "ldrh/ldrht", dsc);
7311 }
7312 break;
7313 case 2: /* Load word */
7314 {
7315 int insn2_bit_8_11 = bits (insn2, 8, 11);
7316
7317 if (rn == 0xf)
7318 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7319 else if (op1 == 0x1) /* Encoding T3 */
7320 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7321 0, 1);
7322 else /* op1 == 0x0 */
7323 {
7324 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7325 /* LDR (immediate) */
7326 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7327 dsc, bit (insn2, 8), 1);
7328 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7329 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7330 "ldrt", dsc);
7331 else
7332 /* LDR (register) */
7333 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7334 dsc, 0, 0);
7335 }
7336 break;
7337 }
7338 default:
7339 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7340 break;
7341 }
7342 return 0;
7343}
7344
7345static void
7346thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7347 uint16_t insn2, struct regcache *regs,
cfba9872 7348 arm_displaced_step_closure *dsc)
34518530
YQ
7349{
7350 int err = 0;
7351 unsigned short op = bit (insn2, 15);
7352 unsigned int op1 = bits (insn1, 11, 12);
7353
7354 switch (op1)
7355 {
7356 case 1:
7357 {
7358 switch (bits (insn1, 9, 10))
7359 {
7360 case 0:
7361 if (bit (insn1, 6))
7362 {
7363 /* Load/store {dual, execlusive}, table branch. */
7364 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7365 && bits (insn2, 5, 7) == 0)
7366 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7367 dsc);
7368 else
7369 /* PC is not allowed to use in load/store {dual, exclusive}
7370 instructions. */
7371 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7372 "load/store dual/ex", dsc);
7373 }
7374 else /* load/store multiple */
7375 {
7376 switch (bits (insn1, 7, 8))
7377 {
7378 case 0: case 3: /* SRS, RFE */
7379 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7380 "srs/rfe", dsc);
7381 break;
7382 case 1: case 2: /* LDM/STM/PUSH/POP */
7383 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7384 break;
7385 }
7386 }
7387 break;
7388
7389 case 1:
7390 /* Data-processing (shift register). */
7391 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7392 dsc);
7393 break;
7394 default: /* Coprocessor instructions. */
7395 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7396 break;
7397 }
7398 break;
7399 }
7400 case 2: /* op1 = 2 */
7401 if (op) /* Branch and misc control. */
7402 {
7403 if (bit (insn2, 14) /* BLX/BL */
7404 || bit (insn2, 12) /* Unconditional branch */
7405 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7406 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7407 else
7408 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7409 "misc ctrl", dsc);
7410 }
7411 else
7412 {
7413 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7414 {
b926417a 7415 int dp_op = bits (insn1, 4, 8);
34518530 7416 int rn = bits (insn1, 0, 3);
b926417a 7417 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
34518530
YQ
7418 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7419 regs, dsc);
7420 else
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "dp/pb", dsc);
7423 }
7424 else /* Data processing (modified immeidate) */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "dp/mi", dsc);
7427 }
7428 break;
7429 case 3: /* op1 = 3 */
7430 switch (bits (insn1, 9, 10))
7431 {
7432 case 0:
7433 if (bit (insn1, 4))
7434 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7435 regs, dsc);
7436 else /* NEON Load/Store and Store single data item */
7437 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7438 "neon elt/struct load/store",
7439 dsc);
7440 break;
7441 case 1: /* op1 = 3, bits (9, 10) == 1 */
7442 switch (bits (insn1, 7, 8))
7443 {
7444 case 0: case 1: /* Data processing (register) */
7445 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7446 "dp(reg)", dsc);
7447 break;
7448 case 2: /* Multiply and absolute difference */
7449 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7450 "mul/mua/diff", dsc);
7451 break;
7452 case 3: /* Long multiply and divide */
7453 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7454 "lmul/lmua", dsc);
7455 break;
7456 }
7457 break;
7458 default: /* Coprocessor instructions */
7459 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7460 break;
7461 }
7462 break;
7463 default:
7464 err = 1;
7465 }
7466
7467 if (err)
7468 internal_error (__FILE__, __LINE__,
7469 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7470
7471}
7472
b434a28f
YQ
7473static void
7474thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7475 struct regcache *regs,
cfba9872 7476 arm_displaced_step_closure *dsc)
b434a28f 7477{
34518530
YQ
7478 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7479 uint16_t insn1
7480 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7481
7482 if (debug_displaced)
7483 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7484 "at %.8lx\n", insn1, (unsigned long) from);
7485
7486 dsc->is_thumb = 1;
7487 dsc->insn_size = thumb_insn_size (insn1);
7488 if (thumb_insn_size (insn1) == 4)
7489 {
7490 uint16_t insn2
7491 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7492 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7493 }
7494 else
7495 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7496}
7497
cca44b1b 7498void
b434a28f
YQ
7499arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7500 CORE_ADDR to, struct regcache *regs,
cfba9872 7501 arm_displaced_step_closure *dsc)
cca44b1b
JB
7502{
7503 int err = 0;
b434a28f
YQ
7504 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7505 uint32_t insn;
cca44b1b
JB
7506
7507 /* Most displaced instructions use a 1-instruction scratch space, so set this
7508 here and override below if/when necessary. */
7509 dsc->numinsns = 1;
7510 dsc->insn_addr = from;
7511 dsc->scratch_base = to;
7512 dsc->cleanup = NULL;
7513 dsc->wrote_to_pc = 0;
7514
b434a28f 7515 if (!displaced_in_arm_mode (regs))
12545665 7516 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7517
4db71c0b
YQ
7518 dsc->is_thumb = 0;
7519 dsc->insn_size = 4;
b434a28f
YQ
7520 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7521 if (debug_displaced)
7522 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7523 "at %.8lx\n", (unsigned long) insn,
7524 (unsigned long) from);
7525
cca44b1b 7526 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7527 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7528 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7529 {
7530 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7531 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7532 break;
7533
7534 case 0x4: case 0x5: case 0x6:
7ff120b4 7535 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7536 break;
7537
7538 case 0x7:
7ff120b4 7539 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7540 break;
7541
7542 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7543 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7544 break;
7545
7546 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7547 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7548 break;
7549 }
7550
7551 if (err)
7552 internal_error (__FILE__, __LINE__,
7553 _("arm_process_displaced_insn: Instruction decode error"));
7554}
7555
7556/* Actually set up the scratch space for a displaced instruction. */
7557
7558void
7559arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
cfba9872 7560 CORE_ADDR to, arm_displaced_step_closure *dsc)
cca44b1b
JB
7561{
7562 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7563 unsigned int i, len, offset;
cca44b1b 7564 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7565 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7566 const gdb_byte *bkp_insn;
cca44b1b 7567
4db71c0b 7568 offset = 0;
cca44b1b
JB
7569 /* Poke modified instruction(s). */
7570 for (i = 0; i < dsc->numinsns; i++)
7571 {
7572 if (debug_displaced)
4db71c0b
YQ
7573 {
7574 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7575 if (size == 4)
7576 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7577 dsc->modinsn[i]);
7578 else if (size == 2)
7579 fprintf_unfiltered (gdb_stdlog, "%.4x",
7580 (unsigned short)dsc->modinsn[i]);
7581
7582 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7583 (unsigned long) to + offset);
7584
7585 }
7586 write_memory_unsigned_integer (to + offset, size,
7587 byte_order_for_code,
cca44b1b 7588 dsc->modinsn[i]);
4db71c0b
YQ
7589 offset += size;
7590 }
7591
7592 /* Choose the correct breakpoint instruction. */
7593 if (dsc->is_thumb)
7594 {
7595 bkp_insn = tdep->thumb_breakpoint;
7596 len = tdep->thumb_breakpoint_size;
7597 }
7598 else
7599 {
7600 bkp_insn = tdep->arm_breakpoint;
7601 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7602 }
7603
7604 /* Put breakpoint afterwards. */
4db71c0b 7605 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7606
7607 if (debug_displaced)
7608 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7609 paddress (gdbarch, from), paddress (gdbarch, to));
7610}
7611
cca44b1b
JB
7612/* Entry point for cleaning things up after a displaced instruction has been
7613 single-stepped. */
7614
7615void
7616arm_displaced_step_fixup (struct gdbarch *gdbarch,
cfba9872 7617 struct displaced_step_closure *dsc_,
cca44b1b
JB
7618 CORE_ADDR from, CORE_ADDR to,
7619 struct regcache *regs)
7620{
cfba9872
SM
7621 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7622
cca44b1b
JB
7623 if (dsc->cleanup)
7624 dsc->cleanup (gdbarch, regs, dsc);
7625
7626 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7627 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7628 dsc->insn_addr + dsc->insn_size);
7629
cca44b1b
JB
7630}
7631
7632#include "bfd-in2.h"
7633#include "libcoff.h"
7634
7635static int
7636gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7637{
e47ad6c0
YQ
7638 gdb_disassembler *di
7639 = static_cast<gdb_disassembler *>(info->application_data);
7640 struct gdbarch *gdbarch = di->arch ();
9779414d
DJ
7641
7642 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7643 {
7644 static asymbol *asym;
7645 static combined_entry_type ce;
7646 static struct coff_symbol_struct csym;
7647 static struct bfd fake_bfd;
7648 static bfd_target fake_target;
7649
7650 if (csym.native == NULL)
7651 {
7652 /* Create a fake symbol vector containing a Thumb symbol.
7653 This is solely so that the code in print_insn_little_arm()
7654 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7655 the presence of a Thumb symbol and switch to decoding
7656 Thumb instructions. */
7657
7658 fake_target.flavour = bfd_target_coff_flavour;
7659 fake_bfd.xvec = &fake_target;
7660 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7661 csym.native = &ce;
7662 csym.symbol.the_bfd = &fake_bfd;
7663 csym.symbol.name = "fake";
7664 asym = (asymbol *) & csym;
7665 }
7666
7667 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7668 info->symbols = &asym;
7669 }
7670 else
7671 info->symbols = NULL;
7672
e60eb288
YQ
7673 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7674 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7675 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7676 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7677 in default_print_insn. */
7678 if (exec_bfd != NULL)
7679 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7680
6394c606 7681 return default_print_insn (memaddr, info);
cca44b1b
JB
7682}
7683
7684/* The following define instruction sequences that will cause ARM
7685 cpu's to take an undefined instruction trap. These are used to
7686 signal a breakpoint to GDB.
7687
7688 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7689 modes. A different instruction is required for each mode. The ARM
7690 cpu's can also be big or little endian. Thus four different
7691 instructions are needed to support all cases.
7692
7693 Note: ARMv4 defines several new instructions that will take the
7694 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7695 not in fact add the new instructions. The new undefined
7696 instructions in ARMv4 are all instructions that had no defined
7697 behaviour in earlier chips. There is no guarantee that they will
7698 raise an exception, but may be treated as NOP's. In practice, it
7699 may only safe to rely on instructions matching:
7700
7701 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7702 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7703 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7704
0963b4bd 7705 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7706 following use a condition predicate of ALWAYS so it is always TRUE.
7707
7708 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7709 and NetBSD all use a software interrupt rather than an undefined
7710 instruction to force a trap. This can be handled by by the
7711 abi-specific code during establishment of the gdbarch vector. */
7712
7713#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7714#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7715#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7716#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7717
948f8e3d
PA
7718static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7719static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7720static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7721static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7722
cd6c3b4f
YQ
7723/* Implement the breakpoint_kind_from_pc gdbarch method. */
7724
d19280ad
YQ
7725static int
7726arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7727{
7728 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7729 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7730
9779414d 7731 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7732 {
7733 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7734
7735 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7736 check whether we are replacing a 32-bit instruction. */
7737 if (tdep->thumb2_breakpoint != NULL)
7738 {
7739 gdb_byte buf[2];
d19280ad 7740
177321bd
DJ
7741 if (target_read_memory (*pcptr, buf, 2) == 0)
7742 {
7743 unsigned short inst1;
d19280ad 7744
177321bd 7745 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7746 if (thumb_insn_size (inst1) == 4)
d19280ad 7747 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7748 }
7749 }
7750
d19280ad 7751 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7752 }
7753 else
d19280ad
YQ
7754 return ARM_BP_KIND_ARM;
7755
7756}
7757
cd6c3b4f
YQ
7758/* Implement the sw_breakpoint_from_kind gdbarch method. */
7759
d19280ad
YQ
7760static const gdb_byte *
7761arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7762{
7763 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7764
7765 switch (kind)
cca44b1b 7766 {
d19280ad
YQ
7767 case ARM_BP_KIND_ARM:
7768 *size = tdep->arm_breakpoint_size;
cca44b1b 7769 return tdep->arm_breakpoint;
d19280ad
YQ
7770 case ARM_BP_KIND_THUMB:
7771 *size = tdep->thumb_breakpoint_size;
7772 return tdep->thumb_breakpoint;
7773 case ARM_BP_KIND_THUMB2:
7774 *size = tdep->thumb2_breakpoint_size;
7775 return tdep->thumb2_breakpoint;
7776 default:
7777 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7778 }
7779}
7780
833b7ab5
YQ
7781/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7782
7783static int
7784arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7785 struct regcache *regcache,
7786 CORE_ADDR *pcptr)
7787{
7788 gdb_byte buf[4];
7789
7790 /* Check the memory pointed by PC is readable. */
7791 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7792 {
7793 struct arm_get_next_pcs next_pcs_ctx;
833b7ab5
YQ
7794
7795 arm_get_next_pcs_ctor (&next_pcs_ctx,
7796 &arm_get_next_pcs_ops,
7797 gdbarch_byte_order (gdbarch),
7798 gdbarch_byte_order_for_code (gdbarch),
7799 0,
7800 regcache);
7801
a0ff9e1a 7802 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
833b7ab5
YQ
7803
7804 /* If MEMADDR is the next instruction of current pc, do the
7805 software single step computation, and get the thumb mode by
7806 the destination address. */
a0ff9e1a 7807 for (CORE_ADDR pc : next_pcs)
833b7ab5
YQ
7808 {
7809 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7810 {
833b7ab5
YQ
7811 if (IS_THUMB_ADDR (pc))
7812 {
7813 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7814 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7815 }
7816 else
7817 return ARM_BP_KIND_ARM;
7818 }
7819 }
833b7ab5
YQ
7820 }
7821
7822 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7823}
7824
cca44b1b
JB
7825/* Extract from an array REGBUF containing the (raw) register state a
7826 function return value of type TYPE, and copy that, in virtual
7827 format, into VALBUF. */
7828
7829static void
7830arm_extract_return_value (struct type *type, struct regcache *regs,
7831 gdb_byte *valbuf)
7832{
ac7936df 7833 struct gdbarch *gdbarch = regs->arch ();
cca44b1b
JB
7834 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7835
7836 if (TYPE_CODE_FLT == TYPE_CODE (type))
7837 {
7838 switch (gdbarch_tdep (gdbarch)->fp_model)
7839 {
7840 case ARM_FLOAT_FPA:
7841 {
7842 /* The value is in register F0 in internal format. We need to
7843 extract the raw value and then convert it to the desired
7844 internal type. */
f0452268 7845 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
cca44b1b 7846
dca08e1f 7847 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
3b2ca824
UW
7848 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7849 valbuf, type);
cca44b1b
JB
7850 }
7851 break;
7852
7853 case ARM_FLOAT_SOFT_FPA:
7854 case ARM_FLOAT_SOFT_VFP:
7855 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7856 not using the VFP ABI code. */
7857 case ARM_FLOAT_VFP:
dca08e1f 7858 regs->cooked_read (ARM_A1_REGNUM, valbuf);
cca44b1b 7859 if (TYPE_LENGTH (type) > 4)
f0452268
AH
7860 regs->cooked_read (ARM_A1_REGNUM + 1,
7861 valbuf + ARM_INT_REGISTER_SIZE);
cca44b1b
JB
7862 break;
7863
7864 default:
0963b4bd
MS
7865 internal_error (__FILE__, __LINE__,
7866 _("arm_extract_return_value: "
7867 "Floating point model not supported"));
cca44b1b
JB
7868 break;
7869 }
7870 }
7871 else if (TYPE_CODE (type) == TYPE_CODE_INT
7872 || TYPE_CODE (type) == TYPE_CODE_CHAR
7873 || TYPE_CODE (type) == TYPE_CODE_BOOL
7874 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 7875 || TYPE_IS_REFERENCE (type)
cca44b1b
JB
7876 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7877 {
b021a221
MS
7878 /* If the type is a plain integer, then the access is
7879 straight-forward. Otherwise we have to play around a bit
7880 more. */
cca44b1b
JB
7881 int len = TYPE_LENGTH (type);
7882 int regno = ARM_A1_REGNUM;
7883 ULONGEST tmp;
7884
7885 while (len > 0)
7886 {
7887 /* By using store_unsigned_integer we avoid having to do
7888 anything special for small big-endian values. */
7889 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7890 store_unsigned_integer (valbuf,
f0452268
AH
7891 (len > ARM_INT_REGISTER_SIZE
7892 ? ARM_INT_REGISTER_SIZE : len),
cca44b1b 7893 byte_order, tmp);
f0452268
AH
7894 len -= ARM_INT_REGISTER_SIZE;
7895 valbuf += ARM_INT_REGISTER_SIZE;
cca44b1b
JB
7896 }
7897 }
7898 else
7899 {
7900 /* For a structure or union the behaviour is as if the value had
7901 been stored to word-aligned memory and then loaded into
7902 registers with 32-bit load instruction(s). */
7903 int len = TYPE_LENGTH (type);
7904 int regno = ARM_A1_REGNUM;
f0452268 7905 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
cca44b1b
JB
7906
7907 while (len > 0)
7908 {
dca08e1f 7909 regs->cooked_read (regno++, tmpbuf);
cca44b1b 7910 memcpy (valbuf, tmpbuf,
f0452268
AH
7911 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7912 len -= ARM_INT_REGISTER_SIZE;
7913 valbuf += ARM_INT_REGISTER_SIZE;
cca44b1b
JB
7914 }
7915 }
7916}
7917
7918
7919/* Will a function return an aggregate type in memory or in a
7920 register? Return 0 if an aggregate type can be returned in a
7921 register, 1 if it must be returned in memory. */
7922
7923static int
7924arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7925{
cca44b1b
JB
7926 enum type_code code;
7927
f168693b 7928 type = check_typedef (type);
cca44b1b 7929
b13c8ab2
YQ
7930 /* Simple, non-aggregate types (ie not including vectors and
7931 complex) are always returned in a register (or registers). */
7932 code = TYPE_CODE (type);
7933 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7934 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7935 return 0;
cca44b1b 7936
c4312b19
YQ
7937 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7938 {
7939 /* Vector values should be returned using ARM registers if they
7940 are not over 16 bytes. */
7941 return (TYPE_LENGTH (type) > 16);
7942 }
7943
b13c8ab2 7944 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 7945 {
b13c8ab2
YQ
7946 /* The AAPCS says all aggregates not larger than a word are returned
7947 in a register. */
f0452268 7948 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
b13c8ab2
YQ
7949 return 0;
7950
cca44b1b
JB
7951 return 1;
7952 }
b13c8ab2
YQ
7953 else
7954 {
7955 int nRc;
cca44b1b 7956
b13c8ab2
YQ
7957 /* All aggregate types that won't fit in a register must be returned
7958 in memory. */
f0452268 7959 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
b13c8ab2 7960 return 1;
cca44b1b 7961
b13c8ab2
YQ
7962 /* In the ARM ABI, "integer" like aggregate types are returned in
7963 registers. For an aggregate type to be integer like, its size
f0452268 7964 must be less than or equal to ARM_INT_REGISTER_SIZE and the
b13c8ab2
YQ
7965 offset of each addressable subfield must be zero. Note that bit
7966 fields are not addressable, and all addressable subfields of
7967 unions always start at offset zero.
cca44b1b 7968
b13c8ab2
YQ
7969 This function is based on the behaviour of GCC 2.95.1.
7970 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 7971
b13c8ab2
YQ
7972 Note: All versions of GCC before GCC 2.95.2 do not set up the
7973 parameters correctly for a function returning the following
7974 structure: struct { float f;}; This should be returned in memory,
7975 not a register. Richard Earnshaw sent me a patch, but I do not
7976 know of any way to detect if a function like the above has been
7977 compiled with the correct calling convention. */
7978
7979 /* Assume all other aggregate types can be returned in a register.
7980 Run a check for structures, unions and arrays. */
7981 nRc = 0;
67255d04 7982
b13c8ab2
YQ
7983 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7984 {
7985 int i;
7986 /* Need to check if this struct/union is "integer" like. For
7987 this to be true, its size must be less than or equal to
f0452268 7988 ARM_INT_REGISTER_SIZE and the offset of each addressable
b13c8ab2
YQ
7989 subfield must be zero. Note that bit fields are not
7990 addressable, and unions always start at offset zero. If any
7991 of the subfields is a floating point type, the struct/union
7992 cannot be an integer type. */
7993
7994 /* For each field in the object, check:
7995 1) Is it FP? --> yes, nRc = 1;
7996 2) Is it addressable (bitpos != 0) and
7997 not packed (bitsize == 0)?
7998 --> yes, nRc = 1
7999 */
8000
8001 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8002 {
b13c8ab2
YQ
8003 enum type_code field_type_code;
8004
8005 field_type_code
8006 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8007 i)));
8008
8009 /* Is it a floating point type field? */
8010 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8011 {
8012 nRc = 1;
8013 break;
8014 }
b13c8ab2
YQ
8015
8016 /* If bitpos != 0, then we have to care about it. */
8017 if (TYPE_FIELD_BITPOS (type, i) != 0)
8018 {
8019 /* Bitfields are not addressable. If the field bitsize is
8020 zero, then the field is not packed. Hence it cannot be
8021 a bitfield or any other packed type. */
8022 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8023 {
8024 nRc = 1;
8025 break;
8026 }
8027 }
67255d04
RE
8028 }
8029 }
67255d04 8030
b13c8ab2
YQ
8031 return nRc;
8032 }
67255d04
RE
8033}
8034
34e8f22d
RE
8035/* Write into appropriate registers a function return value of type
8036 TYPE, given in virtual format. */
8037
8038static void
b508a996 8039arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8040 const gdb_byte *valbuf)
34e8f22d 8041{
ac7936df 8042 struct gdbarch *gdbarch = regs->arch ();
e17a4113 8043 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8044
34e8f22d
RE
8045 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8046 {
f0452268 8047 gdb_byte buf[ARM_FP_REGISTER_SIZE];
34e8f22d 8048
be8626e0 8049 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8050 {
8051 case ARM_FLOAT_FPA:
8052
3b2ca824 8053 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
b66f5587 8054 regs->cooked_write (ARM_F0_REGNUM, buf);
08216dd7
RE
8055 break;
8056
fd50bc42 8057 case ARM_FLOAT_SOFT_FPA:
08216dd7 8058 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8059 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8060 not using the VFP ABI code. */
8061 case ARM_FLOAT_VFP:
b66f5587 8062 regs->cooked_write (ARM_A1_REGNUM, valbuf);
b508a996 8063 if (TYPE_LENGTH (type) > 4)
f0452268
AH
8064 regs->cooked_write (ARM_A1_REGNUM + 1,
8065 valbuf + ARM_INT_REGISTER_SIZE);
08216dd7
RE
8066 break;
8067
8068 default:
9b20d036
MS
8069 internal_error (__FILE__, __LINE__,
8070 _("arm_store_return_value: Floating "
8071 "point model not supported"));
08216dd7
RE
8072 break;
8073 }
34e8f22d 8074 }
b508a996
RE
8075 else if (TYPE_CODE (type) == TYPE_CODE_INT
8076 || TYPE_CODE (type) == TYPE_CODE_CHAR
8077 || TYPE_CODE (type) == TYPE_CODE_BOOL
8078 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 8079 || TYPE_IS_REFERENCE (type)
b508a996
RE
8080 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8081 {
8082 if (TYPE_LENGTH (type) <= 4)
8083 {
8084 /* Values of one word or less are zero/sign-extended and
8085 returned in r0. */
f0452268 8086 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
b508a996
RE
8087 LONGEST val = unpack_long (type, valbuf);
8088
f0452268 8089 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
b66f5587 8090 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
b508a996
RE
8091 }
8092 else
8093 {
8094 /* Integral values greater than one word are stored in consecutive
8095 registers starting with r0. This will always be a multiple of
8096 the regiser size. */
8097 int len = TYPE_LENGTH (type);
8098 int regno = ARM_A1_REGNUM;
8099
8100 while (len > 0)
8101 {
b66f5587 8102 regs->cooked_write (regno++, valbuf);
f0452268
AH
8103 len -= ARM_INT_REGISTER_SIZE;
8104 valbuf += ARM_INT_REGISTER_SIZE;
b508a996
RE
8105 }
8106 }
8107 }
34e8f22d 8108 else
b508a996
RE
8109 {
8110 /* For a structure or union the behaviour is as if the value had
8111 been stored to word-aligned memory and then loaded into
8112 registers with 32-bit load instruction(s). */
8113 int len = TYPE_LENGTH (type);
8114 int regno = ARM_A1_REGNUM;
f0452268 8115 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
b508a996
RE
8116
8117 while (len > 0)
8118 {
8119 memcpy (tmpbuf, valbuf,
f0452268 8120 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
b66f5587 8121 regs->cooked_write (regno++, tmpbuf);
f0452268
AH
8122 len -= ARM_INT_REGISTER_SIZE;
8123 valbuf += ARM_INT_REGISTER_SIZE;
b508a996
RE
8124 }
8125 }
34e8f22d
RE
8126}
8127
2af48f68
PB
8128
8129/* Handle function return values. */
8130
8131static enum return_value_convention
6a3a010b 8132arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8133 struct type *valtype, struct regcache *regcache,
8134 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8135{
7c00367c 8136 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8137 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8138 enum arm_vfp_cprc_base_type vfp_base_type;
8139 int vfp_base_count;
8140
8141 if (arm_vfp_abi_for_function (gdbarch, func_type)
8142 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8143 {
8144 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8145 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8146 int i;
8147 for (i = 0; i < vfp_base_count; i++)
8148 {
58d6951d
DJ
8149 if (reg_char == 'q')
8150 {
8151 if (writebuf)
8152 arm_neon_quad_write (gdbarch, regcache, i,
8153 writebuf + i * unit_length);
8154
8155 if (readbuf)
8156 arm_neon_quad_read (gdbarch, regcache, i,
8157 readbuf + i * unit_length);
8158 }
8159 else
8160 {
8161 char name_buf[4];
8162 int regnum;
8163
8c042590 8164 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8165 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8166 strlen (name_buf));
8167 if (writebuf)
b66f5587 8168 regcache->cooked_write (regnum, writebuf + i * unit_length);
58d6951d 8169 if (readbuf)
dca08e1f 8170 regcache->cooked_read (regnum, readbuf + i * unit_length);
58d6951d 8171 }
90445bd3
DJ
8172 }
8173 return RETURN_VALUE_REGISTER_CONVENTION;
8174 }
7c00367c 8175
2af48f68
PB
8176 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8177 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8178 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8179 {
7c00367c
MK
8180 if (tdep->struct_return == pcc_struct_return
8181 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8182 return RETURN_VALUE_STRUCT_CONVENTION;
8183 }
b13c8ab2
YQ
8184 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8185 {
8186 if (arm_return_in_memory (gdbarch, valtype))
8187 return RETURN_VALUE_STRUCT_CONVENTION;
8188 }
7052e42c 8189
2af48f68
PB
8190 if (writebuf)
8191 arm_store_return_value (valtype, regcache, writebuf);
8192
8193 if (readbuf)
8194 arm_extract_return_value (valtype, regcache, readbuf);
8195
8196 return RETURN_VALUE_REGISTER_CONVENTION;
8197}
8198
8199
9df628e0 8200static int
60ade65d 8201arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8202{
e17a4113
UW
8203 struct gdbarch *gdbarch = get_frame_arch (frame);
8204 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8205 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8206 CORE_ADDR jb_addr;
f0452268 8207 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9df628e0 8208
60ade65d 8209 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8210
8211 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
f0452268 8212 ARM_INT_REGISTER_SIZE))
9df628e0
RE
8213 return 0;
8214
f0452268 8215 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8216 return 1;
8217}
8218
faa95490
DJ
8219/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8220 return the target PC. Otherwise return 0. */
c906108c
SS
8221
8222CORE_ADDR
52f729a7 8223arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8224{
2c02bd72 8225 const char *name;
faa95490 8226 int namelen;
c906108c
SS
8227 CORE_ADDR start_addr;
8228
8229 /* Find the starting address and name of the function containing the PC. */
8230 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8231 {
8232 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8233 check here. */
8234 start_addr = arm_skip_bx_reg (frame, pc);
8235 if (start_addr != 0)
8236 return start_addr;
8237
8238 return 0;
8239 }
c906108c 8240
faa95490
DJ
8241 /* If PC is in a Thumb call or return stub, return the address of the
8242 target PC, which is in a register. The thunk functions are called
8243 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8244 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8245 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8246 if (startswith (name, "_call_via_")
8247 || startswith (name, "__ARM_call_via_"))
c906108c 8248 {
ed9a39eb
JM
8249 /* Use the name suffix to determine which register contains the
8250 target PC. */
a121b7c1 8251 static const char *table[15] =
c5aa993b
JM
8252 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8253 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8254 };
c906108c 8255 int regno;
faa95490 8256 int offset = strlen (name) - 2;
c906108c
SS
8257
8258 for (regno = 0; regno <= 14; regno++)
faa95490 8259 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8260 return get_frame_register_unsigned (frame, regno);
c906108c 8261 }
ed9a39eb 8262
faa95490
DJ
8263 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8264 non-interworking calls to foo. We could decode the stubs
8265 to find the target but it's easier to use the symbol table. */
8266 namelen = strlen (name);
8267 if (name[0] == '_' && name[1] == '_'
8268 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8269 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8270 || (namelen > 2 + strlen ("_from_arm")
61012eef 8271 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8272 {
8273 char *target_name;
8274 int target_len = namelen - 2;
3b7344d5 8275 struct bound_minimal_symbol minsym;
faa95490
DJ
8276 struct objfile *objfile;
8277 struct obj_section *sec;
8278
8279 if (name[namelen - 1] == 'b')
8280 target_len -= strlen ("_from_thumb");
8281 else
8282 target_len -= strlen ("_from_arm");
8283
224c3ddb 8284 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8285 memcpy (target_name, name + 2, target_len);
8286 target_name[target_len] = '\0';
8287
8288 sec = find_pc_section (pc);
8289 objfile = (sec == NULL) ? NULL : sec->objfile;
8290 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8291 if (minsym.minsym != NULL)
77e371c0 8292 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8293 else
8294 return 0;
8295 }
8296
c5aa993b 8297 return 0; /* not a stub */
c906108c
SS
8298}
8299
afd7eef0 8300static void
981a3fb3 8301set_arm_command (const char *args, int from_tty)
afd7eef0 8302{
edefbb7c
AC
8303 printf_unfiltered (_("\
8304\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8305 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8306}
8307
8308static void
981a3fb3 8309show_arm_command (const char *args, int from_tty)
afd7eef0 8310{
26304000 8311 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8312}
8313
28e97307
DJ
8314static void
8315arm_update_current_architecture (void)
fd50bc42 8316{
28e97307 8317 struct gdbarch_info info;
fd50bc42 8318
28e97307 8319 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8320 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8321 return;
fd50bc42 8322
28e97307
DJ
8323 /* Update the architecture. */
8324 gdbarch_info_init (&info);
fd50bc42 8325
28e97307 8326 if (!gdbarch_update_p (info))
9b20d036 8327 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8328}
8329
8330static void
eb4c3f4a 8331set_fp_model_sfunc (const char *args, int from_tty,
fd50bc42
RE
8332 struct cmd_list_element *c)
8333{
570dc176 8334 int fp_model;
fd50bc42
RE
8335
8336 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8337 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8338 {
aead7601 8339 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8340 break;
8341 }
8342
8343 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8344 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8345 current_fp_model);
8346
28e97307 8347 arm_update_current_architecture ();
fd50bc42
RE
8348}
8349
8350static void
08546159
AC
8351show_fp_model (struct ui_file *file, int from_tty,
8352 struct cmd_list_element *c, const char *value)
fd50bc42 8353{
f5656ead 8354 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8355
28e97307 8356 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8357 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8358 fprintf_filtered (file, _("\
8359The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8360 fp_model_strings[tdep->fp_model]);
8361 else
8362 fprintf_filtered (file, _("\
8363The current ARM floating point model is \"%s\".\n"),
8364 fp_model_strings[arm_fp_model]);
8365}
8366
8367static void
eb4c3f4a 8368arm_set_abi (const char *args, int from_tty,
28e97307
DJ
8369 struct cmd_list_element *c)
8370{
570dc176 8371 int arm_abi;
28e97307
DJ
8372
8373 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8374 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8375 {
aead7601 8376 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8377 break;
8378 }
8379
8380 if (arm_abi == ARM_ABI_LAST)
8381 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8382 arm_abi_string);
8383
8384 arm_update_current_architecture ();
8385}
8386
8387static void
8388arm_show_abi (struct ui_file *file, int from_tty,
8389 struct cmd_list_element *c, const char *value)
8390{
f5656ead 8391 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8392
8393 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8394 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8395 fprintf_filtered (file, _("\
8396The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8397 arm_abi_strings[tdep->arm_abi]);
8398 else
8399 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8400 arm_abi_string);
fd50bc42
RE
8401}
8402
0428b8f5
DJ
8403static void
8404arm_show_fallback_mode (struct ui_file *file, int from_tty,
8405 struct cmd_list_element *c, const char *value)
8406{
0963b4bd
MS
8407 fprintf_filtered (file,
8408 _("The current execution mode assumed "
8409 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8410 arm_fallback_mode_string);
8411}
8412
8413static void
8414arm_show_force_mode (struct ui_file *file, int from_tty,
8415 struct cmd_list_element *c, const char *value)
8416{
0963b4bd
MS
8417 fprintf_filtered (file,
8418 _("The current execution mode assumed "
8419 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8420 arm_force_mode_string);
8421}
8422
afd7eef0
RE
8423/* If the user changes the register disassembly style used for info
8424 register and other commands, we have to also switch the style used
8425 in opcodes for disassembly output. This function is run in the "set
8426 arm disassembly" command, and does that. */
bc90b915
FN
8427
8428static void
eb4c3f4a 8429set_disassembly_style_sfunc (const char *args, int from_tty,
65b48a81 8430 struct cmd_list_element *c)
bc90b915 8431{
65b48a81
PB
8432 /* Convert the short style name into the long style name (eg, reg-names-*)
8433 before calling the generic set_disassembler_options() function. */
8434 std::string long_name = std::string ("reg-names-") + disassembly_style;
8435 set_disassembler_options (&long_name[0]);
8436}
8437
8438static void
8439show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8440 struct cmd_list_element *c, const char *value)
8441{
8442 struct gdbarch *gdbarch = get_current_arch ();
8443 char *options = get_disassembler_options (gdbarch);
8444 const char *style = "";
8445 int len = 0;
f995bbe8 8446 const char *opt;
65b48a81
PB
8447
8448 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8449 if (CONST_STRNEQ (opt, "reg-names-"))
8450 {
8451 style = &opt[strlen ("reg-names-")];
8452 len = strcspn (style, ",");
8453 }
8454
8455 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
bc90b915
FN
8456}
8457\f
966fbf70 8458/* Return the ARM register name corresponding to register I. */
a208b0cb 8459static const char *
d93859e2 8460arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8461{
58d6951d
DJ
8462 const int num_regs = gdbarch_num_regs (gdbarch);
8463
8464 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8465 && i >= num_regs && i < num_regs + 32)
8466 {
8467 static const char *const vfp_pseudo_names[] = {
8468 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8469 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8470 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8471 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8472 };
8473
8474 return vfp_pseudo_names[i - num_regs];
8475 }
8476
8477 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8478 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8479 {
8480 static const char *const neon_pseudo_names[] = {
8481 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8482 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8483 };
8484
8485 return neon_pseudo_names[i - num_regs - 32];
8486 }
8487
ff6f572f
DJ
8488 if (i >= ARRAY_SIZE (arm_register_names))
8489 /* These registers are only supported on targets which supply
8490 an XML description. */
8491 return "";
8492
966fbf70
RE
8493 return arm_register_names[i];
8494}
8495
082fc60d
RE
8496/* Test whether the coff symbol specific value corresponds to a Thumb
8497 function. */
8498
8499static int
8500coff_sym_is_thumb (int val)
8501{
f8bf5763
PM
8502 return (val == C_THUMBEXT
8503 || val == C_THUMBSTAT
8504 || val == C_THUMBEXTFUNC
8505 || val == C_THUMBSTATFUNC
8506 || val == C_THUMBLABEL);
082fc60d
RE
8507}
8508
8509/* arm_coff_make_msymbol_special()
8510 arm_elf_make_msymbol_special()
8511
8512 These functions test whether the COFF or ELF symbol corresponds to
8513 an address in thumb code, and set a "special" bit in a minimal
8514 symbol to indicate that it does. */
8515
34e8f22d 8516static void
082fc60d
RE
8517arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8518{
39d911fc
TP
8519 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8520
8521 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8522 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8523 MSYMBOL_SET_SPECIAL (msym);
8524}
8525
34e8f22d 8526static void
082fc60d
RE
8527arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8528{
8529 if (coff_sym_is_thumb (val))
8530 MSYMBOL_SET_SPECIAL (msym);
8531}
8532
60c5725c
DJ
8533static void
8534arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8535 asymbol *sym)
8536{
8537 const char *name = bfd_asymbol_name (sym);
8538 struct arm_per_objfile *data;
60c5725c
DJ
8539 struct arm_mapping_symbol new_map_sym;
8540
8541 gdb_assert (name[0] == '$');
8542 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8543 return;
8544
1b7f24cd 8545 data = arm_objfile_data_key.get (objfile);
60c5725c 8546 if (data == NULL)
1b7f24cd
TT
8547 data = arm_objfile_data_key.emplace (objfile,
8548 objfile->obfd->section_count);
54cc7474
SM
8549 arm_mapping_symbol_vec &map
8550 = data->section_maps[bfd_get_section (sym)->index];
60c5725c
DJ
8551
8552 new_map_sym.value = sym->value;
8553 new_map_sym.type = name[1];
8554
4838e44c
SM
8555 /* Insert at the end, the vector will be sorted on first use. */
8556 map.push_back (new_map_sym);
60c5725c
DJ
8557}
8558
756fe439 8559static void
61a1198a 8560arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8561{
ac7936df 8562 struct gdbarch *gdbarch = regcache->arch ();
61a1198a 8563 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8564
8565 /* If necessary, set the T bit. */
8566 if (arm_apcs_32)
8567 {
9779414d 8568 ULONGEST val, t_bit;
61a1198a 8569 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8570 t_bit = arm_psr_thumb_bit (gdbarch);
8571 if (arm_pc_is_thumb (gdbarch, pc))
8572 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8573 val | t_bit);
756fe439 8574 else
61a1198a 8575 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8576 val & ~t_bit);
756fe439
DJ
8577 }
8578}
123dc839 8579
58d6951d
DJ
8580/* Read the contents of a NEON quad register, by reading from two
8581 double registers. This is used to implement the quad pseudo
8582 registers, and for argument passing in case the quad registers are
8583 missing; vectors are passed in quad registers when using the VFP
8584 ABI, even if a NEON unit is not present. REGNUM is the index of
8585 the quad register, in [0, 15]. */
8586
05d1431c 8587static enum register_status
849d0ba8 8588arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
58d6951d
DJ
8589 int regnum, gdb_byte *buf)
8590{
8591 char name_buf[4];
8592 gdb_byte reg_buf[8];
8593 int offset, double_regnum;
05d1431c 8594 enum register_status status;
58d6951d 8595
8c042590 8596 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8597 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8598 strlen (name_buf));
8599
8600 /* d0 is always the least significant half of q0. */
8601 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8602 offset = 8;
8603 else
8604 offset = 0;
8605
03f50fc8 8606 status = regcache->raw_read (double_regnum, reg_buf);
05d1431c
PA
8607 if (status != REG_VALID)
8608 return status;
58d6951d
DJ
8609 memcpy (buf + offset, reg_buf, 8);
8610
8611 offset = 8 - offset;
03f50fc8 8612 status = regcache->raw_read (double_regnum + 1, reg_buf);
05d1431c
PA
8613 if (status != REG_VALID)
8614 return status;
58d6951d 8615 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8616
8617 return REG_VALID;
58d6951d
DJ
8618}
8619
05d1431c 8620static enum register_status
849d0ba8 8621arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
58d6951d
DJ
8622 int regnum, gdb_byte *buf)
8623{
8624 const int num_regs = gdbarch_num_regs (gdbarch);
8625 char name_buf[4];
8626 gdb_byte reg_buf[8];
8627 int offset, double_regnum;
8628
8629 gdb_assert (regnum >= num_regs);
8630 regnum -= num_regs;
8631
8632 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8633 /* Quad-precision register. */
05d1431c 8634 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8635 else
8636 {
05d1431c
PA
8637 enum register_status status;
8638
58d6951d
DJ
8639 /* Single-precision register. */
8640 gdb_assert (regnum < 32);
8641
8642 /* s0 is always the least significant half of d0. */
8643 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8644 offset = (regnum & 1) ? 0 : 4;
8645 else
8646 offset = (regnum & 1) ? 4 : 0;
8647
8c042590 8648 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8649 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8650 strlen (name_buf));
8651
03f50fc8 8652 status = regcache->raw_read (double_regnum, reg_buf);
05d1431c
PA
8653 if (status == REG_VALID)
8654 memcpy (buf, reg_buf + offset, 4);
8655 return status;
58d6951d
DJ
8656 }
8657}
8658
8659/* Store the contents of BUF to a NEON quad register, by writing to
8660 two double registers. This is used to implement the quad pseudo
8661 registers, and for argument passing in case the quad registers are
8662 missing; vectors are passed in quad registers when using the VFP
8663 ABI, even if a NEON unit is not present. REGNUM is the index
8664 of the quad register, in [0, 15]. */
8665
8666static void
8667arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8668 int regnum, const gdb_byte *buf)
8669{
8670 char name_buf[4];
58d6951d
DJ
8671 int offset, double_regnum;
8672
8c042590 8673 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8674 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8675 strlen (name_buf));
8676
8677 /* d0 is always the least significant half of q0. */
8678 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8679 offset = 8;
8680 else
8681 offset = 0;
8682
10eaee5f 8683 regcache->raw_write (double_regnum, buf + offset);
58d6951d 8684 offset = 8 - offset;
10eaee5f 8685 regcache->raw_write (double_regnum + 1, buf + offset);
58d6951d
DJ
8686}
8687
8688static void
8689arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8690 int regnum, const gdb_byte *buf)
8691{
8692 const int num_regs = gdbarch_num_regs (gdbarch);
8693 char name_buf[4];
8694 gdb_byte reg_buf[8];
8695 int offset, double_regnum;
8696
8697 gdb_assert (regnum >= num_regs);
8698 regnum -= num_regs;
8699
8700 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8701 /* Quad-precision register. */
8702 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8703 else
8704 {
8705 /* Single-precision register. */
8706 gdb_assert (regnum < 32);
8707
8708 /* s0 is always the least significant half of d0. */
8709 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8710 offset = (regnum & 1) ? 0 : 4;
8711 else
8712 offset = (regnum & 1) ? 4 : 0;
8713
8c042590 8714 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8715 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8716 strlen (name_buf));
8717
0b883586 8718 regcache->raw_read (double_regnum, reg_buf);
58d6951d 8719 memcpy (reg_buf + offset, buf, 4);
10eaee5f 8720 regcache->raw_write (double_regnum, reg_buf);
58d6951d
DJ
8721 }
8722}
8723
123dc839
DJ
8724static struct value *
8725value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8726{
9a3c8263 8727 const int *reg_p = (const int *) baton;
123dc839
DJ
8728 return value_of_register (*reg_p, frame);
8729}
97e03143 8730\f
70f80edf
JT
8731static enum gdb_osabi
8732arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8733{
2af48f68 8734 unsigned int elfosabi;
70f80edf 8735 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8736
70f80edf 8737 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8738
28e97307
DJ
8739 if (elfosabi == ELFOSABI_ARM)
8740 /* GNU tools use this value. Check note sections in this case,
8741 as well. */
8742 bfd_map_over_sections (abfd,
8743 generic_elf_osabi_sniff_abi_tag_sections,
8744 &osabi);
97e03143 8745
28e97307 8746 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8747 return osabi;
97e03143
RE
8748}
8749
54483882
YQ
8750static int
8751arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8752 struct reggroup *group)
8753{
2c291032
YQ
8754 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8755 this, FPS register belongs to save_regroup, restore_reggroup, and
8756 all_reggroup, of course. */
54483882 8757 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8758 return (group == float_reggroup
8759 || group == save_reggroup
8760 || group == restore_reggroup
8761 || group == all_reggroup);
54483882
YQ
8762 else
8763 return default_register_reggroup_p (gdbarch, regnum, group);
8764}
8765
25f8c692
JL
8766\f
8767/* For backward-compatibility we allow two 'g' packet lengths with
8768 the remote protocol depending on whether FPA registers are
8769 supplied. M-profile targets do not have FPA registers, but some
8770 stubs already exist in the wild which use a 'g' packet which
8771 supplies them albeit with dummy values. The packet format which
8772 includes FPA registers should be considered deprecated for
8773 M-profile targets. */
8774
8775static void
8776arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8777{
8778 if (gdbarch_tdep (gdbarch)->is_m)
8779 {
8780 /* If we know from the executable this is an M-profile target,
8781 cater for remote targets whose register set layout is the
8782 same as the FPA layout. */
8783 register_remote_g_packet_guess (gdbarch,
03145bf4 8784 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
f0452268
AH
8785 (16 * ARM_INT_REGISTER_SIZE)
8786 + (8 * ARM_FP_REGISTER_SIZE)
8787 + (2 * ARM_INT_REGISTER_SIZE),
25f8c692
JL
8788 tdesc_arm_with_m_fpa_layout);
8789
8790 /* The regular M-profile layout. */
8791 register_remote_g_packet_guess (gdbarch,
8792 /* r0-r12,sp,lr,pc; xpsr */
f0452268
AH
8793 (16 * ARM_INT_REGISTER_SIZE)
8794 + ARM_INT_REGISTER_SIZE,
25f8c692 8795 tdesc_arm_with_m);
3184d3f9
JL
8796
8797 /* M-profile plus M4F VFP. */
8798 register_remote_g_packet_guess (gdbarch,
8799 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
f0452268
AH
8800 (16 * ARM_INT_REGISTER_SIZE)
8801 + (16 * ARM_VFP_REGISTER_SIZE)
8802 + (2 * ARM_INT_REGISTER_SIZE),
3184d3f9 8803 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8804 }
8805
8806 /* Otherwise we don't have a useful guess. */
8807}
8808
7eb89530
YQ
8809/* Implement the code_of_frame_writable gdbarch method. */
8810
8811static int
8812arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8813{
8814 if (gdbarch_tdep (gdbarch)->is_m
8815 && get_frame_type (frame) == SIGTRAMP_FRAME)
8816 {
8817 /* M-profile exception frames return to some magic PCs, where
8818 isn't writable at all. */
8819 return 0;
8820 }
8821 else
8822 return 1;
8823}
8824
3426ae57
AH
8825/* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8826 to be postfixed by a version (eg armv7hl). */
8827
8828static const char *
8829arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8830{
8831 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8832 return "arm(v[^- ]*)?";
8833 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8834}
8835
da3c6d4a
MS
8836/* Initialize the current architecture based on INFO. If possible,
8837 re-use an architecture from ARCHES, which is a list of
8838 architectures already created during this debugging session.
97e03143 8839
da3c6d4a
MS
8840 Called e.g. at program startup, when reading a core file, and when
8841 reading a binary file. */
97e03143 8842
39bbf761
RE
8843static struct gdbarch *
8844arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8845{
97e03143 8846 struct gdbarch_tdep *tdep;
39bbf761 8847 struct gdbarch *gdbarch;
28e97307
DJ
8848 struct gdbarch_list *best_arch;
8849 enum arm_abi_kind arm_abi = arm_abi_global;
8850 enum arm_float_model fp_model = arm_fp_model;
123dc839 8851 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8852 int i, is_m = 0;
330c6ca9 8853 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8854 int have_wmmx_registers = 0;
58d6951d 8855 int have_neon = 0;
ff6f572f 8856 int have_fpa_registers = 1;
9779414d
DJ
8857 const struct target_desc *tdesc = info.target_desc;
8858
8859 /* If we have an object to base this architecture on, try to determine
8860 its ABI. */
8861
8862 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8863 {
8864 int ei_osabi, e_flags;
8865
8866 switch (bfd_get_flavour (info.abfd))
8867 {
9779414d
DJ
8868 case bfd_target_coff_flavour:
8869 /* Assume it's an old APCS-style ABI. */
8870 /* XXX WinCE? */
8871 arm_abi = ARM_ABI_APCS;
8872 break;
8873
8874 case bfd_target_elf_flavour:
8875 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8876 e_flags = elf_elfheader (info.abfd)->e_flags;
8877
8878 if (ei_osabi == ELFOSABI_ARM)
8879 {
8880 /* GNU tools used to use this value, but do not for EABI
8881 objects. There's nowhere to tag an EABI version
8882 anyway, so assume APCS. */
8883 arm_abi = ARM_ABI_APCS;
8884 }
d403db27 8885 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
8886 {
8887 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9779414d
DJ
8888
8889 switch (eabi_ver)
8890 {
8891 case EF_ARM_EABI_UNKNOWN:
8892 /* Assume GNU tools. */
8893 arm_abi = ARM_ABI_APCS;
8894 break;
8895
8896 case EF_ARM_EABI_VER4:
8897 case EF_ARM_EABI_VER5:
8898 arm_abi = ARM_ABI_AAPCS;
8899 /* EABI binaries default to VFP float ordering.
8900 They may also contain build attributes that can
8901 be used to identify if the VFP argument-passing
8902 ABI is in use. */
8903 if (fp_model == ARM_FLOAT_AUTO)
8904 {
8905#ifdef HAVE_ELF
8906 switch (bfd_elf_get_obj_attr_int (info.abfd,
8907 OBJ_ATTR_PROC,
8908 Tag_ABI_VFP_args))
8909 {
b35b0298 8910 case AEABI_VFP_args_base:
9779414d
DJ
8911 /* "The user intended FP parameter/result
8912 passing to conform to AAPCS, base
8913 variant". */
8914 fp_model = ARM_FLOAT_SOFT_VFP;
8915 break;
b35b0298 8916 case AEABI_VFP_args_vfp:
9779414d
DJ
8917 /* "The user intended FP parameter/result
8918 passing to conform to AAPCS, VFP
8919 variant". */
8920 fp_model = ARM_FLOAT_VFP;
8921 break;
b35b0298 8922 case AEABI_VFP_args_toolchain:
9779414d
DJ
8923 /* "The user intended FP parameter/result
8924 passing to conform to tool chain-specific
8925 conventions" - we don't know any such
8926 conventions, so leave it as "auto". */
8927 break;
b35b0298 8928 case AEABI_VFP_args_compatible:
5c294fee
TG
8929 /* "Code is compatible with both the base
8930 and VFP variants; the user did not permit
8931 non-variadic functions to pass FP
8932 parameters/results" - leave it as
8933 "auto". */
8934 break;
9779414d
DJ
8935 default:
8936 /* Attribute value not mentioned in the
5c294fee 8937 November 2012 ABI, so leave it as
9779414d
DJ
8938 "auto". */
8939 break;
8940 }
8941#else
8942 fp_model = ARM_FLOAT_SOFT_VFP;
8943#endif
8944 }
8945 break;
8946
8947 default:
8948 /* Leave it as "auto". */
8949 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8950 break;
8951 }
8952
8953#ifdef HAVE_ELF
8954 /* Detect M-profile programs. This only works if the
8955 executable file includes build attributes; GCC does
8956 copy them to the executable, but e.g. RealView does
8957 not. */
17cbafdb
SM
8958 int attr_arch
8959 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8960 Tag_CPU_arch);
8961 int attr_profile
8962 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8963 Tag_CPU_arch_profile);
8964
9779414d
DJ
8965 /* GCC specifies the profile for v6-M; RealView only
8966 specifies the profile for architectures starting with
8967 V7 (as opposed to architectures with a tag
8968 numerically greater than TAG_CPU_ARCH_V7). */
8969 if (!tdesc_has_registers (tdesc)
8970 && (attr_arch == TAG_CPU_ARCH_V6_M
8971 || attr_arch == TAG_CPU_ARCH_V6S_M
8972 || attr_profile == 'M'))
25f8c692 8973 is_m = 1;
9779414d
DJ
8974#endif
8975 }
8976
8977 if (fp_model == ARM_FLOAT_AUTO)
8978 {
9779414d
DJ
8979 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8980 {
8981 case 0:
8982 /* Leave it as "auto". Strictly speaking this case
8983 means FPA, but almost nobody uses that now, and
8984 many toolchains fail to set the appropriate bits
8985 for the floating-point model they use. */
8986 break;
8987 case EF_ARM_SOFT_FLOAT:
8988 fp_model = ARM_FLOAT_SOFT_FPA;
8989 break;
8990 case EF_ARM_VFP_FLOAT:
8991 fp_model = ARM_FLOAT_VFP;
8992 break;
8993 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8994 fp_model = ARM_FLOAT_SOFT_VFP;
8995 break;
8996 }
8997 }
8998
8999 if (e_flags & EF_ARM_BE8)
9000 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9001
9002 break;
9003
9004 default:
9005 /* Leave it as "auto". */
9006 break;
9007 }
9008 }
123dc839
DJ
9009
9010 /* Check any target description for validity. */
9779414d 9011 if (tdesc_has_registers (tdesc))
123dc839
DJ
9012 {
9013 /* For most registers we require GDB's default names; but also allow
9014 the numeric names for sp / lr / pc, as a convenience. */
9015 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9016 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9017 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9018
9019 const struct tdesc_feature *feature;
58d6951d 9020 int valid_p;
123dc839 9021
9779414d 9022 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9023 "org.gnu.gdb.arm.core");
9024 if (feature == NULL)
9779414d
DJ
9025 {
9026 feature = tdesc_find_feature (tdesc,
9027 "org.gnu.gdb.arm.m-profile");
9028 if (feature == NULL)
9029 return NULL;
9030 else
9031 is_m = 1;
9032 }
123dc839
DJ
9033
9034 tdesc_data = tdesc_data_alloc ();
9035
9036 valid_p = 1;
9037 for (i = 0; i < ARM_SP_REGNUM; i++)
9038 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9039 arm_register_names[i]);
9040 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9041 ARM_SP_REGNUM,
9042 arm_sp_names);
9043 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9044 ARM_LR_REGNUM,
9045 arm_lr_names);
9046 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9047 ARM_PC_REGNUM,
9048 arm_pc_names);
9779414d
DJ
9049 if (is_m)
9050 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9051 ARM_PS_REGNUM, "xpsr");
9052 else
9053 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9054 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9055
9056 if (!valid_p)
9057 {
9058 tdesc_data_cleanup (tdesc_data);
9059 return NULL;
9060 }
9061
9779414d 9062 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9063 "org.gnu.gdb.arm.fpa");
9064 if (feature != NULL)
9065 {
9066 valid_p = 1;
9067 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9068 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9069 arm_register_names[i]);
9070 if (!valid_p)
9071 {
9072 tdesc_data_cleanup (tdesc_data);
9073 return NULL;
9074 }
9075 }
ff6f572f
DJ
9076 else
9077 have_fpa_registers = 0;
9078
9779414d 9079 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9080 "org.gnu.gdb.xscale.iwmmxt");
9081 if (feature != NULL)
9082 {
9083 static const char *const iwmmxt_names[] = {
9084 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9085 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9086 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9087 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9088 };
9089
9090 valid_p = 1;
9091 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9092 valid_p
9093 &= tdesc_numbered_register (feature, tdesc_data, i,
9094 iwmmxt_names[i - ARM_WR0_REGNUM]);
9095
9096 /* Check for the control registers, but do not fail if they
9097 are missing. */
9098 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9099 tdesc_numbered_register (feature, tdesc_data, i,
9100 iwmmxt_names[i - ARM_WR0_REGNUM]);
9101
9102 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9103 valid_p
9104 &= tdesc_numbered_register (feature, tdesc_data, i,
9105 iwmmxt_names[i - ARM_WR0_REGNUM]);
9106
9107 if (!valid_p)
9108 {
9109 tdesc_data_cleanup (tdesc_data);
9110 return NULL;
9111 }
a56cc1ce
YQ
9112
9113 have_wmmx_registers = 1;
ff6f572f 9114 }
58d6951d
DJ
9115
9116 /* If we have a VFP unit, check whether the single precision registers
9117 are present. If not, then we will synthesize them as pseudo
9118 registers. */
9779414d 9119 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9120 "org.gnu.gdb.arm.vfp");
9121 if (feature != NULL)
9122 {
9123 static const char *const vfp_double_names[] = {
9124 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9125 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9126 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9127 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9128 };
9129
9130 /* Require the double precision registers. There must be either
9131 16 or 32. */
9132 valid_p = 1;
9133 for (i = 0; i < 32; i++)
9134 {
9135 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9136 ARM_D0_REGNUM + i,
9137 vfp_double_names[i]);
9138 if (!valid_p)
9139 break;
9140 }
2b9e5ea6
UW
9141 if (!valid_p && i == 16)
9142 valid_p = 1;
58d6951d 9143
2b9e5ea6
UW
9144 /* Also require FPSCR. */
9145 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9146 ARM_FPSCR_REGNUM, "fpscr");
9147 if (!valid_p)
58d6951d
DJ
9148 {
9149 tdesc_data_cleanup (tdesc_data);
9150 return NULL;
9151 }
9152
9153 if (tdesc_unnumbered_register (feature, "s0") == 0)
9154 have_vfp_pseudos = 1;
9155
330c6ca9 9156 vfp_register_count = i;
58d6951d
DJ
9157
9158 /* If we have VFP, also check for NEON. The architecture allows
9159 NEON without VFP (integer vector operations only), but GDB
9160 does not support that. */
9779414d 9161 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9162 "org.gnu.gdb.arm.neon");
9163 if (feature != NULL)
9164 {
9165 /* NEON requires 32 double-precision registers. */
9166 if (i != 32)
9167 {
9168 tdesc_data_cleanup (tdesc_data);
9169 return NULL;
9170 }
9171
9172 /* If there are quad registers defined by the stub, use
9173 their type; otherwise (normally) provide them with
9174 the default type. */
9175 if (tdesc_unnumbered_register (feature, "q0") == 0)
9176 have_neon_pseudos = 1;
9177
9178 have_neon = 1;
9179 }
9180 }
123dc839 9181 }
39bbf761 9182
28e97307
DJ
9183 /* If there is already a candidate, use it. */
9184 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9185 best_arch != NULL;
9186 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9187 {
b8926edc
DJ
9188 if (arm_abi != ARM_ABI_AUTO
9189 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9190 continue;
9191
b8926edc
DJ
9192 if (fp_model != ARM_FLOAT_AUTO
9193 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9194 continue;
9195
58d6951d
DJ
9196 /* There are various other properties in tdep that we do not
9197 need to check here: those derived from a target description,
9198 since gdbarches with a different target description are
9199 automatically disqualified. */
9200
9779414d
DJ
9201 /* Do check is_m, though, since it might come from the binary. */
9202 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9203 continue;
9204
28e97307
DJ
9205 /* Found a match. */
9206 break;
9207 }
97e03143 9208
28e97307 9209 if (best_arch != NULL)
123dc839
DJ
9210 {
9211 if (tdesc_data != NULL)
9212 tdesc_data_cleanup (tdesc_data);
9213 return best_arch->gdbarch;
9214 }
28e97307 9215
8d749320 9216 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9217 gdbarch = gdbarch_alloc (&info, tdep);
9218
28e97307
DJ
9219 /* Record additional information about the architecture we are defining.
9220 These are gdbarch discriminators, like the OSABI. */
9221 tdep->arm_abi = arm_abi;
9222 tdep->fp_model = fp_model;
9779414d 9223 tdep->is_m = is_m;
ff6f572f 9224 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9225 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9226 gdb_assert (vfp_register_count == 0
9227 || vfp_register_count == 16
9228 || vfp_register_count == 32);
9229 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9230 tdep->have_vfp_pseudos = have_vfp_pseudos;
9231 tdep->have_neon_pseudos = have_neon_pseudos;
9232 tdep->have_neon = have_neon;
08216dd7 9233
25f8c692
JL
9234 arm_register_g_packet_guesses (gdbarch);
9235
08216dd7 9236 /* Breakpoints. */
9d4fde75 9237 switch (info.byte_order_for_code)
67255d04
RE
9238 {
9239 case BFD_ENDIAN_BIG:
66e810cd
RE
9240 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9241 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9242 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9243 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9244
67255d04
RE
9245 break;
9246
9247 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9248 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9249 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9250 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9251 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9252
67255d04
RE
9253 break;
9254
9255 default:
9256 internal_error (__FILE__, __LINE__,
edefbb7c 9257 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9258 }
9259
d7b486e7
RE
9260 /* On ARM targets char defaults to unsigned. */
9261 set_gdbarch_char_signed (gdbarch, 0);
9262
53375380
PA
9263 /* wchar_t is unsigned under the AAPCS. */
9264 if (tdep->arm_abi == ARM_ABI_AAPCS)
9265 set_gdbarch_wchar_signed (gdbarch, 0);
9266 else
9267 set_gdbarch_wchar_signed (gdbarch, 1);
53375380 9268
030197b4
AB
9269 /* Compute type alignment. */
9270 set_gdbarch_type_align (gdbarch, arm_type_align);
9271
cca44b1b
JB
9272 /* Note: for displaced stepping, this includes the breakpoint, and one word
9273 of additional scratch space. This setting isn't used for anything beside
9274 displaced stepping at present. */
e935475c 9275 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
cca44b1b 9276
9df628e0 9277 /* This should be low enough for everything. */
97e03143 9278 tdep->lowest_pc = 0x20;
94c30b78 9279 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9280
7c00367c
MK
9281 /* The default, for both APCS and AAPCS, is to return small
9282 structures in registers. */
9283 tdep->struct_return = reg_struct_return;
9284
2dd604e7 9285 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9286 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9287
7eb89530
YQ
9288 if (is_m)
9289 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9290
756fe439
DJ
9291 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9292
eb5492fa 9293 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9294
34e8f22d 9295 /* Address manipulation. */
34e8f22d
RE
9296 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9297
34e8f22d
RE
9298 /* Advance PC across function entry code. */
9299 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9300
c9cf6e20
MG
9301 /* Detect whether PC is at a point where the stack has been destroyed. */
9302 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9303
190dce09
UW
9304 /* Skip trampolines. */
9305 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9306
34e8f22d
RE
9307 /* The stack grows downward. */
9308 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9309
9310 /* Breakpoint manipulation. */
04180708
YQ
9311 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9312 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9313 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9314 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9315
9316 /* Information about registers, etc. */
34e8f22d
RE
9317 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9318 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9319 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9320 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9321 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9322
ff6f572f
DJ
9323 /* This "info float" is FPA-specific. Use the generic version if we
9324 do not have FPA. */
9325 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9326 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9327
26216b98 9328 /* Internal <-> external register number maps. */
ff6f572f 9329 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9330 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9331
34e8f22d
RE
9332 set_gdbarch_register_name (gdbarch, arm_register_name);
9333
9334 /* Returning results. */
2af48f68 9335 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9336
03d48a7d
RE
9337 /* Disassembly. */
9338 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9339
34e8f22d
RE
9340 /* Minsymbol frobbing. */
9341 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9342 set_gdbarch_coff_make_msymbol_special (gdbarch,
9343 arm_coff_make_msymbol_special);
60c5725c 9344 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9345
f9d67f43
DJ
9346 /* Thumb-2 IT block support. */
9347 set_gdbarch_adjust_breakpoint_address (gdbarch,
9348 arm_adjust_breakpoint_address);
9349
0d5de010
DJ
9350 /* Virtual tables. */
9351 set_gdbarch_vbit_in_delta (gdbarch, 1);
9352
97e03143 9353 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9354 gdbarch_init_osabi (info, gdbarch);
97e03143 9355
b39cc962
DJ
9356 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9357
eb5492fa 9358 /* Add some default predicates. */
2ae28aa9
YQ
9359 if (is_m)
9360 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9361 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9362 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9363 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9364 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9365 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9366
97e03143
RE
9367 /* Now we have tuned the configuration, set a few final things,
9368 based on what the OS ABI has told us. */
9369
b8926edc
DJ
9370 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9371 binaries are always marked. */
9372 if (tdep->arm_abi == ARM_ABI_AUTO)
9373 tdep->arm_abi = ARM_ABI_APCS;
9374
e3039479
UW
9375 /* Watchpoints are not steppable. */
9376 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9377
b8926edc
DJ
9378 /* We used to default to FPA for generic ARM, but almost nobody
9379 uses that now, and we now provide a way for the user to force
9380 the model. So default to the most useful variant. */
9381 if (tdep->fp_model == ARM_FLOAT_AUTO)
9382 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9383
9df628e0
RE
9384 if (tdep->jb_pc >= 0)
9385 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9386
08216dd7 9387 /* Floating point sizes and format. */
8da61cc4 9388 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9389 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9390 {
8da61cc4
DJ
9391 set_gdbarch_double_format
9392 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9393 set_gdbarch_long_double_format
9394 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9395 }
9396 else
9397 {
9398 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9399 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9400 }
9401
58d6951d
DJ
9402 if (have_vfp_pseudos)
9403 {
9404 /* NOTE: These are the only pseudo registers used by
9405 the ARM target at the moment. If more are added, a
9406 little more care in numbering will be needed. */
9407
9408 int num_pseudos = 32;
9409 if (have_neon_pseudos)
9410 num_pseudos += 16;
9411 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9412 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9413 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9414 }
9415
123dc839 9416 if (tdesc_data)
58d6951d
DJ
9417 {
9418 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9419
9779414d 9420 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9421
9422 /* Override tdesc_register_type to adjust the types of VFP
9423 registers for NEON. */
9424 set_gdbarch_register_type (gdbarch, arm_register_type);
9425 }
123dc839
DJ
9426
9427 /* Add standard register aliases. We add aliases even for those
9428 nanes which are used by the current architecture - it's simpler,
9429 and does no harm, since nothing ever lists user registers. */
9430 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9431 user_reg_add (gdbarch, arm_register_aliases[i].name,
9432 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9433
65b48a81
PB
9434 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9435 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9436
3426ae57
AH
9437 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9438
39bbf761
RE
9439 return gdbarch;
9440}
9441
97e03143 9442static void
2af46ca0 9443arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9444{
2af46ca0 9445 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9446
9447 if (tdep == NULL)
9448 return;
9449
edefbb7c 9450 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9451 (unsigned long) tdep->lowest_pc);
9452}
9453
0d4c07af 9454#if GDB_SELF_TEST
b121eeb9
YQ
9455namespace selftests
9456{
9457static void arm_record_test (void);
9458}
0d4c07af 9459#endif
b121eeb9 9460
c906108c 9461void
ed9a39eb 9462_initialize_arm_tdep (void)
c906108c 9463{
bc90b915 9464 long length;
65b48a81 9465 int i, j;
edefbb7c
AC
9466 char regdesc[1024], *rdptr = regdesc;
9467 size_t rest = sizeof (regdesc);
085dd6e6 9468
42cf1509 9469 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9470
0e9e9abd 9471 /* Add ourselves to objfile event chain. */
76727919 9472 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
0e9e9abd
UW
9473 arm_exidx_data_key
9474 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9475
70f80edf
JT
9476 /* Register an ELF OS ABI sniffer for ARM binaries. */
9477 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9478 bfd_target_elf_flavour,
9479 arm_elf_osabi_sniffer);
9480
9779414d
DJ
9481 /* Initialize the standard target descriptions. */
9482 initialize_tdesc_arm_with_m ();
25f8c692 9483 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9484 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9485 initialize_tdesc_arm_with_iwmmxt ();
9486 initialize_tdesc_arm_with_vfpv2 ();
9487 initialize_tdesc_arm_with_vfpv3 ();
9488 initialize_tdesc_arm_with_neon ();
9779414d 9489
afd7eef0
RE
9490 /* Add root prefix command for all "set arm"/"show arm" commands. */
9491 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9492 _("Various ARM-specific commands."),
afd7eef0
RE
9493 &setarmcmdlist, "set arm ", 0, &setlist);
9494
9495 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9496 _("Various ARM-specific commands."),
afd7eef0 9497 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9498
c5aa993b 9499
65b48a81 9500 arm_disassembler_options = xstrdup ("reg-names-std");
471b9d15
MR
9501 const disasm_options_t *disasm_options
9502 = &disassembler_options_arm ()->options;
65b48a81
PB
9503 int num_disassembly_styles = 0;
9504 for (i = 0; disasm_options->name[i] != NULL; i++)
9505 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9506 num_disassembly_styles++;
9507
9508 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
8d749320 9509 valid_disassembly_styles = XNEWVEC (const char *,
65b48a81
PB
9510 num_disassembly_styles + 1);
9511 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9512 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9513 {
9514 size_t offset = strlen ("reg-names-");
9515 const char *style = disasm_options->name[i];
9516 valid_disassembly_styles[j++] = &style[offset];
9517 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9518 disasm_options->description[i]);
9519 rdptr += length;
9520 rest -= length;
9521 }
94c30b78 9522 /* Mark the end of valid options. */
65b48a81 9523 valid_disassembly_styles[num_disassembly_styles] = NULL;
c906108c 9524
edefbb7c 9525 /* Create the help text. */
d7e74731
PA
9526 std::string helptext = string_printf ("%s%s%s",
9527 _("The valid values are:\n"),
9528 regdesc,
9529 _("The default is \"std\"."));
ed9a39eb 9530
edefbb7c
AC
9531 add_setshow_enum_cmd("disassembler", no_class,
9532 valid_disassembly_styles, &disassembly_style,
9533 _("Set the disassembly style."),
9534 _("Show the disassembly style."),
09b0e4b0 9535 helptext.c_str (),
2c5b56ce 9536 set_disassembly_style_sfunc,
65b48a81 9537 show_disassembly_style_sfunc,
7376b4c2 9538 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9539
9540 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9541 _("Set usage of ARM 32-bit mode."),
9542 _("Show usage of ARM 32-bit mode."),
9543 _("When off, a 26-bit PC will be used."),
2c5b56ce 9544 NULL,
0963b4bd
MS
9545 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9546 mode is %s. */
26304000 9547 &setarmcmdlist, &showarmcmdlist);
c906108c 9548
fd50bc42 9549 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9550 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9551 _("Set the floating point type."),
9552 _("Show the floating point type."),
9553 _("auto - Determine the FP typefrom the OS-ABI.\n\
9554softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9555fpa - FPA co-processor (GCC compiled).\n\
9556softvfp - Software FP with pure-endian doubles.\n\
9557vfp - VFP co-processor."),
edefbb7c 9558 set_fp_model_sfunc, show_fp_model,
7376b4c2 9559 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9560
28e97307
DJ
9561 /* Add a command to allow the user to force the ABI. */
9562 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9563 _("Set the ABI."),
9564 _("Show the ABI."),
9565 NULL, arm_set_abi, arm_show_abi,
9566 &setarmcmdlist, &showarmcmdlist);
9567
0428b8f5
DJ
9568 /* Add two commands to allow the user to force the assumed
9569 execution mode. */
9570 add_setshow_enum_cmd ("fallback-mode", class_support,
9571 arm_mode_strings, &arm_fallback_mode_string,
9572 _("Set the mode assumed when symbols are unavailable."),
9573 _("Show the mode assumed when symbols are unavailable."),
9574 NULL, NULL, arm_show_fallback_mode,
9575 &setarmcmdlist, &showarmcmdlist);
9576 add_setshow_enum_cmd ("force-mode", class_support,
9577 arm_mode_strings, &arm_force_mode_string,
9578 _("Set the mode assumed even when symbols are available."),
9579 _("Show the mode assumed even when symbols are available."),
9580 NULL, NULL, arm_show_force_mode,
9581 &setarmcmdlist, &showarmcmdlist);
9582
6529d2dd 9583 /* Debugging flag. */
edefbb7c
AC
9584 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9585 _("Set ARM debugging."),
9586 _("Show ARM debugging."),
9587 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9588 NULL,
7915a72c 9589 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9590 &setdebuglist, &showdebuglist);
b121eeb9
YQ
9591
9592#if GDB_SELF_TEST
1526853e 9593 selftests::register_test ("arm-record", selftests::arm_record_test);
b121eeb9
YQ
9594#endif
9595
c906108c 9596}
72508ac0
PO
9597
9598/* ARM-reversible process record data structures. */
9599
9600#define ARM_INSN_SIZE_BYTES 4
9601#define THUMB_INSN_SIZE_BYTES 2
9602#define THUMB2_INSN_SIZE_BYTES 4
9603
9604
71e396f9
LM
9605/* Position of the bit within a 32-bit ARM instruction
9606 that defines whether the instruction is a load or store. */
72508ac0
PO
9607#define INSN_S_L_BIT_NUM 20
9608
9609#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9610 do \
9611 { \
9612 unsigned int reg_len = LENGTH; \
9613 if (reg_len) \
9614 { \
9615 REGS = XNEWVEC (uint32_t, reg_len); \
9616 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9617 } \
9618 } \
9619 while (0)
9620
9621#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9622 do \
9623 { \
9624 unsigned int mem_len = LENGTH; \
9625 if (mem_len) \
9626 { \
9627 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9628 memcpy(&MEMS->len, &RECORD_BUF[0], \
9629 sizeof(struct arm_mem_r) * LENGTH); \
9630 } \
9631 } \
9632 while (0)
9633
9634/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9635#define INSN_RECORDED(ARM_RECORD) \
9636 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9637
9638/* ARM memory record structure. */
9639struct arm_mem_r
9640{
9641 uint32_t len; /* Record length. */
bfbbec00 9642 uint32_t addr; /* Memory address. */
72508ac0
PO
9643};
9644
9645/* ARM instruction record contains opcode of current insn
9646 and execution state (before entry to decode_insn()),
9647 contains list of to-be-modified registers and
9648 memory blocks (on return from decode_insn()). */
9649
9650typedef struct insn_decode_record_t
9651{
9652 struct gdbarch *gdbarch;
9653 struct regcache *regcache;
9654 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9655 uint32_t arm_insn; /* Should accommodate thumb. */
9656 uint32_t cond; /* Condition code. */
9657 uint32_t opcode; /* Insn opcode. */
9658 uint32_t decode; /* Insn decode bits. */
9659 uint32_t mem_rec_count; /* No of mem records. */
9660 uint32_t reg_rec_count; /* No of reg records. */
9661 uint32_t *arm_regs; /* Registers to be saved for this record. */
9662 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9663} insn_decode_record;
9664
9665
9666/* Checks ARM SBZ and SBO mandatory fields. */
9667
9668static int
9669sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9670{
9671 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9672
9673 if (!len)
9674 return 1;
9675
9676 if (!sbo)
9677 ones = ~ones;
9678
9679 while (ones)
9680 {
9681 if (!(ones & sbo))
9682 {
9683 return 0;
9684 }
9685 ones = ones >> 1;
9686 }
9687 return 1;
9688}
9689
c6ec2b30
OJ
9690enum arm_record_result
9691{
9692 ARM_RECORD_SUCCESS = 0,
9693 ARM_RECORD_FAILURE = 1
9694};
9695
72508ac0
PO
9696typedef enum
9697{
9698 ARM_RECORD_STRH=1,
9699 ARM_RECORD_STRD
9700} arm_record_strx_t;
9701
9702typedef enum
9703{
9704 ARM_RECORD=1,
9705 THUMB_RECORD,
9706 THUMB2_RECORD
9707} record_type_t;
9708
9709
9710static int
9711arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9712 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9713{
9714
9715 struct regcache *reg_cache = arm_insn_r->regcache;
9716 ULONGEST u_regval[2]= {0};
9717
9718 uint32_t reg_src1 = 0, reg_src2 = 0;
9719 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9720
9721 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9722 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9723
9724 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9725 {
9726 /* 1) Handle misc store, immediate offset. */
9727 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9728 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9729 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9730 regcache_raw_read_unsigned (reg_cache, reg_src1,
9731 &u_regval[0]);
9732 if (ARM_PC_REGNUM == reg_src1)
9733 {
9734 /* If R15 was used as Rn, hence current PC+8. */
9735 u_regval[0] = u_regval[0] + 8;
9736 }
9737 offset_8 = (immed_high << 4) | immed_low;
9738 /* Calculate target store address. */
9739 if (14 == arm_insn_r->opcode)
9740 {
9741 tgt_mem_addr = u_regval[0] + offset_8;
9742 }
9743 else
9744 {
9745 tgt_mem_addr = u_regval[0] - offset_8;
9746 }
9747 if (ARM_RECORD_STRH == str_type)
9748 {
9749 record_buf_mem[0] = 2;
9750 record_buf_mem[1] = tgt_mem_addr;
9751 arm_insn_r->mem_rec_count = 1;
9752 }
9753 else if (ARM_RECORD_STRD == str_type)
9754 {
9755 record_buf_mem[0] = 4;
9756 record_buf_mem[1] = tgt_mem_addr;
9757 record_buf_mem[2] = 4;
9758 record_buf_mem[3] = tgt_mem_addr + 4;
9759 arm_insn_r->mem_rec_count = 2;
9760 }
9761 }
9762 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9763 {
9764 /* 2) Store, register offset. */
9765 /* Get Rm. */
9766 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9767 /* Get Rn. */
9768 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9769 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9770 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9771 if (15 == reg_src2)
9772 {
9773 /* If R15 was used as Rn, hence current PC+8. */
9774 u_regval[0] = u_regval[0] + 8;
9775 }
9776 /* Calculate target store address, Rn +/- Rm, register offset. */
9777 if (12 == arm_insn_r->opcode)
9778 {
9779 tgt_mem_addr = u_regval[0] + u_regval[1];
9780 }
9781 else
9782 {
9783 tgt_mem_addr = u_regval[1] - u_regval[0];
9784 }
9785 if (ARM_RECORD_STRH == str_type)
9786 {
9787 record_buf_mem[0] = 2;
9788 record_buf_mem[1] = tgt_mem_addr;
9789 arm_insn_r->mem_rec_count = 1;
9790 }
9791 else if (ARM_RECORD_STRD == str_type)
9792 {
9793 record_buf_mem[0] = 4;
9794 record_buf_mem[1] = tgt_mem_addr;
9795 record_buf_mem[2] = 4;
9796 record_buf_mem[3] = tgt_mem_addr + 4;
9797 arm_insn_r->mem_rec_count = 2;
9798 }
9799 }
9800 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9801 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9802 {
9803 /* 3) Store, immediate pre-indexed. */
9804 /* 5) Store, immediate post-indexed. */
9805 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9806 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9807 offset_8 = (immed_high << 4) | immed_low;
9808 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9809 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9810 /* Calculate target store address, Rn +/- Rm, register offset. */
9811 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9812 {
9813 tgt_mem_addr = u_regval[0] + offset_8;
9814 }
9815 else
9816 {
9817 tgt_mem_addr = u_regval[0] - offset_8;
9818 }
9819 if (ARM_RECORD_STRH == str_type)
9820 {
9821 record_buf_mem[0] = 2;
9822 record_buf_mem[1] = tgt_mem_addr;
9823 arm_insn_r->mem_rec_count = 1;
9824 }
9825 else if (ARM_RECORD_STRD == str_type)
9826 {
9827 record_buf_mem[0] = 4;
9828 record_buf_mem[1] = tgt_mem_addr;
9829 record_buf_mem[2] = 4;
9830 record_buf_mem[3] = tgt_mem_addr + 4;
9831 arm_insn_r->mem_rec_count = 2;
9832 }
9833 /* Record Rn also as it changes. */
9834 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9835 arm_insn_r->reg_rec_count = 1;
9836 }
9837 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9838 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9839 {
9840 /* 4) Store, register pre-indexed. */
9841 /* 6) Store, register post -indexed. */
9842 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9843 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9844 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9845 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9846 /* Calculate target store address, Rn +/- Rm, register offset. */
9847 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9848 {
9849 tgt_mem_addr = u_regval[0] + u_regval[1];
9850 }
9851 else
9852 {
9853 tgt_mem_addr = u_regval[1] - u_regval[0];
9854 }
9855 if (ARM_RECORD_STRH == str_type)
9856 {
9857 record_buf_mem[0] = 2;
9858 record_buf_mem[1] = tgt_mem_addr;
9859 arm_insn_r->mem_rec_count = 1;
9860 }
9861 else if (ARM_RECORD_STRD == str_type)
9862 {
9863 record_buf_mem[0] = 4;
9864 record_buf_mem[1] = tgt_mem_addr;
9865 record_buf_mem[2] = 4;
9866 record_buf_mem[3] = tgt_mem_addr + 4;
9867 arm_insn_r->mem_rec_count = 2;
9868 }
9869 /* Record Rn also as it changes. */
9870 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9871 arm_insn_r->reg_rec_count = 1;
9872 }
9873 return 0;
9874}
9875
9876/* Handling ARM extension space insns. */
9877
9878static int
9879arm_record_extension_space (insn_decode_record *arm_insn_r)
9880{
df95a9cf 9881 int ret = 0; /* Return value: -1:record failure ; 0:success */
72508ac0
PO
9882 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9883 uint32_t record_buf[8], record_buf_mem[8];
9884 uint32_t reg_src1 = 0;
72508ac0
PO
9885 struct regcache *reg_cache = arm_insn_r->regcache;
9886 ULONGEST u_regval = 0;
9887
9888 gdb_assert (!INSN_RECORDED(arm_insn_r));
9889 /* Handle unconditional insn extension space. */
9890
9891 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9892 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9893 if (arm_insn_r->cond)
9894 {
9895 /* PLD has no affect on architectural state, it just affects
9896 the caches. */
9897 if (5 == ((opcode1 & 0xE0) >> 5))
9898 {
9899 /* BLX(1) */
9900 record_buf[0] = ARM_PS_REGNUM;
9901 record_buf[1] = ARM_LR_REGNUM;
9902 arm_insn_r->reg_rec_count = 2;
9903 }
9904 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9905 }
9906
9907
9908 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9909 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9910 {
9911 ret = -1;
9912 /* Undefined instruction on ARM V5; need to handle if later
9913 versions define it. */
9914 }
9915
9916 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9917 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9918 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9919
9920 /* Handle arithmetic insn extension space. */
9921 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9922 && !INSN_RECORDED(arm_insn_r))
9923 {
9924 /* Handle MLA(S) and MUL(S). */
b020ff80 9925 if (in_inclusive_range (insn_op1, 0U, 3U))
72508ac0
PO
9926 {
9927 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9928 record_buf[1] = ARM_PS_REGNUM;
9929 arm_insn_r->reg_rec_count = 2;
9930 }
b020ff80 9931 else if (in_inclusive_range (insn_op1, 4U, 15U))
72508ac0
PO
9932 {
9933 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9934 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9935 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9936 record_buf[2] = ARM_PS_REGNUM;
9937 arm_insn_r->reg_rec_count = 3;
9938 }
9939 }
9940
9941 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9942 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9943 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9944
9945 /* Handle control insn extension space. */
9946
9947 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9948 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9949 {
9950 if (!bit (arm_insn_r->arm_insn,25))
9951 {
9952 if (!bits (arm_insn_r->arm_insn, 4, 7))
9953 {
9954 if ((0 == insn_op1) || (2 == insn_op1))
9955 {
9956 /* MRS. */
9957 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9958 arm_insn_r->reg_rec_count = 1;
9959 }
9960 else if (1 == insn_op1)
9961 {
9962 /* CSPR is going to be changed. */
9963 record_buf[0] = ARM_PS_REGNUM;
9964 arm_insn_r->reg_rec_count = 1;
9965 }
9966 else if (3 == insn_op1)
9967 {
9968 /* SPSR is going to be changed. */
9969 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
9970 return -1;
9971 }
9972 }
9973 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9974 {
9975 if (1 == insn_op1)
9976 {
9977 /* BX. */
9978 record_buf[0] = ARM_PS_REGNUM;
9979 arm_insn_r->reg_rec_count = 1;
9980 }
9981 else if (3 == insn_op1)
9982 {
9983 /* CLZ. */
9984 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9985 arm_insn_r->reg_rec_count = 1;
9986 }
9987 }
9988 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9989 {
9990 /* BLX. */
9991 record_buf[0] = ARM_PS_REGNUM;
9992 record_buf[1] = ARM_LR_REGNUM;
9993 arm_insn_r->reg_rec_count = 2;
9994 }
9995 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9996 {
9997 /* QADD, QSUB, QDADD, QDSUB */
9998 record_buf[0] = ARM_PS_REGNUM;
9999 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10000 arm_insn_r->reg_rec_count = 2;
10001 }
10002 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10003 {
10004 /* BKPT. */
10005 record_buf[0] = ARM_PS_REGNUM;
10006 record_buf[1] = ARM_LR_REGNUM;
10007 arm_insn_r->reg_rec_count = 2;
10008
10009 /* Save SPSR also;how? */
72508ac0
PO
10010 return -1;
10011 }
10012 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10013 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10014 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10015 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10016 )
10017 {
10018 if (0 == insn_op1 || 1 == insn_op1)
10019 {
10020 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10021 /* We dont do optimization for SMULW<y> where we
10022 need only Rd. */
10023 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10024 record_buf[1] = ARM_PS_REGNUM;
10025 arm_insn_r->reg_rec_count = 2;
10026 }
10027 else if (2 == insn_op1)
10028 {
10029 /* SMLAL<x><y>. */
10030 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10031 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10032 arm_insn_r->reg_rec_count = 2;
10033 }
10034 else if (3 == insn_op1)
10035 {
10036 /* SMUL<x><y>. */
10037 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10038 arm_insn_r->reg_rec_count = 1;
10039 }
10040 }
10041 }
10042 else
10043 {
10044 /* MSR : immediate form. */
10045 if (1 == insn_op1)
10046 {
10047 /* CSPR is going to be changed. */
10048 record_buf[0] = ARM_PS_REGNUM;
10049 arm_insn_r->reg_rec_count = 1;
10050 }
10051 else if (3 == insn_op1)
10052 {
10053 /* SPSR is going to be changed. */
10054 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10055 return -1;
10056 }
10057 }
10058 }
10059
10060 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10061 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10062 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10063
10064 /* Handle load/store insn extension space. */
10065
10066 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10067 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10068 && !INSN_RECORDED(arm_insn_r))
10069 {
10070 /* SWP/SWPB. */
10071 if (0 == insn_op1)
10072 {
10073 /* These insn, changes register and memory as well. */
10074 /* SWP or SWPB insn. */
10075 /* Get memory address given by Rn. */
10076 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10077 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10078 /* SWP insn ?, swaps word. */
10079 if (8 == arm_insn_r->opcode)
10080 {
10081 record_buf_mem[0] = 4;
10082 }
10083 else
10084 {
10085 /* SWPB insn, swaps only byte. */
10086 record_buf_mem[0] = 1;
10087 }
10088 record_buf_mem[1] = u_regval;
10089 arm_insn_r->mem_rec_count = 1;
10090 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10091 arm_insn_r->reg_rec_count = 1;
10092 }
10093 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10094 {
10095 /* STRH. */
10096 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10097 ARM_RECORD_STRH);
10098 }
10099 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10100 {
10101 /* LDRD. */
10102 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10103 record_buf[1] = record_buf[0] + 1;
10104 arm_insn_r->reg_rec_count = 2;
10105 }
10106 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10107 {
10108 /* STRD. */
10109 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10110 ARM_RECORD_STRD);
10111 }
10112 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10113 {
10114 /* LDRH, LDRSB, LDRSH. */
10115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10116 arm_insn_r->reg_rec_count = 1;
10117 }
10118
10119 }
10120
10121 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10122 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10123 && !INSN_RECORDED(arm_insn_r))
10124 {
10125 ret = -1;
10126 /* Handle coprocessor insn extension space. */
10127 }
10128
10129 /* To be done for ARMv5 and later; as of now we return -1. */
10130 if (-1 == ret)
ca92db2d 10131 return ret;
72508ac0
PO
10132
10133 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10134 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10135
10136 return ret;
10137}
10138
10139/* Handling opcode 000 insns. */
10140
10141static int
10142arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10143{
10144 struct regcache *reg_cache = arm_insn_r->regcache;
10145 uint32_t record_buf[8], record_buf_mem[8];
10146 ULONGEST u_regval[2] = {0};
10147
8d49165d 10148 uint32_t reg_src1 = 0;
72508ac0
PO
10149 uint32_t opcode1 = 0;
10150
10151 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10152 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10153 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10154
2d9e6acb 10155 if (!((opcode1 & 0x19) == 0x10))
72508ac0 10156 {
2d9e6acb
YQ
10157 /* Data-processing (register) and Data-processing (register-shifted
10158 register */
10159 /* Out of 11 shifter operands mode, all the insn modifies destination
10160 register, which is specified by 13-16 decode. */
10161 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10162 record_buf[1] = ARM_PS_REGNUM;
10163 arm_insn_r->reg_rec_count = 2;
72508ac0 10164 }
2d9e6acb 10165 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
72508ac0 10166 {
2d9e6acb
YQ
10167 /* Miscellaneous instructions */
10168
10169 if (3 == arm_insn_r->decode && 0x12 == opcode1
10170 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10171 {
10172 /* Handle BLX, branch and link/exchange. */
10173 if (9 == arm_insn_r->opcode)
10174 {
10175 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10176 and R14 stores the return address. */
10177 record_buf[0] = ARM_PS_REGNUM;
10178 record_buf[1] = ARM_LR_REGNUM;
10179 arm_insn_r->reg_rec_count = 2;
10180 }
10181 }
10182 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10183 {
10184 /* Handle enhanced software breakpoint insn, BKPT. */
10185 /* CPSR is changed to be executed in ARM state, disabling normal
10186 interrupts, entering abort mode. */
10187 /* According to high vector configuration PC is set. */
10188 /* user hit breakpoint and type reverse, in
10189 that case, we need to go back with previous CPSR and
10190 Program Counter. */
10191 record_buf[0] = ARM_PS_REGNUM;
10192 record_buf[1] = ARM_LR_REGNUM;
10193 arm_insn_r->reg_rec_count = 2;
10194
10195 /* Save SPSR also; how? */
10196 return -1;
10197 }
10198 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10199 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10200 {
10201 /* Handle BX, branch and link/exchange. */
10202 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10203 record_buf[0] = ARM_PS_REGNUM;
10204 arm_insn_r->reg_rec_count = 1;
10205 }
10206 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10207 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10208 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10209 {
10210 /* Count leading zeros: CLZ. */
10211 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10212 arm_insn_r->reg_rec_count = 1;
10213 }
10214 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10215 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10216 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10217 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10218 {
10219 /* Handle MRS insn. */
10220 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10221 arm_insn_r->reg_rec_count = 1;
10222 }
72508ac0 10223 }
2d9e6acb 10224 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
72508ac0 10225 {
2d9e6acb
YQ
10226 /* Multiply and multiply-accumulate */
10227
10228 /* Handle multiply instructions. */
10229 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10230 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10231 {
10232 /* Handle MLA and MUL. */
10233 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10234 record_buf[1] = ARM_PS_REGNUM;
10235 arm_insn_r->reg_rec_count = 2;
10236 }
10237 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10238 {
10239 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10240 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10241 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10242 record_buf[2] = ARM_PS_REGNUM;
10243 arm_insn_r->reg_rec_count = 3;
10244 }
10245 }
10246 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10247 {
10248 /* Synchronization primitives */
10249
72508ac0
PO
10250 /* Handling SWP, SWPB. */
10251 /* These insn, changes register and memory as well. */
10252 /* SWP or SWPB insn. */
10253
10254 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10255 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10256 /* SWP insn ?, swaps word. */
10257 if (8 == arm_insn_r->opcode)
2d9e6acb
YQ
10258 {
10259 record_buf_mem[0] = 4;
10260 }
10261 else
10262 {
10263 /* SWPB insn, swaps only byte. */
10264 record_buf_mem[0] = 1;
10265 }
72508ac0
PO
10266 record_buf_mem[1] = u_regval[0];
10267 arm_insn_r->mem_rec_count = 1;
10268 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10269 arm_insn_r->reg_rec_count = 1;
10270 }
2d9e6acb
YQ
10271 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10272 || 15 == arm_insn_r->decode)
72508ac0 10273 {
2d9e6acb
YQ
10274 if ((opcode1 & 0x12) == 2)
10275 {
10276 /* Extra load/store (unprivileged) */
10277 return -1;
10278 }
10279 else
10280 {
10281 /* Extra load/store */
10282 switch (bits (arm_insn_r->arm_insn, 5, 6))
10283 {
10284 case 1:
10285 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10286 {
10287 /* STRH (register), STRH (immediate) */
10288 arm_record_strx (arm_insn_r, &record_buf[0],
10289 &record_buf_mem[0], ARM_RECORD_STRH);
10290 }
10291 else if ((opcode1 & 0x05) == 0x1)
10292 {
10293 /* LDRH (register) */
10294 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10295 arm_insn_r->reg_rec_count = 1;
72508ac0 10296
2d9e6acb
YQ
10297 if (bit (arm_insn_r->arm_insn, 21))
10298 {
10299 /* Write back to Rn. */
10300 record_buf[arm_insn_r->reg_rec_count++]
10301 = bits (arm_insn_r->arm_insn, 16, 19);
10302 }
10303 }
10304 else if ((opcode1 & 0x05) == 0x5)
10305 {
10306 /* LDRH (immediate), LDRH (literal) */
10307 int rn = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10308
2d9e6acb
YQ
10309 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10310 arm_insn_r->reg_rec_count = 1;
10311
10312 if (rn != 15)
10313 {
10314 /*LDRH (immediate) */
10315 if (bit (arm_insn_r->arm_insn, 21))
10316 {
10317 /* Write back to Rn. */
10318 record_buf[arm_insn_r->reg_rec_count++] = rn;
10319 }
10320 }
10321 }
10322 else
10323 return -1;
10324 break;
10325 case 2:
10326 if ((opcode1 & 0x05) == 0x0)
10327 {
10328 /* LDRD (register) */
10329 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10330 record_buf[1] = record_buf[0] + 1;
10331 arm_insn_r->reg_rec_count = 2;
10332
10333 if (bit (arm_insn_r->arm_insn, 21))
10334 {
10335 /* Write back to Rn. */
10336 record_buf[arm_insn_r->reg_rec_count++]
10337 = bits (arm_insn_r->arm_insn, 16, 19);
10338 }
10339 }
10340 else if ((opcode1 & 0x05) == 0x1)
10341 {
10342 /* LDRSB (register) */
10343 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10344 arm_insn_r->reg_rec_count = 1;
10345
10346 if (bit (arm_insn_r->arm_insn, 21))
10347 {
10348 /* Write back to Rn. */
10349 record_buf[arm_insn_r->reg_rec_count++]
10350 = bits (arm_insn_r->arm_insn, 16, 19);
10351 }
10352 }
10353 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10354 {
10355 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10356 LDRSB (literal) */
10357 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10358
10359 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10360 arm_insn_r->reg_rec_count = 1;
10361
10362 if (rn != 15)
10363 {
10364 /*LDRD (immediate), LDRSB (immediate) */
10365 if (bit (arm_insn_r->arm_insn, 21))
10366 {
10367 /* Write back to Rn. */
10368 record_buf[arm_insn_r->reg_rec_count++] = rn;
10369 }
10370 }
10371 }
10372 else
10373 return -1;
10374 break;
10375 case 3:
10376 if ((opcode1 & 0x05) == 0x0)
10377 {
10378 /* STRD (register) */
10379 arm_record_strx (arm_insn_r, &record_buf[0],
10380 &record_buf_mem[0], ARM_RECORD_STRD);
10381 }
10382 else if ((opcode1 & 0x05) == 0x1)
10383 {
10384 /* LDRSH (register) */
10385 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10386 arm_insn_r->reg_rec_count = 1;
10387
10388 if (bit (arm_insn_r->arm_insn, 21))
10389 {
10390 /* Write back to Rn. */
10391 record_buf[arm_insn_r->reg_rec_count++]
10392 = bits (arm_insn_r->arm_insn, 16, 19);
10393 }
10394 }
10395 else if ((opcode1 & 0x05) == 0x4)
10396 {
10397 /* STRD (immediate) */
10398 arm_record_strx (arm_insn_r, &record_buf[0],
10399 &record_buf_mem[0], ARM_RECORD_STRD);
10400 }
10401 else if ((opcode1 & 0x05) == 0x5)
10402 {
10403 /* LDRSH (immediate), LDRSH (literal) */
10404 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10405 arm_insn_r->reg_rec_count = 1;
10406
10407 if (bit (arm_insn_r->arm_insn, 21))
10408 {
10409 /* Write back to Rn. */
10410 record_buf[arm_insn_r->reg_rec_count++]
10411 = bits (arm_insn_r->arm_insn, 16, 19);
10412 }
10413 }
10414 else
10415 return -1;
10416 break;
10417 default:
10418 return -1;
10419 }
10420 }
72508ac0
PO
10421 }
10422 else
10423 {
10424 return -1;
10425 }
10426
10427 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10428 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10429 return 0;
10430}
10431
10432/* Handling opcode 001 insns. */
10433
10434static int
10435arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10436{
10437 uint32_t record_buf[8], record_buf_mem[8];
10438
10439 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10440 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10441
10442 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10443 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10444 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10445 )
10446 {
10447 /* Handle MSR insn. */
10448 if (9 == arm_insn_r->opcode)
10449 {
10450 /* CSPR is going to be changed. */
10451 record_buf[0] = ARM_PS_REGNUM;
10452 arm_insn_r->reg_rec_count = 1;
10453 }
10454 else
10455 {
10456 /* SPSR is going to be changed. */
10457 }
10458 }
10459 else if (arm_insn_r->opcode <= 15)
10460 {
10461 /* Normal data processing insns. */
10462 /* Out of 11 shifter operands mode, all the insn modifies destination
10463 register, which is specified by 13-16 decode. */
10464 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10465 record_buf[1] = ARM_PS_REGNUM;
10466 arm_insn_r->reg_rec_count = 2;
10467 }
10468 else
10469 {
10470 return -1;
10471 }
10472
10473 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10474 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10475 return 0;
10476}
10477
c55978a6
YQ
10478static int
10479arm_record_media (insn_decode_record *arm_insn_r)
10480{
10481 uint32_t record_buf[8];
10482
10483 switch (bits (arm_insn_r->arm_insn, 22, 24))
10484 {
10485 case 0:
10486 /* Parallel addition and subtraction, signed */
10487 case 1:
10488 /* Parallel addition and subtraction, unsigned */
10489 case 2:
10490 case 3:
10491 /* Packing, unpacking, saturation and reversal */
10492 {
10493 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10494
10495 record_buf[arm_insn_r->reg_rec_count++] = rd;
10496 }
10497 break;
10498
10499 case 4:
10500 case 5:
10501 /* Signed multiplies */
10502 {
10503 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10504 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10505
10506 record_buf[arm_insn_r->reg_rec_count++] = rd;
10507 if (op1 == 0x0)
10508 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10509 else if (op1 == 0x4)
10510 record_buf[arm_insn_r->reg_rec_count++]
10511 = bits (arm_insn_r->arm_insn, 12, 15);
10512 }
10513 break;
10514
10515 case 6:
10516 {
10517 if (bit (arm_insn_r->arm_insn, 21)
10518 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10519 {
10520 /* SBFX */
10521 record_buf[arm_insn_r->reg_rec_count++]
10522 = bits (arm_insn_r->arm_insn, 12, 15);
10523 }
10524 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10525 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10526 {
10527 /* USAD8 and USADA8 */
10528 record_buf[arm_insn_r->reg_rec_count++]
10529 = bits (arm_insn_r->arm_insn, 16, 19);
10530 }
10531 }
10532 break;
10533
10534 case 7:
10535 {
10536 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10537 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10538 {
10539 /* Permanently UNDEFINED */
10540 return -1;
10541 }
10542 else
10543 {
10544 /* BFC, BFI and UBFX */
10545 record_buf[arm_insn_r->reg_rec_count++]
10546 = bits (arm_insn_r->arm_insn, 12, 15);
10547 }
10548 }
10549 break;
10550
10551 default:
10552 return -1;
10553 }
10554
10555 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10556
10557 return 0;
10558}
10559
71e396f9 10560/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10561
10562static int
10563arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10564{
10565 struct regcache *reg_cache = arm_insn_r->regcache;
10566
71e396f9
LM
10567 uint32_t reg_base , reg_dest;
10568 uint32_t offset_12, tgt_mem_addr;
72508ac0 10569 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10570 unsigned char wback;
10571 ULONGEST u_regval;
72508ac0 10572
71e396f9
LM
10573 /* Calculate wback. */
10574 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10575 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10576
71e396f9
LM
10577 arm_insn_r->reg_rec_count = 0;
10578 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10579
10580 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10581 {
71e396f9
LM
10582 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10583 and LDRT. */
10584
72508ac0 10585 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10586 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10587
10588 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10589 preceeds a LDR instruction having R15 as reg_base, it
10590 emulates a branch and link instruction, and hence we need to save
10591 CPSR and PC as well. */
10592 if (ARM_PC_REGNUM == reg_dest)
10593 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10594
10595 /* If wback is true, also save the base register, which is going to be
10596 written to. */
10597 if (wback)
10598 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10599 }
10600 else
10601 {
71e396f9
LM
10602 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10603
72508ac0 10604 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10605 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10606
10607 /* Handle bit U. */
72508ac0 10608 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10609 {
10610 /* U == 1: Add the offset. */
10611 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10612 }
72508ac0 10613 else
71e396f9
LM
10614 {
10615 /* U == 0: subtract the offset. */
10616 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10617 }
10618
10619 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10620 bytes. */
10621 if (bit (arm_insn_r->arm_insn, 22))
10622 {
10623 /* STRB and STRBT: 1 byte. */
10624 record_buf_mem[0] = 1;
10625 }
10626 else
10627 {
10628 /* STR and STRT: 4 bytes. */
10629 record_buf_mem[0] = 4;
10630 }
10631
10632 /* Handle bit P. */
10633 if (bit (arm_insn_r->arm_insn, 24))
10634 record_buf_mem[1] = tgt_mem_addr;
10635 else
10636 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10637
72508ac0
PO
10638 arm_insn_r->mem_rec_count = 1;
10639
71e396f9
LM
10640 /* If wback is true, also save the base register, which is going to be
10641 written to. */
10642 if (wback)
10643 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10644 }
10645
10646 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10647 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10648 return 0;
10649}
10650
10651/* Handling opcode 011 insns. */
10652
10653static int
10654arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10655{
10656 struct regcache *reg_cache = arm_insn_r->regcache;
10657
10658 uint32_t shift_imm = 0;
10659 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10660 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10661 uint32_t record_buf[8], record_buf_mem[8];
10662
10663 LONGEST s_word;
10664 ULONGEST u_regval[2];
10665
c55978a6
YQ
10666 if (bit (arm_insn_r->arm_insn, 4))
10667 return arm_record_media (arm_insn_r);
10668
72508ac0
PO
10669 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10670 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10671
10672 /* Handle enhanced store insns and LDRD DSP insn,
10673 order begins according to addressing modes for store insns
10674 STRH insn. */
10675
10676 /* LDR or STR? */
10677 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10678 {
10679 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10680 /* LDR insn has a capability to do branching, if
10681 MOV LR, PC is precedded by LDR insn having Rn as R15
10682 in that case, it emulates branch and link insn, and hence we
10683 need to save CSPR and PC as well. */
10684 if (15 != reg_dest)
10685 {
10686 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10687 arm_insn_r->reg_rec_count = 1;
10688 }
10689 else
10690 {
10691 record_buf[0] = reg_dest;
10692 record_buf[1] = ARM_PS_REGNUM;
10693 arm_insn_r->reg_rec_count = 2;
10694 }
10695 }
10696 else
10697 {
10698 if (! bits (arm_insn_r->arm_insn, 4, 11))
10699 {
10700 /* Store insn, register offset and register pre-indexed,
10701 register post-indexed. */
10702 /* Get Rm. */
10703 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10704 /* Get Rn. */
10705 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10706 regcache_raw_read_unsigned (reg_cache, reg_src1
10707 , &u_regval[0]);
10708 regcache_raw_read_unsigned (reg_cache, reg_src2
10709 , &u_regval[1]);
10710 if (15 == reg_src2)
10711 {
10712 /* If R15 was used as Rn, hence current PC+8. */
10713 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10714 u_regval[0] = u_regval[0] + 8;
10715 }
10716 /* Calculate target store address, Rn +/- Rm, register offset. */
10717 /* U == 1. */
10718 if (bit (arm_insn_r->arm_insn, 23))
10719 {
10720 tgt_mem_addr = u_regval[0] + u_regval[1];
10721 }
10722 else
10723 {
10724 tgt_mem_addr = u_regval[1] - u_regval[0];
10725 }
10726
10727 switch (arm_insn_r->opcode)
10728 {
10729 /* STR. */
10730 case 8:
10731 case 12:
10732 /* STR. */
10733 case 9:
10734 case 13:
10735 /* STRT. */
10736 case 1:
10737 case 5:
10738 /* STR. */
10739 case 0:
10740 case 4:
10741 record_buf_mem[0] = 4;
10742 break;
10743
10744 /* STRB. */
10745 case 10:
10746 case 14:
10747 /* STRB. */
10748 case 11:
10749 case 15:
10750 /* STRBT. */
10751 case 3:
10752 case 7:
10753 /* STRB. */
10754 case 2:
10755 case 6:
10756 record_buf_mem[0] = 1;
10757 break;
10758
10759 default:
10760 gdb_assert_not_reached ("no decoding pattern found");
10761 break;
10762 }
10763 record_buf_mem[1] = tgt_mem_addr;
10764 arm_insn_r->mem_rec_count = 1;
10765
10766 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10767 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10768 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10769 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10770 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10771 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10772 )
10773 {
10774 /* Rn is going to be changed in pre-indexed mode and
10775 post-indexed mode as well. */
10776 record_buf[0] = reg_src2;
10777 arm_insn_r->reg_rec_count = 1;
10778 }
10779 }
10780 else
10781 {
10782 /* Store insn, scaled register offset; scaled pre-indexed. */
10783 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10784 /* Get Rm. */
10785 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10786 /* Get Rn. */
10787 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10788 /* Get shift_imm. */
10789 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10790 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10791 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10792 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10793 /* Offset_12 used as shift. */
10794 switch (offset_12)
10795 {
10796 case 0:
10797 /* Offset_12 used as index. */
10798 offset_12 = u_regval[0] << shift_imm;
10799 break;
10800
10801 case 1:
10802 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10803 break;
10804
10805 case 2:
10806 if (!shift_imm)
10807 {
10808 if (bit (u_regval[0], 31))
10809 {
10810 offset_12 = 0xFFFFFFFF;
10811 }
10812 else
10813 {
10814 offset_12 = 0;
10815 }
10816 }
10817 else
10818 {
10819 /* This is arithmetic shift. */
10820 offset_12 = s_word >> shift_imm;
10821 }
10822 break;
10823
10824 case 3:
10825 if (!shift_imm)
10826 {
10827 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10828 &u_regval[1]);
10829 /* Get C flag value and shift it by 31. */
10830 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10831 | (u_regval[0]) >> 1);
10832 }
10833 else
10834 {
10835 offset_12 = (u_regval[0] >> shift_imm) \
10836 | (u_regval[0] <<
10837 (sizeof(uint32_t) - shift_imm));
10838 }
10839 break;
10840
10841 default:
10842 gdb_assert_not_reached ("no decoding pattern found");
10843 break;
10844 }
10845
10846 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10847 /* bit U set. */
10848 if (bit (arm_insn_r->arm_insn, 23))
10849 {
10850 tgt_mem_addr = u_regval[1] + offset_12;
10851 }
10852 else
10853 {
10854 tgt_mem_addr = u_regval[1] - offset_12;
10855 }
10856
10857 switch (arm_insn_r->opcode)
10858 {
10859 /* STR. */
10860 case 8:
10861 case 12:
10862 /* STR. */
10863 case 9:
10864 case 13:
10865 /* STRT. */
10866 case 1:
10867 case 5:
10868 /* STR. */
10869 case 0:
10870 case 4:
10871 record_buf_mem[0] = 4;
10872 break;
10873
10874 /* STRB. */
10875 case 10:
10876 case 14:
10877 /* STRB. */
10878 case 11:
10879 case 15:
10880 /* STRBT. */
10881 case 3:
10882 case 7:
10883 /* STRB. */
10884 case 2:
10885 case 6:
10886 record_buf_mem[0] = 1;
10887 break;
10888
10889 default:
10890 gdb_assert_not_reached ("no decoding pattern found");
10891 break;
10892 }
10893 record_buf_mem[1] = tgt_mem_addr;
10894 arm_insn_r->mem_rec_count = 1;
10895
10896 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10897 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10898 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10899 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10900 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10901 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10902 )
10903 {
10904 /* Rn is going to be changed in register scaled pre-indexed
10905 mode,and scaled post indexed mode. */
10906 record_buf[0] = reg_src2;
10907 arm_insn_r->reg_rec_count = 1;
10908 }
10909 }
10910 }
10911
10912 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10913 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10914 return 0;
10915}
10916
71e396f9 10917/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10918
10919static int
10920arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10921{
10922 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10923 uint32_t register_count = 0, register_bits;
10924 uint32_t reg_base, addr_mode;
72508ac0 10925 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10926 uint32_t wback;
10927 ULONGEST u_regval;
72508ac0 10928
71e396f9
LM
10929 /* Fetch the list of registers. */
10930 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10931 arm_insn_r->reg_rec_count = 0;
10932
10933 /* Fetch the base register that contains the address we are loading data
10934 to. */
10935 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10936
71e396f9
LM
10937 /* Calculate wback. */
10938 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10939
10940 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10941 {
71e396f9 10942 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10943
71e396f9 10944 /* Find out which registers are going to be loaded from memory. */
72508ac0 10945 while (register_bits)
71e396f9
LM
10946 {
10947 if (register_bits & 0x00000001)
10948 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10949 register_bits = register_bits >> 1;
10950 register_count++;
10951 }
72508ac0 10952
71e396f9
LM
10953
10954 /* If wback is true, also save the base register, which is going to be
10955 written to. */
10956 if (wback)
10957 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10958
10959 /* Save the CPSR register. */
10960 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10961 }
10962 else
10963 {
71e396f9 10964 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10965
71e396f9
LM
10966 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10967
10968 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10969
10970 /* Find out how many registers are going to be stored to memory. */
72508ac0 10971 while (register_bits)
71e396f9
LM
10972 {
10973 if (register_bits & 0x00000001)
10974 register_count++;
10975 register_bits = register_bits >> 1;
10976 }
72508ac0
PO
10977
10978 switch (addr_mode)
71e396f9
LM
10979 {
10980 /* STMDA (STMED): Decrement after. */
10981 case 0:
10982 record_buf_mem[1] = (uint32_t) u_regval
f0452268 10983 - register_count * ARM_INT_REGISTER_SIZE + 4;
71e396f9
LM
10984 break;
10985 /* STM (STMIA, STMEA): Increment after. */
10986 case 1:
10987 record_buf_mem[1] = (uint32_t) u_regval;
10988 break;
10989 /* STMDB (STMFD): Decrement before. */
10990 case 2:
10991 record_buf_mem[1] = (uint32_t) u_regval
f0452268 10992 - register_count * ARM_INT_REGISTER_SIZE;
71e396f9
LM
10993 break;
10994 /* STMIB (STMFA): Increment before. */
10995 case 3:
f0452268 10996 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
71e396f9
LM
10997 break;
10998 default:
10999 gdb_assert_not_reached ("no decoding pattern found");
11000 break;
11001 }
72508ac0 11002
f0452268 11003 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
71e396f9
LM
11004 arm_insn_r->mem_rec_count = 1;
11005
11006 /* If wback is true, also save the base register, which is going to be
11007 written to. */
11008 if (wback)
11009 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11010 }
11011
11012 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11013 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11014 return 0;
11015}
11016
11017/* Handling opcode 101 insns. */
11018
11019static int
11020arm_record_b_bl (insn_decode_record *arm_insn_r)
11021{
11022 uint32_t record_buf[8];
11023
11024 /* Handle B, BL, BLX(1) insns. */
11025 /* B simply branches so we do nothing here. */
11026 /* Note: BLX(1) doesnt fall here but instead it falls into
11027 extension space. */
11028 if (bit (arm_insn_r->arm_insn, 24))
11029 {
11030 record_buf[0] = ARM_LR_REGNUM;
11031 arm_insn_r->reg_rec_count = 1;
11032 }
11033
11034 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11035
11036 return 0;
11037}
11038
72508ac0 11039static int
c6ec2b30 11040arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11041{
11042 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11043 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11044 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11045
11046 return -1;
11047}
11048
5a578da5
OJ
11049/* Record handler for vector data transfer instructions. */
11050
11051static int
11052arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11053{
11054 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11055 uint32_t record_buf[4];
11056
5a578da5
OJ
11057 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11058 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11059 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11060 bit_l = bit (arm_insn_r->arm_insn, 20);
11061 bit_c = bit (arm_insn_r->arm_insn, 8);
11062
11063 /* Handle VMOV instruction. */
11064 if (bit_l && bit_c)
11065 {
11066 record_buf[0] = reg_t;
11067 arm_insn_r->reg_rec_count = 1;
11068 }
11069 else if (bit_l && !bit_c)
11070 {
11071 /* Handle VMOV instruction. */
11072 if (bits_a == 0x00)
11073 {
f1771dce 11074 record_buf[0] = reg_t;
5a578da5
OJ
11075 arm_insn_r->reg_rec_count = 1;
11076 }
11077 /* Handle VMRS instruction. */
11078 else if (bits_a == 0x07)
11079 {
11080 if (reg_t == 15)
11081 reg_t = ARM_PS_REGNUM;
11082
11083 record_buf[0] = reg_t;
11084 arm_insn_r->reg_rec_count = 1;
11085 }
11086 }
11087 else if (!bit_l && !bit_c)
11088 {
11089 /* Handle VMOV instruction. */
11090 if (bits_a == 0x00)
11091 {
f1771dce 11092 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11093
11094 arm_insn_r->reg_rec_count = 1;
11095 }
11096 /* Handle VMSR instruction. */
11097 else if (bits_a == 0x07)
11098 {
11099 record_buf[0] = ARM_FPSCR_REGNUM;
11100 arm_insn_r->reg_rec_count = 1;
11101 }
11102 }
11103 else if (!bit_l && bit_c)
11104 {
11105 /* Handle VMOV instruction. */
11106 if (!(bits_a & 0x04))
11107 {
11108 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11109 + ARM_D0_REGNUM;
11110 arm_insn_r->reg_rec_count = 1;
11111 }
11112 /* Handle VDUP instruction. */
11113 else
11114 {
11115 if (bit (arm_insn_r->arm_insn, 21))
11116 {
11117 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11118 record_buf[0] = reg_v + ARM_D0_REGNUM;
11119 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11120 arm_insn_r->reg_rec_count = 2;
11121 }
11122 else
11123 {
11124 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11125 record_buf[0] = reg_v + ARM_D0_REGNUM;
11126 arm_insn_r->reg_rec_count = 1;
11127 }
11128 }
11129 }
11130
11131 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11132 return 0;
11133}
11134
f20f80dd
OJ
11135/* Record handler for extension register load/store instructions. */
11136
11137static int
11138arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11139{
11140 uint32_t opcode, single_reg;
11141 uint8_t op_vldm_vstm;
11142 uint32_t record_buf[8], record_buf_mem[128];
11143 ULONGEST u_regval = 0;
11144
11145 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11146
11147 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11148 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11149 op_vldm_vstm = opcode & 0x1b;
11150
11151 /* Handle VMOV instructions. */
11152 if ((opcode & 0x1e) == 0x04)
11153 {
9fde51ed 11154 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11155 {
11156 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11157 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11158 arm_insn_r->reg_rec_count = 2;
11159 }
f20f80dd 11160 else
01e57735 11161 {
9fde51ed
YQ
11162 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11163 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11164
9fde51ed 11165 if (single_reg)
01e57735 11166 {
9fde51ed
YQ
11167 /* The first S register number m is REG_M:M (M is bit 5),
11168 the corresponding D register number is REG_M:M / 2, which
11169 is REG_M. */
11170 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11171 /* The second S register number is REG_M:M + 1, the
11172 corresponding D register number is (REG_M:M + 1) / 2.
11173 IOW, if bit M is 1, the first and second S registers
11174 are mapped to different D registers, otherwise, they are
11175 in the same D register. */
11176 if (bit_m)
11177 {
11178 record_buf[arm_insn_r->reg_rec_count++]
11179 = ARM_D0_REGNUM + reg_m + 1;
11180 }
01e57735
YQ
11181 }
11182 else
11183 {
9fde51ed 11184 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11185 arm_insn_r->reg_rec_count = 1;
11186 }
11187 }
f20f80dd
OJ
11188 }
11189 /* Handle VSTM and VPUSH instructions. */
11190 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11191 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11192 {
11193 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11194 uint32_t memory_index = 0;
11195
11196 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11197 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11198 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11199 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11200 memory_count = imm_off8;
11201
11202 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11203 start_address = u_regval;
f20f80dd 11204 else
01e57735 11205 start_address = u_regval - imm_off32;
f20f80dd
OJ
11206
11207 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11208 {
11209 record_buf[0] = reg_rn;
11210 arm_insn_r->reg_rec_count = 1;
11211 }
f20f80dd
OJ
11212
11213 while (memory_count > 0)
01e57735 11214 {
9fde51ed 11215 if (single_reg)
01e57735 11216 {
9fde51ed
YQ
11217 record_buf_mem[memory_index] = 4;
11218 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11219 start_address = start_address + 4;
11220 memory_index = memory_index + 2;
11221 }
11222 else
11223 {
9fde51ed
YQ
11224 record_buf_mem[memory_index] = 4;
11225 record_buf_mem[memory_index + 1] = start_address;
11226 record_buf_mem[memory_index + 2] = 4;
11227 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11228 start_address = start_address + 8;
11229 memory_index = memory_index + 4;
11230 }
11231 memory_count--;
11232 }
f20f80dd
OJ
11233 arm_insn_r->mem_rec_count = (memory_index >> 1);
11234 }
11235 /* Handle VLDM instructions. */
11236 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11237 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11238 {
11239 uint32_t reg_count, reg_vd;
11240 uint32_t reg_index = 0;
9fde51ed 11241 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11242
11243 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11244 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11245
9fde51ed
YQ
11246 /* REG_VD is the first D register number. If the instruction
11247 loads memory to S registers (SINGLE_REG is TRUE), the register
11248 number is (REG_VD << 1 | bit D), so the corresponding D
11249 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11250 if (!single_reg)
11251 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11252
9fde51ed 11253 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11254 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11255
9fde51ed
YQ
11256 /* If the instruction loads memory to D register, REG_COUNT should
11257 be divided by 2, according to the ARM Architecture Reference
11258 Manual. If the instruction loads memory to S register, divide by
11259 2 as well because two S registers are mapped to D register. */
11260 reg_count = reg_count / 2;
11261 if (single_reg && bit_d)
01e57735 11262 {
9fde51ed
YQ
11263 /* Increase the register count if S register list starts from
11264 an odd number (bit d is one). */
11265 reg_count++;
11266 }
f20f80dd 11267
9fde51ed
YQ
11268 while (reg_count > 0)
11269 {
11270 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11271 reg_count--;
11272 }
f20f80dd
OJ
11273 arm_insn_r->reg_rec_count = reg_index;
11274 }
11275 /* VSTR Vector store register. */
11276 else if ((opcode & 0x13) == 0x10)
11277 {
bec2ab5a 11278 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11279 uint32_t memory_index = 0;
11280
11281 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11282 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11283 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11284 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11285
11286 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11287 start_address = u_regval + imm_off32;
f20f80dd 11288 else
01e57735 11289 start_address = u_regval - imm_off32;
f20f80dd
OJ
11290
11291 if (single_reg)
01e57735 11292 {
9fde51ed
YQ
11293 record_buf_mem[memory_index] = 4;
11294 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11295 arm_insn_r->mem_rec_count = 1;
11296 }
f20f80dd 11297 else
01e57735 11298 {
9fde51ed
YQ
11299 record_buf_mem[memory_index] = 4;
11300 record_buf_mem[memory_index + 1] = start_address;
11301 record_buf_mem[memory_index + 2] = 4;
11302 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11303 arm_insn_r->mem_rec_count = 2;
11304 }
f20f80dd
OJ
11305 }
11306 /* VLDR Vector load register. */
11307 else if ((opcode & 0x13) == 0x11)
11308 {
11309 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11310
11311 if (!single_reg)
01e57735
YQ
11312 {
11313 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11314 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11315 }
f20f80dd 11316 else
01e57735
YQ
11317 {
11318 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11319 /* Record register D rather than pseudo register S. */
11320 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11321 }
f20f80dd
OJ
11322 arm_insn_r->reg_rec_count = 1;
11323 }
11324
11325 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11326 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11327 return 0;
11328}
11329
851f26ae
OJ
11330/* Record handler for arm/thumb mode VFP data processing instructions. */
11331
11332static int
11333arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11334{
11335 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11336 uint32_t record_buf[4];
11337 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11338 enum insn_types curr_insn_type = INSN_INV;
11339
11340 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11341 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11342 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11343 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11344 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11345 bit_d = bit (arm_insn_r->arm_insn, 22);
ce887586
TT
11346 /* Mask off the "D" bit. */
11347 opc1 = opc1 & ~0x04;
851f26ae
OJ
11348
11349 /* Handle VMLA, VMLS. */
11350 if (opc1 == 0x00)
11351 {
11352 if (bit (arm_insn_r->arm_insn, 10))
11353 {
11354 if (bit (arm_insn_r->arm_insn, 6))
11355 curr_insn_type = INSN_T0;
11356 else
11357 curr_insn_type = INSN_T1;
11358 }
11359 else
11360 {
11361 if (dp_op_sz)
11362 curr_insn_type = INSN_T1;
11363 else
11364 curr_insn_type = INSN_T2;
11365 }
11366 }
11367 /* Handle VNMLA, VNMLS, VNMUL. */
11368 else if (opc1 == 0x01)
11369 {
11370 if (dp_op_sz)
11371 curr_insn_type = INSN_T1;
11372 else
11373 curr_insn_type = INSN_T2;
11374 }
11375 /* Handle VMUL. */
11376 else if (opc1 == 0x02 && !(opc3 & 0x01))
11377 {
11378 if (bit (arm_insn_r->arm_insn, 10))
11379 {
11380 if (bit (arm_insn_r->arm_insn, 6))
11381 curr_insn_type = INSN_T0;
11382 else
11383 curr_insn_type = INSN_T1;
11384 }
11385 else
11386 {
11387 if (dp_op_sz)
11388 curr_insn_type = INSN_T1;
11389 else
11390 curr_insn_type = INSN_T2;
11391 }
11392 }
11393 /* Handle VADD, VSUB. */
11394 else if (opc1 == 0x03)
11395 {
11396 if (!bit (arm_insn_r->arm_insn, 9))
11397 {
11398 if (bit (arm_insn_r->arm_insn, 6))
11399 curr_insn_type = INSN_T0;
11400 else
11401 curr_insn_type = INSN_T1;
11402 }
11403 else
11404 {
11405 if (dp_op_sz)
11406 curr_insn_type = INSN_T1;
11407 else
11408 curr_insn_type = INSN_T2;
11409 }
11410 }
11411 /* Handle VDIV. */
ce887586 11412 else if (opc1 == 0x08)
851f26ae
OJ
11413 {
11414 if (dp_op_sz)
11415 curr_insn_type = INSN_T1;
11416 else
11417 curr_insn_type = INSN_T2;
11418 }
11419 /* Handle all other vfp data processing instructions. */
11420 else if (opc1 == 0x0b)
11421 {
11422 /* Handle VMOV. */
11423 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11424 {
11425 if (bit (arm_insn_r->arm_insn, 4))
11426 {
11427 if (bit (arm_insn_r->arm_insn, 6))
11428 curr_insn_type = INSN_T0;
11429 else
11430 curr_insn_type = INSN_T1;
11431 }
11432 else
11433 {
11434 if (dp_op_sz)
11435 curr_insn_type = INSN_T1;
11436 else
11437 curr_insn_type = INSN_T2;
11438 }
11439 }
11440 /* Handle VNEG and VABS. */
11441 else if ((opc2 == 0x01 && opc3 == 0x01)
11442 || (opc2 == 0x00 && opc3 == 0x03))
11443 {
11444 if (!bit (arm_insn_r->arm_insn, 11))
11445 {
11446 if (bit (arm_insn_r->arm_insn, 6))
11447 curr_insn_type = INSN_T0;
11448 else
11449 curr_insn_type = INSN_T1;
11450 }
11451 else
11452 {
11453 if (dp_op_sz)
11454 curr_insn_type = INSN_T1;
11455 else
11456 curr_insn_type = INSN_T2;
11457 }
11458 }
11459 /* Handle VSQRT. */
11460 else if (opc2 == 0x01 && opc3 == 0x03)
11461 {
11462 if (dp_op_sz)
11463 curr_insn_type = INSN_T1;
11464 else
11465 curr_insn_type = INSN_T2;
11466 }
11467 /* Handle VCVT. */
11468 else if (opc2 == 0x07 && opc3 == 0x03)
11469 {
11470 if (!dp_op_sz)
11471 curr_insn_type = INSN_T1;
11472 else
11473 curr_insn_type = INSN_T2;
11474 }
11475 else if (opc3 & 0x01)
11476 {
11477 /* Handle VCVT. */
11478 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11479 {
11480 if (!bit (arm_insn_r->arm_insn, 18))
11481 curr_insn_type = INSN_T2;
11482 else
11483 {
11484 if (dp_op_sz)
11485 curr_insn_type = INSN_T1;
11486 else
11487 curr_insn_type = INSN_T2;
11488 }
11489 }
11490 /* Handle VCVT. */
11491 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11492 {
11493 if (dp_op_sz)
11494 curr_insn_type = INSN_T1;
11495 else
11496 curr_insn_type = INSN_T2;
11497 }
11498 /* Handle VCVTB, VCVTT. */
11499 else if ((opc2 & 0x0e) == 0x02)
11500 curr_insn_type = INSN_T2;
11501 /* Handle VCMP, VCMPE. */
11502 else if ((opc2 & 0x0e) == 0x04)
11503 curr_insn_type = INSN_T3;
11504 }
11505 }
11506
11507 switch (curr_insn_type)
11508 {
11509 case INSN_T0:
11510 reg_vd = reg_vd | (bit_d << 4);
11511 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11512 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11513 arm_insn_r->reg_rec_count = 2;
11514 break;
11515
11516 case INSN_T1:
11517 reg_vd = reg_vd | (bit_d << 4);
11518 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11519 arm_insn_r->reg_rec_count = 1;
11520 break;
11521
11522 case INSN_T2:
11523 reg_vd = (reg_vd << 1) | bit_d;
11524 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11525 arm_insn_r->reg_rec_count = 1;
11526 break;
11527
11528 case INSN_T3:
11529 record_buf[0] = ARM_FPSCR_REGNUM;
11530 arm_insn_r->reg_rec_count = 1;
11531 break;
11532
11533 default:
11534 gdb_assert_not_reached ("no decoding pattern found");
11535 break;
11536 }
11537
11538 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11539 return 0;
11540}
11541
60cc5e93
OJ
11542/* Handling opcode 110 insns. */
11543
11544static int
11545arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11546{
bec2ab5a 11547 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11548
11549 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11550 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11551 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11552
11553 if ((coproc & 0x0e) == 0x0a)
11554 {
11555 /* Handle extension register ld/st instructions. */
11556 if (!(op1 & 0x20))
f20f80dd 11557 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11558
11559 /* 64-bit transfers between arm core and extension registers. */
11560 if ((op1 & 0x3e) == 0x04)
f20f80dd 11561 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11562 }
11563 else
11564 {
11565 /* Handle coprocessor ld/st instructions. */
11566 if (!(op1 & 0x3a))
11567 {
11568 /* Store. */
11569 if (!op1_ebit)
11570 return arm_record_unsupported_insn (arm_insn_r);
11571 else
11572 /* Load. */
11573 return arm_record_unsupported_insn (arm_insn_r);
11574 }
11575
11576 /* Move to coprocessor from two arm core registers. */
11577 if (op1 == 0x4)
11578 return arm_record_unsupported_insn (arm_insn_r);
11579
11580 /* Move to two arm core registers from coprocessor. */
11581 if (op1 == 0x5)
11582 {
11583 uint32_t reg_t[2];
11584
11585 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11586 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11587 arm_insn_r->reg_rec_count = 2;
11588
11589 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11590 return 0;
11591 }
11592 }
11593 return arm_record_unsupported_insn (arm_insn_r);
11594}
11595
72508ac0
PO
11596/* Handling opcode 111 insns. */
11597
11598static int
11599arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11600{
2d9e6acb 11601 uint32_t op, op1_ebit, coproc, bits_24_25;
72508ac0
PO
11602 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11603 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11604
11605 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93 11606 coproc = bits (arm_insn_r->arm_insn, 8, 11);
60cc5e93
OJ
11607 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11608 op = bit (arm_insn_r->arm_insn, 4);
2d9e6acb 11609 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
97dfe206
OJ
11610
11611 /* Handle arm SWI/SVC system call instructions. */
2d9e6acb 11612 if (bits_24_25 == 0x3)
97dfe206
OJ
11613 {
11614 if (tdep->arm_syscall_record != NULL)
11615 {
11616 ULONGEST svc_operand, svc_number;
11617
11618 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11619
11620 if (svc_operand) /* OABI. */
11621 svc_number = svc_operand - 0x900000;
11622 else /* EABI. */
11623 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11624
60cc5e93 11625 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11626 }
11627 else
11628 {
11629 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11630 return -1;
97dfe206
OJ
11631 }
11632 }
2d9e6acb 11633 else if (bits_24_25 == 0x02)
60cc5e93 11634 {
2d9e6acb
YQ
11635 if (op)
11636 {
11637 if ((coproc & 0x0e) == 0x0a)
11638 {
11639 /* 8, 16, and 32-bit transfer */
11640 return arm_record_vdata_transfer_insn (arm_insn_r);
11641 }
11642 else
11643 {
11644 if (op1_ebit)
11645 {
11646 /* MRC, MRC2 */
11647 uint32_t record_buf[1];
11648
11649 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11650 if (record_buf[0] == 15)
11651 record_buf[0] = ARM_PS_REGNUM;
60cc5e93 11652
2d9e6acb
YQ
11653 arm_insn_r->reg_rec_count = 1;
11654 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11655 record_buf);
11656 return 0;
11657 }
11658 else
11659 {
11660 /* MCR, MCR2 */
11661 return -1;
11662 }
11663 }
11664 }
11665 else
11666 {
11667 if ((coproc & 0x0e) == 0x0a)
11668 {
11669 /* VFP data-processing instructions. */
11670 return arm_record_vfp_data_proc_insn (arm_insn_r);
11671 }
11672 else
11673 {
11674 /* CDP, CDP2 */
11675 return -1;
11676 }
11677 }
60cc5e93 11678 }
97dfe206
OJ
11679 else
11680 {
2d9e6acb 11681 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
60cc5e93 11682
2d9e6acb
YQ
11683 if (op1 == 5)
11684 {
11685 if ((coproc & 0x0e) != 0x0a)
11686 {
11687 /* MRRC, MRRC2 */
11688 return -1;
11689 }
11690 }
11691 else if (op1 == 4 || op1 == 5)
11692 {
11693 if ((coproc & 0x0e) == 0x0a)
11694 {
11695 /* 64-bit transfers between ARM core and extension */
11696 return -1;
11697 }
11698 else if (op1 == 4)
11699 {
11700 /* MCRR, MCRR2 */
11701 return -1;
11702 }
11703 }
11704 else if (op1 == 0 || op1 == 1)
11705 {
11706 /* UNDEFINED */
11707 return -1;
11708 }
11709 else
11710 {
11711 if ((coproc & 0x0e) == 0x0a)
11712 {
11713 /* Extension register load/store */
11714 }
11715 else
11716 {
11717 /* STC, STC2, LDC, LDC2 */
11718 }
11719 return -1;
11720 }
97dfe206 11721 }
72508ac0 11722
2d9e6acb 11723 return -1;
72508ac0
PO
11724}
11725
11726/* Handling opcode 000 insns. */
11727
11728static int
11729thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11730{
11731 uint32_t record_buf[8];
11732 uint32_t reg_src1 = 0;
11733
11734 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11735
11736 record_buf[0] = ARM_PS_REGNUM;
11737 record_buf[1] = reg_src1;
11738 thumb_insn_r->reg_rec_count = 2;
11739
11740 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11741
11742 return 0;
11743}
11744
11745
11746/* Handling opcode 001 insns. */
11747
11748static int
11749thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11750{
11751 uint32_t record_buf[8];
11752 uint32_t reg_src1 = 0;
11753
11754 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11755
11756 record_buf[0] = ARM_PS_REGNUM;
11757 record_buf[1] = reg_src1;
11758 thumb_insn_r->reg_rec_count = 2;
11759
11760 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11761
11762 return 0;
11763}
11764
11765/* Handling opcode 010 insns. */
11766
11767static int
11768thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11769{
11770 struct regcache *reg_cache = thumb_insn_r->regcache;
11771 uint32_t record_buf[8], record_buf_mem[8];
11772
11773 uint32_t reg_src1 = 0, reg_src2 = 0;
11774 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11775
11776 ULONGEST u_regval[2] = {0};
11777
11778 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11779
11780 if (bit (thumb_insn_r->arm_insn, 12))
11781 {
11782 /* Handle load/store register offset. */
b121eeb9
YQ
11783 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11784
b020ff80 11785 if (in_inclusive_range (opB, 4U, 7U))
72508ac0
PO
11786 {
11787 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11788 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11789 record_buf[0] = reg_src1;
11790 thumb_insn_r->reg_rec_count = 1;
11791 }
b020ff80 11792 else if (in_inclusive_range (opB, 0U, 2U))
72508ac0
PO
11793 {
11794 /* STR(2), STRB(2), STRH(2) . */
11795 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11796 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11797 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11798 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
b121eeb9 11799 if (0 == opB)
72508ac0 11800 record_buf_mem[0] = 4; /* STR (2). */
b121eeb9 11801 else if (2 == opB)
72508ac0 11802 record_buf_mem[0] = 1; /* STRB (2). */
b121eeb9 11803 else if (1 == opB)
72508ac0
PO
11804 record_buf_mem[0] = 2; /* STRH (2). */
11805 record_buf_mem[1] = u_regval[0] + u_regval[1];
11806 thumb_insn_r->mem_rec_count = 1;
11807 }
11808 }
11809 else if (bit (thumb_insn_r->arm_insn, 11))
11810 {
11811 /* Handle load from literal pool. */
11812 /* LDR(3). */
11813 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11814 record_buf[0] = reg_src1;
11815 thumb_insn_r->reg_rec_count = 1;
11816 }
11817 else if (opcode1)
11818 {
b121eeb9 11819 /* Special data instructions and branch and exchange */
72508ac0
PO
11820 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11821 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11822 if ((3 == opcode2) && (!opcode3))
11823 {
11824 /* Branch with exchange. */
11825 record_buf[0] = ARM_PS_REGNUM;
11826 thumb_insn_r->reg_rec_count = 1;
11827 }
11828 else
11829 {
1f33efec
YQ
11830 /* Format 8; special data processing insns. */
11831 record_buf[0] = ARM_PS_REGNUM;
11832 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11833 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11834 thumb_insn_r->reg_rec_count = 2;
11835 }
11836 }
11837 else
11838 {
11839 /* Format 5; data processing insns. */
11840 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11841 if (bit (thumb_insn_r->arm_insn, 7))
11842 {
11843 reg_src1 = reg_src1 + 8;
11844 }
11845 record_buf[0] = ARM_PS_REGNUM;
11846 record_buf[1] = reg_src1;
11847 thumb_insn_r->reg_rec_count = 2;
11848 }
11849
11850 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11851 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11852 record_buf_mem);
11853
11854 return 0;
11855}
11856
11857/* Handling opcode 001 insns. */
11858
11859static int
11860thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11861{
11862 struct regcache *reg_cache = thumb_insn_r->regcache;
11863 uint32_t record_buf[8], record_buf_mem[8];
11864
11865 uint32_t reg_src1 = 0;
11866 uint32_t opcode = 0, immed_5 = 0;
11867
11868 ULONGEST u_regval = 0;
11869
11870 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11871
11872 if (opcode)
11873 {
11874 /* LDR(1). */
11875 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11876 record_buf[0] = reg_src1;
11877 thumb_insn_r->reg_rec_count = 1;
11878 }
11879 else
11880 {
11881 /* STR(1). */
11882 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11883 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11884 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11885 record_buf_mem[0] = 4;
11886 record_buf_mem[1] = u_regval + (immed_5 * 4);
11887 thumb_insn_r->mem_rec_count = 1;
11888 }
11889
11890 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11891 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11892 record_buf_mem);
11893
11894 return 0;
11895}
11896
11897/* Handling opcode 100 insns. */
11898
11899static int
11900thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11901{
11902 struct regcache *reg_cache = thumb_insn_r->regcache;
11903 uint32_t record_buf[8], record_buf_mem[8];
11904
11905 uint32_t reg_src1 = 0;
11906 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11907
11908 ULONGEST u_regval = 0;
11909
11910 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11911
11912 if (3 == opcode)
11913 {
11914 /* LDR(4). */
11915 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11916 record_buf[0] = reg_src1;
11917 thumb_insn_r->reg_rec_count = 1;
11918 }
11919 else if (1 == opcode)
11920 {
11921 /* LDRH(1). */
11922 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11923 record_buf[0] = reg_src1;
11924 thumb_insn_r->reg_rec_count = 1;
11925 }
11926 else if (2 == opcode)
11927 {
11928 /* STR(3). */
11929 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11930 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11931 record_buf_mem[0] = 4;
11932 record_buf_mem[1] = u_regval + (immed_8 * 4);
11933 thumb_insn_r->mem_rec_count = 1;
11934 }
11935 else if (0 == opcode)
11936 {
11937 /* STRH(1). */
11938 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11939 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11940 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11941 record_buf_mem[0] = 2;
11942 record_buf_mem[1] = u_regval + (immed_5 * 2);
11943 thumb_insn_r->mem_rec_count = 1;
11944 }
11945
11946 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11947 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11948 record_buf_mem);
11949
11950 return 0;
11951}
11952
11953/* Handling opcode 101 insns. */
11954
11955static int
11956thumb_record_misc (insn_decode_record *thumb_insn_r)
11957{
11958 struct regcache *reg_cache = thumb_insn_r->regcache;
11959
b121eeb9 11960 uint32_t opcode = 0;
72508ac0 11961 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11962 uint32_t index = 0, start_address = 0;
72508ac0
PO
11963 uint32_t record_buf[24], record_buf_mem[48];
11964 uint32_t reg_src1;
11965
11966 ULONGEST u_regval = 0;
11967
11968 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
72508ac0 11969
b121eeb9 11970 if (opcode == 0 || opcode == 1)
72508ac0 11971 {
b121eeb9
YQ
11972 /* ADR and ADD (SP plus immediate) */
11973
72508ac0
PO
11974 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11975 record_buf[0] = reg_src1;
11976 thumb_insn_r->reg_rec_count = 1;
11977 }
b121eeb9 11978 else
72508ac0 11979 {
b121eeb9
YQ
11980 /* Miscellaneous 16-bit instructions */
11981 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11982
11983 switch (opcode2)
11984 {
11985 case 6:
11986 /* SETEND and CPS */
11987 break;
11988 case 0:
11989 /* ADD/SUB (SP plus immediate) */
11990 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11991 record_buf[0] = ARM_SP_REGNUM;
11992 thumb_insn_r->reg_rec_count = 1;
11993 break;
11994 case 1: /* fall through */
11995 case 3: /* fall through */
11996 case 9: /* fall through */
11997 case 11:
11998 /* CBNZ, CBZ */
b121eeb9
YQ
11999 break;
12000 case 2:
12001 /* SXTH, SXTB, UXTH, UXTB */
12002 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12003 thumb_insn_r->reg_rec_count = 1;
12004 break;
12005 case 4: /* fall through */
12006 case 5:
12007 /* PUSH. */
12008 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12009 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12010 while (register_bits)
12011 {
12012 if (register_bits & 0x00000001)
12013 register_count++;
12014 register_bits = register_bits >> 1;
12015 }
12016 start_address = u_regval - \
12017 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12018 thumb_insn_r->mem_rec_count = register_count;
12019 while (register_count)
12020 {
12021 record_buf_mem[(register_count * 2) - 1] = start_address;
12022 record_buf_mem[(register_count * 2) - 2] = 4;
12023 start_address = start_address + 4;
12024 register_count--;
12025 }
12026 record_buf[0] = ARM_SP_REGNUM;
12027 thumb_insn_r->reg_rec_count = 1;
12028 break;
12029 case 10:
12030 /* REV, REV16, REVSH */
ba14f379
YQ
12031 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12032 thumb_insn_r->reg_rec_count = 1;
b121eeb9
YQ
12033 break;
12034 case 12: /* fall through */
12035 case 13:
12036 /* POP. */
12037 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12038 while (register_bits)
12039 {
12040 if (register_bits & 0x00000001)
12041 record_buf[index++] = register_count;
12042 register_bits = register_bits >> 1;
12043 register_count++;
12044 }
12045 record_buf[index++] = ARM_PS_REGNUM;
12046 record_buf[index++] = ARM_SP_REGNUM;
12047 thumb_insn_r->reg_rec_count = index;
12048 break;
12049 case 0xe:
12050 /* BKPT insn. */
12051 /* Handle enhanced software breakpoint insn, BKPT. */
12052 /* CPSR is changed to be executed in ARM state, disabling normal
12053 interrupts, entering abort mode. */
12054 /* According to high vector configuration PC is set. */
12055 /* User hits breakpoint and type reverse, in that case, we need to go back with
12056 previous CPSR and Program Counter. */
12057 record_buf[0] = ARM_PS_REGNUM;
12058 record_buf[1] = ARM_LR_REGNUM;
12059 thumb_insn_r->reg_rec_count = 2;
12060 /* We need to save SPSR value, which is not yet done. */
12061 printf_unfiltered (_("Process record does not support instruction "
12062 "0x%0x at address %s.\n"),
12063 thumb_insn_r->arm_insn,
12064 paddress (thumb_insn_r->gdbarch,
12065 thumb_insn_r->this_addr));
12066 return -1;
12067
12068 case 0xf:
12069 /* If-Then, and hints */
12070 break;
12071 default:
12072 return -1;
12073 };
72508ac0
PO
12074 }
12075
12076 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12077 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12078 record_buf_mem);
12079
12080 return 0;
12081}
12082
12083/* Handling opcode 110 insns. */
12084
12085static int
12086thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12087{
12088 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12089 struct regcache *reg_cache = thumb_insn_r->regcache;
12090
12091 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12092 uint32_t reg_src1 = 0;
12093 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12094 uint32_t index = 0, start_address = 0;
72508ac0
PO
12095 uint32_t record_buf[24], record_buf_mem[48];
12096
12097 ULONGEST u_regval = 0;
12098
12099 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12100 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12101
12102 if (1 == opcode2)
12103 {
12104
12105 /* LDMIA. */
12106 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12107 /* Get Rn. */
12108 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12109 while (register_bits)
12110 {
12111 if (register_bits & 0x00000001)
f969241e 12112 record_buf[index++] = register_count;
72508ac0 12113 register_bits = register_bits >> 1;
f969241e 12114 register_count++;
72508ac0 12115 }
f969241e
OJ
12116 record_buf[index++] = reg_src1;
12117 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12118 }
12119 else if (0 == opcode2)
12120 {
12121 /* It handles both STMIA. */
12122 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12123 /* Get Rn. */
12124 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12125 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12126 while (register_bits)
12127 {
12128 if (register_bits & 0x00000001)
12129 register_count++;
12130 register_bits = register_bits >> 1;
12131 }
12132 start_address = u_regval;
12133 thumb_insn_r->mem_rec_count = register_count;
12134 while (register_count)
12135 {
12136 record_buf_mem[(register_count * 2) - 1] = start_address;
12137 record_buf_mem[(register_count * 2) - 2] = 4;
12138 start_address = start_address + 4;
12139 register_count--;
12140 }
12141 }
12142 else if (0x1F == opcode1)
12143 {
12144 /* Handle arm syscall insn. */
97dfe206 12145 if (tdep->arm_syscall_record != NULL)
72508ac0 12146 {
97dfe206
OJ
12147 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12148 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12149 }
12150 else
12151 {
12152 printf_unfiltered (_("no syscall record support\n"));
12153 return -1;
12154 }
12155 }
12156
12157 /* B (1), conditional branch is automatically taken care in process_record,
12158 as PC is saved there. */
12159
12160 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12161 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12162 record_buf_mem);
12163
12164 return ret;
12165}
12166
12167/* Handling opcode 111 insns. */
12168
12169static int
12170thumb_record_branch (insn_decode_record *thumb_insn_r)
12171{
12172 uint32_t record_buf[8];
12173 uint32_t bits_h = 0;
12174
12175 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12176
12177 if (2 == bits_h || 3 == bits_h)
12178 {
12179 /* BL */
12180 record_buf[0] = ARM_LR_REGNUM;
12181 thumb_insn_r->reg_rec_count = 1;
12182 }
12183 else if (1 == bits_h)
12184 {
12185 /* BLX(1). */
12186 record_buf[0] = ARM_PS_REGNUM;
12187 record_buf[1] = ARM_LR_REGNUM;
12188 thumb_insn_r->reg_rec_count = 2;
12189 }
12190
12191 /* B(2) is automatically taken care in process_record, as PC is
12192 saved there. */
12193
12194 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12195
12196 return 0;
12197}
12198
c6ec2b30
OJ
12199/* Handler for thumb2 load/store multiple instructions. */
12200
12201static int
12202thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12203{
12204 struct regcache *reg_cache = thumb2_insn_r->regcache;
12205
12206 uint32_t reg_rn, op;
12207 uint32_t register_bits = 0, register_count = 0;
12208 uint32_t index = 0, start_address = 0;
12209 uint32_t record_buf[24], record_buf_mem[48];
12210
12211 ULONGEST u_regval = 0;
12212
12213 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12214 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12215
12216 if (0 == op || 3 == op)
12217 {
12218 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12219 {
12220 /* Handle RFE instruction. */
12221 record_buf[0] = ARM_PS_REGNUM;
12222 thumb2_insn_r->reg_rec_count = 1;
12223 }
12224 else
12225 {
12226 /* Handle SRS instruction after reading banked SP. */
12227 return arm_record_unsupported_insn (thumb2_insn_r);
12228 }
12229 }
12230 else if (1 == op || 2 == op)
12231 {
12232 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12233 {
12234 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12235 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12236 while (register_bits)
12237 {
12238 if (register_bits & 0x00000001)
12239 record_buf[index++] = register_count;
12240
12241 register_count++;
12242 register_bits = register_bits >> 1;
12243 }
12244 record_buf[index++] = reg_rn;
12245 record_buf[index++] = ARM_PS_REGNUM;
12246 thumb2_insn_r->reg_rec_count = index;
12247 }
12248 else
12249 {
12250 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12251 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12252 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12253 while (register_bits)
12254 {
12255 if (register_bits & 0x00000001)
12256 register_count++;
12257
12258 register_bits = register_bits >> 1;
12259 }
12260
12261 if (1 == op)
12262 {
12263 /* Start address calculation for LDMDB/LDMEA. */
12264 start_address = u_regval;
12265 }
12266 else if (2 == op)
12267 {
12268 /* Start address calculation for LDMDB/LDMEA. */
12269 start_address = u_regval - register_count * 4;
12270 }
12271
12272 thumb2_insn_r->mem_rec_count = register_count;
12273 while (register_count)
12274 {
12275 record_buf_mem[register_count * 2 - 1] = start_address;
12276 record_buf_mem[register_count * 2 - 2] = 4;
12277 start_address = start_address + 4;
12278 register_count--;
12279 }
12280 record_buf[0] = reg_rn;
12281 record_buf[1] = ARM_PS_REGNUM;
12282 thumb2_insn_r->reg_rec_count = 2;
12283 }
12284 }
12285
12286 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12287 record_buf_mem);
12288 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12289 record_buf);
12290 return ARM_RECORD_SUCCESS;
12291}
12292
12293/* Handler for thumb2 load/store (dual/exclusive) and table branch
12294 instructions. */
12295
12296static int
12297thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12298{
12299 struct regcache *reg_cache = thumb2_insn_r->regcache;
12300
12301 uint32_t reg_rd, reg_rn, offset_imm;
12302 uint32_t reg_dest1, reg_dest2;
12303 uint32_t address, offset_addr;
12304 uint32_t record_buf[8], record_buf_mem[8];
12305 uint32_t op1, op2, op3;
c6ec2b30
OJ
12306
12307 ULONGEST u_regval[2];
12308
12309 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12310 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12311 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12312
12313 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12314 {
12315 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12316 {
12317 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12318 record_buf[0] = reg_dest1;
12319 record_buf[1] = ARM_PS_REGNUM;
12320 thumb2_insn_r->reg_rec_count = 2;
12321 }
12322
12323 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12324 {
12325 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12326 record_buf[2] = reg_dest2;
12327 thumb2_insn_r->reg_rec_count = 3;
12328 }
12329 }
12330 else
12331 {
12332 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12333 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12334
12335 if (0 == op1 && 0 == op2)
12336 {
12337 /* Handle STREX. */
12338 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12339 address = u_regval[0] + (offset_imm * 4);
12340 record_buf_mem[0] = 4;
12341 record_buf_mem[1] = address;
12342 thumb2_insn_r->mem_rec_count = 1;
12343 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12344 record_buf[0] = reg_rd;
12345 thumb2_insn_r->reg_rec_count = 1;
12346 }
12347 else if (1 == op1 && 0 == op2)
12348 {
12349 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12350 record_buf[0] = reg_rd;
12351 thumb2_insn_r->reg_rec_count = 1;
12352 address = u_regval[0];
12353 record_buf_mem[1] = address;
12354
12355 if (4 == op3)
12356 {
12357 /* Handle STREXB. */
12358 record_buf_mem[0] = 1;
12359 thumb2_insn_r->mem_rec_count = 1;
12360 }
12361 else if (5 == op3)
12362 {
12363 /* Handle STREXH. */
12364 record_buf_mem[0] = 2 ;
12365 thumb2_insn_r->mem_rec_count = 1;
12366 }
12367 else if (7 == op3)
12368 {
12369 /* Handle STREXD. */
12370 address = u_regval[0];
12371 record_buf_mem[0] = 4;
12372 record_buf_mem[2] = 4;
12373 record_buf_mem[3] = address + 4;
12374 thumb2_insn_r->mem_rec_count = 2;
12375 }
12376 }
12377 else
12378 {
12379 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12380
12381 if (bit (thumb2_insn_r->arm_insn, 24))
12382 {
12383 if (bit (thumb2_insn_r->arm_insn, 23))
12384 offset_addr = u_regval[0] + (offset_imm * 4);
12385 else
12386 offset_addr = u_regval[0] - (offset_imm * 4);
12387
12388 address = offset_addr;
12389 }
12390 else
12391 address = u_regval[0];
12392
12393 record_buf_mem[0] = 4;
12394 record_buf_mem[1] = address;
12395 record_buf_mem[2] = 4;
12396 record_buf_mem[3] = address + 4;
12397 thumb2_insn_r->mem_rec_count = 2;
12398 record_buf[0] = reg_rn;
12399 thumb2_insn_r->reg_rec_count = 1;
12400 }
12401 }
12402
12403 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12404 record_buf);
12405 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12406 record_buf_mem);
12407 return ARM_RECORD_SUCCESS;
12408}
12409
12410/* Handler for thumb2 data processing (shift register and modified immediate)
12411 instructions. */
12412
12413static int
12414thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12415{
12416 uint32_t reg_rd, op;
12417 uint32_t record_buf[8];
12418
12419 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12420 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12421
12422 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12423 {
12424 record_buf[0] = ARM_PS_REGNUM;
12425 thumb2_insn_r->reg_rec_count = 1;
12426 }
12427 else
12428 {
12429 record_buf[0] = reg_rd;
12430 record_buf[1] = ARM_PS_REGNUM;
12431 thumb2_insn_r->reg_rec_count = 2;
12432 }
12433
12434 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12435 record_buf);
12436 return ARM_RECORD_SUCCESS;
12437}
12438
12439/* Generic handler for thumb2 instructions which effect destination and PS
12440 registers. */
12441
12442static int
12443thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12444{
12445 uint32_t reg_rd;
12446 uint32_t record_buf[8];
12447
12448 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12449
12450 record_buf[0] = reg_rd;
12451 record_buf[1] = ARM_PS_REGNUM;
12452 thumb2_insn_r->reg_rec_count = 2;
12453
12454 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12455 record_buf);
12456 return ARM_RECORD_SUCCESS;
12457}
12458
12459/* Handler for thumb2 branch and miscellaneous control instructions. */
12460
12461static int
12462thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12463{
12464 uint32_t op, op1, op2;
12465 uint32_t record_buf[8];
12466
12467 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12468 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12469 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12470
12471 /* Handle MSR insn. */
12472 if (!(op1 & 0x2) && 0x38 == op)
12473 {
12474 if (!(op2 & 0x3))
12475 {
12476 /* CPSR is going to be changed. */
12477 record_buf[0] = ARM_PS_REGNUM;
12478 thumb2_insn_r->reg_rec_count = 1;
12479 }
12480 else
12481 {
12482 arm_record_unsupported_insn(thumb2_insn_r);
12483 return -1;
12484 }
12485 }
12486 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12487 {
12488 /* BLX. */
12489 record_buf[0] = ARM_PS_REGNUM;
12490 record_buf[1] = ARM_LR_REGNUM;
12491 thumb2_insn_r->reg_rec_count = 2;
12492 }
12493
12494 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12495 record_buf);
12496 return ARM_RECORD_SUCCESS;
12497}
12498
12499/* Handler for thumb2 store single data item instructions. */
12500
12501static int
12502thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12503{
12504 struct regcache *reg_cache = thumb2_insn_r->regcache;
12505
12506 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12507 uint32_t address, offset_addr;
12508 uint32_t record_buf[8], record_buf_mem[8];
12509 uint32_t op1, op2;
12510
12511 ULONGEST u_regval[2];
12512
12513 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12514 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12515 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12516 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12517
12518 if (bit (thumb2_insn_r->arm_insn, 23))
12519 {
12520 /* T2 encoding. */
12521 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12522 offset_addr = u_regval[0] + offset_imm;
12523 address = offset_addr;
12524 }
12525 else
12526 {
12527 /* T3 encoding. */
12528 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12529 {
12530 /* Handle STRB (register). */
12531 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12532 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12533 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12534 offset_addr = u_regval[1] << shift_imm;
12535 address = u_regval[0] + offset_addr;
12536 }
12537 else
12538 {
12539 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12540 if (bit (thumb2_insn_r->arm_insn, 10))
12541 {
12542 if (bit (thumb2_insn_r->arm_insn, 9))
12543 offset_addr = u_regval[0] + offset_imm;
12544 else
12545 offset_addr = u_regval[0] - offset_imm;
12546
12547 address = offset_addr;
12548 }
12549 else
12550 address = u_regval[0];
12551 }
12552 }
12553
12554 switch (op1)
12555 {
12556 /* Store byte instructions. */
12557 case 4:
12558 case 0:
12559 record_buf_mem[0] = 1;
12560 break;
12561 /* Store half word instructions. */
12562 case 1:
12563 case 5:
12564 record_buf_mem[0] = 2;
12565 break;
12566 /* Store word instructions. */
12567 case 2:
12568 case 6:
12569 record_buf_mem[0] = 4;
12570 break;
12571
12572 default:
12573 gdb_assert_not_reached ("no decoding pattern found");
12574 break;
12575 }
12576
12577 record_buf_mem[1] = address;
12578 thumb2_insn_r->mem_rec_count = 1;
12579 record_buf[0] = reg_rn;
12580 thumb2_insn_r->reg_rec_count = 1;
12581
12582 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12583 record_buf);
12584 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12585 record_buf_mem);
12586 return ARM_RECORD_SUCCESS;
12587}
12588
12589/* Handler for thumb2 load memory hints instructions. */
12590
12591static int
12592thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12593{
12594 uint32_t record_buf[8];
12595 uint32_t reg_rt, reg_rn;
12596
12597 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12598 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12599
12600 if (ARM_PC_REGNUM != reg_rt)
12601 {
12602 record_buf[0] = reg_rt;
12603 record_buf[1] = reg_rn;
12604 record_buf[2] = ARM_PS_REGNUM;
12605 thumb2_insn_r->reg_rec_count = 3;
12606
12607 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12608 record_buf);
12609 return ARM_RECORD_SUCCESS;
12610 }
12611
12612 return ARM_RECORD_FAILURE;
12613}
12614
12615/* Handler for thumb2 load word instructions. */
12616
12617static int
12618thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12619{
c6ec2b30
OJ
12620 uint32_t record_buf[8];
12621
12622 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12623 record_buf[1] = ARM_PS_REGNUM;
12624 thumb2_insn_r->reg_rec_count = 2;
12625
12626 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12627 record_buf);
12628 return ARM_RECORD_SUCCESS;
12629}
12630
12631/* Handler for thumb2 long multiply, long multiply accumulate, and
12632 divide instructions. */
12633
12634static int
12635thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12636{
12637 uint32_t opcode1 = 0, opcode2 = 0;
12638 uint32_t record_buf[8];
c6ec2b30
OJ
12639
12640 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12641 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12642
12643 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12644 {
12645 /* Handle SMULL, UMULL, SMULAL. */
12646 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12647 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12648 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12649 record_buf[2] = ARM_PS_REGNUM;
12650 thumb2_insn_r->reg_rec_count = 3;
12651 }
12652 else if (1 == opcode1 || 3 == opcode2)
12653 {
12654 /* Handle SDIV and UDIV. */
12655 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12656 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12657 record_buf[2] = ARM_PS_REGNUM;
12658 thumb2_insn_r->reg_rec_count = 3;
12659 }
12660 else
12661 return ARM_RECORD_FAILURE;
12662
12663 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12664 record_buf);
12665 return ARM_RECORD_SUCCESS;
12666}
12667
60cc5e93
OJ
12668/* Record handler for thumb32 coprocessor instructions. */
12669
12670static int
12671thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12672{
12673 if (bit (thumb2_insn_r->arm_insn, 25))
12674 return arm_record_coproc_data_proc (thumb2_insn_r);
12675 else
12676 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12677}
12678
1e1b6563
OJ
12679/* Record handler for advance SIMD structure load/store instructions. */
12680
12681static int
12682thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12683{
12684 struct regcache *reg_cache = thumb2_insn_r->regcache;
12685 uint32_t l_bit, a_bit, b_bits;
12686 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12687 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12688 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12689 uint8_t f_ebytes;
12690
12691 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12692 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12693 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12694 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12695 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12696 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12697 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12698 f_elem = 8 / f_ebytes;
12699
12700 if (!l_bit)
12701 {
12702 ULONGEST u_regval = 0;
12703 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12704 address = u_regval;
12705
12706 if (!a_bit)
12707 {
12708 /* Handle VST1. */
12709 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12710 {
12711 if (b_bits == 0x07)
12712 bf_regs = 1;
12713 else if (b_bits == 0x0a)
12714 bf_regs = 2;
12715 else if (b_bits == 0x06)
12716 bf_regs = 3;
12717 else if (b_bits == 0x02)
12718 bf_regs = 4;
12719 else
12720 bf_regs = 0;
12721
12722 for (index_r = 0; index_r < bf_regs; index_r++)
12723 {
12724 for (index_e = 0; index_e < f_elem; index_e++)
12725 {
12726 record_buf_mem[index_m++] = f_ebytes;
12727 record_buf_mem[index_m++] = address;
12728 address = address + f_ebytes;
12729 thumb2_insn_r->mem_rec_count += 1;
12730 }
12731 }
12732 }
12733 /* Handle VST2. */
12734 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12735 {
12736 if (b_bits == 0x09 || b_bits == 0x08)
12737 bf_regs = 1;
12738 else if (b_bits == 0x03)
12739 bf_regs = 2;
12740 else
12741 bf_regs = 0;
12742
12743 for (index_r = 0; index_r < bf_regs; index_r++)
12744 for (index_e = 0; index_e < f_elem; index_e++)
12745 {
12746 for (loop_t = 0; loop_t < 2; loop_t++)
12747 {
12748 record_buf_mem[index_m++] = f_ebytes;
12749 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12750 thumb2_insn_r->mem_rec_count += 1;
12751 }
12752 address = address + (2 * f_ebytes);
12753 }
12754 }
12755 /* Handle VST3. */
12756 else if ((b_bits & 0x0e) == 0x04)
12757 {
12758 for (index_e = 0; index_e < f_elem; index_e++)
12759 {
12760 for (loop_t = 0; loop_t < 3; loop_t++)
12761 {
12762 record_buf_mem[index_m++] = f_ebytes;
12763 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12764 thumb2_insn_r->mem_rec_count += 1;
12765 }
12766 address = address + (3 * f_ebytes);
12767 }
12768 }
12769 /* Handle VST4. */
12770 else if (!(b_bits & 0x0e))
12771 {
12772 for (index_e = 0; index_e < f_elem; index_e++)
12773 {
12774 for (loop_t = 0; loop_t < 4; loop_t++)
12775 {
12776 record_buf_mem[index_m++] = f_ebytes;
12777 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12778 thumb2_insn_r->mem_rec_count += 1;
12779 }
12780 address = address + (4 * f_ebytes);
12781 }
12782 }
12783 }
12784 else
12785 {
12786 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12787
12788 if (bft_size == 0x00)
12789 f_ebytes = 1;
12790 else if (bft_size == 0x01)
12791 f_ebytes = 2;
12792 else if (bft_size == 0x02)
12793 f_ebytes = 4;
12794 else
12795 f_ebytes = 0;
12796
12797 /* Handle VST1. */
12798 if (!(b_bits & 0x0b) || b_bits == 0x08)
12799 thumb2_insn_r->mem_rec_count = 1;
12800 /* Handle VST2. */
12801 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12802 thumb2_insn_r->mem_rec_count = 2;
12803 /* Handle VST3. */
12804 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12805 thumb2_insn_r->mem_rec_count = 3;
12806 /* Handle VST4. */
12807 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12808 thumb2_insn_r->mem_rec_count = 4;
12809
12810 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12811 {
12812 record_buf_mem[index_m] = f_ebytes;
12813 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12814 }
12815 }
12816 }
12817 else
12818 {
12819 if (!a_bit)
12820 {
12821 /* Handle VLD1. */
12822 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12823 thumb2_insn_r->reg_rec_count = 1;
12824 /* Handle VLD2. */
12825 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12826 thumb2_insn_r->reg_rec_count = 2;
12827 /* Handle VLD3. */
12828 else if ((b_bits & 0x0e) == 0x04)
12829 thumb2_insn_r->reg_rec_count = 3;
12830 /* Handle VLD4. */
12831 else if (!(b_bits & 0x0e))
12832 thumb2_insn_r->reg_rec_count = 4;
12833 }
12834 else
12835 {
12836 /* Handle VLD1. */
12837 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12838 thumb2_insn_r->reg_rec_count = 1;
12839 /* Handle VLD2. */
12840 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12841 thumb2_insn_r->reg_rec_count = 2;
12842 /* Handle VLD3. */
12843 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12844 thumb2_insn_r->reg_rec_count = 3;
12845 /* Handle VLD4. */
12846 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12847 thumb2_insn_r->reg_rec_count = 4;
12848
12849 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12850 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12851 }
12852 }
12853
12854 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12855 {
12856 record_buf[index_r] = reg_rn;
12857 thumb2_insn_r->reg_rec_count += 1;
12858 }
12859
12860 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12861 record_buf);
12862 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12863 record_buf_mem);
12864 return 0;
12865}
12866
c6ec2b30
OJ
12867/* Decodes thumb2 instruction type and invokes its record handler. */
12868
12869static unsigned int
12870thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12871{
12872 uint32_t op, op1, op2;
12873
12874 op = bit (thumb2_insn_r->arm_insn, 15);
12875 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12876 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12877
12878 if (op1 == 0x01)
12879 {
12880 if (!(op2 & 0x64 ))
12881 {
12882 /* Load/store multiple instruction. */
12883 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12884 }
b121eeb9 12885 else if ((op2 & 0x64) == 0x4)
c6ec2b30
OJ
12886 {
12887 /* Load/store (dual/exclusive) and table branch instruction. */
12888 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12889 }
b121eeb9 12890 else if ((op2 & 0x60) == 0x20)
c6ec2b30
OJ
12891 {
12892 /* Data-processing (shifted register). */
12893 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12894 }
12895 else if (op2 & 0x40)
12896 {
12897 /* Co-processor instructions. */
60cc5e93 12898 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12899 }
12900 }
12901 else if (op1 == 0x02)
12902 {
12903 if (op)
12904 {
12905 /* Branches and miscellaneous control instructions. */
12906 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12907 }
12908 else if (op2 & 0x20)
12909 {
12910 /* Data-processing (plain binary immediate) instruction. */
12911 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12912 }
12913 else
12914 {
12915 /* Data-processing (modified immediate). */
12916 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12917 }
12918 }
12919 else if (op1 == 0x03)
12920 {
12921 if (!(op2 & 0x71 ))
12922 {
12923 /* Store single data item. */
12924 return thumb2_record_str_single_data (thumb2_insn_r);
12925 }
12926 else if (!((op2 & 0x71) ^ 0x10))
12927 {
12928 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12929 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12930 }
12931 else if (!((op2 & 0x67) ^ 0x01))
12932 {
12933 /* Load byte, memory hints instruction. */
12934 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12935 }
12936 else if (!((op2 & 0x67) ^ 0x03))
12937 {
12938 /* Load halfword, memory hints instruction. */
12939 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12940 }
12941 else if (!((op2 & 0x67) ^ 0x05))
12942 {
12943 /* Load word instruction. */
12944 return thumb2_record_ld_word (thumb2_insn_r);
12945 }
12946 else if (!((op2 & 0x70) ^ 0x20))
12947 {
12948 /* Data-processing (register) instruction. */
12949 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12950 }
12951 else if (!((op2 & 0x78) ^ 0x30))
12952 {
12953 /* Multiply, multiply accumulate, abs diff instruction. */
12954 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12955 }
12956 else if (!((op2 & 0x78) ^ 0x38))
12957 {
12958 /* Long multiply, long multiply accumulate, and divide. */
12959 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12960 }
12961 else if (op2 & 0x40)
12962 {
12963 /* Co-processor instructions. */
60cc5e93 12964 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12965 }
12966 }
12967
12968 return -1;
12969}
72508ac0 12970
ffdbe864 12971namespace {
728a7913
YQ
12972/* Abstract memory reader. */
12973
12974class abstract_memory_reader
12975{
12976public:
12977 /* Read LEN bytes of target memory at address MEMADDR, placing the
12978 results in GDB's memory at BUF. Return true on success. */
12979
12980 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12981};
12982
12983/* Instruction reader from real target. */
12984
12985class instruction_reader : public abstract_memory_reader
12986{
12987 public:
632e107b 12988 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
728a7913
YQ
12989 {
12990 if (target_read_memory (memaddr, buf, len))
12991 return false;
12992 else
12993 return true;
12994 }
12995};
12996
ffdbe864
YQ
12997} // namespace
12998
72508ac0
PO
12999/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13000and positive val on fauilure. */
13001
13002static int
728a7913
YQ
13003extract_arm_insn (abstract_memory_reader& reader,
13004 insn_decode_record *insn_record, uint32_t insn_size)
72508ac0
PO
13005{
13006 gdb_byte buf[insn_size];
13007
13008 memset (&buf[0], 0, insn_size);
13009
728a7913 13010 if (!reader.read (insn_record->this_addr, buf, insn_size))
72508ac0
PO
13011 return 1;
13012 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13013 insn_size,
2959fed9 13014 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
13015 return 0;
13016}
13017
13018typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13019
13020/* Decode arm/thumb insn depending on condition cods and opcodes; and
13021 dispatch it. */
13022
13023static int
728a7913
YQ
13024decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13025 record_type_t record_type, uint32_t insn_size)
72508ac0
PO
13026{
13027
01e57735
YQ
13028 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13029 instruction. */
0fa9c223 13030 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
13031 {
13032 arm_record_data_proc_misc_ld_str, /* 000. */
13033 arm_record_data_proc_imm, /* 001. */
13034 arm_record_ld_st_imm_offset, /* 010. */
13035 arm_record_ld_st_reg_offset, /* 011. */
13036 arm_record_ld_st_multiple, /* 100. */
13037 arm_record_b_bl, /* 101. */
60cc5e93 13038 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
13039 arm_record_coproc_data_proc /* 111. */
13040 };
13041
01e57735
YQ
13042 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13043 instruction. */
0fa9c223 13044 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
13045 { \
13046 thumb_record_shift_add_sub, /* 000. */
13047 thumb_record_add_sub_cmp_mov, /* 001. */
13048 thumb_record_ld_st_reg_offset, /* 010. */
13049 thumb_record_ld_st_imm_offset, /* 011. */
13050 thumb_record_ld_st_stack, /* 100. */
13051 thumb_record_misc, /* 101. */
13052 thumb_record_ldm_stm_swi, /* 110. */
13053 thumb_record_branch /* 111. */
13054 };
13055
13056 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13057 uint32_t insn_id = 0;
13058
728a7913 13059 if (extract_arm_insn (reader, arm_record, insn_size))
72508ac0
PO
13060 {
13061 if (record_debug)
01e57735
YQ
13062 {
13063 printf_unfiltered (_("Process record: error reading memory at "
13064 "addr %s len = %d.\n"),
13065 paddress (arm_record->gdbarch,
13066 arm_record->this_addr), insn_size);
13067 }
72508ac0
PO
13068 return -1;
13069 }
13070 else if (ARM_RECORD == record_type)
13071 {
13072 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13073 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
13074
13075 if (arm_record->cond == 0xf)
13076 ret = arm_record_extension_space (arm_record);
13077 else
01e57735 13078 {
ca92db2d
YQ
13079 /* If this insn has fallen into extension space
13080 then we need not decode it anymore. */
01e57735
YQ
13081 ret = arm_handle_insn[insn_id] (arm_record);
13082 }
ca92db2d
YQ
13083 if (ret != ARM_RECORD_SUCCESS)
13084 {
13085 arm_record_unsupported_insn (arm_record);
13086 ret = -1;
13087 }
72508ac0
PO
13088 }
13089 else if (THUMB_RECORD == record_type)
13090 {
13091 /* As thumb does not have condition codes, we set negative. */
13092 arm_record->cond = -1;
13093 insn_id = bits (arm_record->arm_insn, 13, 15);
13094 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13095 if (ret != ARM_RECORD_SUCCESS)
13096 {
13097 arm_record_unsupported_insn (arm_record);
13098 ret = -1;
13099 }
72508ac0
PO
13100 }
13101 else if (THUMB2_RECORD == record_type)
13102 {
c6ec2b30
OJ
13103 /* As thumb does not have condition codes, we set negative. */
13104 arm_record->cond = -1;
13105
13106 /* Swap first half of 32bit thumb instruction with second half. */
13107 arm_record->arm_insn
01e57735 13108 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13109
ca92db2d 13110 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13111
ca92db2d 13112 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13113 {
13114 arm_record_unsupported_insn (arm_record);
13115 ret = -1;
13116 }
72508ac0
PO
13117 }
13118 else
13119 {
13120 /* Throw assertion. */
13121 gdb_assert_not_reached ("not a valid instruction, could not decode");
13122 }
13123
13124 return ret;
13125}
13126
b121eeb9
YQ
13127#if GDB_SELF_TEST
13128namespace selftests {
13129
13130/* Provide both 16-bit and 32-bit thumb instructions. */
13131
13132class instruction_reader_thumb : public abstract_memory_reader
13133{
13134public:
13135 template<size_t SIZE>
13136 instruction_reader_thumb (enum bfd_endian endian,
13137 const uint16_t (&insns)[SIZE])
13138 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13139 {}
13140
632e107b 13141 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
b121eeb9
YQ
13142 {
13143 SELF_CHECK (len == 4 || len == 2);
13144 SELF_CHECK (memaddr % 2 == 0);
13145 SELF_CHECK ((memaddr / 2) < m_insns_size);
13146
13147 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13148 if (len == 4)
13149 {
13150 store_unsigned_integer (&buf[2], 2, m_endian,
13151 m_insns[memaddr / 2 + 1]);
13152 }
13153 return true;
13154 }
13155
13156private:
13157 enum bfd_endian m_endian;
13158 const uint16_t *m_insns;
13159 size_t m_insns_size;
13160};
13161
13162static void
13163arm_record_test (void)
13164{
13165 struct gdbarch_info info;
13166 gdbarch_info_init (&info);
13167 info.bfd_arch_info = bfd_scan_arch ("arm");
13168
13169 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13170
13171 SELF_CHECK (gdbarch != NULL);
13172
13173 /* 16-bit Thumb instructions. */
13174 {
13175 insn_decode_record arm_record;
13176
13177 memset (&arm_record, 0, sizeof (insn_decode_record));
13178 arm_record.gdbarch = gdbarch;
13179
13180 static const uint16_t insns[] = {
13181 /* db b2 uxtb r3, r3 */
13182 0xb2db,
13183 /* cd 58 ldr r5, [r1, r3] */
13184 0x58cd,
13185 };
13186
13187 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13188 instruction_reader_thumb reader (endian, insns);
13189 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13190 THUMB_INSN_SIZE_BYTES);
13191
13192 SELF_CHECK (ret == 0);
13193 SELF_CHECK (arm_record.mem_rec_count == 0);
13194 SELF_CHECK (arm_record.reg_rec_count == 1);
13195 SELF_CHECK (arm_record.arm_regs[0] == 3);
13196
13197 arm_record.this_addr += 2;
13198 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13199 THUMB_INSN_SIZE_BYTES);
13200
13201 SELF_CHECK (ret == 0);
13202 SELF_CHECK (arm_record.mem_rec_count == 0);
13203 SELF_CHECK (arm_record.reg_rec_count == 1);
13204 SELF_CHECK (arm_record.arm_regs[0] == 5);
13205 }
13206
13207 /* 32-bit Thumb-2 instructions. */
13208 {
13209 insn_decode_record arm_record;
13210
13211 memset (&arm_record, 0, sizeof (insn_decode_record));
13212 arm_record.gdbarch = gdbarch;
13213
13214 static const uint16_t insns[] = {
13215 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13216 0xee1d, 0x7f70,
13217 };
13218
13219 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13220 instruction_reader_thumb reader (endian, insns);
13221 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13222 THUMB2_INSN_SIZE_BYTES);
13223
13224 SELF_CHECK (ret == 0);
13225 SELF_CHECK (arm_record.mem_rec_count == 0);
13226 SELF_CHECK (arm_record.reg_rec_count == 1);
13227 SELF_CHECK (arm_record.arm_regs[0] == 7);
13228 }
13229}
13230} // namespace selftests
13231#endif /* GDB_SELF_TEST */
72508ac0
PO
13232
13233/* Cleans up local record registers and memory allocations. */
13234
13235static void
13236deallocate_reg_mem (insn_decode_record *record)
13237{
13238 xfree (record->arm_regs);
13239 xfree (record->arm_mems);
13240}
13241
13242
01e57735 13243/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13244 memory that will be changed in current instruction to record_arch_list".
13245 Return -1 if something is wrong. */
13246
13247int
01e57735
YQ
13248arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13249 CORE_ADDR insn_addr)
72508ac0
PO
13250{
13251
72508ac0
PO
13252 uint32_t no_of_rec = 0;
13253 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13254 ULONGEST t_bit = 0, insn_id = 0;
13255
13256 ULONGEST u_regval = 0;
13257
13258 insn_decode_record arm_record;
13259
13260 memset (&arm_record, 0, sizeof (insn_decode_record));
13261 arm_record.regcache = regcache;
13262 arm_record.this_addr = insn_addr;
13263 arm_record.gdbarch = gdbarch;
13264
13265
13266 if (record_debug > 1)
13267 {
13268 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13269 "addr = %s\n",
72508ac0
PO
13270 paddress (gdbarch, arm_record.this_addr));
13271 }
13272
728a7913
YQ
13273 instruction_reader reader;
13274 if (extract_arm_insn (reader, &arm_record, 2))
72508ac0
PO
13275 {
13276 if (record_debug)
01e57735
YQ
13277 {
13278 printf_unfiltered (_("Process record: error reading memory at "
13279 "addr %s len = %d.\n"),
13280 paddress (arm_record.gdbarch,
13281 arm_record.this_addr), 2);
13282 }
72508ac0
PO
13283 return -1;
13284 }
13285
13286 /* Check the insn, whether it is thumb or arm one. */
13287
13288 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13289 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13290
13291
13292 if (!(u_regval & t_bit))
13293 {
13294 /* We are decoding arm insn. */
728a7913 13295 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
72508ac0
PO
13296 }
13297 else
13298 {
13299 insn_id = bits (arm_record.arm_insn, 11, 15);
13300 /* is it thumb2 insn? */
13301 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735 13302 {
728a7913 13303 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
01e57735
YQ
13304 THUMB2_INSN_SIZE_BYTES);
13305 }
72508ac0 13306 else
01e57735
YQ
13307 {
13308 /* We are decoding thumb insn. */
728a7913
YQ
13309 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13310 THUMB_INSN_SIZE_BYTES);
01e57735 13311 }
72508ac0
PO
13312 }
13313
13314 if (0 == ret)
13315 {
13316 /* Record registers. */
25ea693b 13317 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13318 if (arm_record.arm_regs)
01e57735
YQ
13319 {
13320 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13321 {
13322 if (record_full_arch_list_add_reg
25ea693b 13323 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13324 ret = -1;
13325 }
13326 }
72508ac0
PO
13327 /* Record memories. */
13328 if (arm_record.arm_mems)
01e57735
YQ
13329 {
13330 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13331 {
13332 if (record_full_arch_list_add_mem
13333 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13334 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13335 ret = -1;
13336 }
13337 }
72508ac0 13338
25ea693b 13339 if (record_full_arch_list_add_end ())
01e57735 13340 ret = -1;
72508ac0
PO
13341 }
13342
13343
13344 deallocate_reg_mem (&arm_record);
13345
13346 return ret;
13347}
This page took 2.813964 seconds and 4 git commands to generate.