gdbserver: Use pattern rule for objects from target/
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
61baf725 3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
e47ad6c0 30#include "disasm.h"
4e052eda 31#include "regcache.h"
54483882 32#include "reggroups.h"
d16aafd8 33#include "doublest.h"
fd0407d6 34#include "value.h"
34e8f22d 35#include "arch-utils.h"
4be87837 36#include "osabi.h"
eb5492fa
DJ
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
842e1f1e
DJ
40#include "objfiles.h"
41#include "dwarf2-frame.h"
e4c16157 42#include "gdbtypes.h"
29d73ae4 43#include "prologue-value.h"
25f8c692 44#include "remote.h"
123dc839
DJ
45#include "target-descriptions.h"
46#include "user-regs.h"
0e9e9abd 47#include "observer.h"
34e8f22d 48
8689682c 49#include "arch/arm.h"
d9311bfa 50#include "arch/arm-get-next-pcs.h"
34e8f22d 51#include "arm-tdep.h"
26216b98 52#include "gdb/sim-arm.h"
34e8f22d 53
082fc60d
RE
54#include "elf-bfd.h"
55#include "coff/internal.h"
97e03143 56#include "elf/arm.h"
c906108c 57
60c5725c 58#include "vec.h"
26216b98 59
72508ac0 60#include "record.h"
d02ed0bb 61#include "record-full.h"
325fac50 62#include <algorithm>
72508ac0 63
0a69eedb
YQ
64#include "features/arm/arm-with-m.c"
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
67#include "features/arm/arm-with-iwmmxt.c"
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
70#include "features/arm/arm-with-neon.c"
9779414d 71
6529d2dd
AC
72static int arm_debug;
73
082fc60d
RE
74/* Macros for setting and testing a bit in a minimal symbol that marks
75 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 76 is used for this purpose.
082fc60d
RE
77
78 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 79 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 80
0963b4bd 81#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 82 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
83
84#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 85 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 86
60c5725c
DJ
87/* Per-objfile data used for mapping symbols. */
88static const struct objfile_data *arm_objfile_data_key;
89
90struct arm_mapping_symbol
91{
92 bfd_vma value;
93 char type;
94};
95typedef struct arm_mapping_symbol arm_mapping_symbol_s;
96DEF_VEC_O(arm_mapping_symbol_s);
97
98struct arm_per_objfile
99{
100 VEC(arm_mapping_symbol_s) **section_maps;
101};
102
afd7eef0
RE
103/* The list of available "set arm ..." and "show arm ..." commands. */
104static struct cmd_list_element *setarmcmdlist = NULL;
105static struct cmd_list_element *showarmcmdlist = NULL;
106
fd50bc42
RE
107/* The type of floating-point to use. Keep this in sync with enum
108 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 109static const char *const fp_model_strings[] =
fd50bc42
RE
110{
111 "auto",
112 "softfpa",
113 "fpa",
114 "softvfp",
28e97307
DJ
115 "vfp",
116 NULL
fd50bc42
RE
117};
118
119/* A variable that can be configured by the user. */
120static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
121static const char *current_fp_model = "auto";
122
28e97307 123/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 124static const char *const arm_abi_strings[] =
28e97307
DJ
125{
126 "auto",
127 "APCS",
128 "AAPCS",
129 NULL
130};
131
132/* A variable that can be configured by the user. */
133static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
134static const char *arm_abi_string = "auto";
135
0428b8f5 136/* The execution mode to assume. */
40478521 137static const char *const arm_mode_strings[] =
0428b8f5
DJ
138 {
139 "auto",
140 "arm",
68770265
MGD
141 "thumb",
142 NULL
0428b8f5
DJ
143 };
144
145static const char *arm_fallback_mode_string = "auto";
146static const char *arm_force_mode_string = "auto";
147
f32bf4a4
YQ
148/* The standard register names, and all the valid aliases for them. Note
149 that `fp', `sp' and `pc' are not added in this alias list, because they
150 have been added as builtin user registers in
151 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
152static const struct
153{
154 const char *name;
155 int regnum;
156} arm_register_aliases[] = {
157 /* Basic register numbers. */
158 { "r0", 0 },
159 { "r1", 1 },
160 { "r2", 2 },
161 { "r3", 3 },
162 { "r4", 4 },
163 { "r5", 5 },
164 { "r6", 6 },
165 { "r7", 7 },
166 { "r8", 8 },
167 { "r9", 9 },
168 { "r10", 10 },
169 { "r11", 11 },
170 { "r12", 12 },
171 { "r13", 13 },
172 { "r14", 14 },
173 { "r15", 15 },
174 /* Synonyms (argument and variable registers). */
175 { "a1", 0 },
176 { "a2", 1 },
177 { "a3", 2 },
178 { "a4", 3 },
179 { "v1", 4 },
180 { "v2", 5 },
181 { "v3", 6 },
182 { "v4", 7 },
183 { "v5", 8 },
184 { "v6", 9 },
185 { "v7", 10 },
186 { "v8", 11 },
187 /* Other platform-specific names for r9. */
188 { "sb", 9 },
189 { "tr", 9 },
190 /* Special names. */
191 { "ip", 12 },
123dc839 192 { "lr", 14 },
123dc839
DJ
193 /* Names used by GCC (not listed in the ARM EABI). */
194 { "sl", 10 },
123dc839
DJ
195 /* A special name from the older ATPCS. */
196 { "wr", 7 },
197};
bc90b915 198
123dc839 199static const char *const arm_register_names[] =
da59e081
JM
200{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
201 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
202 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
203 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
204 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
205 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 206 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 207
65b48a81
PB
208/* Holds the current set of options to be passed to the disassembler. */
209static char *arm_disassembler_options;
210
afd7eef0
RE
211/* Valid register name styles. */
212static const char **valid_disassembly_styles;
ed9a39eb 213
afd7eef0
RE
214/* Disassembly style to use. Default to "std" register names. */
215static const char *disassembly_style;
96baa820 216
ed9a39eb 217/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
218 style. */
219static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 220 struct cmd_list_element *);
65b48a81
PB
221static void show_disassembly_style_sfunc (struct ui_file *, int,
222 struct cmd_list_element *,
223 const char *);
ed9a39eb 224
b508a996 225static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 226 void *, int);
b508a996 227static void convert_to_extended (const struct floatformat *, void *,
be8626e0 228 const void *, int);
ed9a39eb 229
05d1431c
PA
230static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
231 struct regcache *regcache,
232 int regnum, gdb_byte *buf);
58d6951d
DJ
233static void arm_neon_quad_write (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, const gdb_byte *buf);
236
e7cf25a8 237static CORE_ADDR
553cb527 238 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
239
240
d9311bfa
AT
241/* get_next_pcs operations. */
242static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
243 arm_get_next_pcs_read_memory_unsigned_integer,
244 arm_get_next_pcs_syscall_next_pc,
245 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
246 arm_get_next_pcs_is_thumb,
247 NULL,
d9311bfa
AT
248};
249
9b8d791a 250struct arm_prologue_cache
c3b4394c 251{
eb5492fa
DJ
252 /* The stack pointer at the time this frame was created; i.e. the
253 caller's stack pointer when this function was called. It is used
254 to identify this frame. */
255 CORE_ADDR prev_sp;
256
4be43953
DJ
257 /* The frame base for this frame is just prev_sp - frame size.
258 FRAMESIZE is the distance from the frame pointer to the
259 initial stack pointer. */
eb5492fa 260
c3b4394c 261 int framesize;
eb5492fa
DJ
262
263 /* The register used to hold the frame pointer for this frame. */
c3b4394c 264 int framereg;
eb5492fa
DJ
265
266 /* Saved register offsets. */
267 struct trad_frame_saved_reg *saved_regs;
c3b4394c 268};
ed9a39eb 269
0d39a070
DJ
270static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
271 CORE_ADDR prologue_start,
272 CORE_ADDR prologue_end,
273 struct arm_prologue_cache *cache);
274
cca44b1b
JB
275/* Architecture version for displaced stepping. This effects the behaviour of
276 certain instructions, and really should not be hard-wired. */
277
278#define DISPLACED_STEPPING_ARCH_VERSION 5
279
94c30b78 280/* Set to true if the 32-bit mode is in use. */
c906108c
SS
281
282int arm_apcs_32 = 1;
283
9779414d
DJ
284/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
285
478fd957 286int
9779414d
DJ
287arm_psr_thumb_bit (struct gdbarch *gdbarch)
288{
289 if (gdbarch_tdep (gdbarch)->is_m)
290 return XPSR_T;
291 else
292 return CPSR_T;
293}
294
d0e59a68
AT
295/* Determine if the processor is currently executing in Thumb mode. */
296
297int
298arm_is_thumb (struct regcache *regcache)
299{
300 ULONGEST cpsr;
301 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
302
303 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304
305 return (cpsr & t_bit) != 0;
306}
307
b39cc962
DJ
308/* Determine if FRAME is executing in Thumb mode. */
309
25b41d01 310int
b39cc962
DJ
311arm_frame_is_thumb (struct frame_info *frame)
312{
313 CORE_ADDR cpsr;
9779414d 314 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
315
316 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
317 directly (from a signal frame or dummy frame) or by interpreting
318 the saved LR (from a prologue or DWARF frame). So consult it and
319 trust the unwinders. */
320 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321
9779414d 322 return (cpsr & t_bit) != 0;
b39cc962
DJ
323}
324
60c5725c
DJ
325/* Callback for VEC_lower_bound. */
326
327static inline int
328arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
329 const struct arm_mapping_symbol *rhs)
330{
331 return lhs->value < rhs->value;
332}
333
f9d67f43
DJ
334/* Search for the mapping symbol covering MEMADDR. If one is found,
335 return its type. Otherwise, return 0. If START is non-NULL,
336 set *START to the location of the mapping symbol. */
c906108c 337
f9d67f43
DJ
338static char
339arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 340{
60c5725c 341 struct obj_section *sec;
0428b8f5 342
60c5725c
DJ
343 /* If there are mapping symbols, consult them. */
344 sec = find_pc_section (memaddr);
345 if (sec != NULL)
346 {
347 struct arm_per_objfile *data;
348 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
349 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
350 0 };
60c5725c
DJ
351 unsigned int idx;
352
9a3c8263
SM
353 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
354 arm_objfile_data_key);
60c5725c
DJ
355 if (data != NULL)
356 {
357 map = data->section_maps[sec->the_bfd_section->index];
358 if (!VEC_empty (arm_mapping_symbol_s, map))
359 {
360 struct arm_mapping_symbol *map_sym;
361
362 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
363 arm_compare_mapping_symbols);
364
365 /* VEC_lower_bound finds the earliest ordered insertion
366 point. If the following symbol starts at this exact
367 address, we use that; otherwise, the preceding
368 mapping symbol covers this address. */
369 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 {
371 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
372 if (map_sym->value == map_key.value)
f9d67f43
DJ
373 {
374 if (start)
375 *start = map_sym->value + obj_section_addr (sec);
376 return map_sym->type;
377 }
60c5725c
DJ
378 }
379
380 if (idx > 0)
381 {
382 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
383 if (start)
384 *start = map_sym->value + obj_section_addr (sec);
385 return map_sym->type;
60c5725c
DJ
386 }
387 }
388 }
389 }
390
f9d67f43
DJ
391 return 0;
392}
393
394/* Determine if the program counter specified in MEMADDR is in a Thumb
395 function. This function should be called for addresses unrelated to
396 any executing frame; otherwise, prefer arm_frame_is_thumb. */
397
e3039479 398int
9779414d 399arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 400{
7cbd4a93 401 struct bound_minimal_symbol sym;
f9d67f43 402 char type;
a42244db
YQ
403 struct displaced_step_closure* dsc
404 = get_displaced_step_closure_by_addr(memaddr);
405
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
408 if (dsc)
409 {
410 if (debug_displaced)
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
416 }
f9d67f43
DJ
417
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
420 return 1;
421
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
426 return 1;
427
9779414d
DJ
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
430 return 1;
431
f9d67f43
DJ
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
434 if (type)
435 return type == 't';
436
ed9a39eb 437 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 438 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
439 if (sym.minsym)
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
441
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
444 return 0;
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
446 return 1;
447
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
18819fa6 452 displayed in the mode it will be executed). */
0428b8f5 453 if (target_has_registers)
18819fa6 454 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
455
456 /* Otherwise we're out of luck; we assume ARM. */
457 return 0;
c906108c
SS
458}
459
ca90e760
FH
460/* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
462 architectures.
463
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
466
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
471
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
481 FP
482
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
490
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
493
494static int
495arm_m_addr_is_magic (CORE_ADDR addr)
496{
497 switch (addr)
498 {
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
501 case 0xffffffe1:
502 case 0xffffffe9:
503 case 0xffffffed:
504 case 0xfffffff1:
505 case 0xfffffff9:
506 case 0xfffffffd:
507 /* Address is magic. */
508 return 1;
509
510 default:
511 /* Address is not magic. */
512 return 0;
513 }
514}
515
181c1381 516/* Remove useless bits from addresses in a running program. */
34e8f22d 517static CORE_ADDR
24568a2c 518arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 519{
2ae28aa9
YQ
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 523 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
524 return val;
525
a3a2ee65 526 if (arm_apcs_32)
dd6be234 527 return UNMAKE_THUMB_ADDR (val);
c906108c 528 else
a3a2ee65 529 return (val & 0x03fffffc);
c906108c
SS
530}
531
0d39a070 532/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
535 is being called. */
0d39a070 536static int
e0634ccf 537skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 538{
e0634ccf 539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 540 struct bound_minimal_symbol msym;
0d39a070
DJ
541
542 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 543 if (msym.minsym != NULL
77e371c0 544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 546 {
efd66ac6 547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 548
e0634ccf
UW
549 /* The GNU linker's Thumb call stub to foo is named
550 __foo_from_thumb. */
551 if (strstr (name, "_from_thumb") != NULL)
552 name += 2;
0d39a070 553
e0634ccf
UW
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
556 functions. */
61012eef 557 if (startswith (name, "__truncdfsf2"))
e0634ccf 558 return 1;
61012eef 559 if (startswith (name, "__aeabi_d2f"))
e0634ccf 560 return 1;
0d39a070 561
e0634ccf 562 /* Internal functions related to thread-local storage. */
61012eef 563 if (startswith (name, "__tls_get_addr"))
e0634ccf 564 return 1;
61012eef 565 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
566 return 1;
567 }
568 else
569 {
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
574
575 if (!is_thumb
198cd59d 576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
e0634ccf 577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
198cd59d 578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
e0634ccf
UW
579 == 0xe240f01f) /* sub pc, r0, #31 */
580 return 1;
581 }
ec3d575a 582
0d39a070
DJ
583 return 0;
584}
585
621c6d5b
YQ
586/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
588 instruction. */
589#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
594
595/* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
600
ec3d575a
UW
601/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
602
603static unsigned int
604thumb_expand_immediate (unsigned int imm)
605{
606 unsigned int count = imm >> 7;
607
608 if (count < 8)
609 switch (count / 2)
610 {
611 case 0:
612 return imm & 0xff;
613 case 1:
614 return (imm & 0xff) | ((imm & 0xff) << 16);
615 case 2:
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
617 case 3:
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
620 }
621
622 return (0x80 | (imm & 0x7f)) << (32 - count);
623}
624
540314bd
YQ
625/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
627
628static int
629thumb_instruction_restores_sp (unsigned short insn)
630{
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
634}
635
29d73ae4
DJ
636/* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
c906108c
SS
641
642static CORE_ADDR
29d73ae4
DJ
643thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
c906108c 646{
0d39a070 647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
649 int i;
650 pv_t regs[16];
651 struct pv_area *stack;
652 struct cleanup *back_to;
653 CORE_ADDR offset;
ec3d575a 654 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 655
29d73ae4
DJ
656 for (i = 0; i < 16; i++)
657 regs[i] = pv_register (i, 0);
55f960e1 658 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
659 back_to = make_cleanup_free_pv_area (stack);
660
29d73ae4 661 while (start < limit)
c906108c 662 {
29d73ae4
DJ
663 unsigned short insn;
664
198cd59d 665 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 666
94c30b78 667 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 668 {
29d73ae4
DJ
669 int regno;
670 int mask;
4be43953
DJ
671
672 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
673 break;
29d73ae4
DJ
674
675 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
676 whether to save LR (R14). */
677 mask = (insn & 0xff) | ((insn & 0x100) << 6);
678
679 /* Calculate offsets of saved R0-R7 and LR. */
680 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
681 if (mask & (1 << regno))
682 {
29d73ae4
DJ
683 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
684 -4);
685 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
686 }
da59e081 687 }
1db01f22 688 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 689 {
29d73ae4 690 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
691 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
692 -offset);
da59e081 693 }
808f7ab1
YQ
694 else if (thumb_instruction_restores_sp (insn))
695 {
696 /* Don't scan past the epilogue. */
697 break;
698 }
0d39a070
DJ
699 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
700 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
701 (insn & 0xff) << 2);
702 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
703 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
704 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
705 bits (insn, 6, 8));
706 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
707 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
708 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
709 bits (insn, 0, 7));
710 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
711 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
712 && pv_is_constant (regs[bits (insn, 3, 5)]))
713 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
714 regs[bits (insn, 6, 8)]);
715 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
716 && pv_is_constant (regs[bits (insn, 3, 6)]))
717 {
718 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
719 int rm = bits (insn, 3, 6);
720 regs[rd] = pv_add (regs[rd], regs[rm]);
721 }
29d73ae4 722 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 723 {
29d73ae4
DJ
724 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
725 int src_reg = (insn & 0x78) >> 3;
726 regs[dst_reg] = regs[src_reg];
da59e081 727 }
29d73ae4 728 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 729 {
29d73ae4
DJ
730 /* Handle stores to the stack. Normally pushes are used,
731 but with GCC -mtpcs-frame, there may be other stores
732 in the prologue to create the frame. */
733 int regno = (insn >> 8) & 0x7;
734 pv_t addr;
735
736 offset = (insn & 0xff) << 2;
737 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
738
739 if (pv_area_store_would_trash (stack, addr))
740 break;
741
742 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 743 }
0d39a070
DJ
744 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
745 {
746 int rd = bits (insn, 0, 2);
747 int rn = bits (insn, 3, 5);
748 pv_t addr;
749
750 offset = bits (insn, 6, 10) << 2;
751 addr = pv_add_constant (regs[rn], offset);
752
753 if (pv_area_store_would_trash (stack, addr))
754 break;
755
756 pv_area_store (stack, addr, 4, regs[rd]);
757 }
758 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
759 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
760 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
761 /* Ignore stores of argument registers to the stack. */
762 ;
763 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
764 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
765 /* Ignore block loads from the stack, potentially copying
766 parameters from memory. */
767 ;
768 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
769 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
770 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
771 /* Similarly ignore single loads from the stack. */
772 ;
773 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
774 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
775 /* Skip register copies, i.e. saves to another register
776 instead of the stack. */
777 ;
778 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
779 /* Recognize constant loads; even with small stacks these are necessary
780 on Thumb. */
781 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
782 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
783 {
784 /* Constant pool loads, for the same reason. */
785 unsigned int constant;
786 CORE_ADDR loc;
787
788 loc = start + 4 + bits (insn, 0, 7) * 4;
789 constant = read_memory_unsigned_integer (loc, 4, byte_order);
790 regs[bits (insn, 8, 10)] = pv_constant (constant);
791 }
db24da6d 792 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 793 {
0d39a070
DJ
794 unsigned short inst2;
795
198cd59d
YQ
796 inst2 = read_code_unsigned_integer (start + 2, 2,
797 byte_order_for_code);
0d39a070
DJ
798
799 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
800 {
801 /* BL, BLX. Allow some special function calls when
802 skipping the prologue; GCC generates these before
803 storing arguments to the stack. */
804 CORE_ADDR nextpc;
805 int j1, j2, imm1, imm2;
806
807 imm1 = sbits (insn, 0, 10);
808 imm2 = bits (inst2, 0, 10);
809 j1 = bit (inst2, 13);
810 j2 = bit (inst2, 11);
811
812 offset = ((imm1 << 12) + (imm2 << 1));
813 offset ^= ((!j2) << 22) | ((!j1) << 23);
814
815 nextpc = start + 4 + offset;
816 /* For BLX make sure to clear the low bits. */
817 if (bit (inst2, 12) == 0)
818 nextpc = nextpc & 0xfffffffc;
819
e0634ccf
UW
820 if (!skip_prologue_function (gdbarch, nextpc,
821 bit (inst2, 12) != 0))
0d39a070
DJ
822 break;
823 }
ec3d575a 824
0963b4bd
MS
825 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
826 { registers } */
ec3d575a
UW
827 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
828 {
829 pv_t addr = regs[bits (insn, 0, 3)];
830 int regno;
831
832 if (pv_area_store_would_trash (stack, addr))
833 break;
834
835 /* Calculate offsets of saved registers. */
836 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
837 if (inst2 & (1 << regno))
838 {
839 addr = pv_add_constant (addr, -4);
840 pv_area_store (stack, addr, 4, regs[regno]);
841 }
842
843 if (insn & 0x0020)
844 regs[bits (insn, 0, 3)] = addr;
845 }
846
0963b4bd
MS
847 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
848 [Rn, #+/-imm]{!} */
ec3d575a
UW
849 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
850 {
851 int regno1 = bits (inst2, 12, 15);
852 int regno2 = bits (inst2, 8, 11);
853 pv_t addr = regs[bits (insn, 0, 3)];
854
855 offset = inst2 & 0xff;
856 if (insn & 0x0080)
857 addr = pv_add_constant (addr, offset);
858 else
859 addr = pv_add_constant (addr, -offset);
860
861 if (pv_area_store_would_trash (stack, addr))
862 break;
863
864 pv_area_store (stack, addr, 4, regs[regno1]);
865 pv_area_store (stack, pv_add_constant (addr, 4),
866 4, regs[regno2]);
867
868 if (insn & 0x0020)
869 regs[bits (insn, 0, 3)] = addr;
870 }
871
872 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
873 && (inst2 & 0x0c00) == 0x0c00
874 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 {
876 int regno = bits (inst2, 12, 15);
877 pv_t addr = regs[bits (insn, 0, 3)];
878
879 offset = inst2 & 0xff;
880 if (inst2 & 0x0200)
881 addr = pv_add_constant (addr, offset);
882 else
883 addr = pv_add_constant (addr, -offset);
884
885 if (pv_area_store_would_trash (stack, addr))
886 break;
887
888 pv_area_store (stack, addr, 4, regs[regno]);
889
890 if (inst2 & 0x0100)
891 regs[bits (insn, 0, 3)] = addr;
892 }
893
894 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
895 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
896 {
897 int regno = bits (inst2, 12, 15);
898 pv_t addr;
899
900 offset = inst2 & 0xfff;
901 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
902
903 if (pv_area_store_would_trash (stack, addr))
904 break;
905
906 pv_area_store (stack, addr, 4, regs[regno]);
907 }
908
909 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 911 /* Ignore stores of argument registers to the stack. */
0d39a070 912 ;
ec3d575a
UW
913
914 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
915 && (inst2 & 0x0d00) == 0x0c00
0d39a070 916 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 917 /* Ignore stores of argument registers to the stack. */
0d39a070 918 ;
ec3d575a 919
0963b4bd
MS
920 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
921 { registers } */
ec3d575a
UW
922 && (inst2 & 0x8000) == 0x0000
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 /* Ignore block loads from the stack, potentially copying
925 parameters from memory. */
0d39a070 926 ;
ec3d575a 927
0963b4bd
MS
928 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
929 [Rn, #+/-imm] */
0d39a070 930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 931 /* Similarly ignore dual loads from the stack. */
0d39a070 932 ;
ec3d575a
UW
933
934 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
935 && (inst2 & 0x0d00) == 0x0c00
0d39a070 936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 937 /* Similarly ignore single loads from the stack. */
0d39a070 938 ;
ec3d575a
UW
939
940 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 941 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 942 /* Similarly ignore single loads from the stack. */
0d39a070 943 ;
ec3d575a
UW
944
945 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
946 && (inst2 & 0x8000) == 0x0000)
947 {
948 unsigned int imm = ((bits (insn, 10, 10) << 11)
949 | (bits (inst2, 12, 14) << 8)
950 | bits (inst2, 0, 7));
951
952 regs[bits (inst2, 8, 11)]
953 = pv_add_constant (regs[bits (insn, 0, 3)],
954 thumb_expand_immediate (imm));
955 }
956
957 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
958 && (inst2 & 0x8000) == 0x0000)
0d39a070 959 {
ec3d575a
UW
960 unsigned int imm = ((bits (insn, 10, 10) << 11)
961 | (bits (inst2, 12, 14) << 8)
962 | bits (inst2, 0, 7));
963
964 regs[bits (inst2, 8, 11)]
965 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
966 }
967
968 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
969 && (inst2 & 0x8000) == 0x0000)
970 {
971 unsigned int imm = ((bits (insn, 10, 10) << 11)
972 | (bits (inst2, 12, 14) << 8)
973 | bits (inst2, 0, 7));
974
975 regs[bits (inst2, 8, 11)]
976 = pv_add_constant (regs[bits (insn, 0, 3)],
977 - (CORE_ADDR) thumb_expand_immediate (imm));
978 }
979
980 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
981 && (inst2 & 0x8000) == 0x0000)
982 {
983 unsigned int imm = ((bits (insn, 10, 10) << 11)
984 | (bits (inst2, 12, 14) << 8)
985 | bits (inst2, 0, 7));
986
987 regs[bits (inst2, 8, 11)]
988 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
989 }
990
991 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
992 {
993 unsigned int imm = ((bits (insn, 10, 10) << 11)
994 | (bits (inst2, 12, 14) << 8)
995 | bits (inst2, 0, 7));
996
997 regs[bits (inst2, 8, 11)]
998 = pv_constant (thumb_expand_immediate (imm));
999 }
1000
1001 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1002 {
621c6d5b
YQ
1003 unsigned int imm
1004 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1005
1006 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1007 }
1008
1009 else if (insn == 0xea5f /* mov.w Rd,Rm */
1010 && (inst2 & 0xf0f0) == 0)
1011 {
1012 int dst_reg = (inst2 & 0x0f00) >> 8;
1013 int src_reg = inst2 & 0xf;
1014 regs[dst_reg] = regs[src_reg];
1015 }
1016
1017 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1018 {
1019 /* Constant pool loads. */
1020 unsigned int constant;
1021 CORE_ADDR loc;
1022
cac395ea 1023 offset = bits (inst2, 0, 11);
ec3d575a
UW
1024 if (insn & 0x0080)
1025 loc = start + 4 + offset;
1026 else
1027 loc = start + 4 - offset;
1028
1029 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1030 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1031 }
1032
1033 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1034 {
1035 /* Constant pool loads. */
1036 unsigned int constant;
1037 CORE_ADDR loc;
1038
cac395ea 1039 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1040 if (insn & 0x0080)
1041 loc = start + 4 + offset;
1042 else
1043 loc = start + 4 - offset;
1044
1045 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1046 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1047
1048 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1049 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1050 }
1051
1052 else if (thumb2_instruction_changes_pc (insn, inst2))
1053 {
1054 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1055 break;
1056 }
ec3d575a
UW
1057 else
1058 {
1059 /* The optimizer might shove anything into the prologue,
1060 so we just skip what we don't recognize. */
1061 unrecognized_pc = start;
1062 }
0d39a070
DJ
1063
1064 start += 2;
1065 }
ec3d575a 1066 else if (thumb_instruction_changes_pc (insn))
3d74b771 1067 {
ec3d575a 1068 /* Don't scan past anything that might change control flow. */
da3c6d4a 1069 break;
3d74b771 1070 }
ec3d575a
UW
1071 else
1072 {
1073 /* The optimizer might shove anything into the prologue,
1074 so we just skip what we don't recognize. */
1075 unrecognized_pc = start;
1076 }
29d73ae4
DJ
1077
1078 start += 2;
c906108c
SS
1079 }
1080
0d39a070
DJ
1081 if (arm_debug)
1082 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1083 paddress (gdbarch, start));
1084
ec3d575a
UW
1085 if (unrecognized_pc == 0)
1086 unrecognized_pc = start;
1087
29d73ae4
DJ
1088 if (cache == NULL)
1089 {
1090 do_cleanups (back_to);
ec3d575a 1091 return unrecognized_pc;
29d73ae4
DJ
1092 }
1093
29d73ae4
DJ
1094 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1095 {
1096 /* Frame pointer is fp. Frame size is constant. */
1097 cache->framereg = ARM_FP_REGNUM;
1098 cache->framesize = -regs[ARM_FP_REGNUM].k;
1099 }
1100 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1101 {
1102 /* Frame pointer is r7. Frame size is constant. */
1103 cache->framereg = THUMB_FP_REGNUM;
1104 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1105 }
72a2e3dc 1106 else
29d73ae4
DJ
1107 {
1108 /* Try the stack pointer... this is a bit desperate. */
1109 cache->framereg = ARM_SP_REGNUM;
1110 cache->framesize = -regs[ARM_SP_REGNUM].k;
1111 }
29d73ae4
DJ
1112
1113 for (i = 0; i < 16; i++)
1114 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1115 cache->saved_regs[i].addr = offset;
1116
1117 do_cleanups (back_to);
ec3d575a 1118 return unrecognized_pc;
c906108c
SS
1119}
1120
621c6d5b
YQ
1121
1122/* Try to analyze the instructions starting from PC, which load symbol
1123 __stack_chk_guard. Return the address of instruction after loading this
1124 symbol, set the dest register number to *BASEREG, and set the size of
1125 instructions for loading symbol in OFFSET. Return 0 if instructions are
1126 not recognized. */
1127
1128static CORE_ADDR
1129arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1130 unsigned int *destreg, int *offset)
1131{
1132 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1133 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1134 unsigned int low, high, address;
1135
1136 address = 0;
1137 if (is_thumb)
1138 {
1139 unsigned short insn1
198cd59d 1140 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
621c6d5b
YQ
1141
1142 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1143 {
1144 *destreg = bits (insn1, 8, 10);
1145 *offset = 2;
6ae274b7
YQ
1146 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1147 address = read_memory_unsigned_integer (address, 4,
1148 byte_order_for_code);
621c6d5b
YQ
1149 }
1150 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1151 {
1152 unsigned short insn2
198cd59d 1153 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
621c6d5b
YQ
1154
1155 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1156
1157 insn1
198cd59d 1158 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
621c6d5b 1159 insn2
198cd59d 1160 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
621c6d5b
YQ
1161
1162 /* movt Rd, #const */
1163 if ((insn1 & 0xfbc0) == 0xf2c0)
1164 {
1165 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1166 *destreg = bits (insn2, 8, 11);
1167 *offset = 8;
1168 address = (high << 16 | low);
1169 }
1170 }
1171 }
1172 else
1173 {
2e9e421f 1174 unsigned int insn
198cd59d 1175 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
2e9e421f 1176
6ae274b7 1177 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1178 {
6ae274b7
YQ
1179 address = bits (insn, 0, 11) + pc + 8;
1180 address = read_memory_unsigned_integer (address, 4,
1181 byte_order_for_code);
1182
2e9e421f
UW
1183 *destreg = bits (insn, 12, 15);
1184 *offset = 4;
1185 }
1186 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1187 {
1188 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189
1190 insn
198cd59d 1191 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
2e9e421f
UW
1192
1193 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1194 {
1195 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1196 *destreg = bits (insn, 12, 15);
1197 *offset = 8;
1198 address = (high << 16 | low);
1199 }
1200 }
621c6d5b
YQ
1201 }
1202
1203 return address;
1204}
1205
1206/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1207 points to the first instruction of this sequence, return the address of
1208 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1209
1210 On arm, this sequence of instructions is composed of mainly three steps,
1211 Step 1: load symbol __stack_chk_guard,
1212 Step 2: load from address of __stack_chk_guard,
1213 Step 3: store it to somewhere else.
1214
1215 Usually, instructions on step 2 and step 3 are the same on various ARM
1216 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1217 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1218 instructions in step 1 vary from different ARM architectures. On ARMv7,
1219 they are,
1220
1221 movw Rn, #:lower16:__stack_chk_guard
1222 movt Rn, #:upper16:__stack_chk_guard
1223
1224 On ARMv5t, it is,
1225
1226 ldr Rn, .Label
1227 ....
1228 .Lable:
1229 .word __stack_chk_guard
1230
1231 Since ldr/str is a very popular instruction, we can't use them as
1232 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1233 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1234 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1235
1236static CORE_ADDR
1237arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1238{
1239 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1240 unsigned int basereg;
7cbd4a93 1241 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1242 int offset;
1243 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1244 CORE_ADDR addr;
1245
1246 /* Try to parse the instructions in Step 1. */
1247 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1248 &basereg, &offset);
1249 if (!addr)
1250 return pc;
1251
1252 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1253 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1254 Otherwise, this sequence cannot be for stack protector. */
1255 if (stack_chk_guard.minsym == NULL
61012eef 1256 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1257 return pc;
1258
1259 if (is_thumb)
1260 {
1261 unsigned int destreg;
1262 unsigned short insn
198cd59d 1263 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
621c6d5b
YQ
1264
1265 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1266 if ((insn & 0xf800) != 0x6800)
1267 return pc;
1268 if (bits (insn, 3, 5) != basereg)
1269 return pc;
1270 destreg = bits (insn, 0, 2);
1271
198cd59d
YQ
1272 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1273 byte_order_for_code);
621c6d5b
YQ
1274 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1275 if ((insn & 0xf800) != 0x6000)
1276 return pc;
1277 if (destreg != bits (insn, 0, 2))
1278 return pc;
1279 }
1280 else
1281 {
1282 unsigned int destreg;
1283 unsigned int insn
198cd59d 1284 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
621c6d5b
YQ
1285
1286 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1287 if ((insn & 0x0e500000) != 0x04100000)
1288 return pc;
1289 if (bits (insn, 16, 19) != basereg)
1290 return pc;
1291 destreg = bits (insn, 12, 15);
1292 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
198cd59d 1293 insn = read_code_unsigned_integer (pc + offset + 4,
621c6d5b
YQ
1294 4, byte_order_for_code);
1295 if ((insn & 0x0e500000) != 0x04000000)
1296 return pc;
1297 if (bits (insn, 12, 15) != destreg)
1298 return pc;
1299 }
1300 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1301 on arm. */
1302 if (is_thumb)
1303 return pc + offset + 4;
1304 else
1305 return pc + offset + 8;
1306}
1307
da3c6d4a
MS
1308/* Advance the PC across any function entry prologue instructions to
1309 reach some "real" code.
34e8f22d
RE
1310
1311 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1312 prologue:
c906108c 1313
c5aa993b
JM
1314 mov ip, sp
1315 [stmfd sp!, {a1,a2,a3,a4}]
1316 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1317 [stfe f7, [sp, #-12]!]
1318 [stfe f6, [sp, #-12]!]
1319 [stfe f5, [sp, #-12]!]
1320 [stfe f4, [sp, #-12]!]
0963b4bd 1321 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1322
34e8f22d 1323static CORE_ADDR
6093d2eb 1324arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1325{
a89fea3c 1326 CORE_ADDR func_addr, limit_pc;
c906108c 1327
a89fea3c
JL
1328 /* See if we can determine the end of the prologue via the symbol table.
1329 If so, then return either PC, or the PC after the prologue, whichever
1330 is greater. */
1331 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1332 {
d80b854b
UW
1333 CORE_ADDR post_prologue_pc
1334 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1335 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1336
621c6d5b
YQ
1337 if (post_prologue_pc)
1338 post_prologue_pc
1339 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1340
1341
0d39a070
DJ
1342 /* GCC always emits a line note before the prologue and another
1343 one after, even if the two are at the same address or on the
1344 same line. Take advantage of this so that we do not need to
1345 know every instruction that might appear in the prologue. We
1346 will have producer information for most binaries; if it is
1347 missing (e.g. for -gstabs), assuming the GNU tools. */
1348 if (post_prologue_pc
43f3e411
DE
1349 && (cust == NULL
1350 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1351 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1352 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1353 return post_prologue_pc;
1354
a89fea3c 1355 if (post_prologue_pc != 0)
0d39a070
DJ
1356 {
1357 CORE_ADDR analyzed_limit;
1358
1359 /* For non-GCC compilers, make sure the entire line is an
1360 acceptable prologue; GDB will round this function's
1361 return value up to the end of the following line so we
1362 can not skip just part of a line (and we do not want to).
1363
1364 RealView does not treat the prologue specially, but does
1365 associate prologue code with the opening brace; so this
1366 lets us skip the first line if we think it is the opening
1367 brace. */
9779414d 1368 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1369 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1370 post_prologue_pc, NULL);
1371 else
1372 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1373 post_prologue_pc, NULL);
1374
1375 if (analyzed_limit != post_prologue_pc)
1376 return func_addr;
1377
1378 return post_prologue_pc;
1379 }
c906108c
SS
1380 }
1381
a89fea3c
JL
1382 /* Can't determine prologue from the symbol table, need to examine
1383 instructions. */
c906108c 1384
a89fea3c
JL
1385 /* Find an upper limit on the function prologue using the debug
1386 information. If the debug information could not be used to provide
1387 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1388 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1389 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1390 if (limit_pc == 0)
1391 limit_pc = pc + 64; /* Magic. */
1392
c906108c 1393
29d73ae4 1394 /* Check if this is Thumb code. */
9779414d 1395 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1396 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1397 else
1398 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1399}
94c30b78 1400
c5aa993b 1401/* *INDENT-OFF* */
c906108c
SS
1402/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1403 This function decodes a Thumb function prologue to determine:
1404 1) the size of the stack frame
1405 2) which registers are saved on it
1406 3) the offsets of saved regs
1407 4) the offset from the stack pointer to the frame pointer
c906108c 1408
da59e081
JM
1409 A typical Thumb function prologue would create this stack frame
1410 (offsets relative to FP)
c906108c
SS
1411 old SP -> 24 stack parameters
1412 20 LR
1413 16 R7
1414 R7 -> 0 local variables (16 bytes)
1415 SP -> -12 additional stack space (12 bytes)
1416 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1417 12 bytes. The frame register is R7.
da59e081 1418
da3c6d4a
MS
1419 The comments for thumb_skip_prolog() describe the algorithm we use
1420 to detect the end of the prolog. */
c5aa993b
JM
1421/* *INDENT-ON* */
1422
c906108c 1423static void
be8626e0 1424thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1425 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1426{
1427 CORE_ADDR prologue_start;
1428 CORE_ADDR prologue_end;
c906108c 1429
b39cc962
DJ
1430 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1431 &prologue_end))
c906108c 1432 {
ec3d575a
UW
1433 /* See comment in arm_scan_prologue for an explanation of
1434 this heuristics. */
1435 if (prologue_end > prologue_start + 64)
1436 {
1437 prologue_end = prologue_start + 64;
1438 }
c906108c
SS
1439 }
1440 else
f7060f85
DJ
1441 /* We're in the boondocks: we have no idea where the start of the
1442 function is. */
1443 return;
c906108c 1444
325fac50 1445 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1446
be8626e0 1447 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1448}
1449
f303bc3e
YQ
1450/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1451 otherwise. */
1452
1453static int
1454arm_instruction_restores_sp (unsigned int insn)
1455{
1456 if (bits (insn, 28, 31) != INST_NV)
1457 {
1458 if ((insn & 0x0df0f000) == 0x0080d000
1459 /* ADD SP (register or immediate). */
1460 || (insn & 0x0df0f000) == 0x0040d000
1461 /* SUB SP (register or immediate). */
1462 || (insn & 0x0ffffff0) == 0x01a0d000
1463 /* MOV SP. */
1464 || (insn & 0x0fff0000) == 0x08bd0000
1465 /* POP (LDMIA). */
1466 || (insn & 0x0fff0000) == 0x049d0000)
1467 /* POP of a single register. */
1468 return 1;
1469 }
1470
1471 return 0;
1472}
1473
0d39a070
DJ
1474/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1475 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1476 fill it in. Return the first address not recognized as a prologue
1477 instruction.
eb5492fa 1478
0d39a070
DJ
1479 We recognize all the instructions typically found in ARM prologues,
1480 plus harmless instructions which can be skipped (either for analysis
1481 purposes, or a more restrictive set that can be skipped when finding
1482 the end of the prologue). */
1483
1484static CORE_ADDR
1485arm_analyze_prologue (struct gdbarch *gdbarch,
1486 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1487 struct arm_prologue_cache *cache)
1488{
0d39a070
DJ
1489 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1490 int regno;
1491 CORE_ADDR offset, current_pc;
1492 pv_t regs[ARM_FPS_REGNUM];
1493 struct pv_area *stack;
1494 struct cleanup *back_to;
0d39a070
DJ
1495 CORE_ADDR unrecognized_pc = 0;
1496
1497 /* Search the prologue looking for instructions that set up the
96baa820 1498 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1499
96baa820
JM
1500 Be careful, however, and if it doesn't look like a prologue,
1501 don't try to scan it. If, for instance, a frameless function
1502 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1503 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1504 and other operations that rely on a knowledge of the stack
0d39a070 1505 traceback. */
d4473757 1506
4be43953
DJ
1507 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1508 regs[regno] = pv_register (regno, 0);
55f960e1 1509 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1510 back_to = make_cleanup_free_pv_area (stack);
1511
94c30b78
MS
1512 for (current_pc = prologue_start;
1513 current_pc < prologue_end;
f43845b3 1514 current_pc += 4)
96baa820 1515 {
e17a4113 1516 unsigned int insn
198cd59d 1517 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1518
94c30b78 1519 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1520 {
4be43953 1521 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1522 continue;
1523 }
0d39a070
DJ
1524 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1526 {
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1529 int rd = bits (insn, 12, 15);
28cd8767 1530 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1532 continue;
1533 }
0d39a070
DJ
1534 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1535 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1536 {
1537 unsigned imm = insn & 0xff; /* immediate value */
1538 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1539 int rd = bits (insn, 12, 15);
28cd8767 1540 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1541 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1542 continue;
1543 }
0963b4bd
MS
1544 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1545 [sp, #-4]! */
f43845b3 1546 {
4be43953
DJ
1547 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1548 break;
1549 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1550 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1551 regs[bits (insn, 12, 15)]);
f43845b3
MS
1552 continue;
1553 }
1554 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1555 /* stmfd sp!, {..., fp, ip, lr, pc}
1556 or
1557 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1558 {
d4473757 1559 int mask = insn & 0xffff;
ed9a39eb 1560
4be43953
DJ
1561 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1562 break;
1563
94c30b78 1564 /* Calculate offsets of saved registers. */
34e8f22d 1565 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1566 if (mask & (1 << regno))
1567 {
0963b4bd
MS
1568 regs[ARM_SP_REGNUM]
1569 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1570 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1571 }
1572 }
0d39a070
DJ
1573 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1574 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1575 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1576 {
1577 /* No need to add this to saved_regs -- it's just an arg reg. */
1578 continue;
1579 }
0d39a070
DJ
1580 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1581 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1582 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1583 {
1584 /* No need to add this to saved_regs -- it's just an arg reg. */
1585 continue;
1586 }
0963b4bd
MS
1587 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1588 { registers } */
0d39a070
DJ
1589 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1590 {
1591 /* No need to add this to saved_regs -- it's just arg regs. */
1592 continue;
1593 }
d4473757
KB
1594 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1595 {
94c30b78
MS
1596 unsigned imm = insn & 0xff; /* immediate value */
1597 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1598 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1599 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1600 }
1601 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1602 {
94c30b78
MS
1603 unsigned imm = insn & 0xff; /* immediate value */
1604 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1605 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1606 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1607 }
0963b4bd
MS
1608 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1609 [sp, -#c]! */
2af46ca0 1610 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1611 {
4be43953
DJ
1612 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1613 break;
1614
1615 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1616 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1617 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1618 }
0963b4bd
MS
1619 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1620 [sp!] */
2af46ca0 1621 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1622 {
1623 int n_saved_fp_regs;
1624 unsigned int fp_start_reg, fp_bound_reg;
1625
4be43953
DJ
1626 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1627 break;
1628
94c30b78 1629 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1630 {
d4473757
KB
1631 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1632 n_saved_fp_regs = 3;
1633 else
1634 n_saved_fp_regs = 1;
96baa820 1635 }
d4473757 1636 else
96baa820 1637 {
d4473757
KB
1638 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1639 n_saved_fp_regs = 2;
1640 else
1641 n_saved_fp_regs = 4;
96baa820 1642 }
d4473757 1643
34e8f22d 1644 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1645 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1646 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1647 {
4be43953
DJ
1648 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1649 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1650 regs[fp_start_reg++]);
96baa820 1651 }
c906108c 1652 }
0d39a070
DJ
1653 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1654 {
1655 /* Allow some special function calls when skipping the
1656 prologue; GCC generates these before storing arguments to
1657 the stack. */
1658 CORE_ADDR dest = BranchDest (current_pc, insn);
1659
e0634ccf 1660 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1661 continue;
1662 else
1663 break;
1664 }
d4473757 1665 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1666 break; /* Condition not true, exit early. */
0d39a070
DJ
1667 else if (arm_instruction_changes_pc (insn))
1668 /* Don't scan past anything that might change control flow. */
1669 break;
f303bc3e
YQ
1670 else if (arm_instruction_restores_sp (insn))
1671 {
1672 /* Don't scan past the epilogue. */
1673 break;
1674 }
d19f7eee
UW
1675 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1676 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1677 /* Ignore block loads from the stack, potentially copying
1678 parameters from memory. */
1679 continue;
1680 else if ((insn & 0xfc500000) == 0xe4100000
1681 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1682 /* Similarly ignore single loads from the stack. */
1683 continue;
0d39a070
DJ
1684 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1685 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1686 register instead of the stack. */
d4473757 1687 continue;
0d39a070
DJ
1688 else
1689 {
21daaaaf
YQ
1690 /* The optimizer might shove anything into the prologue, if
1691 we build up cache (cache != NULL) from scanning prologue,
1692 we just skip what we don't recognize and scan further to
1693 make cache as complete as possible. However, if we skip
1694 prologue, we'll stop immediately on unrecognized
1695 instruction. */
0d39a070 1696 unrecognized_pc = current_pc;
21daaaaf
YQ
1697 if (cache != NULL)
1698 continue;
1699 else
1700 break;
0d39a070 1701 }
c906108c
SS
1702 }
1703
0d39a070
DJ
1704 if (unrecognized_pc == 0)
1705 unrecognized_pc = current_pc;
1706
0d39a070
DJ
1707 if (cache)
1708 {
4072f920
YQ
1709 int framereg, framesize;
1710
1711 /* The frame size is just the distance from the frame register
1712 to the original stack pointer. */
1713 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1714 {
1715 /* Frame pointer is fp. */
1716 framereg = ARM_FP_REGNUM;
1717 framesize = -regs[ARM_FP_REGNUM].k;
1718 }
1719 else
1720 {
1721 /* Try the stack pointer... this is a bit desperate. */
1722 framereg = ARM_SP_REGNUM;
1723 framesize = -regs[ARM_SP_REGNUM].k;
1724 }
1725
0d39a070
DJ
1726 cache->framereg = framereg;
1727 cache->framesize = framesize;
1728
1729 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1730 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1731 cache->saved_regs[regno].addr = offset;
1732 }
1733
1734 if (arm_debug)
1735 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1736 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1737
1738 do_cleanups (back_to);
0d39a070
DJ
1739 return unrecognized_pc;
1740}
1741
1742static void
1743arm_scan_prologue (struct frame_info *this_frame,
1744 struct arm_prologue_cache *cache)
1745{
1746 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1747 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1748 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1749 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1750 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1751
1752 /* Assume there is no frame until proven otherwise. */
1753 cache->framereg = ARM_SP_REGNUM;
1754 cache->framesize = 0;
1755
1756 /* Check for Thumb prologue. */
1757 if (arm_frame_is_thumb (this_frame))
1758 {
1759 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1760 return;
1761 }
1762
1763 /* Find the function prologue. If we can't find the function in
1764 the symbol table, peek in the stack frame to find the PC. */
1765 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1766 &prologue_end))
1767 {
1768 /* One way to find the end of the prologue (which works well
1769 for unoptimized code) is to do the following:
1770
1771 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1772
1773 if (sal.line == 0)
1774 prologue_end = prev_pc;
1775 else if (sal.end < prologue_end)
1776 prologue_end = sal.end;
1777
1778 This mechanism is very accurate so long as the optimizer
1779 doesn't move any instructions from the function body into the
1780 prologue. If this happens, sal.end will be the last
1781 instruction in the first hunk of prologue code just before
1782 the first instruction that the scheduler has moved from
1783 the body to the prologue.
1784
1785 In order to make sure that we scan all of the prologue
1786 instructions, we use a slightly less accurate mechanism which
1787 may scan more than necessary. To help compensate for this
1788 lack of accuracy, the prologue scanning loop below contains
1789 several clauses which'll cause the loop to terminate early if
1790 an implausible prologue instruction is encountered.
1791
1792 The expression
1793
1794 prologue_start + 64
1795
1796 is a suitable endpoint since it accounts for the largest
1797 possible prologue plus up to five instructions inserted by
1798 the scheduler. */
1799
1800 if (prologue_end > prologue_start + 64)
1801 {
1802 prologue_end = prologue_start + 64; /* See above. */
1803 }
1804 }
1805 else
1806 {
1807 /* We have no symbol information. Our only option is to assume this
1808 function has a standard stack frame and the normal frame register.
1809 Then, we can find the value of our frame pointer on entrance to
1810 the callee (or at the present moment if this is the innermost frame).
1811 The value stored there should be the address of the stmfd + 8. */
1812 CORE_ADDR frame_loc;
7913a64c 1813 ULONGEST return_value;
0d39a070
DJ
1814
1815 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
7913a64c
YQ
1816 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1817 &return_value))
0d39a070
DJ
1818 return;
1819 else
1820 {
1821 prologue_start = gdbarch_addr_bits_remove
1822 (gdbarch, return_value) - 8;
1823 prologue_end = prologue_start + 64; /* See above. */
1824 }
1825 }
1826
1827 if (prev_pc < prologue_end)
1828 prologue_end = prev_pc;
1829
1830 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1831}
1832
eb5492fa 1833static struct arm_prologue_cache *
a262aec2 1834arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1835{
eb5492fa
DJ
1836 int reg;
1837 struct arm_prologue_cache *cache;
1838 CORE_ADDR unwound_fp;
c5aa993b 1839
35d5d4ee 1840 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1841 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1842
a262aec2 1843 arm_scan_prologue (this_frame, cache);
848cfffb 1844
a262aec2 1845 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1846 if (unwound_fp == 0)
1847 return cache;
c906108c 1848
4be43953 1849 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1850
eb5492fa
DJ
1851 /* Calculate actual addresses of saved registers using offsets
1852 determined by arm_scan_prologue. */
a262aec2 1853 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1854 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1855 cache->saved_regs[reg].addr += cache->prev_sp;
1856
1857 return cache;
c906108c
SS
1858}
1859
c1ee9414
LM
1860/* Implementation of the stop_reason hook for arm_prologue frames. */
1861
1862static enum unwind_stop_reason
1863arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1864 void **this_cache)
1865{
1866 struct arm_prologue_cache *cache;
1867 CORE_ADDR pc;
1868
1869 if (*this_cache == NULL)
1870 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1871 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1872
1873 /* This is meant to halt the backtrace at "_start". */
1874 pc = get_frame_pc (this_frame);
1875 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1876 return UNWIND_OUTERMOST;
1877
1878 /* If we've hit a wall, stop. */
1879 if (cache->prev_sp == 0)
1880 return UNWIND_OUTERMOST;
1881
1882 return UNWIND_NO_REASON;
1883}
1884
eb5492fa
DJ
1885/* Our frame ID for a normal frame is the current function's starting PC
1886 and the caller's SP when we were called. */
c906108c 1887
148754e5 1888static void
a262aec2 1889arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1890 void **this_cache,
1891 struct frame_id *this_id)
c906108c 1892{
eb5492fa
DJ
1893 struct arm_prologue_cache *cache;
1894 struct frame_id id;
2c404490 1895 CORE_ADDR pc, func;
f079148d 1896
eb5492fa 1897 if (*this_cache == NULL)
a262aec2 1898 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1899 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1900
0e9e9abd
UW
1901 /* Use function start address as part of the frame ID. If we cannot
1902 identify the start address (due to missing symbol information),
1903 fall back to just using the current PC. */
c1ee9414 1904 pc = get_frame_pc (this_frame);
2c404490 1905 func = get_frame_func (this_frame);
0e9e9abd
UW
1906 if (!func)
1907 func = pc;
1908
eb5492fa 1909 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1910 *this_id = id;
c906108c
SS
1911}
1912
a262aec2
DJ
1913static struct value *
1914arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1915 void **this_cache,
a262aec2 1916 int prev_regnum)
24de872b 1917{
24568a2c 1918 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1919 struct arm_prologue_cache *cache;
1920
eb5492fa 1921 if (*this_cache == NULL)
a262aec2 1922 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1923 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1924
eb5492fa 1925 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1926 instead. The prologue may save PC, but it will point into this
1927 frame's prologue, not the next frame's resume location. Also
1928 strip the saved T bit. A valid LR may have the low bit set, but
1929 a valid PC never does. */
eb5492fa 1930 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1931 {
1932 CORE_ADDR lr;
1933
1934 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1935 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1936 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1937 }
24de872b 1938
eb5492fa 1939 /* SP is generally not saved to the stack, but this frame is
a262aec2 1940 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1941 The value was already reconstructed into PREV_SP. */
1942 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1943 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1944
b39cc962
DJ
1945 /* The CPSR may have been changed by the call instruction and by the
1946 called function. The only bit we can reconstruct is the T bit,
1947 by checking the low bit of LR as of the call. This is a reliable
1948 indicator of Thumb-ness except for some ARM v4T pre-interworking
1949 Thumb code, which could get away with a clear low bit as long as
1950 the called function did not use bx. Guess that all other
1951 bits are unchanged; the condition flags are presumably lost,
1952 but the processor status is likely valid. */
1953 if (prev_regnum == ARM_PS_REGNUM)
1954 {
1955 CORE_ADDR lr, cpsr;
9779414d 1956 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1957
1958 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1959 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1960 if (IS_THUMB_ADDR (lr))
9779414d 1961 cpsr |= t_bit;
b39cc962 1962 else
9779414d 1963 cpsr &= ~t_bit;
b39cc962
DJ
1964 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1965 }
1966
a262aec2
DJ
1967 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1968 prev_regnum);
eb5492fa
DJ
1969}
1970
1971struct frame_unwind arm_prologue_unwind = {
1972 NORMAL_FRAME,
c1ee9414 1973 arm_prologue_unwind_stop_reason,
eb5492fa 1974 arm_prologue_this_id,
a262aec2
DJ
1975 arm_prologue_prev_register,
1976 NULL,
1977 default_frame_sniffer
eb5492fa
DJ
1978};
1979
0e9e9abd
UW
1980/* Maintain a list of ARM exception table entries per objfile, similar to the
1981 list of mapping symbols. We only cache entries for standard ARM-defined
1982 personality routines; the cache will contain only the frame unwinding
1983 instructions associated with the entry (not the descriptors). */
1984
1985static const struct objfile_data *arm_exidx_data_key;
1986
1987struct arm_exidx_entry
1988{
1989 bfd_vma addr;
1990 gdb_byte *entry;
1991};
1992typedef struct arm_exidx_entry arm_exidx_entry_s;
1993DEF_VEC_O(arm_exidx_entry_s);
1994
1995struct arm_exidx_data
1996{
1997 VEC(arm_exidx_entry_s) **section_maps;
1998};
1999
2000static void
2001arm_exidx_data_free (struct objfile *objfile, void *arg)
2002{
9a3c8263 2003 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2004 unsigned int i;
2005
2006 for (i = 0; i < objfile->obfd->section_count; i++)
2007 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2008}
2009
2010static inline int
2011arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2012 const struct arm_exidx_entry *rhs)
2013{
2014 return lhs->addr < rhs->addr;
2015}
2016
2017static struct obj_section *
2018arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2019{
2020 struct obj_section *osect;
2021
2022 ALL_OBJFILE_OSECTIONS (objfile, osect)
2023 if (bfd_get_section_flags (objfile->obfd,
2024 osect->the_bfd_section) & SEC_ALLOC)
2025 {
2026 bfd_vma start, size;
2027 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2028 size = bfd_get_section_size (osect->the_bfd_section);
2029
2030 if (start <= vma && vma < start + size)
2031 return osect;
2032 }
2033
2034 return NULL;
2035}
2036
2037/* Parse contents of exception table and exception index sections
2038 of OBJFILE, and fill in the exception table entry cache.
2039
2040 For each entry that refers to a standard ARM-defined personality
2041 routine, extract the frame unwinding instructions (from either
2042 the index or the table section). The unwinding instructions
2043 are normalized by:
2044 - extracting them from the rest of the table data
2045 - converting to host endianness
2046 - appending the implicit 0xb0 ("Finish") code
2047
2048 The extracted and normalized instructions are stored for later
2049 retrieval by the arm_find_exidx_entry routine. */
2050
2051static void
2052arm_exidx_new_objfile (struct objfile *objfile)
2053{
3bb47e8b 2054 struct cleanup *cleanups;
0e9e9abd
UW
2055 struct arm_exidx_data *data;
2056 asection *exidx, *extab;
2057 bfd_vma exidx_vma = 0, extab_vma = 0;
2058 bfd_size_type exidx_size = 0, extab_size = 0;
2059 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2060 LONGEST i;
2061
2062 /* If we've already touched this file, do nothing. */
2063 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2064 return;
3bb47e8b 2065 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2066
2067 /* Read contents of exception table and index. */
a5eda10c 2068 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2069 if (exidx)
2070 {
2071 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2072 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2073 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2074 make_cleanup (xfree, exidx_data);
2075
2076 if (!bfd_get_section_contents (objfile->obfd, exidx,
2077 exidx_data, 0, exidx_size))
2078 {
2079 do_cleanups (cleanups);
2080 return;
2081 }
2082 }
2083
2084 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2085 if (extab)
2086 {
2087 extab_vma = bfd_section_vma (objfile->obfd, extab);
2088 extab_size = bfd_get_section_size (extab);
224c3ddb 2089 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2090 make_cleanup (xfree, extab_data);
2091
2092 if (!bfd_get_section_contents (objfile->obfd, extab,
2093 extab_data, 0, extab_size))
2094 {
2095 do_cleanups (cleanups);
2096 return;
2097 }
2098 }
2099
2100 /* Allocate exception table data structure. */
2101 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2102 set_objfile_data (objfile, arm_exidx_data_key, data);
2103 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2104 objfile->obfd->section_count,
2105 VEC(arm_exidx_entry_s) *);
2106
2107 /* Fill in exception table. */
2108 for (i = 0; i < exidx_size / 8; i++)
2109 {
2110 struct arm_exidx_entry new_exidx_entry;
2111 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2112 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2113 bfd_vma addr = 0, word = 0;
2114 int n_bytes = 0, n_words = 0;
2115 struct obj_section *sec;
2116 gdb_byte *entry = NULL;
2117
2118 /* Extract address of start of function. */
2119 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2120 idx += exidx_vma + i * 8;
2121
2122 /* Find section containing function and compute section offset. */
2123 sec = arm_obj_section_from_vma (objfile, idx);
2124 if (sec == NULL)
2125 continue;
2126 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2127
2128 /* Determine address of exception table entry. */
2129 if (val == 1)
2130 {
2131 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2132 }
2133 else if ((val & 0xff000000) == 0x80000000)
2134 {
2135 /* Exception table entry embedded in .ARM.exidx
2136 -- must be short form. */
2137 word = val;
2138 n_bytes = 3;
2139 }
2140 else if (!(val & 0x80000000))
2141 {
2142 /* Exception table entry in .ARM.extab. */
2143 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2144 addr += exidx_vma + i * 8 + 4;
2145
2146 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2147 {
2148 word = bfd_h_get_32 (objfile->obfd,
2149 extab_data + addr - extab_vma);
2150 addr += 4;
2151
2152 if ((word & 0xff000000) == 0x80000000)
2153 {
2154 /* Short form. */
2155 n_bytes = 3;
2156 }
2157 else if ((word & 0xff000000) == 0x81000000
2158 || (word & 0xff000000) == 0x82000000)
2159 {
2160 /* Long form. */
2161 n_bytes = 2;
2162 n_words = ((word >> 16) & 0xff);
2163 }
2164 else if (!(word & 0x80000000))
2165 {
2166 bfd_vma pers;
2167 struct obj_section *pers_sec;
2168 int gnu_personality = 0;
2169
2170 /* Custom personality routine. */
2171 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2172 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2173
2174 /* Check whether we've got one of the variants of the
2175 GNU personality routines. */
2176 pers_sec = arm_obj_section_from_vma (objfile, pers);
2177 if (pers_sec)
2178 {
2179 static const char *personality[] =
2180 {
2181 "__gcc_personality_v0",
2182 "__gxx_personality_v0",
2183 "__gcj_personality_v0",
2184 "__gnu_objc_personality_v0",
2185 NULL
2186 };
2187
2188 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2189 int k;
2190
2191 for (k = 0; personality[k]; k++)
2192 if (lookup_minimal_symbol_by_pc_name
2193 (pc, personality[k], objfile))
2194 {
2195 gnu_personality = 1;
2196 break;
2197 }
2198 }
2199
2200 /* If so, the next word contains a word count in the high
2201 byte, followed by the same unwind instructions as the
2202 pre-defined forms. */
2203 if (gnu_personality
2204 && addr + 4 <= extab_vma + extab_size)
2205 {
2206 word = bfd_h_get_32 (objfile->obfd,
2207 extab_data + addr - extab_vma);
2208 addr += 4;
2209 n_bytes = 3;
2210 n_words = ((word >> 24) & 0xff);
2211 }
2212 }
2213 }
2214 }
2215
2216 /* Sanity check address. */
2217 if (n_words)
2218 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2219 n_words = n_bytes = 0;
2220
2221 /* The unwind instructions reside in WORD (only the N_BYTES least
2222 significant bytes are valid), followed by N_WORDS words in the
2223 extab section starting at ADDR. */
2224 if (n_bytes || n_words)
2225 {
224c3ddb
SM
2226 gdb_byte *p = entry
2227 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2228 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2229
2230 while (n_bytes--)
2231 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2232
2233 while (n_words--)
2234 {
2235 word = bfd_h_get_32 (objfile->obfd,
2236 extab_data + addr - extab_vma);
2237 addr += 4;
2238
2239 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2240 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2241 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2242 *p++ = (gdb_byte) (word & 0xff);
2243 }
2244
2245 /* Implied "Finish" to terminate the list. */
2246 *p++ = 0xb0;
2247 }
2248
2249 /* Push entry onto vector. They are guaranteed to always
2250 appear in order of increasing addresses. */
2251 new_exidx_entry.addr = idx;
2252 new_exidx_entry.entry = entry;
2253 VEC_safe_push (arm_exidx_entry_s,
2254 data->section_maps[sec->the_bfd_section->index],
2255 &new_exidx_entry);
2256 }
2257
2258 do_cleanups (cleanups);
2259}
2260
2261/* Search for the exception table entry covering MEMADDR. If one is found,
2262 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2263 set *START to the start of the region covered by this entry. */
2264
2265static gdb_byte *
2266arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2267{
2268 struct obj_section *sec;
2269
2270 sec = find_pc_section (memaddr);
2271 if (sec != NULL)
2272 {
2273 struct arm_exidx_data *data;
2274 VEC(arm_exidx_entry_s) *map;
2275 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2276 unsigned int idx;
2277
9a3c8263
SM
2278 data = ((struct arm_exidx_data *)
2279 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2280 if (data != NULL)
2281 {
2282 map = data->section_maps[sec->the_bfd_section->index];
2283 if (!VEC_empty (arm_exidx_entry_s, map))
2284 {
2285 struct arm_exidx_entry *map_sym;
2286
2287 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2288 arm_compare_exidx_entries);
2289
2290 /* VEC_lower_bound finds the earliest ordered insertion
2291 point. If the following symbol starts at this exact
2292 address, we use that; otherwise, the preceding
2293 exception table entry covers this address. */
2294 if (idx < VEC_length (arm_exidx_entry_s, map))
2295 {
2296 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2297 if (map_sym->addr == map_key.addr)
2298 {
2299 if (start)
2300 *start = map_sym->addr + obj_section_addr (sec);
2301 return map_sym->entry;
2302 }
2303 }
2304
2305 if (idx > 0)
2306 {
2307 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2308 if (start)
2309 *start = map_sym->addr + obj_section_addr (sec);
2310 return map_sym->entry;
2311 }
2312 }
2313 }
2314 }
2315
2316 return NULL;
2317}
2318
2319/* Given the current frame THIS_FRAME, and its associated frame unwinding
2320 instruction list from the ARM exception table entry ENTRY, allocate and
2321 return a prologue cache structure describing how to unwind this frame.
2322
2323 Return NULL if the unwinding instruction list contains a "spare",
2324 "reserved" or "refuse to unwind" instruction as defined in section
2325 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2326 for the ARM Architecture" document. */
2327
2328static struct arm_prologue_cache *
2329arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2330{
2331 CORE_ADDR vsp = 0;
2332 int vsp_valid = 0;
2333
2334 struct arm_prologue_cache *cache;
2335 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2336 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2337
2338 for (;;)
2339 {
2340 gdb_byte insn;
2341
2342 /* Whenever we reload SP, we actually have to retrieve its
2343 actual value in the current frame. */
2344 if (!vsp_valid)
2345 {
2346 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2347 {
2348 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2349 vsp = get_frame_register_unsigned (this_frame, reg);
2350 }
2351 else
2352 {
2353 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2354 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2355 }
2356
2357 vsp_valid = 1;
2358 }
2359
2360 /* Decode next unwind instruction. */
2361 insn = *entry++;
2362
2363 if ((insn & 0xc0) == 0)
2364 {
2365 int offset = insn & 0x3f;
2366 vsp += (offset << 2) + 4;
2367 }
2368 else if ((insn & 0xc0) == 0x40)
2369 {
2370 int offset = insn & 0x3f;
2371 vsp -= (offset << 2) + 4;
2372 }
2373 else if ((insn & 0xf0) == 0x80)
2374 {
2375 int mask = ((insn & 0xf) << 8) | *entry++;
2376 int i;
2377
2378 /* The special case of an all-zero mask identifies
2379 "Refuse to unwind". We return NULL to fall back
2380 to the prologue analyzer. */
2381 if (mask == 0)
2382 return NULL;
2383
2384 /* Pop registers r4..r15 under mask. */
2385 for (i = 0; i < 12; i++)
2386 if (mask & (1 << i))
2387 {
2388 cache->saved_regs[4 + i].addr = vsp;
2389 vsp += 4;
2390 }
2391
2392 /* Special-case popping SP -- we need to reload vsp. */
2393 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2394 vsp_valid = 0;
2395 }
2396 else if ((insn & 0xf0) == 0x90)
2397 {
2398 int reg = insn & 0xf;
2399
2400 /* Reserved cases. */
2401 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2402 return NULL;
2403
2404 /* Set SP from another register and mark VSP for reload. */
2405 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2406 vsp_valid = 0;
2407 }
2408 else if ((insn & 0xf0) == 0xa0)
2409 {
2410 int count = insn & 0x7;
2411 int pop_lr = (insn & 0x8) != 0;
2412 int i;
2413
2414 /* Pop r4..r[4+count]. */
2415 for (i = 0; i <= count; i++)
2416 {
2417 cache->saved_regs[4 + i].addr = vsp;
2418 vsp += 4;
2419 }
2420
2421 /* If indicated by flag, pop LR as well. */
2422 if (pop_lr)
2423 {
2424 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2425 vsp += 4;
2426 }
2427 }
2428 else if (insn == 0xb0)
2429 {
2430 /* We could only have updated PC by popping into it; if so, it
2431 will show up as address. Otherwise, copy LR into PC. */
2432 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2433 cache->saved_regs[ARM_PC_REGNUM]
2434 = cache->saved_regs[ARM_LR_REGNUM];
2435
2436 /* We're done. */
2437 break;
2438 }
2439 else if (insn == 0xb1)
2440 {
2441 int mask = *entry++;
2442 int i;
2443
2444 /* All-zero mask and mask >= 16 is "spare". */
2445 if (mask == 0 || mask >= 16)
2446 return NULL;
2447
2448 /* Pop r0..r3 under mask. */
2449 for (i = 0; i < 4; i++)
2450 if (mask & (1 << i))
2451 {
2452 cache->saved_regs[i].addr = vsp;
2453 vsp += 4;
2454 }
2455 }
2456 else if (insn == 0xb2)
2457 {
2458 ULONGEST offset = 0;
2459 unsigned shift = 0;
2460
2461 do
2462 {
2463 offset |= (*entry & 0x7f) << shift;
2464 shift += 7;
2465 }
2466 while (*entry++ & 0x80);
2467
2468 vsp += 0x204 + (offset << 2);
2469 }
2470 else if (insn == 0xb3)
2471 {
2472 int start = *entry >> 4;
2473 int count = (*entry++) & 0xf;
2474 int i;
2475
2476 /* Only registers D0..D15 are valid here. */
2477 if (start + count >= 16)
2478 return NULL;
2479
2480 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2481 for (i = 0; i <= count; i++)
2482 {
2483 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2484 vsp += 8;
2485 }
2486
2487 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2488 vsp += 4;
2489 }
2490 else if ((insn & 0xf8) == 0xb8)
2491 {
2492 int count = insn & 0x7;
2493 int i;
2494
2495 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2496 for (i = 0; i <= count; i++)
2497 {
2498 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2499 vsp += 8;
2500 }
2501
2502 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2503 vsp += 4;
2504 }
2505 else if (insn == 0xc6)
2506 {
2507 int start = *entry >> 4;
2508 int count = (*entry++) & 0xf;
2509 int i;
2510
2511 /* Only registers WR0..WR15 are valid. */
2512 if (start + count >= 16)
2513 return NULL;
2514
2515 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2516 for (i = 0; i <= count; i++)
2517 {
2518 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2519 vsp += 8;
2520 }
2521 }
2522 else if (insn == 0xc7)
2523 {
2524 int mask = *entry++;
2525 int i;
2526
2527 /* All-zero mask and mask >= 16 is "spare". */
2528 if (mask == 0 || mask >= 16)
2529 return NULL;
2530
2531 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2532 for (i = 0; i < 4; i++)
2533 if (mask & (1 << i))
2534 {
2535 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2536 vsp += 4;
2537 }
2538 }
2539 else if ((insn & 0xf8) == 0xc0)
2540 {
2541 int count = insn & 0x7;
2542 int i;
2543
2544 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2545 for (i = 0; i <= count; i++)
2546 {
2547 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2548 vsp += 8;
2549 }
2550 }
2551 else if (insn == 0xc8)
2552 {
2553 int start = *entry >> 4;
2554 int count = (*entry++) & 0xf;
2555 int i;
2556
2557 /* Only registers D0..D31 are valid. */
2558 if (start + count >= 16)
2559 return NULL;
2560
2561 /* Pop VFP double-precision registers
2562 D[16+start]..D[16+start+count]. */
2563 for (i = 0; i <= count; i++)
2564 {
2565 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2566 vsp += 8;
2567 }
2568 }
2569 else if (insn == 0xc9)
2570 {
2571 int start = *entry >> 4;
2572 int count = (*entry++) & 0xf;
2573 int i;
2574
2575 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2576 for (i = 0; i <= count; i++)
2577 {
2578 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2579 vsp += 8;
2580 }
2581 }
2582 else if ((insn & 0xf8) == 0xd0)
2583 {
2584 int count = insn & 0x7;
2585 int i;
2586
2587 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2588 for (i = 0; i <= count; i++)
2589 {
2590 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2591 vsp += 8;
2592 }
2593 }
2594 else
2595 {
2596 /* Everything else is "spare". */
2597 return NULL;
2598 }
2599 }
2600
2601 /* If we restore SP from a register, assume this was the frame register.
2602 Otherwise just fall back to SP as frame register. */
2603 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2604 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2605 else
2606 cache->framereg = ARM_SP_REGNUM;
2607
2608 /* Determine offset to previous frame. */
2609 cache->framesize
2610 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2611
2612 /* We already got the previous SP. */
2613 cache->prev_sp = vsp;
2614
2615 return cache;
2616}
2617
2618/* Unwinding via ARM exception table entries. Note that the sniffer
2619 already computes a filled-in prologue cache, which is then used
2620 with the same arm_prologue_this_id and arm_prologue_prev_register
2621 routines also used for prologue-parsing based unwinding. */
2622
2623static int
2624arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2625 struct frame_info *this_frame,
2626 void **this_prologue_cache)
2627{
2628 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2629 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2630 CORE_ADDR addr_in_block, exidx_region, func_start;
2631 struct arm_prologue_cache *cache;
2632 gdb_byte *entry;
2633
2634 /* See if we have an ARM exception table entry covering this address. */
2635 addr_in_block = get_frame_address_in_block (this_frame);
2636 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2637 if (!entry)
2638 return 0;
2639
2640 /* The ARM exception table does not describe unwind information
2641 for arbitrary PC values, but is guaranteed to be correct only
2642 at call sites. We have to decide here whether we want to use
2643 ARM exception table information for this frame, or fall back
2644 to using prologue parsing. (Note that if we have DWARF CFI,
2645 this sniffer isn't even called -- CFI is always preferred.)
2646
2647 Before we make this decision, however, we check whether we
2648 actually have *symbol* information for the current frame.
2649 If not, prologue parsing would not work anyway, so we might
2650 as well use the exception table and hope for the best. */
2651 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2652 {
2653 int exc_valid = 0;
2654
2655 /* If the next frame is "normal", we are at a call site in this
2656 frame, so exception information is guaranteed to be valid. */
2657 if (get_next_frame (this_frame)
2658 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2659 exc_valid = 1;
2660
2661 /* We also assume exception information is valid if we're currently
2662 blocked in a system call. The system library is supposed to
d9311bfa
AT
2663 ensure this, so that e.g. pthread cancellation works. */
2664 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2665 {
7913a64c 2666 ULONGEST insn;
416dc9c6 2667
7913a64c
YQ
2668 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2669 2, byte_order_for_code, &insn)
d9311bfa
AT
2670 && (insn & 0xff00) == 0xdf00 /* svc */)
2671 exc_valid = 1;
0e9e9abd 2672 }
d9311bfa
AT
2673 else
2674 {
7913a64c 2675 ULONGEST insn;
416dc9c6 2676
7913a64c
YQ
2677 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2678 4, byte_order_for_code, &insn)
d9311bfa
AT
2679 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2680 exc_valid = 1;
2681 }
2682
0e9e9abd
UW
2683 /* Bail out if we don't know that exception information is valid. */
2684 if (!exc_valid)
2685 return 0;
2686
2687 /* The ARM exception index does not mark the *end* of the region
2688 covered by the entry, and some functions will not have any entry.
2689 To correctly recognize the end of the covered region, the linker
2690 should have inserted dummy records with a CANTUNWIND marker.
2691
2692 Unfortunately, current versions of GNU ld do not reliably do
2693 this, and thus we may have found an incorrect entry above.
2694 As a (temporary) sanity check, we only use the entry if it
2695 lies *within* the bounds of the function. Note that this check
2696 might reject perfectly valid entries that just happen to cover
2697 multiple functions; therefore this check ought to be removed
2698 once the linker is fixed. */
2699 if (func_start > exidx_region)
2700 return 0;
2701 }
2702
2703 /* Decode the list of unwinding instructions into a prologue cache.
2704 Note that this may fail due to e.g. a "refuse to unwind" code. */
2705 cache = arm_exidx_fill_cache (this_frame, entry);
2706 if (!cache)
2707 return 0;
2708
2709 *this_prologue_cache = cache;
2710 return 1;
2711}
2712
2713struct frame_unwind arm_exidx_unwind = {
2714 NORMAL_FRAME,
8fbca658 2715 default_frame_unwind_stop_reason,
0e9e9abd
UW
2716 arm_prologue_this_id,
2717 arm_prologue_prev_register,
2718 NULL,
2719 arm_exidx_unwind_sniffer
2720};
2721
779aa56f
YQ
2722static struct arm_prologue_cache *
2723arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2724{
2725 struct arm_prologue_cache *cache;
779aa56f
YQ
2726 int reg;
2727
2728 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2729 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2730
2731 /* Still rely on the offset calculated from prologue. */
2732 arm_scan_prologue (this_frame, cache);
2733
2734 /* Since we are in epilogue, the SP has been restored. */
2735 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2736
2737 /* Calculate actual addresses of saved registers using offsets
2738 determined by arm_scan_prologue. */
2739 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2740 if (trad_frame_addr_p (cache->saved_regs, reg))
2741 cache->saved_regs[reg].addr += cache->prev_sp;
2742
2743 return cache;
2744}
2745
2746/* Implementation of function hook 'this_id' in
2747 'struct frame_uwnind' for epilogue unwinder. */
2748
2749static void
2750arm_epilogue_frame_this_id (struct frame_info *this_frame,
2751 void **this_cache,
2752 struct frame_id *this_id)
2753{
2754 struct arm_prologue_cache *cache;
2755 CORE_ADDR pc, func;
2756
2757 if (*this_cache == NULL)
2758 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2759 cache = (struct arm_prologue_cache *) *this_cache;
2760
2761 /* Use function start address as part of the frame ID. If we cannot
2762 identify the start address (due to missing symbol information),
2763 fall back to just using the current PC. */
2764 pc = get_frame_pc (this_frame);
2765 func = get_frame_func (this_frame);
fb3f3d25 2766 if (func == 0)
779aa56f
YQ
2767 func = pc;
2768
2769 (*this_id) = frame_id_build (cache->prev_sp, pc);
2770}
2771
2772/* Implementation of function hook 'prev_register' in
2773 'struct frame_uwnind' for epilogue unwinder. */
2774
2775static struct value *
2776arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2777 void **this_cache, int regnum)
2778{
779aa56f
YQ
2779 if (*this_cache == NULL)
2780 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2781
2782 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2783}
2784
2785static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2786 CORE_ADDR pc);
2787static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2788 CORE_ADDR pc);
2789
2790/* Implementation of function hook 'sniffer' in
2791 'struct frame_uwnind' for epilogue unwinder. */
2792
2793static int
2794arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2795 struct frame_info *this_frame,
2796 void **this_prologue_cache)
2797{
2798 if (frame_relative_level (this_frame) == 0)
2799 {
2800 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2801 CORE_ADDR pc = get_frame_pc (this_frame);
2802
2803 if (arm_frame_is_thumb (this_frame))
2804 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2805 else
2806 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2807 }
2808 else
2809 return 0;
2810}
2811
2812/* Frame unwinder from epilogue. */
2813
2814static const struct frame_unwind arm_epilogue_frame_unwind =
2815{
2816 NORMAL_FRAME,
2817 default_frame_unwind_stop_reason,
2818 arm_epilogue_frame_this_id,
2819 arm_epilogue_frame_prev_register,
2820 NULL,
2821 arm_epilogue_frame_sniffer,
2822};
2823
80d8d390
YQ
2824/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2825 trampoline, return the target PC. Otherwise return 0.
2826
2827 void call0a (char c, short s, int i, long l) {}
2828
2829 int main (void)
2830 {
2831 (*pointer_to_call0a) (c, s, i, l);
2832 }
2833
2834 Instead of calling a stub library function _call_via_xx (xx is
2835 the register name), GCC may inline the trampoline in the object
2836 file as below (register r2 has the address of call0a).
2837
2838 .global main
2839 .type main, %function
2840 ...
2841 bl .L1
2842 ...
2843 .size main, .-main
2844
2845 .L1:
2846 bx r2
2847
2848 The trampoline 'bx r2' doesn't belong to main. */
2849
2850static CORE_ADDR
2851arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2852{
2853 /* The heuristics of recognizing such trampoline is that FRAME is
2854 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2855 if (arm_frame_is_thumb (frame))
2856 {
2857 gdb_byte buf[2];
2858
2859 if (target_read_memory (pc, buf, 2) == 0)
2860 {
2861 struct gdbarch *gdbarch = get_frame_arch (frame);
2862 enum bfd_endian byte_order_for_code
2863 = gdbarch_byte_order_for_code (gdbarch);
2864 uint16_t insn
2865 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2866
2867 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2868 {
2869 CORE_ADDR dest
2870 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2871
2872 /* Clear the LSB so that gdb core sets step-resume
2873 breakpoint at the right address. */
2874 return UNMAKE_THUMB_ADDR (dest);
2875 }
2876 }
2877 }
2878
2879 return 0;
2880}
2881
909cf6ea 2882static struct arm_prologue_cache *
a262aec2 2883arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2884{
909cf6ea 2885 struct arm_prologue_cache *cache;
909cf6ea 2886
35d5d4ee 2887 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2888 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2889
a262aec2 2890 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2891
2892 return cache;
2893}
2894
2895/* Our frame ID for a stub frame is the current SP and LR. */
2896
2897static void
a262aec2 2898arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2899 void **this_cache,
2900 struct frame_id *this_id)
2901{
2902 struct arm_prologue_cache *cache;
2903
2904 if (*this_cache == NULL)
a262aec2 2905 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2906 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2907
a262aec2 2908 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2909}
2910
a262aec2
DJ
2911static int
2912arm_stub_unwind_sniffer (const struct frame_unwind *self,
2913 struct frame_info *this_frame,
2914 void **this_prologue_cache)
909cf6ea 2915{
93d42b30 2916 CORE_ADDR addr_in_block;
948f8e3d 2917 gdb_byte dummy[4];
18d18ac8
YQ
2918 CORE_ADDR pc, start_addr;
2919 const char *name;
909cf6ea 2920
a262aec2 2921 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2922 pc = get_frame_pc (this_frame);
3e5d3a5a 2923 if (in_plt_section (addr_in_block)
fc36e839
DE
2924 /* We also use the stub winder if the target memory is unreadable
2925 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2926 || target_read_memory (pc, dummy, 4) != 0)
2927 return 1;
2928
2929 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2930 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2931 return 1;
909cf6ea 2932
a262aec2 2933 return 0;
909cf6ea
DJ
2934}
2935
a262aec2
DJ
2936struct frame_unwind arm_stub_unwind = {
2937 NORMAL_FRAME,
8fbca658 2938 default_frame_unwind_stop_reason,
a262aec2
DJ
2939 arm_stub_this_id,
2940 arm_prologue_prev_register,
2941 NULL,
2942 arm_stub_unwind_sniffer
2943};
2944
2ae28aa9
YQ
2945/* Put here the code to store, into CACHE->saved_regs, the addresses
2946 of the saved registers of frame described by THIS_FRAME. CACHE is
2947 returned. */
2948
2949static struct arm_prologue_cache *
2950arm_m_exception_cache (struct frame_info *this_frame)
2951{
2952 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2953 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2954 struct arm_prologue_cache *cache;
2955 CORE_ADDR unwound_sp;
2956 LONGEST xpsr;
2957
2958 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2959 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2960
2961 unwound_sp = get_frame_register_unsigned (this_frame,
2962 ARM_SP_REGNUM);
2963
2964 /* The hardware saves eight 32-bit words, comprising xPSR,
2965 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2966 "B1.5.6 Exception entry behavior" in
2967 "ARMv7-M Architecture Reference Manual". */
2968 cache->saved_regs[0].addr = unwound_sp;
2969 cache->saved_regs[1].addr = unwound_sp + 4;
2970 cache->saved_regs[2].addr = unwound_sp + 8;
2971 cache->saved_regs[3].addr = unwound_sp + 12;
2972 cache->saved_regs[12].addr = unwound_sp + 16;
2973 cache->saved_regs[14].addr = unwound_sp + 20;
2974 cache->saved_regs[15].addr = unwound_sp + 24;
2975 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2976
2977 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2978 aligner between the top of the 32-byte stack frame and the
2979 previous context's stack pointer. */
2980 cache->prev_sp = unwound_sp + 32;
2981 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2982 && (xpsr & (1 << 9)) != 0)
2983 cache->prev_sp += 4;
2984
2985 return cache;
2986}
2987
2988/* Implementation of function hook 'this_id' in
2989 'struct frame_uwnind'. */
2990
2991static void
2992arm_m_exception_this_id (struct frame_info *this_frame,
2993 void **this_cache,
2994 struct frame_id *this_id)
2995{
2996 struct arm_prologue_cache *cache;
2997
2998 if (*this_cache == NULL)
2999 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3000 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3001
3002 /* Our frame ID for a stub frame is the current SP and LR. */
3003 *this_id = frame_id_build (cache->prev_sp,
3004 get_frame_pc (this_frame));
3005}
3006
3007/* Implementation of function hook 'prev_register' in
3008 'struct frame_uwnind'. */
3009
3010static struct value *
3011arm_m_exception_prev_register (struct frame_info *this_frame,
3012 void **this_cache,
3013 int prev_regnum)
3014{
2ae28aa9
YQ
3015 struct arm_prologue_cache *cache;
3016
3017 if (*this_cache == NULL)
3018 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3019 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3020
3021 /* The value was already reconstructed into PREV_SP. */
3022 if (prev_regnum == ARM_SP_REGNUM)
3023 return frame_unwind_got_constant (this_frame, prev_regnum,
3024 cache->prev_sp);
3025
3026 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3027 prev_regnum);
3028}
3029
3030/* Implementation of function hook 'sniffer' in
3031 'struct frame_uwnind'. */
3032
3033static int
3034arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3035 struct frame_info *this_frame,
3036 void **this_prologue_cache)
3037{
3038 CORE_ADDR this_pc = get_frame_pc (this_frame);
3039
3040 /* No need to check is_m; this sniffer is only registered for
3041 M-profile architectures. */
3042
ca90e760
FH
3043 /* Check if exception frame returns to a magic PC value. */
3044 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3045}
3046
3047/* Frame unwinder for M-profile exceptions. */
3048
3049struct frame_unwind arm_m_exception_unwind =
3050{
3051 SIGTRAMP_FRAME,
3052 default_frame_unwind_stop_reason,
3053 arm_m_exception_this_id,
3054 arm_m_exception_prev_register,
3055 NULL,
3056 arm_m_exception_unwind_sniffer
3057};
3058
24de872b 3059static CORE_ADDR
a262aec2 3060arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3061{
3062 struct arm_prologue_cache *cache;
3063
eb5492fa 3064 if (*this_cache == NULL)
a262aec2 3065 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3066 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3067
4be43953 3068 return cache->prev_sp - cache->framesize;
24de872b
DJ
3069}
3070
eb5492fa
DJ
3071struct frame_base arm_normal_base = {
3072 &arm_prologue_unwind,
3073 arm_normal_frame_base,
3074 arm_normal_frame_base,
3075 arm_normal_frame_base
3076};
3077
a262aec2 3078/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3079 dummy frame. The frame ID's base needs to match the TOS value
3080 saved by save_dummy_frame_tos() and returned from
3081 arm_push_dummy_call, and the PC needs to match the dummy frame's
3082 breakpoint. */
c906108c 3083
eb5492fa 3084static struct frame_id
a262aec2 3085arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3086{
0963b4bd
MS
3087 return frame_id_build (get_frame_register_unsigned (this_frame,
3088 ARM_SP_REGNUM),
a262aec2 3089 get_frame_pc (this_frame));
eb5492fa 3090}
c3b4394c 3091
eb5492fa
DJ
3092/* Given THIS_FRAME, find the previous frame's resume PC (which will
3093 be used to construct the previous frame's ID, after looking up the
3094 containing function). */
c3b4394c 3095
eb5492fa
DJ
3096static CORE_ADDR
3097arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3098{
3099 CORE_ADDR pc;
3100 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3101 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3102}
3103
3104static CORE_ADDR
3105arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3106{
3107 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3108}
3109
b39cc962
DJ
3110static struct value *
3111arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3112 int regnum)
3113{
24568a2c 3114 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3115 CORE_ADDR lr, cpsr;
9779414d 3116 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3117
3118 switch (regnum)
3119 {
3120 case ARM_PC_REGNUM:
3121 /* The PC is normally copied from the return column, which
3122 describes saves of LR. However, that version may have an
3123 extra bit set to indicate Thumb state. The bit is not
3124 part of the PC. */
3125 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3126 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3127 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3128
3129 case ARM_PS_REGNUM:
3130 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3131 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3132 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3133 if (IS_THUMB_ADDR (lr))
9779414d 3134 cpsr |= t_bit;
b39cc962 3135 else
9779414d 3136 cpsr &= ~t_bit;
ca38c58e 3137 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3138
3139 default:
3140 internal_error (__FILE__, __LINE__,
3141 _("Unexpected register %d"), regnum);
3142 }
3143}
3144
3145static void
3146arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3147 struct dwarf2_frame_state_reg *reg,
3148 struct frame_info *this_frame)
3149{
3150 switch (regnum)
3151 {
3152 case ARM_PC_REGNUM:
3153 case ARM_PS_REGNUM:
3154 reg->how = DWARF2_FRAME_REG_FN;
3155 reg->loc.fn = arm_dwarf2_prev_register;
3156 break;
3157 case ARM_SP_REGNUM:
3158 reg->how = DWARF2_FRAME_REG_CFA;
3159 break;
3160 }
3161}
3162
c9cf6e20 3163/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3164
3165static int
c9cf6e20 3166thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3167{
3168 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3169 unsigned int insn, insn2;
3170 int found_return = 0, found_stack_adjust = 0;
3171 CORE_ADDR func_start, func_end;
3172 CORE_ADDR scan_pc;
3173 gdb_byte buf[4];
3174
3175 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3176 return 0;
3177
3178 /* The epilogue is a sequence of instructions along the following lines:
3179
3180 - add stack frame size to SP or FP
3181 - [if frame pointer used] restore SP from FP
3182 - restore registers from SP [may include PC]
3183 - a return-type instruction [if PC wasn't already restored]
3184
3185 In a first pass, we scan forward from the current PC and verify the
3186 instructions we find as compatible with this sequence, ending in a
3187 return instruction.
3188
3189 However, this is not sufficient to distinguish indirect function calls
3190 within a function from indirect tail calls in the epilogue in some cases.
3191 Therefore, if we didn't already find any SP-changing instruction during
3192 forward scan, we add a backward scanning heuristic to ensure we actually
3193 are in the epilogue. */
3194
3195 scan_pc = pc;
3196 while (scan_pc < func_end && !found_return)
3197 {
3198 if (target_read_memory (scan_pc, buf, 2))
3199 break;
3200
3201 scan_pc += 2;
3202 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3203
3204 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3205 found_return = 1;
3206 else if (insn == 0x46f7) /* mov pc, lr */
3207 found_return = 1;
540314bd 3208 else if (thumb_instruction_restores_sp (insn))
4024ca99 3209 {
b7576e5c 3210 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3211 found_return = 1;
3212 }
db24da6d 3213 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3214 {
3215 if (target_read_memory (scan_pc, buf, 2))
3216 break;
3217
3218 scan_pc += 2;
3219 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3220
3221 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3222 {
4024ca99
UW
3223 if (insn2 & 0x8000) /* <registers> include PC. */
3224 found_return = 1;
3225 }
3226 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3227 && (insn2 & 0x0fff) == 0x0b04)
3228 {
4024ca99
UW
3229 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3230 found_return = 1;
3231 }
3232 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3233 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3234 ;
4024ca99
UW
3235 else
3236 break;
3237 }
3238 else
3239 break;
3240 }
3241
3242 if (!found_return)
3243 return 0;
3244
3245 /* Since any instruction in the epilogue sequence, with the possible
3246 exception of return itself, updates the stack pointer, we need to
3247 scan backwards for at most one instruction. Try either a 16-bit or
3248 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3249 too much about false positives. */
4024ca99 3250
6b65d1b6
YQ
3251 if (pc - 4 < func_start)
3252 return 0;
3253 if (target_read_memory (pc - 4, buf, 4))
3254 return 0;
4024ca99 3255
6b65d1b6
YQ
3256 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3257 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3258
3259 if (thumb_instruction_restores_sp (insn2))
3260 found_stack_adjust = 1;
3261 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3262 found_stack_adjust = 1;
3263 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3264 && (insn2 & 0x0fff) == 0x0b04)
3265 found_stack_adjust = 1;
3266 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3267 && (insn2 & 0x0e00) == 0x0a00)
3268 found_stack_adjust = 1;
4024ca99
UW
3269
3270 return found_stack_adjust;
3271}
3272
4024ca99 3273static int
c58b006a 3274arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3275{
3276 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3277 unsigned int insn;
f303bc3e 3278 int found_return;
4024ca99
UW
3279 CORE_ADDR func_start, func_end;
3280
4024ca99
UW
3281 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3282 return 0;
3283
3284 /* We are in the epilogue if the previous instruction was a stack
3285 adjustment and the next instruction is a possible return (bx, mov
3286 pc, or pop). We could have to scan backwards to find the stack
3287 adjustment, or forwards to find the return, but this is a decent
3288 approximation. First scan forwards. */
3289
3290 found_return = 0;
3291 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3292 if (bits (insn, 28, 31) != INST_NV)
3293 {
3294 if ((insn & 0x0ffffff0) == 0x012fff10)
3295 /* BX. */
3296 found_return = 1;
3297 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3298 /* MOV PC. */
3299 found_return = 1;
3300 else if ((insn & 0x0fff0000) == 0x08bd0000
3301 && (insn & 0x0000c000) != 0)
3302 /* POP (LDMIA), including PC or LR. */
3303 found_return = 1;
3304 }
3305
3306 if (!found_return)
3307 return 0;
3308
3309 /* Scan backwards. This is just a heuristic, so do not worry about
3310 false positives from mode changes. */
3311
3312 if (pc < func_start + 4)
3313 return 0;
3314
3315 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3316 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3317 return 1;
3318
3319 return 0;
3320}
3321
c58b006a
YQ
3322/* Implement the stack_frame_destroyed_p gdbarch method. */
3323
3324static int
3325arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3326{
3327 if (arm_pc_is_thumb (gdbarch, pc))
3328 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3329 else
3330 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3331}
4024ca99 3332
2dd604e7
RE
3333/* When arguments must be pushed onto the stack, they go on in reverse
3334 order. The code below implements a FILO (stack) to do this. */
3335
3336struct stack_item
3337{
3338 int len;
3339 struct stack_item *prev;
7c543f7b 3340 gdb_byte *data;
2dd604e7
RE
3341};
3342
3343static struct stack_item *
df3b6708 3344push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3345{
3346 struct stack_item *si;
8d749320 3347 si = XNEW (struct stack_item);
7c543f7b 3348 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3349 si->len = len;
3350 si->prev = prev;
3351 memcpy (si->data, contents, len);
3352 return si;
3353}
3354
3355static struct stack_item *
3356pop_stack_item (struct stack_item *si)
3357{
3358 struct stack_item *dead = si;
3359 si = si->prev;
3360 xfree (dead->data);
3361 xfree (dead);
3362 return si;
3363}
3364
2af48f68
PB
3365
3366/* Return the alignment (in bytes) of the given type. */
3367
3368static int
3369arm_type_align (struct type *t)
3370{
3371 int n;
3372 int align;
3373 int falign;
3374
3375 t = check_typedef (t);
3376 switch (TYPE_CODE (t))
3377 {
3378 default:
3379 /* Should never happen. */
3380 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3381 return 4;
3382
3383 case TYPE_CODE_PTR:
3384 case TYPE_CODE_ENUM:
3385 case TYPE_CODE_INT:
3386 case TYPE_CODE_FLT:
3387 case TYPE_CODE_SET:
3388 case TYPE_CODE_RANGE:
2af48f68
PB
3389 case TYPE_CODE_REF:
3390 case TYPE_CODE_CHAR:
3391 case TYPE_CODE_BOOL:
3392 return TYPE_LENGTH (t);
3393
3394 case TYPE_CODE_ARRAY:
c4312b19
YQ
3395 if (TYPE_VECTOR (t))
3396 {
3397 /* Use the natural alignment for vector types (the same for
3398 scalar type), but the maximum alignment is 64-bit. */
3399 if (TYPE_LENGTH (t) > 8)
3400 return 8;
3401 else
3402 return TYPE_LENGTH (t);
3403 }
3404 else
3405 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3406 case TYPE_CODE_COMPLEX:
2af48f68
PB
3407 return arm_type_align (TYPE_TARGET_TYPE (t));
3408
3409 case TYPE_CODE_STRUCT:
3410 case TYPE_CODE_UNION:
3411 align = 1;
3412 for (n = 0; n < TYPE_NFIELDS (t); n++)
3413 {
3414 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3415 if (falign > align)
3416 align = falign;
3417 }
3418 return align;
3419 }
3420}
3421
90445bd3
DJ
3422/* Possible base types for a candidate for passing and returning in
3423 VFP registers. */
3424
3425enum arm_vfp_cprc_base_type
3426{
3427 VFP_CPRC_UNKNOWN,
3428 VFP_CPRC_SINGLE,
3429 VFP_CPRC_DOUBLE,
3430 VFP_CPRC_VEC64,
3431 VFP_CPRC_VEC128
3432};
3433
3434/* The length of one element of base type B. */
3435
3436static unsigned
3437arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3438{
3439 switch (b)
3440 {
3441 case VFP_CPRC_SINGLE:
3442 return 4;
3443 case VFP_CPRC_DOUBLE:
3444 return 8;
3445 case VFP_CPRC_VEC64:
3446 return 8;
3447 case VFP_CPRC_VEC128:
3448 return 16;
3449 default:
3450 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3451 (int) b);
3452 }
3453}
3454
3455/* The character ('s', 'd' or 'q') for the type of VFP register used
3456 for passing base type B. */
3457
3458static int
3459arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3460{
3461 switch (b)
3462 {
3463 case VFP_CPRC_SINGLE:
3464 return 's';
3465 case VFP_CPRC_DOUBLE:
3466 return 'd';
3467 case VFP_CPRC_VEC64:
3468 return 'd';
3469 case VFP_CPRC_VEC128:
3470 return 'q';
3471 default:
3472 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3473 (int) b);
3474 }
3475}
3476
3477/* Determine whether T may be part of a candidate for passing and
3478 returning in VFP registers, ignoring the limit on the total number
3479 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3480 classification of the first valid component found; if it is not
3481 VFP_CPRC_UNKNOWN, all components must have the same classification
3482 as *BASE_TYPE. If it is found that T contains a type not permitted
3483 for passing and returning in VFP registers, a type differently
3484 classified from *BASE_TYPE, or two types differently classified
3485 from each other, return -1, otherwise return the total number of
3486 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3487 array). Vector types are not currently supported, matching the
3488 generic AAPCS support. */
90445bd3
DJ
3489
3490static int
3491arm_vfp_cprc_sub_candidate (struct type *t,
3492 enum arm_vfp_cprc_base_type *base_type)
3493{
3494 t = check_typedef (t);
3495 switch (TYPE_CODE (t))
3496 {
3497 case TYPE_CODE_FLT:
3498 switch (TYPE_LENGTH (t))
3499 {
3500 case 4:
3501 if (*base_type == VFP_CPRC_UNKNOWN)
3502 *base_type = VFP_CPRC_SINGLE;
3503 else if (*base_type != VFP_CPRC_SINGLE)
3504 return -1;
3505 return 1;
3506
3507 case 8:
3508 if (*base_type == VFP_CPRC_UNKNOWN)
3509 *base_type = VFP_CPRC_DOUBLE;
3510 else if (*base_type != VFP_CPRC_DOUBLE)
3511 return -1;
3512 return 1;
3513
3514 default:
3515 return -1;
3516 }
3517 break;
3518
817e0957
YQ
3519 case TYPE_CODE_COMPLEX:
3520 /* Arguments of complex T where T is one of the types float or
3521 double get treated as if they are implemented as:
3522
3523 struct complexT
3524 {
3525 T real;
3526 T imag;
5f52445b
YQ
3527 };
3528
3529 */
817e0957
YQ
3530 switch (TYPE_LENGTH (t))
3531 {
3532 case 8:
3533 if (*base_type == VFP_CPRC_UNKNOWN)
3534 *base_type = VFP_CPRC_SINGLE;
3535 else if (*base_type != VFP_CPRC_SINGLE)
3536 return -1;
3537 return 2;
3538
3539 case 16:
3540 if (*base_type == VFP_CPRC_UNKNOWN)
3541 *base_type = VFP_CPRC_DOUBLE;
3542 else if (*base_type != VFP_CPRC_DOUBLE)
3543 return -1;
3544 return 2;
3545
3546 default:
3547 return -1;
3548 }
3549 break;
3550
90445bd3
DJ
3551 case TYPE_CODE_ARRAY:
3552 {
c4312b19 3553 if (TYPE_VECTOR (t))
90445bd3 3554 {
c4312b19
YQ
3555 /* A 64-bit or 128-bit containerized vector type are VFP
3556 CPRCs. */
3557 switch (TYPE_LENGTH (t))
3558 {
3559 case 8:
3560 if (*base_type == VFP_CPRC_UNKNOWN)
3561 *base_type = VFP_CPRC_VEC64;
3562 return 1;
3563 case 16:
3564 if (*base_type == VFP_CPRC_UNKNOWN)
3565 *base_type = VFP_CPRC_VEC128;
3566 return 1;
3567 default:
3568 return -1;
3569 }
3570 }
3571 else
3572 {
3573 int count;
3574 unsigned unitlen;
3575
3576 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3577 base_type);
3578 if (count == -1)
3579 return -1;
3580 if (TYPE_LENGTH (t) == 0)
3581 {
3582 gdb_assert (count == 0);
3583 return 0;
3584 }
3585 else if (count == 0)
3586 return -1;
3587 unitlen = arm_vfp_cprc_unit_length (*base_type);
3588 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3589 return TYPE_LENGTH (t) / unitlen;
90445bd3 3590 }
90445bd3
DJ
3591 }
3592 break;
3593
3594 case TYPE_CODE_STRUCT:
3595 {
3596 int count = 0;
3597 unsigned unitlen;
3598 int i;
3599 for (i = 0; i < TYPE_NFIELDS (t); i++)
3600 {
1040b979
YQ
3601 int sub_count = 0;
3602
3603 if (!field_is_static (&TYPE_FIELD (t, i)))
3604 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3605 base_type);
90445bd3
DJ
3606 if (sub_count == -1)
3607 return -1;
3608 count += sub_count;
3609 }
3610 if (TYPE_LENGTH (t) == 0)
3611 {
3612 gdb_assert (count == 0);
3613 return 0;
3614 }
3615 else if (count == 0)
3616 return -1;
3617 unitlen = arm_vfp_cprc_unit_length (*base_type);
3618 if (TYPE_LENGTH (t) != unitlen * count)
3619 return -1;
3620 return count;
3621 }
3622
3623 case TYPE_CODE_UNION:
3624 {
3625 int count = 0;
3626 unsigned unitlen;
3627 int i;
3628 for (i = 0; i < TYPE_NFIELDS (t); i++)
3629 {
3630 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3631 base_type);
3632 if (sub_count == -1)
3633 return -1;
3634 count = (count > sub_count ? count : sub_count);
3635 }
3636 if (TYPE_LENGTH (t) == 0)
3637 {
3638 gdb_assert (count == 0);
3639 return 0;
3640 }
3641 else if (count == 0)
3642 return -1;
3643 unitlen = arm_vfp_cprc_unit_length (*base_type);
3644 if (TYPE_LENGTH (t) != unitlen * count)
3645 return -1;
3646 return count;
3647 }
3648
3649 default:
3650 break;
3651 }
3652
3653 return -1;
3654}
3655
3656/* Determine whether T is a VFP co-processor register candidate (CPRC)
3657 if passed to or returned from a non-variadic function with the VFP
3658 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3659 *BASE_TYPE to the base type for T and *COUNT to the number of
3660 elements of that base type before returning. */
3661
3662static int
3663arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3664 int *count)
3665{
3666 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3667 int c = arm_vfp_cprc_sub_candidate (t, &b);
3668 if (c <= 0 || c > 4)
3669 return 0;
3670 *base_type = b;
3671 *count = c;
3672 return 1;
3673}
3674
3675/* Return 1 if the VFP ABI should be used for passing arguments to and
3676 returning values from a function of type FUNC_TYPE, 0
3677 otherwise. */
3678
3679static int
3680arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3681{
3682 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3683 /* Variadic functions always use the base ABI. Assume that functions
3684 without debug info are not variadic. */
3685 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3686 return 0;
3687 /* The VFP ABI is only supported as a variant of AAPCS. */
3688 if (tdep->arm_abi != ARM_ABI_AAPCS)
3689 return 0;
3690 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3691}
3692
3693/* We currently only support passing parameters in integer registers, which
3694 conforms with GCC's default model, and VFP argument passing following
3695 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3696 we should probably support some of them based on the selected ABI. */
3697
3698static CORE_ADDR
7d9b040b 3699arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3700 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3701 struct value **args, CORE_ADDR sp, int struct_return,
3702 CORE_ADDR struct_addr)
2dd604e7 3703{
e17a4113 3704 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3705 int argnum;
3706 int argreg;
3707 int nstack;
3708 struct stack_item *si = NULL;
90445bd3
DJ
3709 int use_vfp_abi;
3710 struct type *ftype;
3711 unsigned vfp_regs_free = (1 << 16) - 1;
3712
3713 /* Determine the type of this function and whether the VFP ABI
3714 applies. */
3715 ftype = check_typedef (value_type (function));
3716 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3717 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3718 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3719
6a65450a
AC
3720 /* Set the return address. For the ARM, the return breakpoint is
3721 always at BP_ADDR. */
9779414d 3722 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3723 bp_addr |= 1;
6a65450a 3724 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3725
3726 /* Walk through the list of args and determine how large a temporary
3727 stack is required. Need to take care here as structs may be
7a9dd1b2 3728 passed on the stack, and we have to push them. */
2dd604e7
RE
3729 nstack = 0;
3730
3731 argreg = ARM_A1_REGNUM;
3732 nstack = 0;
3733
2dd604e7
RE
3734 /* The struct_return pointer occupies the first parameter
3735 passing register. */
3736 if (struct_return)
3737 {
3738 if (arm_debug)
5af949e3 3739 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3740 gdbarch_register_name (gdbarch, argreg),
5af949e3 3741 paddress (gdbarch, struct_addr));
2dd604e7
RE
3742 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3743 argreg++;
3744 }
3745
3746 for (argnum = 0; argnum < nargs; argnum++)
3747 {
3748 int len;
3749 struct type *arg_type;
3750 struct type *target_type;
3751 enum type_code typecode;
8c6363cf 3752 const bfd_byte *val;
2af48f68 3753 int align;
90445bd3
DJ
3754 enum arm_vfp_cprc_base_type vfp_base_type;
3755 int vfp_base_count;
3756 int may_use_core_reg = 1;
2dd604e7 3757
df407dfe 3758 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3759 len = TYPE_LENGTH (arg_type);
3760 target_type = TYPE_TARGET_TYPE (arg_type);
3761 typecode = TYPE_CODE (arg_type);
8c6363cf 3762 val = value_contents (args[argnum]);
2dd604e7 3763
2af48f68
PB
3764 align = arm_type_align (arg_type);
3765 /* Round alignment up to a whole number of words. */
3766 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3767 /* Different ABIs have different maximum alignments. */
3768 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3769 {
3770 /* The APCS ABI only requires word alignment. */
3771 align = INT_REGISTER_SIZE;
3772 }
3773 else
3774 {
3775 /* The AAPCS requires at most doubleword alignment. */
3776 if (align > INT_REGISTER_SIZE * 2)
3777 align = INT_REGISTER_SIZE * 2;
3778 }
3779
90445bd3
DJ
3780 if (use_vfp_abi
3781 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3782 &vfp_base_count))
3783 {
3784 int regno;
3785 int unit_length;
3786 int shift;
3787 unsigned mask;
3788
3789 /* Because this is a CPRC it cannot go in a core register or
3790 cause a core register to be skipped for alignment.
3791 Either it goes in VFP registers and the rest of this loop
3792 iteration is skipped for this argument, or it goes on the
3793 stack (and the stack alignment code is correct for this
3794 case). */
3795 may_use_core_reg = 0;
3796
3797 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3798 shift = unit_length / 4;
3799 mask = (1 << (shift * vfp_base_count)) - 1;
3800 for (regno = 0; regno < 16; regno += shift)
3801 if (((vfp_regs_free >> regno) & mask) == mask)
3802 break;
3803
3804 if (regno < 16)
3805 {
3806 int reg_char;
3807 int reg_scaled;
3808 int i;
3809
3810 vfp_regs_free &= ~(mask << regno);
3811 reg_scaled = regno / shift;
3812 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3813 for (i = 0; i < vfp_base_count; i++)
3814 {
3815 char name_buf[4];
3816 int regnum;
58d6951d
DJ
3817 if (reg_char == 'q')
3818 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3819 val + i * unit_length);
58d6951d
DJ
3820 else
3821 {
8c042590
PM
3822 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3823 reg_char, reg_scaled + i);
58d6951d
DJ
3824 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3825 strlen (name_buf));
3826 regcache_cooked_write (regcache, regnum,
3827 val + i * unit_length);
3828 }
90445bd3
DJ
3829 }
3830 continue;
3831 }
3832 else
3833 {
3834 /* This CPRC could not go in VFP registers, so all VFP
3835 registers are now marked as used. */
3836 vfp_regs_free = 0;
3837 }
3838 }
3839
2af48f68
PB
3840 /* Push stack padding for dowubleword alignment. */
3841 if (nstack & (align - 1))
3842 {
3843 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3844 nstack += INT_REGISTER_SIZE;
3845 }
3846
3847 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3848 if (may_use_core_reg
3849 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3850 && align > INT_REGISTER_SIZE
3851 && argreg & 1)
3852 argreg++;
3853
2dd604e7
RE
3854 /* If the argument is a pointer to a function, and it is a
3855 Thumb function, create a LOCAL copy of the value and set
3856 the THUMB bit in it. */
3857 if (TYPE_CODE_PTR == typecode
3858 && target_type != NULL
f96b8fa0 3859 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3860 {
e17a4113 3861 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3862 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3863 {
224c3ddb 3864 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3865 store_unsigned_integer (copy, len, byte_order,
e17a4113 3866 MAKE_THUMB_ADDR (regval));
8c6363cf 3867 val = copy;
2dd604e7
RE
3868 }
3869 }
3870
3871 /* Copy the argument to general registers or the stack in
3872 register-sized pieces. Large arguments are split between
3873 registers and stack. */
3874 while (len > 0)
3875 {
f0c9063c 3876 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3877 CORE_ADDR regval
3878 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3879
90445bd3 3880 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3881 {
3882 /* The argument is being passed in a general purpose
3883 register. */
e17a4113 3884 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3885 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3886 if (arm_debug)
3887 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3888 argnum,
3889 gdbarch_register_name
2af46ca0 3890 (gdbarch, argreg),
f0c9063c 3891 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3892 regcache_cooked_write_unsigned (regcache, argreg, regval);
3893 argreg++;
3894 }
3895 else
3896 {
ef9bd0b8
YQ
3897 gdb_byte buf[INT_REGISTER_SIZE];
3898
3899 memset (buf, 0, sizeof (buf));
3900 store_unsigned_integer (buf, partial_len, byte_order, regval);
3901
2dd604e7
RE
3902 /* Push the arguments onto the stack. */
3903 if (arm_debug)
3904 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3905 argnum, nstack);
ef9bd0b8 3906 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3907 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3908 }
3909
3910 len -= partial_len;
3911 val += partial_len;
3912 }
3913 }
3914 /* If we have an odd number of words to push, then decrement the stack
3915 by one word now, so first stack argument will be dword aligned. */
3916 if (nstack & 4)
3917 sp -= 4;
3918
3919 while (si)
3920 {
3921 sp -= si->len;
3922 write_memory (sp, si->data, si->len);
3923 si = pop_stack_item (si);
3924 }
3925
3926 /* Finally, update teh SP register. */
3927 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3928
3929 return sp;
3930}
3931
f53f0d0b
PB
3932
3933/* Always align the frame to an 8-byte boundary. This is required on
3934 some platforms and harmless on the rest. */
3935
3936static CORE_ADDR
3937arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3938{
3939 /* Align the stack to eight bytes. */
3940 return sp & ~ (CORE_ADDR) 7;
3941}
3942
c906108c 3943static void
12b27276 3944print_fpu_flags (struct ui_file *file, int flags)
c906108c 3945{
c5aa993b 3946 if (flags & (1 << 0))
12b27276 3947 fputs_filtered ("IVO ", file);
c5aa993b 3948 if (flags & (1 << 1))
12b27276 3949 fputs_filtered ("DVZ ", file);
c5aa993b 3950 if (flags & (1 << 2))
12b27276 3951 fputs_filtered ("OFL ", file);
c5aa993b 3952 if (flags & (1 << 3))
12b27276 3953 fputs_filtered ("UFL ", file);
c5aa993b 3954 if (flags & (1 << 4))
12b27276
WN
3955 fputs_filtered ("INX ", file);
3956 fputc_filtered ('\n', file);
c906108c
SS
3957}
3958
5e74b15c
RE
3959/* Print interesting information about the floating point processor
3960 (if present) or emulator. */
34e8f22d 3961static void
d855c300 3962arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3963 struct frame_info *frame, const char *args)
c906108c 3964{
9c9acae0 3965 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3966 int type;
3967
3968 type = (status >> 24) & 127;
edefbb7c 3969 if (status & (1 << 31))
12b27276 3970 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3971 else
12b27276 3972 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3973 /* i18n: [floating point unit] mask */
12b27276
WN
3974 fputs_filtered (_("mask: "), file);
3975 print_fpu_flags (file, status >> 16);
edefbb7c 3976 /* i18n: [floating point unit] flags */
12b27276
WN
3977 fputs_filtered (_("flags: "), file);
3978 print_fpu_flags (file, status);
c906108c
SS
3979}
3980
27067745
UW
3981/* Construct the ARM extended floating point type. */
3982static struct type *
3983arm_ext_type (struct gdbarch *gdbarch)
3984{
3985 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3986
3987 if (!tdep->arm_ext_type)
3988 tdep->arm_ext_type
e9bb382b 3989 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3990 floatformats_arm_ext);
3991
3992 return tdep->arm_ext_type;
3993}
3994
58d6951d
DJ
3995static struct type *
3996arm_neon_double_type (struct gdbarch *gdbarch)
3997{
3998 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3999
4000 if (tdep->neon_double_type == NULL)
4001 {
4002 struct type *t, *elem;
4003
4004 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4005 TYPE_CODE_UNION);
4006 elem = builtin_type (gdbarch)->builtin_uint8;
4007 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4008 elem = builtin_type (gdbarch)->builtin_uint16;
4009 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4010 elem = builtin_type (gdbarch)->builtin_uint32;
4011 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4012 elem = builtin_type (gdbarch)->builtin_uint64;
4013 append_composite_type_field (t, "u64", elem);
4014 elem = builtin_type (gdbarch)->builtin_float;
4015 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4016 elem = builtin_type (gdbarch)->builtin_double;
4017 append_composite_type_field (t, "f64", elem);
4018
4019 TYPE_VECTOR (t) = 1;
4020 TYPE_NAME (t) = "neon_d";
4021 tdep->neon_double_type = t;
4022 }
4023
4024 return tdep->neon_double_type;
4025}
4026
4027/* FIXME: The vector types are not correctly ordered on big-endian
4028 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4029 bits of d0 - regardless of what unit size is being held in d0. So
4030 the offset of the first uint8 in d0 is 7, but the offset of the
4031 first float is 4. This code works as-is for little-endian
4032 targets. */
4033
4034static struct type *
4035arm_neon_quad_type (struct gdbarch *gdbarch)
4036{
4037 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4038
4039 if (tdep->neon_quad_type == NULL)
4040 {
4041 struct type *t, *elem;
4042
4043 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4044 TYPE_CODE_UNION);
4045 elem = builtin_type (gdbarch)->builtin_uint8;
4046 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4047 elem = builtin_type (gdbarch)->builtin_uint16;
4048 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4049 elem = builtin_type (gdbarch)->builtin_uint32;
4050 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4051 elem = builtin_type (gdbarch)->builtin_uint64;
4052 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4053 elem = builtin_type (gdbarch)->builtin_float;
4054 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4055 elem = builtin_type (gdbarch)->builtin_double;
4056 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4057
4058 TYPE_VECTOR (t) = 1;
4059 TYPE_NAME (t) = "neon_q";
4060 tdep->neon_quad_type = t;
4061 }
4062
4063 return tdep->neon_quad_type;
4064}
4065
34e8f22d
RE
4066/* Return the GDB type object for the "standard" data type of data in
4067 register N. */
4068
4069static struct type *
7a5ea0d4 4070arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4071{
58d6951d
DJ
4072 int num_regs = gdbarch_num_regs (gdbarch);
4073
4074 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4075 && regnum >= num_regs && regnum < num_regs + 32)
4076 return builtin_type (gdbarch)->builtin_float;
4077
4078 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4079 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4080 return arm_neon_quad_type (gdbarch);
4081
4082 /* If the target description has register information, we are only
4083 in this function so that we can override the types of
4084 double-precision registers for NEON. */
4085 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4086 {
4087 struct type *t = tdesc_register_type (gdbarch, regnum);
4088
4089 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4090 && TYPE_CODE (t) == TYPE_CODE_FLT
4091 && gdbarch_tdep (gdbarch)->have_neon)
4092 return arm_neon_double_type (gdbarch);
4093 else
4094 return t;
4095 }
4096
34e8f22d 4097 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4098 {
4099 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4100 return builtin_type (gdbarch)->builtin_void;
4101
4102 return arm_ext_type (gdbarch);
4103 }
e4c16157 4104 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4105 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4106 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4107 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4108 else if (regnum >= ARRAY_SIZE (arm_register_names))
4109 /* These registers are only supported on targets which supply
4110 an XML description. */
df4df182 4111 return builtin_type (gdbarch)->builtin_int0;
032758dc 4112 else
df4df182 4113 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4114}
4115
ff6f572f
DJ
4116/* Map a DWARF register REGNUM onto the appropriate GDB register
4117 number. */
4118
4119static int
d3f73121 4120arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4121{
4122 /* Core integer regs. */
4123 if (reg >= 0 && reg <= 15)
4124 return reg;
4125
4126 /* Legacy FPA encoding. These were once used in a way which
4127 overlapped with VFP register numbering, so their use is
4128 discouraged, but GDB doesn't support the ARM toolchain
4129 which used them for VFP. */
4130 if (reg >= 16 && reg <= 23)
4131 return ARM_F0_REGNUM + reg - 16;
4132
4133 /* New assignments for the FPA registers. */
4134 if (reg >= 96 && reg <= 103)
4135 return ARM_F0_REGNUM + reg - 96;
4136
4137 /* WMMX register assignments. */
4138 if (reg >= 104 && reg <= 111)
4139 return ARM_WCGR0_REGNUM + reg - 104;
4140
4141 if (reg >= 112 && reg <= 127)
4142 return ARM_WR0_REGNUM + reg - 112;
4143
4144 if (reg >= 192 && reg <= 199)
4145 return ARM_WC0_REGNUM + reg - 192;
4146
58d6951d
DJ
4147 /* VFP v2 registers. A double precision value is actually
4148 in d1 rather than s2, but the ABI only defines numbering
4149 for the single precision registers. This will "just work"
4150 in GDB for little endian targets (we'll read eight bytes,
4151 starting in s0 and then progressing to s1), but will be
4152 reversed on big endian targets with VFP. This won't
4153 be a problem for the new Neon quad registers; you're supposed
4154 to use DW_OP_piece for those. */
4155 if (reg >= 64 && reg <= 95)
4156 {
4157 char name_buf[4];
4158
8c042590 4159 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4160 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4161 strlen (name_buf));
4162 }
4163
4164 /* VFP v3 / Neon registers. This range is also used for VFP v2
4165 registers, except that it now describes d0 instead of s0. */
4166 if (reg >= 256 && reg <= 287)
4167 {
4168 char name_buf[4];
4169
8c042590 4170 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4171 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4172 strlen (name_buf));
4173 }
4174
ff6f572f
DJ
4175 return -1;
4176}
4177
26216b98
AC
4178/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4179static int
e7faf938 4180arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4181{
4182 int reg = regnum;
e7faf938 4183 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4184
ff6f572f
DJ
4185 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4186 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4187
4188 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4189 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4190
4191 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4192 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4193
26216b98
AC
4194 if (reg < NUM_GREGS)
4195 return SIM_ARM_R0_REGNUM + reg;
4196 reg -= NUM_GREGS;
4197
4198 if (reg < NUM_FREGS)
4199 return SIM_ARM_FP0_REGNUM + reg;
4200 reg -= NUM_FREGS;
4201
4202 if (reg < NUM_SREGS)
4203 return SIM_ARM_FPS_REGNUM + reg;
4204 reg -= NUM_SREGS;
4205
edefbb7c 4206 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4207}
34e8f22d 4208
a37b3cc0
AC
4209/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4210 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4211 It is thought that this is is the floating-point register format on
4212 little-endian systems. */
c906108c 4213
ed9a39eb 4214static void
b508a996 4215convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4216 void *dbl, int endianess)
c906108c 4217{
a37b3cc0 4218 DOUBLEST d;
be8626e0
MD
4219
4220 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4221 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4222 else
4223 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4224 ptr, &d);
b508a996 4225 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4226}
4227
34e8f22d 4228static void
be8626e0
MD
4229convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4230 int endianess)
c906108c 4231{
a37b3cc0 4232 DOUBLEST d;
be8626e0 4233
b508a996 4234 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4235 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4236 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4237 else
4238 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4239 &d, dbl);
c906108c 4240}
ed9a39eb 4241
d9311bfa
AT
4242/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4243 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4244 NULL if an error occurs. BUF is freed. */
c906108c 4245
d9311bfa
AT
4246static gdb_byte *
4247extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4248 int old_len, int new_len)
4249{
4250 gdb_byte *new_buf;
4251 int bytes_to_read = new_len - old_len;
c906108c 4252
d9311bfa
AT
4253 new_buf = (gdb_byte *) xmalloc (new_len);
4254 memcpy (new_buf + bytes_to_read, buf, old_len);
4255 xfree (buf);
198cd59d 4256 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
d9311bfa
AT
4257 {
4258 xfree (new_buf);
4259 return NULL;
c906108c 4260 }
d9311bfa 4261 return new_buf;
c906108c
SS
4262}
4263
d9311bfa
AT
4264/* An IT block is at most the 2-byte IT instruction followed by
4265 four 4-byte instructions. The furthest back we must search to
4266 find an IT block that affects the current instruction is thus
4267 2 + 3 * 4 == 14 bytes. */
4268#define MAX_IT_BLOCK_PREFIX 14
177321bd 4269
d9311bfa
AT
4270/* Use a quick scan if there are more than this many bytes of
4271 code. */
4272#define IT_SCAN_THRESHOLD 32
177321bd 4273
d9311bfa
AT
4274/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4275 A breakpoint in an IT block may not be hit, depending on the
4276 condition flags. */
ad527d2e 4277static CORE_ADDR
d9311bfa 4278arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4279{
d9311bfa
AT
4280 gdb_byte *buf;
4281 char map_type;
4282 CORE_ADDR boundary, func_start;
4283 int buf_len;
4284 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4285 int i, any, last_it, last_it_count;
177321bd 4286
d9311bfa
AT
4287 /* If we are using BKPT breakpoints, none of this is necessary. */
4288 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4289 return bpaddr;
177321bd 4290
d9311bfa
AT
4291 /* ARM mode does not have this problem. */
4292 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4293 return bpaddr;
177321bd 4294
d9311bfa
AT
4295 /* We are setting a breakpoint in Thumb code that could potentially
4296 contain an IT block. The first step is to find how much Thumb
4297 code there is; we do not need to read outside of known Thumb
4298 sequences. */
4299 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4300 if (map_type == 0)
4301 /* Thumb-2 code must have mapping symbols to have a chance. */
4302 return bpaddr;
9dca5578 4303
d9311bfa 4304 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4305
d9311bfa
AT
4306 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4307 && func_start > boundary)
4308 boundary = func_start;
9dca5578 4309
d9311bfa
AT
4310 /* Search for a candidate IT instruction. We have to do some fancy
4311 footwork to distinguish a real IT instruction from the second
4312 half of a 32-bit instruction, but there is no need for that if
4313 there's no candidate. */
325fac50 4314 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4315 if (buf_len == 0)
4316 /* No room for an IT instruction. */
4317 return bpaddr;
c906108c 4318
d9311bfa 4319 buf = (gdb_byte *) xmalloc (buf_len);
198cd59d 4320 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
d9311bfa
AT
4321 return bpaddr;
4322 any = 0;
4323 for (i = 0; i < buf_len; i += 2)
c906108c 4324 {
d9311bfa
AT
4325 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4326 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4327 {
d9311bfa
AT
4328 any = 1;
4329 break;
25b41d01 4330 }
c906108c 4331 }
d9311bfa
AT
4332
4333 if (any == 0)
c906108c 4334 {
d9311bfa
AT
4335 xfree (buf);
4336 return bpaddr;
f9d67f43
DJ
4337 }
4338
4339 /* OK, the code bytes before this instruction contain at least one
4340 halfword which resembles an IT instruction. We know that it's
4341 Thumb code, but there are still two possibilities. Either the
4342 halfword really is an IT instruction, or it is the second half of
4343 a 32-bit Thumb instruction. The only way we can tell is to
4344 scan forwards from a known instruction boundary. */
4345 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4346 {
4347 int definite;
4348
4349 /* There's a lot of code before this instruction. Start with an
4350 optimistic search; it's easy to recognize halfwords that can
4351 not be the start of a 32-bit instruction, and use that to
4352 lock on to the instruction boundaries. */
4353 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4354 if (buf == NULL)
4355 return bpaddr;
4356 buf_len = IT_SCAN_THRESHOLD;
4357
4358 definite = 0;
4359 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4360 {
4361 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4362 if (thumb_insn_size (inst1) == 2)
4363 {
4364 definite = 1;
4365 break;
4366 }
4367 }
4368
4369 /* At this point, if DEFINITE, BUF[I] is the first place we
4370 are sure that we know the instruction boundaries, and it is far
4371 enough from BPADDR that we could not miss an IT instruction
4372 affecting BPADDR. If ! DEFINITE, give up - start from a
4373 known boundary. */
4374 if (! definite)
4375 {
0963b4bd
MS
4376 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4377 bpaddr - boundary);
f9d67f43
DJ
4378 if (buf == NULL)
4379 return bpaddr;
4380 buf_len = bpaddr - boundary;
4381 i = 0;
4382 }
4383 }
4384 else
4385 {
4386 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4387 if (buf == NULL)
4388 return bpaddr;
4389 buf_len = bpaddr - boundary;
4390 i = 0;
4391 }
4392
4393 /* Scan forwards. Find the last IT instruction before BPADDR. */
4394 last_it = -1;
4395 last_it_count = 0;
4396 while (i < buf_len)
4397 {
4398 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4399 last_it_count--;
4400 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4401 {
4402 last_it = i;
4403 if (inst1 & 0x0001)
4404 last_it_count = 4;
4405 else if (inst1 & 0x0002)
4406 last_it_count = 3;
4407 else if (inst1 & 0x0004)
4408 last_it_count = 2;
4409 else
4410 last_it_count = 1;
4411 }
4412 i += thumb_insn_size (inst1);
4413 }
4414
4415 xfree (buf);
4416
4417 if (last_it == -1)
4418 /* There wasn't really an IT instruction after all. */
4419 return bpaddr;
4420
4421 if (last_it_count < 1)
4422 /* It was too far away. */
4423 return bpaddr;
4424
4425 /* This really is a trouble spot. Move the breakpoint to the IT
4426 instruction. */
4427 return bpaddr - buf_len + last_it;
4428}
4429
cca44b1b 4430/* ARM displaced stepping support.
c906108c 4431
cca44b1b 4432 Generally ARM displaced stepping works as follows:
c906108c 4433
cca44b1b 4434 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4435 arm_process_displaced_insn. Depending on the type of instruction, it is
4436 then copied to a scratch location, possibly in a modified form. The
4437 copy_* set of functions performs such modification, as necessary. A
4438 breakpoint is placed after the modified instruction in the scratch space
4439 to return control to GDB. Note in particular that instructions which
4440 modify the PC will no longer do so after modification.
c5aa993b 4441
cca44b1b
JB
4442 2. The instruction is single-stepped, by setting the PC to the scratch
4443 location address, and resuming. Control returns to GDB when the
4444 breakpoint is hit.
c5aa993b 4445
cca44b1b
JB
4446 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4447 function used for the current instruction. This function's job is to
4448 put the CPU/memory state back to what it would have been if the
4449 instruction had been executed unmodified in its original location. */
c5aa993b 4450
cca44b1b
JB
4451/* NOP instruction (mov r0, r0). */
4452#define ARM_NOP 0xe1a00000
34518530 4453#define THUMB_NOP 0x4600
cca44b1b
JB
4454
4455/* Helper for register reads for displaced stepping. In particular, this
4456 returns the PC as it would be seen by the instruction at its original
4457 location. */
4458
4459ULONGEST
36073a92
YQ
4460displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4461 int regno)
cca44b1b
JB
4462{
4463 ULONGEST ret;
36073a92 4464 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4465
bf9f652a 4466 if (regno == ARM_PC_REGNUM)
cca44b1b 4467 {
4db71c0b
YQ
4468 /* Compute pipeline offset:
4469 - When executing an ARM instruction, PC reads as the address of the
4470 current instruction plus 8.
4471 - When executing a Thumb instruction, PC reads as the address of the
4472 current instruction plus 4. */
4473
36073a92 4474 if (!dsc->is_thumb)
4db71c0b
YQ
4475 from += 8;
4476 else
4477 from += 4;
4478
cca44b1b
JB
4479 if (debug_displaced)
4480 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4481 (unsigned long) from);
4482 return (ULONGEST) from;
cca44b1b 4483 }
c906108c 4484 else
cca44b1b
JB
4485 {
4486 regcache_cooked_read_unsigned (regs, regno, &ret);
4487 if (debug_displaced)
4488 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4489 regno, (unsigned long) ret);
4490 return ret;
4491 }
c906108c
SS
4492}
4493
cca44b1b
JB
4494static int
4495displaced_in_arm_mode (struct regcache *regs)
4496{
4497 ULONGEST ps;
9779414d 4498 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4499
cca44b1b 4500 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4501
9779414d 4502 return (ps & t_bit) == 0;
cca44b1b 4503}
66e810cd 4504
cca44b1b 4505/* Write to the PC as from a branch instruction. */
c906108c 4506
cca44b1b 4507static void
36073a92
YQ
4508branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4509 ULONGEST val)
c906108c 4510{
36073a92 4511 if (!dsc->is_thumb)
cca44b1b
JB
4512 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4513 architecture versions < 6. */
0963b4bd
MS
4514 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4515 val & ~(ULONGEST) 0x3);
cca44b1b 4516 else
0963b4bd
MS
4517 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4518 val & ~(ULONGEST) 0x1);
cca44b1b 4519}
66e810cd 4520
cca44b1b
JB
4521/* Write to the PC as from a branch-exchange instruction. */
4522
4523static void
4524bx_write_pc (struct regcache *regs, ULONGEST val)
4525{
4526 ULONGEST ps;
9779414d 4527 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4528
4529 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4530
4531 if ((val & 1) == 1)
c906108c 4532 {
9779414d 4533 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4534 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4535 }
4536 else if ((val & 2) == 0)
4537 {
9779414d 4538 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4539 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4540 }
4541 else
4542 {
cca44b1b
JB
4543 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4544 mode, align dest to 4 bytes). */
4545 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4546 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4547 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4548 }
4549}
ed9a39eb 4550
cca44b1b 4551/* Write to the PC as if from a load instruction. */
ed9a39eb 4552
34e8f22d 4553static void
36073a92
YQ
4554load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4555 ULONGEST val)
ed9a39eb 4556{
cca44b1b
JB
4557 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4558 bx_write_pc (regs, val);
4559 else
36073a92 4560 branch_write_pc (regs, dsc, val);
cca44b1b 4561}
be8626e0 4562
cca44b1b
JB
4563/* Write to the PC as if from an ALU instruction. */
4564
4565static void
36073a92
YQ
4566alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4567 ULONGEST val)
cca44b1b 4568{
36073a92 4569 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4570 bx_write_pc (regs, val);
4571 else
36073a92 4572 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4573}
4574
4575/* Helper for writing to registers for displaced stepping. Writing to the PC
4576 has a varying effects depending on the instruction which does the write:
4577 this is controlled by the WRITE_PC argument. */
4578
4579void
4580displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4581 int regno, ULONGEST val, enum pc_write_style write_pc)
4582{
bf9f652a 4583 if (regno == ARM_PC_REGNUM)
08216dd7 4584 {
cca44b1b
JB
4585 if (debug_displaced)
4586 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4587 (unsigned long) val);
4588 switch (write_pc)
08216dd7 4589 {
cca44b1b 4590 case BRANCH_WRITE_PC:
36073a92 4591 branch_write_pc (regs, dsc, val);
08216dd7
RE
4592 break;
4593
cca44b1b
JB
4594 case BX_WRITE_PC:
4595 bx_write_pc (regs, val);
4596 break;
4597
4598 case LOAD_WRITE_PC:
36073a92 4599 load_write_pc (regs, dsc, val);
cca44b1b
JB
4600 break;
4601
4602 case ALU_WRITE_PC:
36073a92 4603 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4604 break;
4605
4606 case CANNOT_WRITE_PC:
4607 warning (_("Instruction wrote to PC in an unexpected way when "
4608 "single-stepping"));
08216dd7
RE
4609 break;
4610
4611 default:
97b9747c
JB
4612 internal_error (__FILE__, __LINE__,
4613 _("Invalid argument to displaced_write_reg"));
08216dd7 4614 }
b508a996 4615
cca44b1b 4616 dsc->wrote_to_pc = 1;
b508a996 4617 }
ed9a39eb 4618 else
b508a996 4619 {
cca44b1b
JB
4620 if (debug_displaced)
4621 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4622 regno, (unsigned long) val);
4623 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4624 }
34e8f22d
RE
4625}
4626
cca44b1b
JB
4627/* This function is used to concisely determine if an instruction INSN
4628 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4629 corresponding fields of BITMASK set to 0b1111. The function
4630 returns return 1 if any of these fields in INSN reference the PC
4631 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4632
4633static int
cca44b1b 4634insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4635{
cca44b1b 4636 uint32_t lowbit = 1;
67255d04 4637
cca44b1b
JB
4638 while (bitmask != 0)
4639 {
4640 uint32_t mask;
44e1a9eb 4641
cca44b1b
JB
4642 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4643 ;
67255d04 4644
cca44b1b
JB
4645 if (!lowbit)
4646 break;
67255d04 4647
cca44b1b 4648 mask = lowbit * 0xf;
67255d04 4649
cca44b1b
JB
4650 if ((insn & mask) == mask)
4651 return 1;
4652
4653 bitmask &= ~mask;
67255d04
RE
4654 }
4655
cca44b1b
JB
4656 return 0;
4657}
2af48f68 4658
cca44b1b
JB
4659/* The simplest copy function. Many instructions have the same effect no
4660 matter what address they are executed at: in those cases, use this. */
67255d04 4661
cca44b1b 4662static int
7ff120b4
YQ
4663arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4664 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4665{
4666 if (debug_displaced)
4667 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4668 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4669 iname);
67255d04 4670
cca44b1b 4671 dsc->modinsn[0] = insn;
67255d04 4672
cca44b1b
JB
4673 return 0;
4674}
4675
34518530
YQ
4676static int
4677thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4678 uint16_t insn2, const char *iname,
4679 struct displaced_step_closure *dsc)
4680{
4681 if (debug_displaced)
4682 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4683 "opcode/class '%s' unmodified\n", insn1, insn2,
4684 iname);
4685
4686 dsc->modinsn[0] = insn1;
4687 dsc->modinsn[1] = insn2;
4688 dsc->numinsns = 2;
4689
4690 return 0;
4691}
4692
4693/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4694 modification. */
4695static int
615234c1 4696thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4697 const char *iname,
4698 struct displaced_step_closure *dsc)
4699{
4700 if (debug_displaced)
4701 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4702 "opcode/class '%s' unmodified\n", insn,
4703 iname);
4704
4705 dsc->modinsn[0] = insn;
4706
4707 return 0;
4708}
4709
cca44b1b
JB
4710/* Preload instructions with immediate offset. */
4711
4712static void
6e39997a 4713cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4714 struct regcache *regs, struct displaced_step_closure *dsc)
4715{
4716 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4717 if (!dsc->u.preload.immed)
4718 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4719}
4720
7ff120b4
YQ
4721static void
4722install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4723 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4724{
cca44b1b 4725 ULONGEST rn_val;
cca44b1b
JB
4726 /* Preload instructions:
4727
4728 {pli/pld} [rn, #+/-imm]
4729 ->
4730 {pli/pld} [r0, #+/-imm]. */
4731
36073a92
YQ
4732 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4733 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4734 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4735 dsc->u.preload.immed = 1;
4736
cca44b1b 4737 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4738}
4739
cca44b1b 4740static int
7ff120b4 4741arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4742 struct displaced_step_closure *dsc)
4743{
4744 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4745
7ff120b4
YQ
4746 if (!insn_references_pc (insn, 0x000f0000ul))
4747 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4748
4749 if (debug_displaced)
4750 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4751 (unsigned long) insn);
4752
7ff120b4
YQ
4753 dsc->modinsn[0] = insn & 0xfff0ffff;
4754
4755 install_preload (gdbarch, regs, dsc, rn);
4756
4757 return 0;
4758}
4759
34518530
YQ
4760static int
4761thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4762 struct regcache *regs, struct displaced_step_closure *dsc)
4763{
4764 unsigned int rn = bits (insn1, 0, 3);
4765 unsigned int u_bit = bit (insn1, 7);
4766 int imm12 = bits (insn2, 0, 11);
4767 ULONGEST pc_val;
4768
4769 if (rn != ARM_PC_REGNUM)
4770 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4771
4772 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4773 PLD (literal) Encoding T1. */
4774 if (debug_displaced)
4775 fprintf_unfiltered (gdb_stdlog,
4776 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4777 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4778 imm12);
4779
4780 if (!u_bit)
4781 imm12 = -1 * imm12;
4782
4783 /* Rewrite instruction {pli/pld} PC imm12 into:
4784 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4785
4786 {pli/pld} [r0, r1]
4787
4788 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4789
4790 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4791 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4792
4793 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4794
4795 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4796 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4797 dsc->u.preload.immed = 0;
4798
4799 /* {pli/pld} [r0, r1] */
4800 dsc->modinsn[0] = insn1 & 0xfff0;
4801 dsc->modinsn[1] = 0xf001;
4802 dsc->numinsns = 2;
4803
4804 dsc->cleanup = &cleanup_preload;
4805 return 0;
4806}
4807
7ff120b4
YQ
4808/* Preload instructions with register offset. */
4809
4810static void
4811install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4812 struct displaced_step_closure *dsc, unsigned int rn,
4813 unsigned int rm)
4814{
4815 ULONGEST rn_val, rm_val;
4816
cca44b1b
JB
4817 /* Preload register-offset instructions:
4818
4819 {pli/pld} [rn, rm {, shift}]
4820 ->
4821 {pli/pld} [r0, r1 {, shift}]. */
4822
36073a92
YQ
4823 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4824 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4825 rn_val = displaced_read_reg (regs, dsc, rn);
4826 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4827 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4828 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4829 dsc->u.preload.immed = 0;
4830
cca44b1b 4831 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4832}
4833
4834static int
4835arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4836 struct regcache *regs,
4837 struct displaced_step_closure *dsc)
4838{
4839 unsigned int rn = bits (insn, 16, 19);
4840 unsigned int rm = bits (insn, 0, 3);
4841
4842
4843 if (!insn_references_pc (insn, 0x000f000ful))
4844 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4845
4846 if (debug_displaced)
4847 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4848 (unsigned long) insn);
4849
4850 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4851
7ff120b4 4852 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4853 return 0;
4854}
4855
4856/* Copy/cleanup coprocessor load and store instructions. */
4857
4858static void
6e39997a 4859cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4860 struct regcache *regs,
4861 struct displaced_step_closure *dsc)
4862{
36073a92 4863 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4864
4865 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4866
4867 if (dsc->u.ldst.writeback)
4868 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4869}
4870
7ff120b4
YQ
4871static void
4872install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4873 struct displaced_step_closure *dsc,
4874 int writeback, unsigned int rn)
cca44b1b 4875{
cca44b1b 4876 ULONGEST rn_val;
cca44b1b 4877
cca44b1b
JB
4878 /* Coprocessor load/store instructions:
4879
4880 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4881 ->
4882 {stc/stc2} [r0, #+/-imm].
4883
4884 ldc/ldc2 are handled identically. */
4885
36073a92
YQ
4886 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4887 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4888 /* PC should be 4-byte aligned. */
4889 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4890 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4891
7ff120b4 4892 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4893 dsc->u.ldst.rn = rn;
4894
7ff120b4
YQ
4895 dsc->cleanup = &cleanup_copro_load_store;
4896}
4897
4898static int
4899arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4900 struct regcache *regs,
4901 struct displaced_step_closure *dsc)
4902{
4903 unsigned int rn = bits (insn, 16, 19);
4904
4905 if (!insn_references_pc (insn, 0x000f0000ul))
4906 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4907
4908 if (debug_displaced)
4909 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4910 "load/store insn %.8lx\n", (unsigned long) insn);
4911
cca44b1b
JB
4912 dsc->modinsn[0] = insn & 0xfff0ffff;
4913
7ff120b4 4914 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4915
4916 return 0;
4917}
4918
34518530
YQ
4919static int
4920thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4921 uint16_t insn2, struct regcache *regs,
4922 struct displaced_step_closure *dsc)
4923{
4924 unsigned int rn = bits (insn1, 0, 3);
4925
4926 if (rn != ARM_PC_REGNUM)
4927 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4928 "copro load/store", dsc);
4929
4930 if (debug_displaced)
4931 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4932 "load/store insn %.4x%.4x\n", insn1, insn2);
4933
4934 dsc->modinsn[0] = insn1 & 0xfff0;
4935 dsc->modinsn[1] = insn2;
4936 dsc->numinsns = 2;
4937
4938 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4939 doesn't support writeback, so pass 0. */
4940 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4941
4942 return 0;
4943}
4944
cca44b1b
JB
4945/* Clean up branch instructions (actually perform the branch, by setting
4946 PC). */
4947
4948static void
6e39997a 4949cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4950 struct displaced_step_closure *dsc)
4951{
36073a92 4952 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4953 int branch_taken = condition_true (dsc->u.branch.cond, status);
4954 enum pc_write_style write_pc = dsc->u.branch.exchange
4955 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4956
4957 if (!branch_taken)
4958 return;
4959
4960 if (dsc->u.branch.link)
4961 {
8c8dba6d
YQ
4962 /* The value of LR should be the next insn of current one. In order
4963 not to confuse logic hanlding later insn `bx lr', if current insn mode
4964 is Thumb, the bit 0 of LR value should be set to 1. */
4965 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4966
4967 if (dsc->is_thumb)
4968 next_insn_addr |= 0x1;
4969
4970 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4971 CANNOT_WRITE_PC);
cca44b1b
JB
4972 }
4973
bf9f652a 4974 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4975}
4976
4977/* Copy B/BL/BLX instructions with immediate destinations. */
4978
7ff120b4
YQ
4979static void
4980install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4981 struct displaced_step_closure *dsc,
4982 unsigned int cond, int exchange, int link, long offset)
4983{
4984 /* Implement "BL<cond> <label>" as:
4985
4986 Preparation: cond <- instruction condition
4987 Insn: mov r0, r0 (nop)
4988 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4989
4990 B<cond> similar, but don't set r14 in cleanup. */
4991
4992 dsc->u.branch.cond = cond;
4993 dsc->u.branch.link = link;
4994 dsc->u.branch.exchange = exchange;
4995
2b16b2e3
YQ
4996 dsc->u.branch.dest = dsc->insn_addr;
4997 if (link && exchange)
4998 /* For BLX, offset is computed from the Align (PC, 4). */
4999 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5000
7ff120b4 5001 if (dsc->is_thumb)
2b16b2e3 5002 dsc->u.branch.dest += 4 + offset;
7ff120b4 5003 else
2b16b2e3 5004 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5005
5006 dsc->cleanup = &cleanup_branch;
5007}
cca44b1b 5008static int
7ff120b4
YQ
5009arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5010 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5011{
5012 unsigned int cond = bits (insn, 28, 31);
5013 int exchange = (cond == 0xf);
5014 int link = exchange || bit (insn, 24);
cca44b1b
JB
5015 long offset;
5016
5017 if (debug_displaced)
5018 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5019 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5020 (unsigned long) insn);
cca44b1b
JB
5021 if (exchange)
5022 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5023 then arrange the switch into Thumb mode. */
5024 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5025 else
5026 offset = bits (insn, 0, 23) << 2;
5027
5028 if (bit (offset, 25))
5029 offset = offset | ~0x3ffffff;
5030
cca44b1b
JB
5031 dsc->modinsn[0] = ARM_NOP;
5032
7ff120b4 5033 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5034 return 0;
5035}
5036
34518530
YQ
5037static int
5038thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5039 uint16_t insn2, struct regcache *regs,
5040 struct displaced_step_closure *dsc)
5041{
5042 int link = bit (insn2, 14);
5043 int exchange = link && !bit (insn2, 12);
5044 int cond = INST_AL;
5045 long offset = 0;
5046 int j1 = bit (insn2, 13);
5047 int j2 = bit (insn2, 11);
5048 int s = sbits (insn1, 10, 10);
5049 int i1 = !(j1 ^ bit (insn1, 10));
5050 int i2 = !(j2 ^ bit (insn1, 10));
5051
5052 if (!link && !exchange) /* B */
5053 {
5054 offset = (bits (insn2, 0, 10) << 1);
5055 if (bit (insn2, 12)) /* Encoding T4 */
5056 {
5057 offset |= (bits (insn1, 0, 9) << 12)
5058 | (i2 << 22)
5059 | (i1 << 23)
5060 | (s << 24);
5061 cond = INST_AL;
5062 }
5063 else /* Encoding T3 */
5064 {
5065 offset |= (bits (insn1, 0, 5) << 12)
5066 | (j1 << 18)
5067 | (j2 << 19)
5068 | (s << 20);
5069 cond = bits (insn1, 6, 9);
5070 }
5071 }
5072 else
5073 {
5074 offset = (bits (insn1, 0, 9) << 12);
5075 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5076 offset |= exchange ?
5077 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5078 }
5079
5080 if (debug_displaced)
5081 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5082 "%.4x %.4x with offset %.8lx\n",
5083 link ? (exchange) ? "blx" : "bl" : "b",
5084 insn1, insn2, offset);
5085
5086 dsc->modinsn[0] = THUMB_NOP;
5087
5088 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5089 return 0;
5090}
5091
5092/* Copy B Thumb instructions. */
5093static int
615234c1 5094thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5095 struct displaced_step_closure *dsc)
5096{
5097 unsigned int cond = 0;
5098 int offset = 0;
5099 unsigned short bit_12_15 = bits (insn, 12, 15);
5100 CORE_ADDR from = dsc->insn_addr;
5101
5102 if (bit_12_15 == 0xd)
5103 {
5104 /* offset = SignExtend (imm8:0, 32) */
5105 offset = sbits ((insn << 1), 0, 8);
5106 cond = bits (insn, 8, 11);
5107 }
5108 else if (bit_12_15 == 0xe) /* Encoding T2 */
5109 {
5110 offset = sbits ((insn << 1), 0, 11);
5111 cond = INST_AL;
5112 }
5113
5114 if (debug_displaced)
5115 fprintf_unfiltered (gdb_stdlog,
5116 "displaced: copying b immediate insn %.4x "
5117 "with offset %d\n", insn, offset);
5118
5119 dsc->u.branch.cond = cond;
5120 dsc->u.branch.link = 0;
5121 dsc->u.branch.exchange = 0;
5122 dsc->u.branch.dest = from + 4 + offset;
5123
5124 dsc->modinsn[0] = THUMB_NOP;
5125
5126 dsc->cleanup = &cleanup_branch;
5127
5128 return 0;
5129}
5130
cca44b1b
JB
5131/* Copy BX/BLX with register-specified destinations. */
5132
7ff120b4
YQ
5133static void
5134install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5135 struct displaced_step_closure *dsc, int link,
5136 unsigned int cond, unsigned int rm)
cca44b1b 5137{
cca44b1b
JB
5138 /* Implement {BX,BLX}<cond> <reg>" as:
5139
5140 Preparation: cond <- instruction condition
5141 Insn: mov r0, r0 (nop)
5142 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5143
5144 Don't set r14 in cleanup for BX. */
5145
36073a92 5146 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5147
5148 dsc->u.branch.cond = cond;
5149 dsc->u.branch.link = link;
cca44b1b 5150
7ff120b4 5151 dsc->u.branch.exchange = 1;
cca44b1b
JB
5152
5153 dsc->cleanup = &cleanup_branch;
7ff120b4 5154}
cca44b1b 5155
7ff120b4
YQ
5156static int
5157arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5158 struct regcache *regs, struct displaced_step_closure *dsc)
5159{
5160 unsigned int cond = bits (insn, 28, 31);
5161 /* BX: x12xxx1x
5162 BLX: x12xxx3x. */
5163 int link = bit (insn, 5);
5164 unsigned int rm = bits (insn, 0, 3);
5165
5166 if (debug_displaced)
5167 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5168 (unsigned long) insn);
5169
5170 dsc->modinsn[0] = ARM_NOP;
5171
5172 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5173 return 0;
5174}
5175
34518530
YQ
5176static int
5177thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5178 struct regcache *regs,
5179 struct displaced_step_closure *dsc)
5180{
5181 int link = bit (insn, 7);
5182 unsigned int rm = bits (insn, 3, 6);
5183
5184 if (debug_displaced)
5185 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5186 (unsigned short) insn);
5187
5188 dsc->modinsn[0] = THUMB_NOP;
5189
5190 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5191
5192 return 0;
5193}
5194
5195
0963b4bd 5196/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5197
5198static void
6e39997a 5199cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5200 struct regcache *regs, struct displaced_step_closure *dsc)
5201{
36073a92 5202 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5203 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5204 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5205 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5206}
5207
5208static int
7ff120b4
YQ
5209arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5210 struct displaced_step_closure *dsc)
cca44b1b
JB
5211{
5212 unsigned int rn = bits (insn, 16, 19);
5213 unsigned int rd = bits (insn, 12, 15);
5214 unsigned int op = bits (insn, 21, 24);
5215 int is_mov = (op == 0xd);
5216 ULONGEST rd_val, rn_val;
cca44b1b
JB
5217
5218 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5219 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5220
5221 if (debug_displaced)
5222 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5223 "%.8lx\n", is_mov ? "move" : "ALU",
5224 (unsigned long) insn);
5225
5226 /* Instruction is of form:
5227
5228 <op><cond> rd, [rn,] #imm
5229
5230 Rewrite as:
5231
5232 Preparation: tmp1, tmp2 <- r0, r1;
5233 r0, r1 <- rd, rn
5234 Insn: <op><cond> r0, r1, #imm
5235 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5236 */
5237
36073a92
YQ
5238 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5239 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5240 rn_val = displaced_read_reg (regs, dsc, rn);
5241 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5242 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5243 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5244 dsc->rd = rd;
5245
5246 if (is_mov)
5247 dsc->modinsn[0] = insn & 0xfff00fff;
5248 else
5249 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5250
5251 dsc->cleanup = &cleanup_alu_imm;
5252
5253 return 0;
5254}
5255
34518530
YQ
5256static int
5257thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5258 uint16_t insn2, struct regcache *regs,
5259 struct displaced_step_closure *dsc)
5260{
5261 unsigned int op = bits (insn1, 5, 8);
5262 unsigned int rn, rm, rd;
5263 ULONGEST rd_val, rn_val;
5264
5265 rn = bits (insn1, 0, 3); /* Rn */
5266 rm = bits (insn2, 0, 3); /* Rm */
5267 rd = bits (insn2, 8, 11); /* Rd */
5268
5269 /* This routine is only called for instruction MOV. */
5270 gdb_assert (op == 0x2 && rn == 0xf);
5271
5272 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5273 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5274
5275 if (debug_displaced)
5276 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5277 "ALU", insn1, insn2);
5278
5279 /* Instruction is of form:
5280
5281 <op><cond> rd, [rn,] #imm
5282
5283 Rewrite as:
5284
5285 Preparation: tmp1, tmp2 <- r0, r1;
5286 r0, r1 <- rd, rn
5287 Insn: <op><cond> r0, r1, #imm
5288 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5289 */
5290
5291 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5292 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5293 rn_val = displaced_read_reg (regs, dsc, rn);
5294 rd_val = displaced_read_reg (regs, dsc, rd);
5295 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5296 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5297 dsc->rd = rd;
5298
5299 dsc->modinsn[0] = insn1;
5300 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5301 dsc->numinsns = 2;
5302
5303 dsc->cleanup = &cleanup_alu_imm;
5304
5305 return 0;
5306}
5307
cca44b1b
JB
5308/* Copy/cleanup arithmetic/logic insns with register RHS. */
5309
5310static void
6e39997a 5311cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5312 struct regcache *regs, struct displaced_step_closure *dsc)
5313{
5314 ULONGEST rd_val;
5315 int i;
5316
36073a92 5317 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5318
5319 for (i = 0; i < 3; i++)
5320 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5321
5322 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5323}
5324
7ff120b4
YQ
5325static void
5326install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5327 struct displaced_step_closure *dsc,
5328 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5329{
cca44b1b 5330 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5331
cca44b1b
JB
5332 /* Instruction is of form:
5333
5334 <op><cond> rd, [rn,] rm [, <shift>]
5335
5336 Rewrite as:
5337
5338 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5339 r0, r1, r2 <- rd, rn, rm
ef713951 5340 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5341 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5342 */
5343
36073a92
YQ
5344 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5345 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5346 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5347 rd_val = displaced_read_reg (regs, dsc, rd);
5348 rn_val = displaced_read_reg (regs, dsc, rn);
5349 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5350 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5351 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5352 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5353 dsc->rd = rd;
5354
7ff120b4
YQ
5355 dsc->cleanup = &cleanup_alu_reg;
5356}
5357
5358static int
5359arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5360 struct displaced_step_closure *dsc)
5361{
5362 unsigned int op = bits (insn, 21, 24);
5363 int is_mov = (op == 0xd);
5364
5365 if (!insn_references_pc (insn, 0x000ff00ful))
5366 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5367
5368 if (debug_displaced)
5369 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5370 is_mov ? "move" : "ALU", (unsigned long) insn);
5371
cca44b1b
JB
5372 if (is_mov)
5373 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5374 else
5375 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5376
7ff120b4
YQ
5377 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5378 bits (insn, 0, 3));
cca44b1b
JB
5379 return 0;
5380}
5381
34518530
YQ
5382static int
5383thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5384 struct regcache *regs,
5385 struct displaced_step_closure *dsc)
5386{
ef713951 5387 unsigned rm, rd;
34518530 5388
ef713951
YQ
5389 rm = bits (insn, 3, 6);
5390 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5391
ef713951 5392 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5393 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5394
5395 if (debug_displaced)
ef713951
YQ
5396 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5397 (unsigned short) insn);
34518530 5398
ef713951 5399 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5400
ef713951 5401 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5402
5403 return 0;
5404}
5405
cca44b1b
JB
5406/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5407
5408static void
6e39997a 5409cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5410 struct regcache *regs,
5411 struct displaced_step_closure *dsc)
5412{
36073a92 5413 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5414 int i;
5415
5416 for (i = 0; i < 4; i++)
5417 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5418
5419 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5420}
5421
7ff120b4
YQ
5422static void
5423install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5424 struct displaced_step_closure *dsc,
5425 unsigned int rd, unsigned int rn, unsigned int rm,
5426 unsigned rs)
cca44b1b 5427{
7ff120b4 5428 int i;
cca44b1b 5429 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5430
cca44b1b
JB
5431 /* Instruction is of form:
5432
5433 <op><cond> rd, [rn,] rm, <shift> rs
5434
5435 Rewrite as:
5436
5437 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5438 r0, r1, r2, r3 <- rd, rn, rm, rs
5439 Insn: <op><cond> r0, r1, r2, <shift> r3
5440 Cleanup: tmp5 <- r0
5441 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5442 rd <- tmp5
5443 */
5444
5445 for (i = 0; i < 4; i++)
36073a92 5446 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5447
36073a92
YQ
5448 rd_val = displaced_read_reg (regs, dsc, rd);
5449 rn_val = displaced_read_reg (regs, dsc, rn);
5450 rm_val = displaced_read_reg (regs, dsc, rm);
5451 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5452 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5453 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5454 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5455 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5456 dsc->rd = rd;
7ff120b4
YQ
5457 dsc->cleanup = &cleanup_alu_shifted_reg;
5458}
5459
5460static int
5461arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5462 struct regcache *regs,
5463 struct displaced_step_closure *dsc)
5464{
5465 unsigned int op = bits (insn, 21, 24);
5466 int is_mov = (op == 0xd);
5467 unsigned int rd, rn, rm, rs;
5468
5469 if (!insn_references_pc (insn, 0x000fff0ful))
5470 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5471
5472 if (debug_displaced)
5473 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5474 "%.8lx\n", is_mov ? "move" : "ALU",
5475 (unsigned long) insn);
5476
5477 rn = bits (insn, 16, 19);
5478 rm = bits (insn, 0, 3);
5479 rs = bits (insn, 8, 11);
5480 rd = bits (insn, 12, 15);
cca44b1b
JB
5481
5482 if (is_mov)
5483 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5484 else
5485 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5486
7ff120b4 5487 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5488
5489 return 0;
5490}
5491
5492/* Clean up load instructions. */
5493
5494static void
6e39997a 5495cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5496 struct displaced_step_closure *dsc)
5497{
5498 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5499
36073a92 5500 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5501 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5502 rt_val2 = displaced_read_reg (regs, dsc, 1);
5503 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5504
5505 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5506 if (dsc->u.ldst.xfersize > 4)
5507 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5508 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5509 if (!dsc->u.ldst.immed)
5510 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5511
5512 /* Handle register writeback. */
5513 if (dsc->u.ldst.writeback)
5514 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5515 /* Put result in right place. */
5516 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5517 if (dsc->u.ldst.xfersize == 8)
5518 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5519}
5520
5521/* Clean up store instructions. */
5522
5523static void
6e39997a 5524cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5525 struct displaced_step_closure *dsc)
5526{
36073a92 5527 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5528
5529 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5530 if (dsc->u.ldst.xfersize > 4)
5531 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5532 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5533 if (!dsc->u.ldst.immed)
5534 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5535 if (!dsc->u.ldst.restore_r4)
5536 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5537
5538 /* Writeback. */
5539 if (dsc->u.ldst.writeback)
5540 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5541}
5542
5543/* Copy "extra" load/store instructions. These are halfword/doubleword
5544 transfers, which have a different encoding to byte/word transfers. */
5545
5546static int
550dc4e2 5547arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5548 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5549{
5550 unsigned int op1 = bits (insn, 20, 24);
5551 unsigned int op2 = bits (insn, 5, 6);
5552 unsigned int rt = bits (insn, 12, 15);
5553 unsigned int rn = bits (insn, 16, 19);
5554 unsigned int rm = bits (insn, 0, 3);
5555 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5556 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5557 int immed = (op1 & 0x4) != 0;
5558 int opcode;
5559 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5560
5561 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5562 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5563
5564 if (debug_displaced)
5565 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5566 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5567 (unsigned long) insn);
5568
5569 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5570
5571 if (opcode < 0)
5572 internal_error (__FILE__, __LINE__,
5573 _("copy_extra_ld_st: instruction decode error"));
5574
36073a92
YQ
5575 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5576 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5577 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5578 if (!immed)
36073a92 5579 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5580
36073a92 5581 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5582 if (bytesize[opcode] == 8)
36073a92
YQ
5583 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5584 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5585 if (!immed)
36073a92 5586 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5587
5588 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5589 if (bytesize[opcode] == 8)
5590 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5591 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5592 if (!immed)
5593 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5594
5595 dsc->rd = rt;
5596 dsc->u.ldst.xfersize = bytesize[opcode];
5597 dsc->u.ldst.rn = rn;
5598 dsc->u.ldst.immed = immed;
5599 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5600 dsc->u.ldst.restore_r4 = 0;
5601
5602 if (immed)
5603 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5604 ->
5605 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5606 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5607 else
5608 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5609 ->
5610 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5611 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5612
5613 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5614
5615 return 0;
5616}
5617
0f6f04ba 5618/* Copy byte/half word/word loads and stores. */
cca44b1b 5619
7ff120b4 5620static void
0f6f04ba
YQ
5621install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5622 struct displaced_step_closure *dsc, int load,
5623 int immed, int writeback, int size, int usermode,
5624 int rt, int rm, int rn)
cca44b1b 5625{
cca44b1b 5626 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5627
36073a92
YQ
5628 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5629 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5630 if (!immed)
36073a92 5631 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5632 if (!load)
36073a92 5633 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5634
36073a92
YQ
5635 rt_val = displaced_read_reg (regs, dsc, rt);
5636 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5637 if (!immed)
36073a92 5638 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5639
5640 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5641 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5642 if (!immed)
5643 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5644 dsc->rd = rt;
0f6f04ba 5645 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5646 dsc->u.ldst.rn = rn;
5647 dsc->u.ldst.immed = immed;
7ff120b4 5648 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5649
5650 /* To write PC we can do:
5651
494e194e
YQ
5652 Before this sequence of instructions:
5653 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5654 r2 is the Rn value got from dispalced_read_reg.
5655
5656 Insn1: push {pc} Write address of STR instruction + offset on stack
5657 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5658 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5659 = addr(Insn1) + offset - addr(Insn3) - 8
5660 = offset - 16
5661 Insn4: add r4, r4, #8 r4 = offset - 8
5662 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5663 = from + offset
5664 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5665
5666 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5667 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5668 of this can be found in Section "Saving from r15" in
5669 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5670
7ff120b4
YQ
5671 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5672}
5673
34518530
YQ
5674
5675static int
5676thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5677 uint16_t insn2, struct regcache *regs,
5678 struct displaced_step_closure *dsc, int size)
5679{
5680 unsigned int u_bit = bit (insn1, 7);
5681 unsigned int rt = bits (insn2, 12, 15);
5682 int imm12 = bits (insn2, 0, 11);
5683 ULONGEST pc_val;
5684
5685 if (debug_displaced)
5686 fprintf_unfiltered (gdb_stdlog,
5687 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5688 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5689 imm12);
5690
5691 if (!u_bit)
5692 imm12 = -1 * imm12;
5693
5694 /* Rewrite instruction LDR Rt imm12 into:
5695
5696 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5697
5698 LDR R0, R2, R3,
5699
5700 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5701
5702
5703 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5704 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5705 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5706
5707 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5708
5709 pc_val = pc_val & 0xfffffffc;
5710
5711 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5712 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5713
5714 dsc->rd = rt;
5715
5716 dsc->u.ldst.xfersize = size;
5717 dsc->u.ldst.immed = 0;
5718 dsc->u.ldst.writeback = 0;
5719 dsc->u.ldst.restore_r4 = 0;
5720
5721 /* LDR R0, R2, R3 */
5722 dsc->modinsn[0] = 0xf852;
5723 dsc->modinsn[1] = 0x3;
5724 dsc->numinsns = 2;
5725
5726 dsc->cleanup = &cleanup_load;
5727
5728 return 0;
5729}
5730
5731static int
5732thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5733 uint16_t insn2, struct regcache *regs,
5734 struct displaced_step_closure *dsc,
5735 int writeback, int immed)
5736{
5737 unsigned int rt = bits (insn2, 12, 15);
5738 unsigned int rn = bits (insn1, 0, 3);
5739 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5740 /* In LDR (register), there is also a register Rm, which is not allowed to
5741 be PC, so we don't have to check it. */
5742
5743 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5744 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5745 dsc);
5746
5747 if (debug_displaced)
5748 fprintf_unfiltered (gdb_stdlog,
5749 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5750 rt, rn, insn1, insn2);
5751
5752 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5753 0, rt, rm, rn);
5754
5755 dsc->u.ldst.restore_r4 = 0;
5756
5757 if (immed)
5758 /* ldr[b]<cond> rt, [rn, #imm], etc.
5759 ->
5760 ldr[b]<cond> r0, [r2, #imm]. */
5761 {
5762 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5763 dsc->modinsn[1] = insn2 & 0x0fff;
5764 }
5765 else
5766 /* ldr[b]<cond> rt, [rn, rm], etc.
5767 ->
5768 ldr[b]<cond> r0, [r2, r3]. */
5769 {
5770 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5771 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5772 }
5773
5774 dsc->numinsns = 2;
5775
5776 return 0;
5777}
5778
5779
7ff120b4
YQ
5780static int
5781arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5782 struct regcache *regs,
5783 struct displaced_step_closure *dsc,
0f6f04ba 5784 int load, int size, int usermode)
7ff120b4
YQ
5785{
5786 int immed = !bit (insn, 25);
5787 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5788 unsigned int rt = bits (insn, 12, 15);
5789 unsigned int rn = bits (insn, 16, 19);
5790 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5791
5792 if (!insn_references_pc (insn, 0x000ff00ful))
5793 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5794
5795 if (debug_displaced)
5796 fprintf_unfiltered (gdb_stdlog,
5797 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5798 load ? (size == 1 ? "ldrb" : "ldr")
5799 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5800 rt, rn,
5801 (unsigned long) insn);
5802
0f6f04ba
YQ
5803 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5804 usermode, rt, rm, rn);
7ff120b4 5805
bf9f652a 5806 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5807 {
5808 dsc->u.ldst.restore_r4 = 0;
5809
5810 if (immed)
5811 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5812 ->
5813 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5814 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5815 else
5816 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5817 ->
5818 {ldr,str}[b]<cond> r0, [r2, r3]. */
5819 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5820 }
5821 else
5822 {
5823 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5824 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5825 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5826 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5827 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5828 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5829 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5830
5831 /* As above. */
5832 if (immed)
5833 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5834 else
5835 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5836
cca44b1b
JB
5837 dsc->numinsns = 6;
5838 }
5839
5840 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5841
5842 return 0;
5843}
5844
5845/* Cleanup LDM instructions with fully-populated register list. This is an
5846 unfortunate corner case: it's impossible to implement correctly by modifying
5847 the instruction. The issue is as follows: we have an instruction,
5848
5849 ldm rN, {r0-r15}
5850
5851 which we must rewrite to avoid loading PC. A possible solution would be to
5852 do the load in two halves, something like (with suitable cleanup
5853 afterwards):
5854
5855 mov r8, rN
5856 ldm[id][ab] r8!, {r0-r7}
5857 str r7, <temp>
5858 ldm[id][ab] r8, {r7-r14}
5859 <bkpt>
5860
5861 but at present there's no suitable place for <temp>, since the scratch space
5862 is overwritten before the cleanup routine is called. For now, we simply
5863 emulate the instruction. */
5864
5865static void
5866cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5867 struct displaced_step_closure *dsc)
5868{
cca44b1b
JB
5869 int inc = dsc->u.block.increment;
5870 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5871 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5872 uint32_t regmask = dsc->u.block.regmask;
5873 int regno = inc ? 0 : 15;
5874 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5875 int exception_return = dsc->u.block.load && dsc->u.block.user
5876 && (regmask & 0x8000) != 0;
36073a92 5877 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5878 int do_transfer = condition_true (dsc->u.block.cond, status);
5879 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5880
5881 if (!do_transfer)
5882 return;
5883
5884 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5885 sensible we can do here. Complain loudly. */
5886 if (exception_return)
5887 error (_("Cannot single-step exception return"));
5888
5889 /* We don't handle any stores here for now. */
5890 gdb_assert (dsc->u.block.load != 0);
5891
5892 if (debug_displaced)
5893 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5894 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5895 dsc->u.block.increment ? "inc" : "dec",
5896 dsc->u.block.before ? "before" : "after");
5897
5898 while (regmask)
5899 {
5900 uint32_t memword;
5901
5902 if (inc)
bf9f652a 5903 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5904 regno++;
5905 else
5906 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5907 regno--;
5908
5909 xfer_addr += bump_before;
5910
5911 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5912 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5913
5914 xfer_addr += bump_after;
5915
5916 regmask &= ~(1 << regno);
5917 }
5918
5919 if (dsc->u.block.writeback)
5920 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5921 CANNOT_WRITE_PC);
5922}
5923
5924/* Clean up an STM which included the PC in the register list. */
5925
5926static void
5927cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5928 struct displaced_step_closure *dsc)
5929{
36073a92 5930 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5931 int store_executed = condition_true (dsc->u.block.cond, status);
5932 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5933 CORE_ADDR stm_insn_addr;
5934 uint32_t pc_val;
5935 long offset;
5936 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5937
5938 /* If condition code fails, there's nothing else to do. */
5939 if (!store_executed)
5940 return;
5941
5942 if (dsc->u.block.increment)
5943 {
5944 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5945
5946 if (dsc->u.block.before)
5947 pc_stored_at += 4;
5948 }
5949 else
5950 {
5951 pc_stored_at = dsc->u.block.xfer_addr;
5952
5953 if (dsc->u.block.before)
5954 pc_stored_at -= 4;
5955 }
5956
5957 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5958 stm_insn_addr = dsc->scratch_base;
5959 offset = pc_val - stm_insn_addr;
5960
5961 if (debug_displaced)
5962 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5963 "STM instruction\n", offset);
5964
5965 /* Rewrite the stored PC to the proper value for the non-displaced original
5966 instruction. */
5967 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5968 dsc->insn_addr + offset);
5969}
5970
5971/* Clean up an LDM which includes the PC in the register list. We clumped all
5972 the registers in the transferred list into a contiguous range r0...rX (to
5973 avoid loading PC directly and losing control of the debugged program), so we
5974 must undo that here. */
5975
5976static void
6e39997a 5977cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5978 struct regcache *regs,
5979 struct displaced_step_closure *dsc)
5980{
36073a92 5981 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5982 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5983 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5984 unsigned int regs_loaded = bitcount (mask);
5985 unsigned int num_to_shuffle = regs_loaded, clobbered;
5986
5987 /* The method employed here will fail if the register list is fully populated
5988 (we need to avoid loading PC directly). */
5989 gdb_assert (num_to_shuffle < 16);
5990
5991 if (!load_executed)
5992 return;
5993
5994 clobbered = (1 << num_to_shuffle) - 1;
5995
5996 while (num_to_shuffle > 0)
5997 {
5998 if ((mask & (1 << write_reg)) != 0)
5999 {
6000 unsigned int read_reg = num_to_shuffle - 1;
6001
6002 if (read_reg != write_reg)
6003 {
36073a92 6004 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6005 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6006 if (debug_displaced)
6007 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6008 "loaded register r%d to r%d\n"), read_reg,
6009 write_reg);
6010 }
6011 else if (debug_displaced)
6012 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6013 "r%d already in the right place\n"),
6014 write_reg);
6015
6016 clobbered &= ~(1 << write_reg);
6017
6018 num_to_shuffle--;
6019 }
6020
6021 write_reg--;
6022 }
6023
6024 /* Restore any registers we scribbled over. */
6025 for (write_reg = 0; clobbered != 0; write_reg++)
6026 {
6027 if ((clobbered & (1 << write_reg)) != 0)
6028 {
6029 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6030 CANNOT_WRITE_PC);
6031 if (debug_displaced)
6032 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6033 "clobbered register r%d\n"), write_reg);
6034 clobbered &= ~(1 << write_reg);
6035 }
6036 }
6037
6038 /* Perform register writeback manually. */
6039 if (dsc->u.block.writeback)
6040 {
6041 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6042
6043 if (dsc->u.block.increment)
6044 new_rn_val += regs_loaded * 4;
6045 else
6046 new_rn_val -= regs_loaded * 4;
6047
6048 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6049 CANNOT_WRITE_PC);
6050 }
6051}
6052
6053/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6054 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6055
6056static int
7ff120b4
YQ
6057arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6058 struct regcache *regs,
6059 struct displaced_step_closure *dsc)
cca44b1b
JB
6060{
6061 int load = bit (insn, 20);
6062 int user = bit (insn, 22);
6063 int increment = bit (insn, 23);
6064 int before = bit (insn, 24);
6065 int writeback = bit (insn, 21);
6066 int rn = bits (insn, 16, 19);
cca44b1b 6067
0963b4bd
MS
6068 /* Block transfers which don't mention PC can be run directly
6069 out-of-line. */
bf9f652a 6070 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6071 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6072
bf9f652a 6073 if (rn == ARM_PC_REGNUM)
cca44b1b 6074 {
0963b4bd
MS
6075 warning (_("displaced: Unpredictable LDM or STM with "
6076 "base register r15"));
7ff120b4 6077 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6078 }
6079
6080 if (debug_displaced)
6081 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6082 "%.8lx\n", (unsigned long) insn);
6083
36073a92 6084 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6085 dsc->u.block.rn = rn;
6086
6087 dsc->u.block.load = load;
6088 dsc->u.block.user = user;
6089 dsc->u.block.increment = increment;
6090 dsc->u.block.before = before;
6091 dsc->u.block.writeback = writeback;
6092 dsc->u.block.cond = bits (insn, 28, 31);
6093
6094 dsc->u.block.regmask = insn & 0xffff;
6095
6096 if (load)
6097 {
6098 if ((insn & 0xffff) == 0xffff)
6099 {
6100 /* LDM with a fully-populated register list. This case is
6101 particularly tricky. Implement for now by fully emulating the
6102 instruction (which might not behave perfectly in all cases, but
6103 these instructions should be rare enough for that not to matter
6104 too much). */
6105 dsc->modinsn[0] = ARM_NOP;
6106
6107 dsc->cleanup = &cleanup_block_load_all;
6108 }
6109 else
6110 {
6111 /* LDM of a list of registers which includes PC. Implement by
6112 rewriting the list of registers to be transferred into a
6113 contiguous chunk r0...rX before doing the transfer, then shuffling
6114 registers into the correct places in the cleanup routine. */
6115 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6116 unsigned int num_in_list = bitcount (regmask), new_regmask;
6117 unsigned int i;
cca44b1b
JB
6118
6119 for (i = 0; i < num_in_list; i++)
36073a92 6120 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6121
6122 /* Writeback makes things complicated. We need to avoid clobbering
6123 the base register with one of the registers in our modified
6124 register list, but just using a different register can't work in
6125 all cases, e.g.:
6126
6127 ldm r14!, {r0-r13,pc}
6128
6129 which would need to be rewritten as:
6130
6131 ldm rN!, {r0-r14}
6132
6133 but that can't work, because there's no free register for N.
6134
6135 Solve this by turning off the writeback bit, and emulating
6136 writeback manually in the cleanup routine. */
6137
6138 if (writeback)
6139 insn &= ~(1 << 21);
6140
6141 new_regmask = (1 << num_in_list) - 1;
6142
6143 if (debug_displaced)
6144 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6145 "{..., pc}: original reg list %.4x, modified "
6146 "list %.4x\n"), rn, writeback ? "!" : "",
6147 (int) insn & 0xffff, new_regmask);
6148
6149 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6150
6151 dsc->cleanup = &cleanup_block_load_pc;
6152 }
6153 }
6154 else
6155 {
6156 /* STM of a list of registers which includes PC. Run the instruction
6157 as-is, but out of line: this will store the wrong value for the PC,
6158 so we must manually fix up the memory in the cleanup routine.
6159 Doing things this way has the advantage that we can auto-detect
6160 the offset of the PC write (which is architecture-dependent) in
6161 the cleanup routine. */
6162 dsc->modinsn[0] = insn;
6163
6164 dsc->cleanup = &cleanup_block_store_pc;
6165 }
6166
6167 return 0;
6168}
6169
34518530
YQ
6170static int
6171thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6172 struct regcache *regs,
6173 struct displaced_step_closure *dsc)
cca44b1b 6174{
34518530
YQ
6175 int rn = bits (insn1, 0, 3);
6176 int load = bit (insn1, 4);
6177 int writeback = bit (insn1, 5);
cca44b1b 6178
34518530
YQ
6179 /* Block transfers which don't mention PC can be run directly
6180 out-of-line. */
6181 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6182 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6183
34518530
YQ
6184 if (rn == ARM_PC_REGNUM)
6185 {
6186 warning (_("displaced: Unpredictable LDM or STM with "
6187 "base register r15"));
6188 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6189 "unpredictable ldm/stm", dsc);
6190 }
cca44b1b
JB
6191
6192 if (debug_displaced)
34518530
YQ
6193 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6194 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6195
34518530
YQ
6196 /* Clear bit 13, since it should be always zero. */
6197 dsc->u.block.regmask = (insn2 & 0xdfff);
6198 dsc->u.block.rn = rn;
cca44b1b 6199
34518530
YQ
6200 dsc->u.block.load = load;
6201 dsc->u.block.user = 0;
6202 dsc->u.block.increment = bit (insn1, 7);
6203 dsc->u.block.before = bit (insn1, 8);
6204 dsc->u.block.writeback = writeback;
6205 dsc->u.block.cond = INST_AL;
6206 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6207
34518530
YQ
6208 if (load)
6209 {
6210 if (dsc->u.block.regmask == 0xffff)
6211 {
6212 /* This branch is impossible to happen. */
6213 gdb_assert (0);
6214 }
6215 else
6216 {
6217 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6218 unsigned int num_in_list = bitcount (regmask), new_regmask;
6219 unsigned int i;
34518530
YQ
6220
6221 for (i = 0; i < num_in_list; i++)
6222 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6223
6224 if (writeback)
6225 insn1 &= ~(1 << 5);
6226
6227 new_regmask = (1 << num_in_list) - 1;
6228
6229 if (debug_displaced)
6230 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6231 "{..., pc}: original reg list %.4x, modified "
6232 "list %.4x\n"), rn, writeback ? "!" : "",
6233 (int) dsc->u.block.regmask, new_regmask);
6234
6235 dsc->modinsn[0] = insn1;
6236 dsc->modinsn[1] = (new_regmask & 0xffff);
6237 dsc->numinsns = 2;
6238
6239 dsc->cleanup = &cleanup_block_load_pc;
6240 }
6241 }
6242 else
6243 {
6244 dsc->modinsn[0] = insn1;
6245 dsc->modinsn[1] = insn2;
6246 dsc->numinsns = 2;
6247 dsc->cleanup = &cleanup_block_store_pc;
6248 }
6249 return 0;
6250}
6251
d9311bfa
AT
6252/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6253 This is used to avoid a dependency on BFD's bfd_endian enum. */
6254
6255ULONGEST
6256arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6257 int byte_order)
6258{
5f2dfcfd
AT
6259 return read_memory_unsigned_integer (memaddr, len,
6260 (enum bfd_endian) byte_order);
d9311bfa
AT
6261}
6262
6263/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6264
6265CORE_ADDR
6266arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6267 CORE_ADDR val)
6268{
6269 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6270}
6271
6272/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6273
e7cf25a8 6274static CORE_ADDR
553cb527 6275arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6276{
d9311bfa
AT
6277 return 0;
6278}
6279
6280/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6281
6282int
6283arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6284{
6285 return arm_is_thumb (self->regcache);
6286}
6287
6288/* single_step() is called just before we want to resume the inferior,
6289 if we want to single-step it but there is no hardware or kernel
6290 single-step support. We find the target of the coming instructions
6291 and breakpoint them. */
6292
93f9a11f 6293VEC (CORE_ADDR) *
f5ea389a 6294arm_software_single_step (struct regcache *regcache)
d9311bfa 6295{
d9311bfa 6296 struct gdbarch *gdbarch = get_regcache_arch (regcache);
d9311bfa
AT
6297 struct arm_get_next_pcs next_pcs_ctx;
6298 CORE_ADDR pc;
6299 int i;
6300 VEC (CORE_ADDR) *next_pcs = NULL;
6301 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6302
6303 arm_get_next_pcs_ctor (&next_pcs_ctx,
6304 &arm_get_next_pcs_ops,
6305 gdbarch_byte_order (gdbarch),
6306 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6307 0,
d9311bfa
AT
6308 regcache);
6309
4d18591b 6310 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6311
6312 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
771da62d
YQ
6313 {
6314 pc = gdbarch_addr_bits_remove (gdbarch, pc);
0bc5d801 6315 VEC_replace (CORE_ADDR, next_pcs, i, pc);
771da62d 6316 }
d9311bfa 6317
93f9a11f 6318 discard_cleanups (old_chain);
d9311bfa 6319
93f9a11f 6320 return next_pcs;
d9311bfa
AT
6321}
6322
34518530
YQ
6323/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6324 for Linux, where some SVC instructions must be treated specially. */
6325
6326static void
6327cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6328 struct displaced_step_closure *dsc)
6329{
6330 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6331
6332 if (debug_displaced)
6333 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6334 "%.8lx\n", (unsigned long) resume_addr);
6335
6336 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6337}
6338
6339
6340/* Common copy routine for svc instruciton. */
6341
6342static int
6343install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6344 struct displaced_step_closure *dsc)
6345{
6346 /* Preparation: none.
6347 Insn: unmodified svc.
6348 Cleanup: pc <- insn_addr + insn_size. */
6349
6350 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6351 instruction. */
6352 dsc->wrote_to_pc = 1;
6353
6354 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6355 if (dsc->u.svc.copy_svc_os)
6356 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6357 else
6358 {
6359 dsc->cleanup = &cleanup_svc;
6360 return 0;
6361 }
34518530
YQ
6362}
6363
6364static int
6365arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6366 struct regcache *regs, struct displaced_step_closure *dsc)
6367{
6368
6369 if (debug_displaced)
6370 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6371 (unsigned long) insn);
6372
6373 dsc->modinsn[0] = insn;
6374
6375 return install_svc (gdbarch, regs, dsc);
6376}
6377
6378static int
6379thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6380 struct regcache *regs, struct displaced_step_closure *dsc)
6381{
6382
6383 if (debug_displaced)
6384 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6385 insn);
bd18283a 6386
34518530
YQ
6387 dsc->modinsn[0] = insn;
6388
6389 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6390}
6391
6392/* Copy undefined instructions. */
6393
6394static int
7ff120b4
YQ
6395arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6396 struct displaced_step_closure *dsc)
cca44b1b
JB
6397{
6398 if (debug_displaced)
0963b4bd
MS
6399 fprintf_unfiltered (gdb_stdlog,
6400 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6401 (unsigned long) insn);
6402
6403 dsc->modinsn[0] = insn;
6404
6405 return 0;
6406}
6407
34518530
YQ
6408static int
6409thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6410 struct displaced_step_closure *dsc)
6411{
6412
6413 if (debug_displaced)
6414 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6415 "%.4x %.4x\n", (unsigned short) insn1,
6416 (unsigned short) insn2);
6417
6418 dsc->modinsn[0] = insn1;
6419 dsc->modinsn[1] = insn2;
6420 dsc->numinsns = 2;
6421
6422 return 0;
6423}
6424
cca44b1b
JB
6425/* Copy unpredictable instructions. */
6426
6427static int
7ff120b4
YQ
6428arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6429 struct displaced_step_closure *dsc)
cca44b1b
JB
6430{
6431 if (debug_displaced)
6432 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6433 "%.8lx\n", (unsigned long) insn);
6434
6435 dsc->modinsn[0] = insn;
6436
6437 return 0;
6438}
6439
6440/* The decode_* functions are instruction decoding helpers. They mostly follow
6441 the presentation in the ARM ARM. */
6442
6443static int
7ff120b4
YQ
6444arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6445 struct regcache *regs,
6446 struct displaced_step_closure *dsc)
cca44b1b
JB
6447{
6448 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6449 unsigned int rn = bits (insn, 16, 19);
6450
6451 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6452 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6453 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6454 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6455 else if ((op1 & 0x60) == 0x20)
7ff120b4 6456 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6457 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6458 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6459 dsc);
cca44b1b 6460 else if ((op1 & 0x77) == 0x41)
7ff120b4 6461 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6462 else if ((op1 & 0x77) == 0x45)
7ff120b4 6463 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6464 else if ((op1 & 0x77) == 0x51)
6465 {
6466 if (rn != 0xf)
7ff120b4 6467 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6468 else
7ff120b4 6469 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6470 }
6471 else if ((op1 & 0x77) == 0x55)
7ff120b4 6472 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6473 else if (op1 == 0x57)
6474 switch (op2)
6475 {
7ff120b4
YQ
6476 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6477 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6478 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6479 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6480 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6481 }
6482 else if ((op1 & 0x63) == 0x43)
7ff120b4 6483 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6484 else if ((op2 & 0x1) == 0x0)
6485 switch (op1 & ~0x80)
6486 {
6487 case 0x61:
7ff120b4 6488 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6489 case 0x65:
7ff120b4 6490 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6491 case 0x71: case 0x75:
6492 /* pld/pldw reg. */
7ff120b4 6493 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6494 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6495 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6496 default:
7ff120b4 6497 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6498 }
6499 else
7ff120b4 6500 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6501}
6502
6503static int
7ff120b4
YQ
6504arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6505 struct regcache *regs,
6506 struct displaced_step_closure *dsc)
cca44b1b
JB
6507{
6508 if (bit (insn, 27) == 0)
7ff120b4 6509 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6510 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6511 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6512 {
6513 case 0x0: case 0x2:
7ff120b4 6514 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6515
6516 case 0x1: case 0x3:
7ff120b4 6517 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6518
6519 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6520 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6521
6522 case 0x8:
6523 switch ((insn & 0xe00000) >> 21)
6524 {
6525 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6526 /* stc/stc2. */
7ff120b4 6527 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6528
6529 case 0x2:
7ff120b4 6530 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6531
6532 default:
7ff120b4 6533 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6534 }
6535
6536 case 0x9:
6537 {
6538 int rn_f = (bits (insn, 16, 19) == 0xf);
6539 switch ((insn & 0xe00000) >> 21)
6540 {
6541 case 0x1: case 0x3:
6542 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6543 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6544 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6545
6546 case 0x2:
7ff120b4 6547 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6548
6549 case 0x4: case 0x5: case 0x6: case 0x7:
6550 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6551 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6552 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6553
6554 default:
7ff120b4 6555 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6556 }
6557 }
6558
6559 case 0xa:
7ff120b4 6560 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6561
6562 case 0xb:
6563 if (bits (insn, 16, 19) == 0xf)
6564 /* ldc/ldc2 lit. */
7ff120b4 6565 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6566 else
7ff120b4 6567 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6568
6569 case 0xc:
6570 if (bit (insn, 4))
7ff120b4 6571 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6572 else
7ff120b4 6573 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6574
6575 case 0xd:
6576 if (bit (insn, 4))
7ff120b4 6577 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6578 else
7ff120b4 6579 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6580
6581 default:
7ff120b4 6582 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6583 }
6584}
6585
6586/* Decode miscellaneous instructions in dp/misc encoding space. */
6587
6588static int
7ff120b4
YQ
6589arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6590 struct regcache *regs,
6591 struct displaced_step_closure *dsc)
cca44b1b
JB
6592{
6593 unsigned int op2 = bits (insn, 4, 6);
6594 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6595
6596 switch (op2)
6597 {
6598 case 0x0:
7ff120b4 6599 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6600
6601 case 0x1:
6602 if (op == 0x1) /* bx. */
7ff120b4 6603 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6604 else if (op == 0x3)
7ff120b4 6605 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6606 else
7ff120b4 6607 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6608
6609 case 0x2:
6610 if (op == 0x1)
6611 /* Not really supported. */
7ff120b4 6612 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6613 else
7ff120b4 6614 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6615
6616 case 0x3:
6617 if (op == 0x1)
7ff120b4 6618 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6619 regs, dsc); /* blx register. */
cca44b1b 6620 else
7ff120b4 6621 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6622
6623 case 0x5:
7ff120b4 6624 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6625
6626 case 0x7:
6627 if (op == 0x1)
7ff120b4 6628 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6629 else if (op == 0x3)
6630 /* Not really supported. */
7ff120b4 6631 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6632
6633 default:
7ff120b4 6634 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6635 }
6636}
6637
6638static int
7ff120b4
YQ
6639arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6640 struct regcache *regs,
6641 struct displaced_step_closure *dsc)
cca44b1b
JB
6642{
6643 if (bit (insn, 25))
6644 switch (bits (insn, 20, 24))
6645 {
6646 case 0x10:
7ff120b4 6647 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6648
6649 case 0x14:
7ff120b4 6650 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6651
6652 case 0x12: case 0x16:
7ff120b4 6653 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6654
6655 default:
7ff120b4 6656 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6657 }
6658 else
6659 {
6660 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6661
6662 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6663 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6664 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6665 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6666 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6667 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6668 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6669 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6670 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6671 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6672 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6673 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6674 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6675 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6676 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6677 dsc);
cca44b1b
JB
6678 }
6679
6680 /* Should be unreachable. */
6681 return 1;
6682}
6683
6684static int
7ff120b4
YQ
6685arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6686 struct regcache *regs,
6687 struct displaced_step_closure *dsc)
cca44b1b
JB
6688{
6689 int a = bit (insn, 25), b = bit (insn, 4);
6690 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6691
6692 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6693 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6694 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6695 else if ((!a && (op1 & 0x17) == 0x02)
6696 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6697 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6698 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6699 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6700 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6701 else if ((!a && (op1 & 0x17) == 0x03)
6702 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6704 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6705 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6707 else if ((!a && (op1 & 0x17) == 0x06)
6708 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6710 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6711 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6713 else if ((!a && (op1 & 0x17) == 0x07)
6714 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6715 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6716
6717 /* Should be unreachable. */
6718 return 1;
6719}
6720
6721static int
7ff120b4
YQ
6722arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6723 struct displaced_step_closure *dsc)
cca44b1b
JB
6724{
6725 switch (bits (insn, 20, 24))
6726 {
6727 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6728 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6729
6730 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6731 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6732
6733 case 0x08: case 0x09: case 0x0a: case 0x0b:
6734 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6735 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6736 "decode/pack/unpack/saturate/reverse", dsc);
6737
6738 case 0x18:
6739 if (bits (insn, 5, 7) == 0) /* op2. */
6740 {
6741 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6742 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6743 else
7ff120b4 6744 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6745 }
6746 else
7ff120b4 6747 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6748
6749 case 0x1a: case 0x1b:
6750 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6751 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6752 else
7ff120b4 6753 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6754
6755 case 0x1c: case 0x1d:
6756 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6757 {
6758 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6759 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6760 else
7ff120b4 6761 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6762 }
6763 else
7ff120b4 6764 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6765
6766 case 0x1e: case 0x1f:
6767 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6768 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6769 else
7ff120b4 6770 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6771 }
6772
6773 /* Should be unreachable. */
6774 return 1;
6775}
6776
6777static int
615234c1 6778arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6779 struct regcache *regs,
6780 struct displaced_step_closure *dsc)
cca44b1b
JB
6781{
6782 if (bit (insn, 25))
7ff120b4 6783 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6784 else
7ff120b4 6785 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6786}
6787
6788static int
7ff120b4
YQ
6789arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6790 struct regcache *regs,
6791 struct displaced_step_closure *dsc)
cca44b1b
JB
6792{
6793 unsigned int opcode = bits (insn, 20, 24);
6794
6795 switch (opcode)
6796 {
6797 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6798 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6799
6800 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6801 case 0x12: case 0x16:
7ff120b4 6802 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6803
6804 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6805 case 0x13: case 0x17:
7ff120b4 6806 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6807
6808 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6809 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6810 /* Note: no writeback for these instructions. Bit 25 will always be
6811 zero though (via caller), so the following works OK. */
7ff120b4 6812 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6813 }
6814
6815 /* Should be unreachable. */
6816 return 1;
6817}
6818
34518530
YQ
6819/* Decode shifted register instructions. */
6820
6821static int
6822thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6823 uint16_t insn2, struct regcache *regs,
6824 struct displaced_step_closure *dsc)
6825{
6826 /* PC is only allowed to be used in instruction MOV. */
6827
6828 unsigned int op = bits (insn1, 5, 8);
6829 unsigned int rn = bits (insn1, 0, 3);
6830
6831 if (op == 0x2 && rn == 0xf) /* MOV */
6832 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6833 else
6834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6835 "dp (shift reg)", dsc);
6836}
6837
6838
6839/* Decode extension register load/store. Exactly the same as
6840 arm_decode_ext_reg_ld_st. */
6841
6842static int
6843thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6844 uint16_t insn2, struct regcache *regs,
6845 struct displaced_step_closure *dsc)
6846{
6847 unsigned int opcode = bits (insn1, 4, 8);
6848
6849 switch (opcode)
6850 {
6851 case 0x04: case 0x05:
6852 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6853 "vfp/neon vmov", dsc);
6854
6855 case 0x08: case 0x0c: /* 01x00 */
6856 case 0x0a: case 0x0e: /* 01x10 */
6857 case 0x12: case 0x16: /* 10x10 */
6858 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6859 "vfp/neon vstm/vpush", dsc);
6860
6861 case 0x09: case 0x0d: /* 01x01 */
6862 case 0x0b: case 0x0f: /* 01x11 */
6863 case 0x13: case 0x17: /* 10x11 */
6864 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6865 "vfp/neon vldm/vpop", dsc);
6866
6867 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6868 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6869 "vstr", dsc);
6870 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6871 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6872 }
6873
6874 /* Should be unreachable. */
6875 return 1;
6876}
6877
cca44b1b 6878static int
12545665 6879arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6880 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6881{
6882 unsigned int op1 = bits (insn, 20, 25);
6883 int op = bit (insn, 4);
6884 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6885
6886 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6887 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6888 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6889 && (coproc & 0xe) != 0xa)
6890 /* stc/stc2. */
7ff120b4 6891 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6892 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6893 && (coproc & 0xe) != 0xa)
6894 /* ldc/ldc2 imm/lit. */
7ff120b4 6895 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6896 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6897 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6898 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6899 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6900 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6901 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6902 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6903 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6904 else if ((op1 & 0x30) == 0x20 && !op)
6905 {
6906 if ((coproc & 0xe) == 0xa)
7ff120b4 6907 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6908 else
7ff120b4 6909 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6910 }
6911 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6912 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6913 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6914 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6915 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6916 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6917 else if ((op1 & 0x30) == 0x30)
7ff120b4 6918 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6919 else
7ff120b4 6920 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6921}
6922
34518530
YQ
6923static int
6924thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6925 uint16_t insn2, struct regcache *regs,
6926 struct displaced_step_closure *dsc)
6927{
6928 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6929 unsigned int bit_5_8 = bits (insn1, 5, 8);
6930 unsigned int bit_9 = bit (insn1, 9);
6931 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6932
6933 if (bit_9 == 0)
6934 {
6935 if (bit_5_8 == 2)
6936 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6937 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6938 dsc);
6939 else if (bit_5_8 == 0) /* UNDEFINED. */
6940 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6941 else
6942 {
6943 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6944 if ((coproc & 0xe) == 0xa)
6945 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6946 dsc);
6947 else /* coproc is not 101x. */
6948 {
6949 if (bit_4 == 0) /* STC/STC2. */
6950 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6951 "stc/stc2", dsc);
6952 else /* LDC/LDC2 {literal, immeidate}. */
6953 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6954 regs, dsc);
6955 }
6956 }
6957 }
6958 else
6959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6960
6961 return 0;
6962}
6963
6964static void
6965install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6966 struct displaced_step_closure *dsc, int rd)
6967{
6968 /* ADR Rd, #imm
6969
6970 Rewrite as:
6971
6972 Preparation: Rd <- PC
6973 Insn: ADD Rd, #imm
6974 Cleanup: Null.
6975 */
6976
6977 /* Rd <- PC */
6978 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6979 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6980}
6981
6982static int
6983thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6984 struct displaced_step_closure *dsc,
6985 int rd, unsigned int imm)
6986{
6987
6988 /* Encoding T2: ADDS Rd, #imm */
6989 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6990
6991 install_pc_relative (gdbarch, regs, dsc, rd);
6992
6993 return 0;
6994}
6995
6996static int
6997thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6998 struct regcache *regs,
6999 struct displaced_step_closure *dsc)
7000{
7001 unsigned int rd = bits (insn, 8, 10);
7002 unsigned int imm8 = bits (insn, 0, 7);
7003
7004 if (debug_displaced)
7005 fprintf_unfiltered (gdb_stdlog,
7006 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7007 rd, imm8, insn);
7008
7009 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7010}
7011
7012static int
7013thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7014 uint16_t insn2, struct regcache *regs,
7015 struct displaced_step_closure *dsc)
7016{
7017 unsigned int rd = bits (insn2, 8, 11);
7018 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7019 extract raw immediate encoding rather than computing immediate. When
7020 generating ADD or SUB instruction, we can simply perform OR operation to
7021 set immediate into ADD. */
7022 unsigned int imm_3_8 = insn2 & 0x70ff;
7023 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7024
7025 if (debug_displaced)
7026 fprintf_unfiltered (gdb_stdlog,
7027 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7028 rd, imm_i, imm_3_8, insn1, insn2);
7029
7030 if (bit (insn1, 7)) /* Encoding T2 */
7031 {
7032 /* Encoding T3: SUB Rd, Rd, #imm */
7033 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7034 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7035 }
7036 else /* Encoding T3 */
7037 {
7038 /* Encoding T3: ADD Rd, Rd, #imm */
7039 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7040 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7041 }
7042 dsc->numinsns = 2;
7043
7044 install_pc_relative (gdbarch, regs, dsc, rd);
7045
7046 return 0;
7047}
7048
7049static int
615234c1 7050thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7051 struct regcache *regs,
7052 struct displaced_step_closure *dsc)
7053{
7054 unsigned int rt = bits (insn1, 8, 10);
7055 unsigned int pc;
7056 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7057
7058 /* LDR Rd, #imm8
7059
7060 Rwrite as:
7061
7062 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7063
7064 Insn: LDR R0, [R2, R3];
7065 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7066
7067 if (debug_displaced)
7068 fprintf_unfiltered (gdb_stdlog,
7069 "displaced: copying thumb ldr r%d [pc #%d]\n"
7070 , rt, imm8);
7071
7072 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7073 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7074 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7075 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7076 /* The assembler calculates the required value of the offset from the
7077 Align(PC,4) value of this instruction to the label. */
7078 pc = pc & 0xfffffffc;
7079
7080 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7081 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7082
7083 dsc->rd = rt;
7084 dsc->u.ldst.xfersize = 4;
7085 dsc->u.ldst.rn = 0;
7086 dsc->u.ldst.immed = 0;
7087 dsc->u.ldst.writeback = 0;
7088 dsc->u.ldst.restore_r4 = 0;
7089
7090 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7091
7092 dsc->cleanup = &cleanup_load;
7093
7094 return 0;
7095}
7096
7097/* Copy Thumb cbnz/cbz insruction. */
7098
7099static int
7100thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7101 struct regcache *regs,
7102 struct displaced_step_closure *dsc)
7103{
7104 int non_zero = bit (insn1, 11);
7105 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7106 CORE_ADDR from = dsc->insn_addr;
7107 int rn = bits (insn1, 0, 2);
7108 int rn_val = displaced_read_reg (regs, dsc, rn);
7109
7110 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7111 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7112 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7113 condition is false, let it be, cleanup_branch will do nothing. */
7114 if (dsc->u.branch.cond)
7115 {
7116 dsc->u.branch.cond = INST_AL;
7117 dsc->u.branch.dest = from + 4 + imm5;
7118 }
7119 else
7120 dsc->u.branch.dest = from + 2;
7121
7122 dsc->u.branch.link = 0;
7123 dsc->u.branch.exchange = 0;
7124
7125 if (debug_displaced)
7126 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7127 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7128 rn, rn_val, insn1, dsc->u.branch.dest);
7129
7130 dsc->modinsn[0] = THUMB_NOP;
7131
7132 dsc->cleanup = &cleanup_branch;
7133 return 0;
7134}
7135
7136/* Copy Table Branch Byte/Halfword */
7137static int
7138thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7139 uint16_t insn2, struct regcache *regs,
7140 struct displaced_step_closure *dsc)
7141{
7142 ULONGEST rn_val, rm_val;
7143 int is_tbh = bit (insn2, 4);
7144 CORE_ADDR halfwords = 0;
7145 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7146
7147 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7148 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7149
7150 if (is_tbh)
7151 {
7152 gdb_byte buf[2];
7153
7154 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7155 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7156 }
7157 else
7158 {
7159 gdb_byte buf[1];
7160
7161 target_read_memory (rn_val + rm_val, buf, 1);
7162 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7163 }
7164
7165 if (debug_displaced)
7166 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7167 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7168 (unsigned int) rn_val, (unsigned int) rm_val,
7169 (unsigned int) halfwords);
7170
7171 dsc->u.branch.cond = INST_AL;
7172 dsc->u.branch.link = 0;
7173 dsc->u.branch.exchange = 0;
7174 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7175
7176 dsc->cleanup = &cleanup_branch;
7177
7178 return 0;
7179}
7180
7181static void
7182cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7183 struct displaced_step_closure *dsc)
7184{
7185 /* PC <- r7 */
7186 int val = displaced_read_reg (regs, dsc, 7);
7187 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7188
7189 /* r7 <- r8 */
7190 val = displaced_read_reg (regs, dsc, 8);
7191 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7192
7193 /* r8 <- tmp[0] */
7194 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7195
7196}
7197
7198static int
615234c1 7199thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7200 struct regcache *regs,
7201 struct displaced_step_closure *dsc)
7202{
7203 dsc->u.block.regmask = insn1 & 0x00ff;
7204
7205 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7206 to :
7207
7208 (1) register list is full, that is, r0-r7 are used.
7209 Prepare: tmp[0] <- r8
7210
7211 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7212 MOV r8, r7; Move value of r7 to r8;
7213 POP {r7}; Store PC value into r7.
7214
7215 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7216
7217 (2) register list is not full, supposing there are N registers in
7218 register list (except PC, 0 <= N <= 7).
7219 Prepare: for each i, 0 - N, tmp[i] <- ri.
7220
7221 POP {r0, r1, ...., rN};
7222
7223 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7224 from tmp[] properly.
7225 */
7226 if (debug_displaced)
7227 fprintf_unfiltered (gdb_stdlog,
7228 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7229 dsc->u.block.regmask, insn1);
7230
7231 if (dsc->u.block.regmask == 0xff)
7232 {
7233 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7234
7235 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7236 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7237 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7238
7239 dsc->numinsns = 3;
7240 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7241 }
7242 else
7243 {
7244 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7245 unsigned int i;
7246 unsigned int new_regmask;
34518530
YQ
7247
7248 for (i = 0; i < num_in_list + 1; i++)
7249 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7250
7251 new_regmask = (1 << (num_in_list + 1)) - 1;
7252
7253 if (debug_displaced)
7254 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7255 "{..., pc}: original reg list %.4x,"
7256 " modified list %.4x\n"),
7257 (int) dsc->u.block.regmask, new_regmask);
7258
7259 dsc->u.block.regmask |= 0x8000;
7260 dsc->u.block.writeback = 0;
7261 dsc->u.block.cond = INST_AL;
7262
7263 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7264
7265 dsc->cleanup = &cleanup_block_load_pc;
7266 }
7267
7268 return 0;
7269}
7270
7271static void
7272thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7273 struct regcache *regs,
7274 struct displaced_step_closure *dsc)
7275{
7276 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7277 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7278 int err = 0;
7279
7280 /* 16-bit thumb instructions. */
7281 switch (op_bit_12_15)
7282 {
7283 /* Shift (imme), add, subtract, move and compare. */
7284 case 0: case 1: case 2: case 3:
7285 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7286 "shift/add/sub/mov/cmp",
7287 dsc);
7288 break;
7289 case 4:
7290 switch (op_bit_10_11)
7291 {
7292 case 0: /* Data-processing */
7293 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7294 "data-processing",
7295 dsc);
7296 break;
7297 case 1: /* Special data instructions and branch and exchange. */
7298 {
7299 unsigned short op = bits (insn1, 7, 9);
7300 if (op == 6 || op == 7) /* BX or BLX */
7301 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7302 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7303 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7304 else
7305 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7306 dsc);
7307 }
7308 break;
7309 default: /* LDR (literal) */
7310 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7311 }
7312 break;
7313 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7315 break;
7316 case 10:
7317 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7318 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7319 else /* Generate SP-relative address */
7320 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7321 break;
7322 case 11: /* Misc 16-bit instructions */
7323 {
7324 switch (bits (insn1, 8, 11))
7325 {
7326 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7327 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7328 break;
7329 case 12: case 13: /* POP */
7330 if (bit (insn1, 8)) /* PC is in register list. */
7331 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7332 else
7333 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7334 break;
7335 case 15: /* If-Then, and hints */
7336 if (bits (insn1, 0, 3))
7337 /* If-Then makes up to four following instructions conditional.
7338 IT instruction itself is not conditional, so handle it as a
7339 common unmodified instruction. */
7340 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7341 dsc);
7342 else
7343 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7344 break;
7345 default:
7346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7347 }
7348 }
7349 break;
7350 case 12:
7351 if (op_bit_10_11 < 2) /* Store multiple registers */
7352 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7353 else /* Load multiple registers */
7354 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7355 break;
7356 case 13: /* Conditional branch and supervisor call */
7357 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7358 err = thumb_copy_b (gdbarch, insn1, dsc);
7359 else
7360 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7361 break;
7362 case 14: /* Unconditional branch */
7363 err = thumb_copy_b (gdbarch, insn1, dsc);
7364 break;
7365 default:
7366 err = 1;
7367 }
7368
7369 if (err)
7370 internal_error (__FILE__, __LINE__,
7371 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7372}
7373
7374static int
7375decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7376 uint16_t insn1, uint16_t insn2,
7377 struct regcache *regs,
7378 struct displaced_step_closure *dsc)
7379{
7380 int rt = bits (insn2, 12, 15);
7381 int rn = bits (insn1, 0, 3);
7382 int op1 = bits (insn1, 7, 8);
34518530
YQ
7383
7384 switch (bits (insn1, 5, 6))
7385 {
7386 case 0: /* Load byte and memory hints */
7387 if (rt == 0xf) /* PLD/PLI */
7388 {
7389 if (rn == 0xf)
7390 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7391 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7392 else
7393 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7394 "pli/pld", dsc);
7395 }
7396 else
7397 {
7398 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7399 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7400 1);
7401 else
7402 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7403 "ldrb{reg, immediate}/ldrbt",
7404 dsc);
7405 }
7406
7407 break;
7408 case 1: /* Load halfword and memory hints. */
7409 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7410 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7411 "pld/unalloc memhint", dsc);
7412 else
7413 {
7414 if (rn == 0xf)
7415 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7416 2);
7417 else
7418 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7419 "ldrh/ldrht", dsc);
7420 }
7421 break;
7422 case 2: /* Load word */
7423 {
7424 int insn2_bit_8_11 = bits (insn2, 8, 11);
7425
7426 if (rn == 0xf)
7427 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7428 else if (op1 == 0x1) /* Encoding T3 */
7429 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7430 0, 1);
7431 else /* op1 == 0x0 */
7432 {
7433 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7434 /* LDR (immediate) */
7435 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7436 dsc, bit (insn2, 8), 1);
7437 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7438 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7439 "ldrt", dsc);
7440 else
7441 /* LDR (register) */
7442 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7443 dsc, 0, 0);
7444 }
7445 break;
7446 }
7447 default:
7448 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7449 break;
7450 }
7451 return 0;
7452}
7453
7454static void
7455thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7456 uint16_t insn2, struct regcache *regs,
7457 struct displaced_step_closure *dsc)
7458{
7459 int err = 0;
7460 unsigned short op = bit (insn2, 15);
7461 unsigned int op1 = bits (insn1, 11, 12);
7462
7463 switch (op1)
7464 {
7465 case 1:
7466 {
7467 switch (bits (insn1, 9, 10))
7468 {
7469 case 0:
7470 if (bit (insn1, 6))
7471 {
7472 /* Load/store {dual, execlusive}, table branch. */
7473 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7474 && bits (insn2, 5, 7) == 0)
7475 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7476 dsc);
7477 else
7478 /* PC is not allowed to use in load/store {dual, exclusive}
7479 instructions. */
7480 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7481 "load/store dual/ex", dsc);
7482 }
7483 else /* load/store multiple */
7484 {
7485 switch (bits (insn1, 7, 8))
7486 {
7487 case 0: case 3: /* SRS, RFE */
7488 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7489 "srs/rfe", dsc);
7490 break;
7491 case 1: case 2: /* LDM/STM/PUSH/POP */
7492 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7493 break;
7494 }
7495 }
7496 break;
7497
7498 case 1:
7499 /* Data-processing (shift register). */
7500 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7501 dsc);
7502 break;
7503 default: /* Coprocessor instructions. */
7504 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7505 break;
7506 }
7507 break;
7508 }
7509 case 2: /* op1 = 2 */
7510 if (op) /* Branch and misc control. */
7511 {
7512 if (bit (insn2, 14) /* BLX/BL */
7513 || bit (insn2, 12) /* Unconditional branch */
7514 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7515 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7516 else
7517 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7518 "misc ctrl", dsc);
7519 }
7520 else
7521 {
7522 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7523 {
7524 int op = bits (insn1, 4, 8);
7525 int rn = bits (insn1, 0, 3);
7526 if ((op == 0 || op == 0xa) && rn == 0xf)
7527 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7528 regs, dsc);
7529 else
7530 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7531 "dp/pb", dsc);
7532 }
7533 else /* Data processing (modified immeidate) */
7534 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7535 "dp/mi", dsc);
7536 }
7537 break;
7538 case 3: /* op1 = 3 */
7539 switch (bits (insn1, 9, 10))
7540 {
7541 case 0:
7542 if (bit (insn1, 4))
7543 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7544 regs, dsc);
7545 else /* NEON Load/Store and Store single data item */
7546 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7547 "neon elt/struct load/store",
7548 dsc);
7549 break;
7550 case 1: /* op1 = 3, bits (9, 10) == 1 */
7551 switch (bits (insn1, 7, 8))
7552 {
7553 case 0: case 1: /* Data processing (register) */
7554 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7555 "dp(reg)", dsc);
7556 break;
7557 case 2: /* Multiply and absolute difference */
7558 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7559 "mul/mua/diff", dsc);
7560 break;
7561 case 3: /* Long multiply and divide */
7562 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7563 "lmul/lmua", dsc);
7564 break;
7565 }
7566 break;
7567 default: /* Coprocessor instructions */
7568 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7569 break;
7570 }
7571 break;
7572 default:
7573 err = 1;
7574 }
7575
7576 if (err)
7577 internal_error (__FILE__, __LINE__,
7578 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7579
7580}
7581
b434a28f
YQ
7582static void
7583thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7584 struct regcache *regs,
b434a28f
YQ
7585 struct displaced_step_closure *dsc)
7586{
34518530
YQ
7587 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7588 uint16_t insn1
7589 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7590
7591 if (debug_displaced)
7592 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7593 "at %.8lx\n", insn1, (unsigned long) from);
7594
7595 dsc->is_thumb = 1;
7596 dsc->insn_size = thumb_insn_size (insn1);
7597 if (thumb_insn_size (insn1) == 4)
7598 {
7599 uint16_t insn2
7600 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7601 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7602 }
7603 else
7604 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7605}
7606
cca44b1b 7607void
b434a28f
YQ
7608arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7609 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7610 struct displaced_step_closure *dsc)
7611{
7612 int err = 0;
b434a28f
YQ
7613 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7614 uint32_t insn;
cca44b1b
JB
7615
7616 /* Most displaced instructions use a 1-instruction scratch space, so set this
7617 here and override below if/when necessary. */
7618 dsc->numinsns = 1;
7619 dsc->insn_addr = from;
7620 dsc->scratch_base = to;
7621 dsc->cleanup = NULL;
7622 dsc->wrote_to_pc = 0;
7623
b434a28f 7624 if (!displaced_in_arm_mode (regs))
12545665 7625 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7626
4db71c0b
YQ
7627 dsc->is_thumb = 0;
7628 dsc->insn_size = 4;
b434a28f
YQ
7629 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7630 if (debug_displaced)
7631 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7632 "at %.8lx\n", (unsigned long) insn,
7633 (unsigned long) from);
7634
cca44b1b 7635 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7636 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7637 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7638 {
7639 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7640 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7641 break;
7642
7643 case 0x4: case 0x5: case 0x6:
7ff120b4 7644 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7645 break;
7646
7647 case 0x7:
7ff120b4 7648 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7649 break;
7650
7651 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7652 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7653 break;
7654
7655 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7656 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7657 break;
7658 }
7659
7660 if (err)
7661 internal_error (__FILE__, __LINE__,
7662 _("arm_process_displaced_insn: Instruction decode error"));
7663}
7664
7665/* Actually set up the scratch space for a displaced instruction. */
7666
7667void
7668arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7669 CORE_ADDR to, struct displaced_step_closure *dsc)
7670{
7671 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7672 unsigned int i, len, offset;
cca44b1b 7673 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7674 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7675 const gdb_byte *bkp_insn;
cca44b1b 7676
4db71c0b 7677 offset = 0;
cca44b1b
JB
7678 /* Poke modified instruction(s). */
7679 for (i = 0; i < dsc->numinsns; i++)
7680 {
7681 if (debug_displaced)
4db71c0b
YQ
7682 {
7683 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7684 if (size == 4)
7685 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7686 dsc->modinsn[i]);
7687 else if (size == 2)
7688 fprintf_unfiltered (gdb_stdlog, "%.4x",
7689 (unsigned short)dsc->modinsn[i]);
7690
7691 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7692 (unsigned long) to + offset);
7693
7694 }
7695 write_memory_unsigned_integer (to + offset, size,
7696 byte_order_for_code,
cca44b1b 7697 dsc->modinsn[i]);
4db71c0b
YQ
7698 offset += size;
7699 }
7700
7701 /* Choose the correct breakpoint instruction. */
7702 if (dsc->is_thumb)
7703 {
7704 bkp_insn = tdep->thumb_breakpoint;
7705 len = tdep->thumb_breakpoint_size;
7706 }
7707 else
7708 {
7709 bkp_insn = tdep->arm_breakpoint;
7710 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7711 }
7712
7713 /* Put breakpoint afterwards. */
4db71c0b 7714 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7715
7716 if (debug_displaced)
7717 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7718 paddress (gdbarch, from), paddress (gdbarch, to));
7719}
7720
cca44b1b
JB
7721/* Entry point for cleaning things up after a displaced instruction has been
7722 single-stepped. */
7723
7724void
7725arm_displaced_step_fixup (struct gdbarch *gdbarch,
7726 struct displaced_step_closure *dsc,
7727 CORE_ADDR from, CORE_ADDR to,
7728 struct regcache *regs)
7729{
7730 if (dsc->cleanup)
7731 dsc->cleanup (gdbarch, regs, dsc);
7732
7733 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7734 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7735 dsc->insn_addr + dsc->insn_size);
7736
cca44b1b
JB
7737}
7738
7739#include "bfd-in2.h"
7740#include "libcoff.h"
7741
7742static int
7743gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7744{
e47ad6c0
YQ
7745 gdb_disassembler *di
7746 = static_cast<gdb_disassembler *>(info->application_data);
7747 struct gdbarch *gdbarch = di->arch ();
9779414d
DJ
7748
7749 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7750 {
7751 static asymbol *asym;
7752 static combined_entry_type ce;
7753 static struct coff_symbol_struct csym;
7754 static struct bfd fake_bfd;
7755 static bfd_target fake_target;
7756
7757 if (csym.native == NULL)
7758 {
7759 /* Create a fake symbol vector containing a Thumb symbol.
7760 This is solely so that the code in print_insn_little_arm()
7761 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7762 the presence of a Thumb symbol and switch to decoding
7763 Thumb instructions. */
7764
7765 fake_target.flavour = bfd_target_coff_flavour;
7766 fake_bfd.xvec = &fake_target;
7767 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7768 csym.native = &ce;
7769 csym.symbol.the_bfd = &fake_bfd;
7770 csym.symbol.name = "fake";
7771 asym = (asymbol *) & csym;
7772 }
7773
7774 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7775 info->symbols = &asym;
7776 }
7777 else
7778 info->symbols = NULL;
7779
7780 if (info->endian == BFD_ENDIAN_BIG)
7781 return print_insn_big_arm (memaddr, info);
7782 else
7783 return print_insn_little_arm (memaddr, info);
7784}
7785
7786/* The following define instruction sequences that will cause ARM
7787 cpu's to take an undefined instruction trap. These are used to
7788 signal a breakpoint to GDB.
7789
7790 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7791 modes. A different instruction is required for each mode. The ARM
7792 cpu's can also be big or little endian. Thus four different
7793 instructions are needed to support all cases.
7794
7795 Note: ARMv4 defines several new instructions that will take the
7796 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7797 not in fact add the new instructions. The new undefined
7798 instructions in ARMv4 are all instructions that had no defined
7799 behaviour in earlier chips. There is no guarantee that they will
7800 raise an exception, but may be treated as NOP's. In practice, it
7801 may only safe to rely on instructions matching:
7802
7803 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7804 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7805 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7806
0963b4bd 7807 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7808 following use a condition predicate of ALWAYS so it is always TRUE.
7809
7810 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7811 and NetBSD all use a software interrupt rather than an undefined
7812 instruction to force a trap. This can be handled by by the
7813 abi-specific code during establishment of the gdbarch vector. */
7814
7815#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7816#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7817#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7818#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7819
948f8e3d
PA
7820static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7821static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7822static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7823static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7824
cd6c3b4f
YQ
7825/* Implement the breakpoint_kind_from_pc gdbarch method. */
7826
d19280ad
YQ
7827static int
7828arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7829{
7830 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7831 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7832
9779414d 7833 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7834 {
7835 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7836
7837 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7838 check whether we are replacing a 32-bit instruction. */
7839 if (tdep->thumb2_breakpoint != NULL)
7840 {
7841 gdb_byte buf[2];
d19280ad 7842
177321bd
DJ
7843 if (target_read_memory (*pcptr, buf, 2) == 0)
7844 {
7845 unsigned short inst1;
d19280ad 7846
177321bd 7847 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7848 if (thumb_insn_size (inst1) == 4)
d19280ad 7849 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7850 }
7851 }
7852
d19280ad 7853 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7854 }
7855 else
d19280ad
YQ
7856 return ARM_BP_KIND_ARM;
7857
7858}
7859
cd6c3b4f
YQ
7860/* Implement the sw_breakpoint_from_kind gdbarch method. */
7861
d19280ad
YQ
7862static const gdb_byte *
7863arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7864{
7865 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7866
7867 switch (kind)
cca44b1b 7868 {
d19280ad
YQ
7869 case ARM_BP_KIND_ARM:
7870 *size = tdep->arm_breakpoint_size;
cca44b1b 7871 return tdep->arm_breakpoint;
d19280ad
YQ
7872 case ARM_BP_KIND_THUMB:
7873 *size = tdep->thumb_breakpoint_size;
7874 return tdep->thumb_breakpoint;
7875 case ARM_BP_KIND_THUMB2:
7876 *size = tdep->thumb2_breakpoint_size;
7877 return tdep->thumb2_breakpoint;
7878 default:
7879 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7880 }
7881}
7882
833b7ab5
YQ
7883/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7884
7885static int
7886arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7887 struct regcache *regcache,
7888 CORE_ADDR *pcptr)
7889{
7890 gdb_byte buf[4];
7891
7892 /* Check the memory pointed by PC is readable. */
7893 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7894 {
7895 struct arm_get_next_pcs next_pcs_ctx;
7896 CORE_ADDR pc;
7897 int i;
7898 VEC (CORE_ADDR) *next_pcs = NULL;
7899 struct cleanup *old_chain
7900 = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
7901
7902 arm_get_next_pcs_ctor (&next_pcs_ctx,
7903 &arm_get_next_pcs_ops,
7904 gdbarch_byte_order (gdbarch),
7905 gdbarch_byte_order_for_code (gdbarch),
7906 0,
7907 regcache);
7908
7909 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7910
7911 /* If MEMADDR is the next instruction of current pc, do the
7912 software single step computation, and get the thumb mode by
7913 the destination address. */
7914 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
7915 {
7916 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7917 {
7918 do_cleanups (old_chain);
7919
7920 if (IS_THUMB_ADDR (pc))
7921 {
7922 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7923 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7924 }
7925 else
7926 return ARM_BP_KIND_ARM;
7927 }
7928 }
7929
7930 do_cleanups (old_chain);
7931 }
7932
7933 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7934}
7935
cca44b1b
JB
7936/* Extract from an array REGBUF containing the (raw) register state a
7937 function return value of type TYPE, and copy that, in virtual
7938 format, into VALBUF. */
7939
7940static void
7941arm_extract_return_value (struct type *type, struct regcache *regs,
7942 gdb_byte *valbuf)
7943{
7944 struct gdbarch *gdbarch = get_regcache_arch (regs);
7945 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7946
7947 if (TYPE_CODE_FLT == TYPE_CODE (type))
7948 {
7949 switch (gdbarch_tdep (gdbarch)->fp_model)
7950 {
7951 case ARM_FLOAT_FPA:
7952 {
7953 /* The value is in register F0 in internal format. We need to
7954 extract the raw value and then convert it to the desired
7955 internal type. */
7956 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7957
7958 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7959 convert_from_extended (floatformat_from_type (type), tmpbuf,
7960 valbuf, gdbarch_byte_order (gdbarch));
7961 }
7962 break;
7963
7964 case ARM_FLOAT_SOFT_FPA:
7965 case ARM_FLOAT_SOFT_VFP:
7966 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7967 not using the VFP ABI code. */
7968 case ARM_FLOAT_VFP:
7969 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7970 if (TYPE_LENGTH (type) > 4)
7971 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7972 valbuf + INT_REGISTER_SIZE);
7973 break;
7974
7975 default:
0963b4bd
MS
7976 internal_error (__FILE__, __LINE__,
7977 _("arm_extract_return_value: "
7978 "Floating point model not supported"));
cca44b1b
JB
7979 break;
7980 }
7981 }
7982 else if (TYPE_CODE (type) == TYPE_CODE_INT
7983 || TYPE_CODE (type) == TYPE_CODE_CHAR
7984 || TYPE_CODE (type) == TYPE_CODE_BOOL
7985 || TYPE_CODE (type) == TYPE_CODE_PTR
7986 || TYPE_CODE (type) == TYPE_CODE_REF
7987 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7988 {
b021a221
MS
7989 /* If the type is a plain integer, then the access is
7990 straight-forward. Otherwise we have to play around a bit
7991 more. */
cca44b1b
JB
7992 int len = TYPE_LENGTH (type);
7993 int regno = ARM_A1_REGNUM;
7994 ULONGEST tmp;
7995
7996 while (len > 0)
7997 {
7998 /* By using store_unsigned_integer we avoid having to do
7999 anything special for small big-endian values. */
8000 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8001 store_unsigned_integer (valbuf,
8002 (len > INT_REGISTER_SIZE
8003 ? INT_REGISTER_SIZE : len),
8004 byte_order, tmp);
8005 len -= INT_REGISTER_SIZE;
8006 valbuf += INT_REGISTER_SIZE;
8007 }
8008 }
8009 else
8010 {
8011 /* For a structure or union the behaviour is as if the value had
8012 been stored to word-aligned memory and then loaded into
8013 registers with 32-bit load instruction(s). */
8014 int len = TYPE_LENGTH (type);
8015 int regno = ARM_A1_REGNUM;
8016 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8017
8018 while (len > 0)
8019 {
8020 regcache_cooked_read (regs, regno++, tmpbuf);
8021 memcpy (valbuf, tmpbuf,
8022 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8023 len -= INT_REGISTER_SIZE;
8024 valbuf += INT_REGISTER_SIZE;
8025 }
8026 }
8027}
8028
8029
8030/* Will a function return an aggregate type in memory or in a
8031 register? Return 0 if an aggregate type can be returned in a
8032 register, 1 if it must be returned in memory. */
8033
8034static int
8035arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8036{
cca44b1b
JB
8037 enum type_code code;
8038
f168693b 8039 type = check_typedef (type);
cca44b1b 8040
b13c8ab2
YQ
8041 /* Simple, non-aggregate types (ie not including vectors and
8042 complex) are always returned in a register (or registers). */
8043 code = TYPE_CODE (type);
8044 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8045 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8046 return 0;
cca44b1b 8047
c4312b19
YQ
8048 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8049 {
8050 /* Vector values should be returned using ARM registers if they
8051 are not over 16 bytes. */
8052 return (TYPE_LENGTH (type) > 16);
8053 }
8054
b13c8ab2 8055 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8056 {
b13c8ab2
YQ
8057 /* The AAPCS says all aggregates not larger than a word are returned
8058 in a register. */
8059 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8060 return 0;
8061
cca44b1b
JB
8062 return 1;
8063 }
b13c8ab2
YQ
8064 else
8065 {
8066 int nRc;
cca44b1b 8067
b13c8ab2
YQ
8068 /* All aggregate types that won't fit in a register must be returned
8069 in memory. */
8070 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8071 return 1;
cca44b1b 8072
b13c8ab2
YQ
8073 /* In the ARM ABI, "integer" like aggregate types are returned in
8074 registers. For an aggregate type to be integer like, its size
8075 must be less than or equal to INT_REGISTER_SIZE and the
8076 offset of each addressable subfield must be zero. Note that bit
8077 fields are not addressable, and all addressable subfields of
8078 unions always start at offset zero.
cca44b1b 8079
b13c8ab2
YQ
8080 This function is based on the behaviour of GCC 2.95.1.
8081 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8082
b13c8ab2
YQ
8083 Note: All versions of GCC before GCC 2.95.2 do not set up the
8084 parameters correctly for a function returning the following
8085 structure: struct { float f;}; This should be returned in memory,
8086 not a register. Richard Earnshaw sent me a patch, but I do not
8087 know of any way to detect if a function like the above has been
8088 compiled with the correct calling convention. */
8089
8090 /* Assume all other aggregate types can be returned in a register.
8091 Run a check for structures, unions and arrays. */
8092 nRc = 0;
67255d04 8093
b13c8ab2
YQ
8094 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8095 {
8096 int i;
8097 /* Need to check if this struct/union is "integer" like. For
8098 this to be true, its size must be less than or equal to
8099 INT_REGISTER_SIZE and the offset of each addressable
8100 subfield must be zero. Note that bit fields are not
8101 addressable, and unions always start at offset zero. If any
8102 of the subfields is a floating point type, the struct/union
8103 cannot be an integer type. */
8104
8105 /* For each field in the object, check:
8106 1) Is it FP? --> yes, nRc = 1;
8107 2) Is it addressable (bitpos != 0) and
8108 not packed (bitsize == 0)?
8109 --> yes, nRc = 1
8110 */
8111
8112 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8113 {
b13c8ab2
YQ
8114 enum type_code field_type_code;
8115
8116 field_type_code
8117 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8118 i)));
8119
8120 /* Is it a floating point type field? */
8121 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8122 {
8123 nRc = 1;
8124 break;
8125 }
b13c8ab2
YQ
8126
8127 /* If bitpos != 0, then we have to care about it. */
8128 if (TYPE_FIELD_BITPOS (type, i) != 0)
8129 {
8130 /* Bitfields are not addressable. If the field bitsize is
8131 zero, then the field is not packed. Hence it cannot be
8132 a bitfield or any other packed type. */
8133 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8134 {
8135 nRc = 1;
8136 break;
8137 }
8138 }
67255d04
RE
8139 }
8140 }
67255d04 8141
b13c8ab2
YQ
8142 return nRc;
8143 }
67255d04
RE
8144}
8145
34e8f22d
RE
8146/* Write into appropriate registers a function return value of type
8147 TYPE, given in virtual format. */
8148
8149static void
b508a996 8150arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8151 const gdb_byte *valbuf)
34e8f22d 8152{
be8626e0 8153 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8154 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8155
34e8f22d
RE
8156 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8157 {
e362b510 8158 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8159
be8626e0 8160 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8161 {
8162 case ARM_FLOAT_FPA:
8163
be8626e0
MD
8164 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8165 gdbarch_byte_order (gdbarch));
b508a996 8166 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8167 break;
8168
fd50bc42 8169 case ARM_FLOAT_SOFT_FPA:
08216dd7 8170 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8171 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8172 not using the VFP ABI code. */
8173 case ARM_FLOAT_VFP:
b508a996
RE
8174 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8175 if (TYPE_LENGTH (type) > 4)
8176 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8177 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8178 break;
8179
8180 default:
9b20d036
MS
8181 internal_error (__FILE__, __LINE__,
8182 _("arm_store_return_value: Floating "
8183 "point model not supported"));
08216dd7
RE
8184 break;
8185 }
34e8f22d 8186 }
b508a996
RE
8187 else if (TYPE_CODE (type) == TYPE_CODE_INT
8188 || TYPE_CODE (type) == TYPE_CODE_CHAR
8189 || TYPE_CODE (type) == TYPE_CODE_BOOL
8190 || TYPE_CODE (type) == TYPE_CODE_PTR
8191 || TYPE_CODE (type) == TYPE_CODE_REF
8192 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8193 {
8194 if (TYPE_LENGTH (type) <= 4)
8195 {
8196 /* Values of one word or less are zero/sign-extended and
8197 returned in r0. */
7a5ea0d4 8198 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8199 LONGEST val = unpack_long (type, valbuf);
8200
e17a4113 8201 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8202 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8203 }
8204 else
8205 {
8206 /* Integral values greater than one word are stored in consecutive
8207 registers starting with r0. This will always be a multiple of
8208 the regiser size. */
8209 int len = TYPE_LENGTH (type);
8210 int regno = ARM_A1_REGNUM;
8211
8212 while (len > 0)
8213 {
8214 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8215 len -= INT_REGISTER_SIZE;
8216 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8217 }
8218 }
8219 }
34e8f22d 8220 else
b508a996
RE
8221 {
8222 /* For a structure or union the behaviour is as if the value had
8223 been stored to word-aligned memory and then loaded into
8224 registers with 32-bit load instruction(s). */
8225 int len = TYPE_LENGTH (type);
8226 int regno = ARM_A1_REGNUM;
7a5ea0d4 8227 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8228
8229 while (len > 0)
8230 {
8231 memcpy (tmpbuf, valbuf,
7a5ea0d4 8232 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8233 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8234 len -= INT_REGISTER_SIZE;
8235 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8236 }
8237 }
34e8f22d
RE
8238}
8239
2af48f68
PB
8240
8241/* Handle function return values. */
8242
8243static enum return_value_convention
6a3a010b 8244arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8245 struct type *valtype, struct regcache *regcache,
8246 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8247{
7c00367c 8248 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8249 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8250 enum arm_vfp_cprc_base_type vfp_base_type;
8251 int vfp_base_count;
8252
8253 if (arm_vfp_abi_for_function (gdbarch, func_type)
8254 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8255 {
8256 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8257 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8258 int i;
8259 for (i = 0; i < vfp_base_count; i++)
8260 {
58d6951d
DJ
8261 if (reg_char == 'q')
8262 {
8263 if (writebuf)
8264 arm_neon_quad_write (gdbarch, regcache, i,
8265 writebuf + i * unit_length);
8266
8267 if (readbuf)
8268 arm_neon_quad_read (gdbarch, regcache, i,
8269 readbuf + i * unit_length);
8270 }
8271 else
8272 {
8273 char name_buf[4];
8274 int regnum;
8275
8c042590 8276 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8277 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8278 strlen (name_buf));
8279 if (writebuf)
8280 regcache_cooked_write (regcache, regnum,
8281 writebuf + i * unit_length);
8282 if (readbuf)
8283 regcache_cooked_read (regcache, regnum,
8284 readbuf + i * unit_length);
8285 }
90445bd3
DJ
8286 }
8287 return RETURN_VALUE_REGISTER_CONVENTION;
8288 }
7c00367c 8289
2af48f68
PB
8290 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8291 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8292 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8293 {
7c00367c
MK
8294 if (tdep->struct_return == pcc_struct_return
8295 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8296 return RETURN_VALUE_STRUCT_CONVENTION;
8297 }
b13c8ab2
YQ
8298 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8299 {
8300 if (arm_return_in_memory (gdbarch, valtype))
8301 return RETURN_VALUE_STRUCT_CONVENTION;
8302 }
7052e42c 8303
2af48f68
PB
8304 if (writebuf)
8305 arm_store_return_value (valtype, regcache, writebuf);
8306
8307 if (readbuf)
8308 arm_extract_return_value (valtype, regcache, readbuf);
8309
8310 return RETURN_VALUE_REGISTER_CONVENTION;
8311}
8312
8313
9df628e0 8314static int
60ade65d 8315arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8316{
e17a4113
UW
8317 struct gdbarch *gdbarch = get_frame_arch (frame);
8318 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8319 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8320 CORE_ADDR jb_addr;
e362b510 8321 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8322
60ade65d 8323 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8324
8325 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8326 INT_REGISTER_SIZE))
9df628e0
RE
8327 return 0;
8328
e17a4113 8329 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8330 return 1;
8331}
8332
faa95490
DJ
8333/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8334 return the target PC. Otherwise return 0. */
c906108c
SS
8335
8336CORE_ADDR
52f729a7 8337arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8338{
2c02bd72 8339 const char *name;
faa95490 8340 int namelen;
c906108c
SS
8341 CORE_ADDR start_addr;
8342
8343 /* Find the starting address and name of the function containing the PC. */
8344 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8345 {
8346 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8347 check here. */
8348 start_addr = arm_skip_bx_reg (frame, pc);
8349 if (start_addr != 0)
8350 return start_addr;
8351
8352 return 0;
8353 }
c906108c 8354
faa95490
DJ
8355 /* If PC is in a Thumb call or return stub, return the address of the
8356 target PC, which is in a register. The thunk functions are called
8357 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8358 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8359 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8360 if (startswith (name, "_call_via_")
8361 || startswith (name, "__ARM_call_via_"))
c906108c 8362 {
ed9a39eb
JM
8363 /* Use the name suffix to determine which register contains the
8364 target PC. */
c5aa993b
JM
8365 static char *table[15] =
8366 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8367 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8368 };
c906108c 8369 int regno;
faa95490 8370 int offset = strlen (name) - 2;
c906108c
SS
8371
8372 for (regno = 0; regno <= 14; regno++)
faa95490 8373 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8374 return get_frame_register_unsigned (frame, regno);
c906108c 8375 }
ed9a39eb 8376
faa95490
DJ
8377 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8378 non-interworking calls to foo. We could decode the stubs
8379 to find the target but it's easier to use the symbol table. */
8380 namelen = strlen (name);
8381 if (name[0] == '_' && name[1] == '_'
8382 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8383 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8384 || (namelen > 2 + strlen ("_from_arm")
61012eef 8385 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8386 {
8387 char *target_name;
8388 int target_len = namelen - 2;
3b7344d5 8389 struct bound_minimal_symbol minsym;
faa95490
DJ
8390 struct objfile *objfile;
8391 struct obj_section *sec;
8392
8393 if (name[namelen - 1] == 'b')
8394 target_len -= strlen ("_from_thumb");
8395 else
8396 target_len -= strlen ("_from_arm");
8397
224c3ddb 8398 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8399 memcpy (target_name, name + 2, target_len);
8400 target_name[target_len] = '\0';
8401
8402 sec = find_pc_section (pc);
8403 objfile = (sec == NULL) ? NULL : sec->objfile;
8404 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8405 if (minsym.minsym != NULL)
77e371c0 8406 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8407 else
8408 return 0;
8409 }
8410
c5aa993b 8411 return 0; /* not a stub */
c906108c
SS
8412}
8413
afd7eef0
RE
8414static void
8415set_arm_command (char *args, int from_tty)
8416{
edefbb7c
AC
8417 printf_unfiltered (_("\
8418\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8419 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8420}
8421
8422static void
8423show_arm_command (char *args, int from_tty)
8424{
26304000 8425 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8426}
8427
28e97307
DJ
8428static void
8429arm_update_current_architecture (void)
fd50bc42 8430{
28e97307 8431 struct gdbarch_info info;
fd50bc42 8432
28e97307 8433 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8434 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8435 return;
fd50bc42 8436
28e97307
DJ
8437 /* Update the architecture. */
8438 gdbarch_info_init (&info);
fd50bc42 8439
28e97307 8440 if (!gdbarch_update_p (info))
9b20d036 8441 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8442}
8443
8444static void
8445set_fp_model_sfunc (char *args, int from_tty,
8446 struct cmd_list_element *c)
8447{
570dc176 8448 int fp_model;
fd50bc42
RE
8449
8450 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8451 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8452 {
aead7601 8453 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8454 break;
8455 }
8456
8457 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8458 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8459 current_fp_model);
8460
28e97307 8461 arm_update_current_architecture ();
fd50bc42
RE
8462}
8463
8464static void
08546159
AC
8465show_fp_model (struct ui_file *file, int from_tty,
8466 struct cmd_list_element *c, const char *value)
fd50bc42 8467{
f5656ead 8468 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8469
28e97307 8470 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8471 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8472 fprintf_filtered (file, _("\
8473The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8474 fp_model_strings[tdep->fp_model]);
8475 else
8476 fprintf_filtered (file, _("\
8477The current ARM floating point model is \"%s\".\n"),
8478 fp_model_strings[arm_fp_model]);
8479}
8480
8481static void
8482arm_set_abi (char *args, int from_tty,
8483 struct cmd_list_element *c)
8484{
570dc176 8485 int arm_abi;
28e97307
DJ
8486
8487 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8488 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8489 {
aead7601 8490 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8491 break;
8492 }
8493
8494 if (arm_abi == ARM_ABI_LAST)
8495 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8496 arm_abi_string);
8497
8498 arm_update_current_architecture ();
8499}
8500
8501static void
8502arm_show_abi (struct ui_file *file, int from_tty,
8503 struct cmd_list_element *c, const char *value)
8504{
f5656ead 8505 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8506
8507 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8508 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8509 fprintf_filtered (file, _("\
8510The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8511 arm_abi_strings[tdep->arm_abi]);
8512 else
8513 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8514 arm_abi_string);
fd50bc42
RE
8515}
8516
0428b8f5
DJ
8517static void
8518arm_show_fallback_mode (struct ui_file *file, int from_tty,
8519 struct cmd_list_element *c, const char *value)
8520{
0963b4bd
MS
8521 fprintf_filtered (file,
8522 _("The current execution mode assumed "
8523 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8524 arm_fallback_mode_string);
8525}
8526
8527static void
8528arm_show_force_mode (struct ui_file *file, int from_tty,
8529 struct cmd_list_element *c, const char *value)
8530{
0963b4bd
MS
8531 fprintf_filtered (file,
8532 _("The current execution mode assumed "
8533 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8534 arm_force_mode_string);
8535}
8536
afd7eef0
RE
8537/* If the user changes the register disassembly style used for info
8538 register and other commands, we have to also switch the style used
8539 in opcodes for disassembly output. This function is run in the "set
8540 arm disassembly" command, and does that. */
bc90b915
FN
8541
8542static void
afd7eef0 8543set_disassembly_style_sfunc (char *args, int from_tty,
65b48a81 8544 struct cmd_list_element *c)
bc90b915 8545{
65b48a81
PB
8546 /* Convert the short style name into the long style name (eg, reg-names-*)
8547 before calling the generic set_disassembler_options() function. */
8548 std::string long_name = std::string ("reg-names-") + disassembly_style;
8549 set_disassembler_options (&long_name[0]);
8550}
8551
8552static void
8553show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8554 struct cmd_list_element *c, const char *value)
8555{
8556 struct gdbarch *gdbarch = get_current_arch ();
8557 char *options = get_disassembler_options (gdbarch);
8558 const char *style = "";
8559 int len = 0;
8560 char *opt;
8561
8562 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8563 if (CONST_STRNEQ (opt, "reg-names-"))
8564 {
8565 style = &opt[strlen ("reg-names-")];
8566 len = strcspn (style, ",");
8567 }
8568
8569 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
bc90b915
FN
8570}
8571\f
966fbf70 8572/* Return the ARM register name corresponding to register I. */
a208b0cb 8573static const char *
d93859e2 8574arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8575{
58d6951d
DJ
8576 const int num_regs = gdbarch_num_regs (gdbarch);
8577
8578 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8579 && i >= num_regs && i < num_regs + 32)
8580 {
8581 static const char *const vfp_pseudo_names[] = {
8582 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8583 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8584 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8585 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8586 };
8587
8588 return vfp_pseudo_names[i - num_regs];
8589 }
8590
8591 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8592 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8593 {
8594 static const char *const neon_pseudo_names[] = {
8595 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8596 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8597 };
8598
8599 return neon_pseudo_names[i - num_regs - 32];
8600 }
8601
ff6f572f
DJ
8602 if (i >= ARRAY_SIZE (arm_register_names))
8603 /* These registers are only supported on targets which supply
8604 an XML description. */
8605 return "";
8606
966fbf70
RE
8607 return arm_register_names[i];
8608}
8609
082fc60d
RE
8610/* Test whether the coff symbol specific value corresponds to a Thumb
8611 function. */
8612
8613static int
8614coff_sym_is_thumb (int val)
8615{
f8bf5763
PM
8616 return (val == C_THUMBEXT
8617 || val == C_THUMBSTAT
8618 || val == C_THUMBEXTFUNC
8619 || val == C_THUMBSTATFUNC
8620 || val == C_THUMBLABEL);
082fc60d
RE
8621}
8622
8623/* arm_coff_make_msymbol_special()
8624 arm_elf_make_msymbol_special()
8625
8626 These functions test whether the COFF or ELF symbol corresponds to
8627 an address in thumb code, and set a "special" bit in a minimal
8628 symbol to indicate that it does. */
8629
34e8f22d 8630static void
082fc60d
RE
8631arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8632{
39d911fc
TP
8633 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8634
8635 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8636 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8637 MSYMBOL_SET_SPECIAL (msym);
8638}
8639
34e8f22d 8640static void
082fc60d
RE
8641arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8642{
8643 if (coff_sym_is_thumb (val))
8644 MSYMBOL_SET_SPECIAL (msym);
8645}
8646
60c5725c 8647static void
c1bd65d0 8648arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8649{
9a3c8263 8650 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8651 unsigned int i;
8652
8653 for (i = 0; i < objfile->obfd->section_count; i++)
8654 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8655}
8656
8657static void
8658arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8659 asymbol *sym)
8660{
8661 const char *name = bfd_asymbol_name (sym);
8662 struct arm_per_objfile *data;
8663 VEC(arm_mapping_symbol_s) **map_p;
8664 struct arm_mapping_symbol new_map_sym;
8665
8666 gdb_assert (name[0] == '$');
8667 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8668 return;
8669
9a3c8263
SM
8670 data = (struct arm_per_objfile *) objfile_data (objfile,
8671 arm_objfile_data_key);
60c5725c
DJ
8672 if (data == NULL)
8673 {
8674 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8675 struct arm_per_objfile);
8676 set_objfile_data (objfile, arm_objfile_data_key, data);
8677 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8678 objfile->obfd->section_count,
8679 VEC(arm_mapping_symbol_s) *);
8680 }
8681 map_p = &data->section_maps[bfd_get_section (sym)->index];
8682
8683 new_map_sym.value = sym->value;
8684 new_map_sym.type = name[1];
8685
8686 /* Assume that most mapping symbols appear in order of increasing
8687 value. If they were randomly distributed, it would be faster to
8688 always push here and then sort at first use. */
8689 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8690 {
8691 struct arm_mapping_symbol *prev_map_sym;
8692
8693 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8694 if (prev_map_sym->value >= sym->value)
8695 {
8696 unsigned int idx;
8697 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8698 arm_compare_mapping_symbols);
8699 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8700 return;
8701 }
8702 }
8703
8704 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8705}
8706
756fe439 8707static void
61a1198a 8708arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8709{
9779414d 8710 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8711 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8712
8713 /* If necessary, set the T bit. */
8714 if (arm_apcs_32)
8715 {
9779414d 8716 ULONGEST val, t_bit;
61a1198a 8717 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8718 t_bit = arm_psr_thumb_bit (gdbarch);
8719 if (arm_pc_is_thumb (gdbarch, pc))
8720 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8721 val | t_bit);
756fe439 8722 else
61a1198a 8723 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8724 val & ~t_bit);
756fe439
DJ
8725 }
8726}
123dc839 8727
58d6951d
DJ
8728/* Read the contents of a NEON quad register, by reading from two
8729 double registers. This is used to implement the quad pseudo
8730 registers, and for argument passing in case the quad registers are
8731 missing; vectors are passed in quad registers when using the VFP
8732 ABI, even if a NEON unit is not present. REGNUM is the index of
8733 the quad register, in [0, 15]. */
8734
05d1431c 8735static enum register_status
58d6951d
DJ
8736arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8737 int regnum, gdb_byte *buf)
8738{
8739 char name_buf[4];
8740 gdb_byte reg_buf[8];
8741 int offset, double_regnum;
05d1431c 8742 enum register_status status;
58d6951d 8743
8c042590 8744 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8745 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8746 strlen (name_buf));
8747
8748 /* d0 is always the least significant half of q0. */
8749 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8750 offset = 8;
8751 else
8752 offset = 0;
8753
05d1431c
PA
8754 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8755 if (status != REG_VALID)
8756 return status;
58d6951d
DJ
8757 memcpy (buf + offset, reg_buf, 8);
8758
8759 offset = 8 - offset;
05d1431c
PA
8760 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8761 if (status != REG_VALID)
8762 return status;
58d6951d 8763 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8764
8765 return REG_VALID;
58d6951d
DJ
8766}
8767
05d1431c 8768static enum register_status
58d6951d
DJ
8769arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8770 int regnum, gdb_byte *buf)
8771{
8772 const int num_regs = gdbarch_num_regs (gdbarch);
8773 char name_buf[4];
8774 gdb_byte reg_buf[8];
8775 int offset, double_regnum;
8776
8777 gdb_assert (regnum >= num_regs);
8778 regnum -= num_regs;
8779
8780 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8781 /* Quad-precision register. */
05d1431c 8782 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8783 else
8784 {
05d1431c
PA
8785 enum register_status status;
8786
58d6951d
DJ
8787 /* Single-precision register. */
8788 gdb_assert (regnum < 32);
8789
8790 /* s0 is always the least significant half of d0. */
8791 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8792 offset = (regnum & 1) ? 0 : 4;
8793 else
8794 offset = (regnum & 1) ? 4 : 0;
8795
8c042590 8796 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8797 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8798 strlen (name_buf));
8799
05d1431c
PA
8800 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8801 if (status == REG_VALID)
8802 memcpy (buf, reg_buf + offset, 4);
8803 return status;
58d6951d
DJ
8804 }
8805}
8806
8807/* Store the contents of BUF to a NEON quad register, by writing to
8808 two double registers. This is used to implement the quad pseudo
8809 registers, and for argument passing in case the quad registers are
8810 missing; vectors are passed in quad registers when using the VFP
8811 ABI, even if a NEON unit is not present. REGNUM is the index
8812 of the quad register, in [0, 15]. */
8813
8814static void
8815arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8816 int regnum, const gdb_byte *buf)
8817{
8818 char name_buf[4];
58d6951d
DJ
8819 int offset, double_regnum;
8820
8c042590 8821 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8822 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8823 strlen (name_buf));
8824
8825 /* d0 is always the least significant half of q0. */
8826 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8827 offset = 8;
8828 else
8829 offset = 0;
8830
8831 regcache_raw_write (regcache, double_regnum, buf + offset);
8832 offset = 8 - offset;
8833 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8834}
8835
8836static void
8837arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8838 int regnum, const gdb_byte *buf)
8839{
8840 const int num_regs = gdbarch_num_regs (gdbarch);
8841 char name_buf[4];
8842 gdb_byte reg_buf[8];
8843 int offset, double_regnum;
8844
8845 gdb_assert (regnum >= num_regs);
8846 regnum -= num_regs;
8847
8848 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8849 /* Quad-precision register. */
8850 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8851 else
8852 {
8853 /* Single-precision register. */
8854 gdb_assert (regnum < 32);
8855
8856 /* s0 is always the least significant half of d0. */
8857 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8858 offset = (regnum & 1) ? 0 : 4;
8859 else
8860 offset = (regnum & 1) ? 4 : 0;
8861
8c042590 8862 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8863 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8864 strlen (name_buf));
8865
8866 regcache_raw_read (regcache, double_regnum, reg_buf);
8867 memcpy (reg_buf + offset, buf, 4);
8868 regcache_raw_write (regcache, double_regnum, reg_buf);
8869 }
8870}
8871
123dc839
DJ
8872static struct value *
8873value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8874{
9a3c8263 8875 const int *reg_p = (const int *) baton;
123dc839
DJ
8876 return value_of_register (*reg_p, frame);
8877}
97e03143 8878\f
70f80edf
JT
8879static enum gdb_osabi
8880arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8881{
2af48f68 8882 unsigned int elfosabi;
70f80edf 8883 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8884
70f80edf 8885 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8886
28e97307
DJ
8887 if (elfosabi == ELFOSABI_ARM)
8888 /* GNU tools use this value. Check note sections in this case,
8889 as well. */
8890 bfd_map_over_sections (abfd,
8891 generic_elf_osabi_sniff_abi_tag_sections,
8892 &osabi);
97e03143 8893
28e97307 8894 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8895 return osabi;
97e03143
RE
8896}
8897
54483882
YQ
8898static int
8899arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8900 struct reggroup *group)
8901{
2c291032
YQ
8902 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8903 this, FPS register belongs to save_regroup, restore_reggroup, and
8904 all_reggroup, of course. */
54483882 8905 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8906 return (group == float_reggroup
8907 || group == save_reggroup
8908 || group == restore_reggroup
8909 || group == all_reggroup);
54483882
YQ
8910 else
8911 return default_register_reggroup_p (gdbarch, regnum, group);
8912}
8913
25f8c692
JL
8914\f
8915/* For backward-compatibility we allow two 'g' packet lengths with
8916 the remote protocol depending on whether FPA registers are
8917 supplied. M-profile targets do not have FPA registers, but some
8918 stubs already exist in the wild which use a 'g' packet which
8919 supplies them albeit with dummy values. The packet format which
8920 includes FPA registers should be considered deprecated for
8921 M-profile targets. */
8922
8923static void
8924arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8925{
8926 if (gdbarch_tdep (gdbarch)->is_m)
8927 {
8928 /* If we know from the executable this is an M-profile target,
8929 cater for remote targets whose register set layout is the
8930 same as the FPA layout. */
8931 register_remote_g_packet_guess (gdbarch,
03145bf4 8932 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8933 (16 * INT_REGISTER_SIZE)
8934 + (8 * FP_REGISTER_SIZE)
8935 + (2 * INT_REGISTER_SIZE),
8936 tdesc_arm_with_m_fpa_layout);
8937
8938 /* The regular M-profile layout. */
8939 register_remote_g_packet_guess (gdbarch,
8940 /* r0-r12,sp,lr,pc; xpsr */
8941 (16 * INT_REGISTER_SIZE)
8942 + INT_REGISTER_SIZE,
8943 tdesc_arm_with_m);
3184d3f9
JL
8944
8945 /* M-profile plus M4F VFP. */
8946 register_remote_g_packet_guess (gdbarch,
8947 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8948 (16 * INT_REGISTER_SIZE)
8949 + (16 * VFP_REGISTER_SIZE)
8950 + (2 * INT_REGISTER_SIZE),
8951 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8952 }
8953
8954 /* Otherwise we don't have a useful guess. */
8955}
8956
7eb89530
YQ
8957/* Implement the code_of_frame_writable gdbarch method. */
8958
8959static int
8960arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8961{
8962 if (gdbarch_tdep (gdbarch)->is_m
8963 && get_frame_type (frame) == SIGTRAMP_FRAME)
8964 {
8965 /* M-profile exception frames return to some magic PCs, where
8966 isn't writable at all. */
8967 return 0;
8968 }
8969 else
8970 return 1;
8971}
8972
70f80edf 8973\f
da3c6d4a
MS
8974/* Initialize the current architecture based on INFO. If possible,
8975 re-use an architecture from ARCHES, which is a list of
8976 architectures already created during this debugging session.
97e03143 8977
da3c6d4a
MS
8978 Called e.g. at program startup, when reading a core file, and when
8979 reading a binary file. */
97e03143 8980
39bbf761
RE
8981static struct gdbarch *
8982arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8983{
97e03143 8984 struct gdbarch_tdep *tdep;
39bbf761 8985 struct gdbarch *gdbarch;
28e97307
DJ
8986 struct gdbarch_list *best_arch;
8987 enum arm_abi_kind arm_abi = arm_abi_global;
8988 enum arm_float_model fp_model = arm_fp_model;
123dc839 8989 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8990 int i, is_m = 0;
330c6ca9 8991 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8992 int have_wmmx_registers = 0;
58d6951d 8993 int have_neon = 0;
ff6f572f 8994 int have_fpa_registers = 1;
9779414d
DJ
8995 const struct target_desc *tdesc = info.target_desc;
8996
8997 /* If we have an object to base this architecture on, try to determine
8998 its ABI. */
8999
9000 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9001 {
9002 int ei_osabi, e_flags;
9003
9004 switch (bfd_get_flavour (info.abfd))
9005 {
9779414d
DJ
9006 case bfd_target_coff_flavour:
9007 /* Assume it's an old APCS-style ABI. */
9008 /* XXX WinCE? */
9009 arm_abi = ARM_ABI_APCS;
9010 break;
9011
9012 case bfd_target_elf_flavour:
9013 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9014 e_flags = elf_elfheader (info.abfd)->e_flags;
9015
9016 if (ei_osabi == ELFOSABI_ARM)
9017 {
9018 /* GNU tools used to use this value, but do not for EABI
9019 objects. There's nowhere to tag an EABI version
9020 anyway, so assume APCS. */
9021 arm_abi = ARM_ABI_APCS;
9022 }
d403db27 9023 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
9024 {
9025 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9026 int attr_arch, attr_profile;
9027
9028 switch (eabi_ver)
9029 {
9030 case EF_ARM_EABI_UNKNOWN:
9031 /* Assume GNU tools. */
9032 arm_abi = ARM_ABI_APCS;
9033 break;
9034
9035 case EF_ARM_EABI_VER4:
9036 case EF_ARM_EABI_VER5:
9037 arm_abi = ARM_ABI_AAPCS;
9038 /* EABI binaries default to VFP float ordering.
9039 They may also contain build attributes that can
9040 be used to identify if the VFP argument-passing
9041 ABI is in use. */
9042 if (fp_model == ARM_FLOAT_AUTO)
9043 {
9044#ifdef HAVE_ELF
9045 switch (bfd_elf_get_obj_attr_int (info.abfd,
9046 OBJ_ATTR_PROC,
9047 Tag_ABI_VFP_args))
9048 {
b35b0298 9049 case AEABI_VFP_args_base:
9779414d
DJ
9050 /* "The user intended FP parameter/result
9051 passing to conform to AAPCS, base
9052 variant". */
9053 fp_model = ARM_FLOAT_SOFT_VFP;
9054 break;
b35b0298 9055 case AEABI_VFP_args_vfp:
9779414d
DJ
9056 /* "The user intended FP parameter/result
9057 passing to conform to AAPCS, VFP
9058 variant". */
9059 fp_model = ARM_FLOAT_VFP;
9060 break;
b35b0298 9061 case AEABI_VFP_args_toolchain:
9779414d
DJ
9062 /* "The user intended FP parameter/result
9063 passing to conform to tool chain-specific
9064 conventions" - we don't know any such
9065 conventions, so leave it as "auto". */
9066 break;
b35b0298 9067 case AEABI_VFP_args_compatible:
5c294fee
TG
9068 /* "Code is compatible with both the base
9069 and VFP variants; the user did not permit
9070 non-variadic functions to pass FP
9071 parameters/results" - leave it as
9072 "auto". */
9073 break;
9779414d
DJ
9074 default:
9075 /* Attribute value not mentioned in the
5c294fee 9076 November 2012 ABI, so leave it as
9779414d
DJ
9077 "auto". */
9078 break;
9079 }
9080#else
9081 fp_model = ARM_FLOAT_SOFT_VFP;
9082#endif
9083 }
9084 break;
9085
9086 default:
9087 /* Leave it as "auto". */
9088 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9089 break;
9090 }
9091
9092#ifdef HAVE_ELF
9093 /* Detect M-profile programs. This only works if the
9094 executable file includes build attributes; GCC does
9095 copy them to the executable, but e.g. RealView does
9096 not. */
9097 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9098 Tag_CPU_arch);
0963b4bd
MS
9099 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9100 OBJ_ATTR_PROC,
9779414d
DJ
9101 Tag_CPU_arch_profile);
9102 /* GCC specifies the profile for v6-M; RealView only
9103 specifies the profile for architectures starting with
9104 V7 (as opposed to architectures with a tag
9105 numerically greater than TAG_CPU_ARCH_V7). */
9106 if (!tdesc_has_registers (tdesc)
9107 && (attr_arch == TAG_CPU_ARCH_V6_M
9108 || attr_arch == TAG_CPU_ARCH_V6S_M
9109 || attr_profile == 'M'))
25f8c692 9110 is_m = 1;
9779414d
DJ
9111#endif
9112 }
9113
9114 if (fp_model == ARM_FLOAT_AUTO)
9115 {
9116 int e_flags = elf_elfheader (info.abfd)->e_flags;
9117
9118 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9119 {
9120 case 0:
9121 /* Leave it as "auto". Strictly speaking this case
9122 means FPA, but almost nobody uses that now, and
9123 many toolchains fail to set the appropriate bits
9124 for the floating-point model they use. */
9125 break;
9126 case EF_ARM_SOFT_FLOAT:
9127 fp_model = ARM_FLOAT_SOFT_FPA;
9128 break;
9129 case EF_ARM_VFP_FLOAT:
9130 fp_model = ARM_FLOAT_VFP;
9131 break;
9132 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9133 fp_model = ARM_FLOAT_SOFT_VFP;
9134 break;
9135 }
9136 }
9137
9138 if (e_flags & EF_ARM_BE8)
9139 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9140
9141 break;
9142
9143 default:
9144 /* Leave it as "auto". */
9145 break;
9146 }
9147 }
123dc839
DJ
9148
9149 /* Check any target description for validity. */
9779414d 9150 if (tdesc_has_registers (tdesc))
123dc839
DJ
9151 {
9152 /* For most registers we require GDB's default names; but also allow
9153 the numeric names for sp / lr / pc, as a convenience. */
9154 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9155 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9156 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9157
9158 const struct tdesc_feature *feature;
58d6951d 9159 int valid_p;
123dc839 9160
9779414d 9161 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9162 "org.gnu.gdb.arm.core");
9163 if (feature == NULL)
9779414d
DJ
9164 {
9165 feature = tdesc_find_feature (tdesc,
9166 "org.gnu.gdb.arm.m-profile");
9167 if (feature == NULL)
9168 return NULL;
9169 else
9170 is_m = 1;
9171 }
123dc839
DJ
9172
9173 tdesc_data = tdesc_data_alloc ();
9174
9175 valid_p = 1;
9176 for (i = 0; i < ARM_SP_REGNUM; i++)
9177 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9178 arm_register_names[i]);
9179 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9180 ARM_SP_REGNUM,
9181 arm_sp_names);
9182 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9183 ARM_LR_REGNUM,
9184 arm_lr_names);
9185 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9186 ARM_PC_REGNUM,
9187 arm_pc_names);
9779414d
DJ
9188 if (is_m)
9189 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9190 ARM_PS_REGNUM, "xpsr");
9191 else
9192 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9193 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9194
9195 if (!valid_p)
9196 {
9197 tdesc_data_cleanup (tdesc_data);
9198 return NULL;
9199 }
9200
9779414d 9201 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9202 "org.gnu.gdb.arm.fpa");
9203 if (feature != NULL)
9204 {
9205 valid_p = 1;
9206 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9207 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9208 arm_register_names[i]);
9209 if (!valid_p)
9210 {
9211 tdesc_data_cleanup (tdesc_data);
9212 return NULL;
9213 }
9214 }
ff6f572f
DJ
9215 else
9216 have_fpa_registers = 0;
9217
9779414d 9218 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9219 "org.gnu.gdb.xscale.iwmmxt");
9220 if (feature != NULL)
9221 {
9222 static const char *const iwmmxt_names[] = {
9223 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9224 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9225 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9226 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9227 };
9228
9229 valid_p = 1;
9230 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9231 valid_p
9232 &= tdesc_numbered_register (feature, tdesc_data, i,
9233 iwmmxt_names[i - ARM_WR0_REGNUM]);
9234
9235 /* Check for the control registers, but do not fail if they
9236 are missing. */
9237 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9238 tdesc_numbered_register (feature, tdesc_data, i,
9239 iwmmxt_names[i - ARM_WR0_REGNUM]);
9240
9241 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9242 valid_p
9243 &= tdesc_numbered_register (feature, tdesc_data, i,
9244 iwmmxt_names[i - ARM_WR0_REGNUM]);
9245
9246 if (!valid_p)
9247 {
9248 tdesc_data_cleanup (tdesc_data);
9249 return NULL;
9250 }
a56cc1ce
YQ
9251
9252 have_wmmx_registers = 1;
ff6f572f 9253 }
58d6951d
DJ
9254
9255 /* If we have a VFP unit, check whether the single precision registers
9256 are present. If not, then we will synthesize them as pseudo
9257 registers. */
9779414d 9258 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9259 "org.gnu.gdb.arm.vfp");
9260 if (feature != NULL)
9261 {
9262 static const char *const vfp_double_names[] = {
9263 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9264 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9265 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9266 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9267 };
9268
9269 /* Require the double precision registers. There must be either
9270 16 or 32. */
9271 valid_p = 1;
9272 for (i = 0; i < 32; i++)
9273 {
9274 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9275 ARM_D0_REGNUM + i,
9276 vfp_double_names[i]);
9277 if (!valid_p)
9278 break;
9279 }
2b9e5ea6
UW
9280 if (!valid_p && i == 16)
9281 valid_p = 1;
58d6951d 9282
2b9e5ea6
UW
9283 /* Also require FPSCR. */
9284 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9285 ARM_FPSCR_REGNUM, "fpscr");
9286 if (!valid_p)
58d6951d
DJ
9287 {
9288 tdesc_data_cleanup (tdesc_data);
9289 return NULL;
9290 }
9291
9292 if (tdesc_unnumbered_register (feature, "s0") == 0)
9293 have_vfp_pseudos = 1;
9294
330c6ca9 9295 vfp_register_count = i;
58d6951d
DJ
9296
9297 /* If we have VFP, also check for NEON. The architecture allows
9298 NEON without VFP (integer vector operations only), but GDB
9299 does not support that. */
9779414d 9300 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9301 "org.gnu.gdb.arm.neon");
9302 if (feature != NULL)
9303 {
9304 /* NEON requires 32 double-precision registers. */
9305 if (i != 32)
9306 {
9307 tdesc_data_cleanup (tdesc_data);
9308 return NULL;
9309 }
9310
9311 /* If there are quad registers defined by the stub, use
9312 their type; otherwise (normally) provide them with
9313 the default type. */
9314 if (tdesc_unnumbered_register (feature, "q0") == 0)
9315 have_neon_pseudos = 1;
9316
9317 have_neon = 1;
9318 }
9319 }
123dc839 9320 }
39bbf761 9321
28e97307
DJ
9322 /* If there is already a candidate, use it. */
9323 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9324 best_arch != NULL;
9325 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9326 {
b8926edc
DJ
9327 if (arm_abi != ARM_ABI_AUTO
9328 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9329 continue;
9330
b8926edc
DJ
9331 if (fp_model != ARM_FLOAT_AUTO
9332 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9333 continue;
9334
58d6951d
DJ
9335 /* There are various other properties in tdep that we do not
9336 need to check here: those derived from a target description,
9337 since gdbarches with a different target description are
9338 automatically disqualified. */
9339
9779414d
DJ
9340 /* Do check is_m, though, since it might come from the binary. */
9341 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9342 continue;
9343
28e97307
DJ
9344 /* Found a match. */
9345 break;
9346 }
97e03143 9347
28e97307 9348 if (best_arch != NULL)
123dc839
DJ
9349 {
9350 if (tdesc_data != NULL)
9351 tdesc_data_cleanup (tdesc_data);
9352 return best_arch->gdbarch;
9353 }
28e97307 9354
8d749320 9355 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9356 gdbarch = gdbarch_alloc (&info, tdep);
9357
28e97307
DJ
9358 /* Record additional information about the architecture we are defining.
9359 These are gdbarch discriminators, like the OSABI. */
9360 tdep->arm_abi = arm_abi;
9361 tdep->fp_model = fp_model;
9779414d 9362 tdep->is_m = is_m;
ff6f572f 9363 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9364 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9365 gdb_assert (vfp_register_count == 0
9366 || vfp_register_count == 16
9367 || vfp_register_count == 32);
9368 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9369 tdep->have_vfp_pseudos = have_vfp_pseudos;
9370 tdep->have_neon_pseudos = have_neon_pseudos;
9371 tdep->have_neon = have_neon;
08216dd7 9372
25f8c692
JL
9373 arm_register_g_packet_guesses (gdbarch);
9374
08216dd7 9375 /* Breakpoints. */
9d4fde75 9376 switch (info.byte_order_for_code)
67255d04
RE
9377 {
9378 case BFD_ENDIAN_BIG:
66e810cd
RE
9379 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9380 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9381 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9382 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9383
67255d04
RE
9384 break;
9385
9386 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9387 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9388 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9389 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9390 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9391
67255d04
RE
9392 break;
9393
9394 default:
9395 internal_error (__FILE__, __LINE__,
edefbb7c 9396 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9397 }
9398
d7b486e7
RE
9399 /* On ARM targets char defaults to unsigned. */
9400 set_gdbarch_char_signed (gdbarch, 0);
9401
cca44b1b
JB
9402 /* Note: for displaced stepping, this includes the breakpoint, and one word
9403 of additional scratch space. This setting isn't used for anything beside
9404 displaced stepping at present. */
9405 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9406
9df628e0 9407 /* This should be low enough for everything. */
97e03143 9408 tdep->lowest_pc = 0x20;
94c30b78 9409 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9410
7c00367c
MK
9411 /* The default, for both APCS and AAPCS, is to return small
9412 structures in registers. */
9413 tdep->struct_return = reg_struct_return;
9414
2dd604e7 9415 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9416 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9417
7eb89530
YQ
9418 if (is_m)
9419 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9420
756fe439
DJ
9421 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9422
148754e5 9423 /* Frame handling. */
a262aec2 9424 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9425 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9426 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9427
eb5492fa 9428 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9429
34e8f22d 9430 /* Address manipulation. */
34e8f22d
RE
9431 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9432
34e8f22d
RE
9433 /* Advance PC across function entry code. */
9434 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9435
c9cf6e20
MG
9436 /* Detect whether PC is at a point where the stack has been destroyed. */
9437 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9438
190dce09
UW
9439 /* Skip trampolines. */
9440 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9441
34e8f22d
RE
9442 /* The stack grows downward. */
9443 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9444
9445 /* Breakpoint manipulation. */
04180708
YQ
9446 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9447 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9448 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9449 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9450
9451 /* Information about registers, etc. */
34e8f22d
RE
9452 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9453 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9454 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9455 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9456 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9457
ff6f572f
DJ
9458 /* This "info float" is FPA-specific. Use the generic version if we
9459 do not have FPA. */
9460 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9461 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9462
26216b98 9463 /* Internal <-> external register number maps. */
ff6f572f 9464 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9465 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9466
34e8f22d
RE
9467 set_gdbarch_register_name (gdbarch, arm_register_name);
9468
9469 /* Returning results. */
2af48f68 9470 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9471
03d48a7d
RE
9472 /* Disassembly. */
9473 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9474
34e8f22d
RE
9475 /* Minsymbol frobbing. */
9476 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9477 set_gdbarch_coff_make_msymbol_special (gdbarch,
9478 arm_coff_make_msymbol_special);
60c5725c 9479 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9480
f9d67f43
DJ
9481 /* Thumb-2 IT block support. */
9482 set_gdbarch_adjust_breakpoint_address (gdbarch,
9483 arm_adjust_breakpoint_address);
9484
0d5de010
DJ
9485 /* Virtual tables. */
9486 set_gdbarch_vbit_in_delta (gdbarch, 1);
9487
97e03143 9488 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9489 gdbarch_init_osabi (info, gdbarch);
97e03143 9490
b39cc962
DJ
9491 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9492
eb5492fa 9493 /* Add some default predicates. */
2ae28aa9
YQ
9494 if (is_m)
9495 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9496 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9497 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9498 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9499 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9500 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9501
97e03143
RE
9502 /* Now we have tuned the configuration, set a few final things,
9503 based on what the OS ABI has told us. */
9504
b8926edc
DJ
9505 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9506 binaries are always marked. */
9507 if (tdep->arm_abi == ARM_ABI_AUTO)
9508 tdep->arm_abi = ARM_ABI_APCS;
9509
e3039479
UW
9510 /* Watchpoints are not steppable. */
9511 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9512
b8926edc
DJ
9513 /* We used to default to FPA for generic ARM, but almost nobody
9514 uses that now, and we now provide a way for the user to force
9515 the model. So default to the most useful variant. */
9516 if (tdep->fp_model == ARM_FLOAT_AUTO)
9517 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9518
9df628e0
RE
9519 if (tdep->jb_pc >= 0)
9520 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9521
08216dd7 9522 /* Floating point sizes and format. */
8da61cc4 9523 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9524 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9525 {
8da61cc4
DJ
9526 set_gdbarch_double_format
9527 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9528 set_gdbarch_long_double_format
9529 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9530 }
9531 else
9532 {
9533 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9534 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9535 }
9536
58d6951d
DJ
9537 if (have_vfp_pseudos)
9538 {
9539 /* NOTE: These are the only pseudo registers used by
9540 the ARM target at the moment. If more are added, a
9541 little more care in numbering will be needed. */
9542
9543 int num_pseudos = 32;
9544 if (have_neon_pseudos)
9545 num_pseudos += 16;
9546 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9547 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9548 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9549 }
9550
123dc839 9551 if (tdesc_data)
58d6951d
DJ
9552 {
9553 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9554
9779414d 9555 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9556
9557 /* Override tdesc_register_type to adjust the types of VFP
9558 registers for NEON. */
9559 set_gdbarch_register_type (gdbarch, arm_register_type);
9560 }
123dc839
DJ
9561
9562 /* Add standard register aliases. We add aliases even for those
9563 nanes which are used by the current architecture - it's simpler,
9564 and does no harm, since nothing ever lists user registers. */
9565 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9566 user_reg_add (gdbarch, arm_register_aliases[i].name,
9567 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9568
65b48a81
PB
9569 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9570 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9571
39bbf761
RE
9572 return gdbarch;
9573}
9574
97e03143 9575static void
2af46ca0 9576arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9577{
2af46ca0 9578 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9579
9580 if (tdep == NULL)
9581 return;
9582
edefbb7c 9583 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9584 (unsigned long) tdep->lowest_pc);
9585}
9586
a78f21af
AC
9587extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9588
c906108c 9589void
ed9a39eb 9590_initialize_arm_tdep (void)
c906108c 9591{
bc90b915 9592 long length;
53904c9e
AC
9593 const char *setname;
9594 const char *setdesc;
65b48a81 9595 int i, j;
edefbb7c
AC
9596 char regdesc[1024], *rdptr = regdesc;
9597 size_t rest = sizeof (regdesc);
085dd6e6 9598
42cf1509 9599 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9600
60c5725c 9601 arm_objfile_data_key
c1bd65d0 9602 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9603
0e9e9abd
UW
9604 /* Add ourselves to objfile event chain. */
9605 observer_attach_new_objfile (arm_exidx_new_objfile);
9606 arm_exidx_data_key
9607 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9608
70f80edf
JT
9609 /* Register an ELF OS ABI sniffer for ARM binaries. */
9610 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9611 bfd_target_elf_flavour,
9612 arm_elf_osabi_sniffer);
9613
9779414d
DJ
9614 /* Initialize the standard target descriptions. */
9615 initialize_tdesc_arm_with_m ();
25f8c692 9616 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9617 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9618 initialize_tdesc_arm_with_iwmmxt ();
9619 initialize_tdesc_arm_with_vfpv2 ();
9620 initialize_tdesc_arm_with_vfpv3 ();
9621 initialize_tdesc_arm_with_neon ();
9779414d 9622
afd7eef0
RE
9623 /* Add root prefix command for all "set arm"/"show arm" commands. */
9624 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9625 _("Various ARM-specific commands."),
afd7eef0
RE
9626 &setarmcmdlist, "set arm ", 0, &setlist);
9627
9628 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9629 _("Various ARM-specific commands."),
afd7eef0 9630 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9631
c5aa993b 9632
65b48a81
PB
9633 arm_disassembler_options = xstrdup ("reg-names-std");
9634 const disasm_options_t *disasm_options = disassembler_options_arm ();
9635 int num_disassembly_styles = 0;
9636 for (i = 0; disasm_options->name[i] != NULL; i++)
9637 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9638 num_disassembly_styles++;
9639
9640 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
8d749320 9641 valid_disassembly_styles = XNEWVEC (const char *,
65b48a81
PB
9642 num_disassembly_styles + 1);
9643 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9644 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9645 {
9646 size_t offset = strlen ("reg-names-");
9647 const char *style = disasm_options->name[i];
9648 valid_disassembly_styles[j++] = &style[offset];
9649 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9650 disasm_options->description[i]);
9651 rdptr += length;
9652 rest -= length;
9653 }
94c30b78 9654 /* Mark the end of valid options. */
65b48a81 9655 valid_disassembly_styles[num_disassembly_styles] = NULL;
c906108c 9656
edefbb7c 9657 /* Create the help text. */
d7e74731
PA
9658 std::string helptext = string_printf ("%s%s%s",
9659 _("The valid values are:\n"),
9660 regdesc,
9661 _("The default is \"std\"."));
ed9a39eb 9662
edefbb7c
AC
9663 add_setshow_enum_cmd("disassembler", no_class,
9664 valid_disassembly_styles, &disassembly_style,
9665 _("Set the disassembly style."),
9666 _("Show the disassembly style."),
09b0e4b0 9667 helptext.c_str (),
2c5b56ce 9668 set_disassembly_style_sfunc,
65b48a81 9669 show_disassembly_style_sfunc,
7376b4c2 9670 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9671
9672 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9673 _("Set usage of ARM 32-bit mode."),
9674 _("Show usage of ARM 32-bit mode."),
9675 _("When off, a 26-bit PC will be used."),
2c5b56ce 9676 NULL,
0963b4bd
MS
9677 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9678 mode is %s. */
26304000 9679 &setarmcmdlist, &showarmcmdlist);
c906108c 9680
fd50bc42 9681 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9682 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9683 _("Set the floating point type."),
9684 _("Show the floating point type."),
9685 _("auto - Determine the FP typefrom the OS-ABI.\n\
9686softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9687fpa - FPA co-processor (GCC compiled).\n\
9688softvfp - Software FP with pure-endian doubles.\n\
9689vfp - VFP co-processor."),
edefbb7c 9690 set_fp_model_sfunc, show_fp_model,
7376b4c2 9691 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9692
28e97307
DJ
9693 /* Add a command to allow the user to force the ABI. */
9694 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9695 _("Set the ABI."),
9696 _("Show the ABI."),
9697 NULL, arm_set_abi, arm_show_abi,
9698 &setarmcmdlist, &showarmcmdlist);
9699
0428b8f5
DJ
9700 /* Add two commands to allow the user to force the assumed
9701 execution mode. */
9702 add_setshow_enum_cmd ("fallback-mode", class_support,
9703 arm_mode_strings, &arm_fallback_mode_string,
9704 _("Set the mode assumed when symbols are unavailable."),
9705 _("Show the mode assumed when symbols are unavailable."),
9706 NULL, NULL, arm_show_fallback_mode,
9707 &setarmcmdlist, &showarmcmdlist);
9708 add_setshow_enum_cmd ("force-mode", class_support,
9709 arm_mode_strings, &arm_force_mode_string,
9710 _("Set the mode assumed even when symbols are available."),
9711 _("Show the mode assumed even when symbols are available."),
9712 NULL, NULL, arm_show_force_mode,
9713 &setarmcmdlist, &showarmcmdlist);
9714
6529d2dd 9715 /* Debugging flag. */
edefbb7c
AC
9716 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9717 _("Set ARM debugging."),
9718 _("Show ARM debugging."),
9719 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9720 NULL,
7915a72c 9721 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9722 &setdebuglist, &showdebuglist);
c906108c 9723}
72508ac0
PO
9724
9725/* ARM-reversible process record data structures. */
9726
9727#define ARM_INSN_SIZE_BYTES 4
9728#define THUMB_INSN_SIZE_BYTES 2
9729#define THUMB2_INSN_SIZE_BYTES 4
9730
9731
71e396f9
LM
9732/* Position of the bit within a 32-bit ARM instruction
9733 that defines whether the instruction is a load or store. */
72508ac0
PO
9734#define INSN_S_L_BIT_NUM 20
9735
9736#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9737 do \
9738 { \
9739 unsigned int reg_len = LENGTH; \
9740 if (reg_len) \
9741 { \
9742 REGS = XNEWVEC (uint32_t, reg_len); \
9743 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9744 } \
9745 } \
9746 while (0)
9747
9748#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9749 do \
9750 { \
9751 unsigned int mem_len = LENGTH; \
9752 if (mem_len) \
9753 { \
9754 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9755 memcpy(&MEMS->len, &RECORD_BUF[0], \
9756 sizeof(struct arm_mem_r) * LENGTH); \
9757 } \
9758 } \
9759 while (0)
9760
9761/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9762#define INSN_RECORDED(ARM_RECORD) \
9763 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9764
9765/* ARM memory record structure. */
9766struct arm_mem_r
9767{
9768 uint32_t len; /* Record length. */
bfbbec00 9769 uint32_t addr; /* Memory address. */
72508ac0
PO
9770};
9771
9772/* ARM instruction record contains opcode of current insn
9773 and execution state (before entry to decode_insn()),
9774 contains list of to-be-modified registers and
9775 memory blocks (on return from decode_insn()). */
9776
9777typedef struct insn_decode_record_t
9778{
9779 struct gdbarch *gdbarch;
9780 struct regcache *regcache;
9781 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9782 uint32_t arm_insn; /* Should accommodate thumb. */
9783 uint32_t cond; /* Condition code. */
9784 uint32_t opcode; /* Insn opcode. */
9785 uint32_t decode; /* Insn decode bits. */
9786 uint32_t mem_rec_count; /* No of mem records. */
9787 uint32_t reg_rec_count; /* No of reg records. */
9788 uint32_t *arm_regs; /* Registers to be saved for this record. */
9789 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9790} insn_decode_record;
9791
9792
9793/* Checks ARM SBZ and SBO mandatory fields. */
9794
9795static int
9796sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9797{
9798 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9799
9800 if (!len)
9801 return 1;
9802
9803 if (!sbo)
9804 ones = ~ones;
9805
9806 while (ones)
9807 {
9808 if (!(ones & sbo))
9809 {
9810 return 0;
9811 }
9812 ones = ones >> 1;
9813 }
9814 return 1;
9815}
9816
c6ec2b30
OJ
9817enum arm_record_result
9818{
9819 ARM_RECORD_SUCCESS = 0,
9820 ARM_RECORD_FAILURE = 1
9821};
9822
72508ac0
PO
9823typedef enum
9824{
9825 ARM_RECORD_STRH=1,
9826 ARM_RECORD_STRD
9827} arm_record_strx_t;
9828
9829typedef enum
9830{
9831 ARM_RECORD=1,
9832 THUMB_RECORD,
9833 THUMB2_RECORD
9834} record_type_t;
9835
9836
9837static int
9838arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9839 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9840{
9841
9842 struct regcache *reg_cache = arm_insn_r->regcache;
9843 ULONGEST u_regval[2]= {0};
9844
9845 uint32_t reg_src1 = 0, reg_src2 = 0;
9846 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9847
9848 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9849 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9850
9851 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9852 {
9853 /* 1) Handle misc store, immediate offset. */
9854 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9855 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9856 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9857 regcache_raw_read_unsigned (reg_cache, reg_src1,
9858 &u_regval[0]);
9859 if (ARM_PC_REGNUM == reg_src1)
9860 {
9861 /* If R15 was used as Rn, hence current PC+8. */
9862 u_regval[0] = u_regval[0] + 8;
9863 }
9864 offset_8 = (immed_high << 4) | immed_low;
9865 /* Calculate target store address. */
9866 if (14 == arm_insn_r->opcode)
9867 {
9868 tgt_mem_addr = u_regval[0] + offset_8;
9869 }
9870 else
9871 {
9872 tgt_mem_addr = u_regval[0] - offset_8;
9873 }
9874 if (ARM_RECORD_STRH == str_type)
9875 {
9876 record_buf_mem[0] = 2;
9877 record_buf_mem[1] = tgt_mem_addr;
9878 arm_insn_r->mem_rec_count = 1;
9879 }
9880 else if (ARM_RECORD_STRD == str_type)
9881 {
9882 record_buf_mem[0] = 4;
9883 record_buf_mem[1] = tgt_mem_addr;
9884 record_buf_mem[2] = 4;
9885 record_buf_mem[3] = tgt_mem_addr + 4;
9886 arm_insn_r->mem_rec_count = 2;
9887 }
9888 }
9889 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9890 {
9891 /* 2) Store, register offset. */
9892 /* Get Rm. */
9893 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9894 /* Get Rn. */
9895 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9896 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9897 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9898 if (15 == reg_src2)
9899 {
9900 /* If R15 was used as Rn, hence current PC+8. */
9901 u_regval[0] = u_regval[0] + 8;
9902 }
9903 /* Calculate target store address, Rn +/- Rm, register offset. */
9904 if (12 == arm_insn_r->opcode)
9905 {
9906 tgt_mem_addr = u_regval[0] + u_regval[1];
9907 }
9908 else
9909 {
9910 tgt_mem_addr = u_regval[1] - u_regval[0];
9911 }
9912 if (ARM_RECORD_STRH == str_type)
9913 {
9914 record_buf_mem[0] = 2;
9915 record_buf_mem[1] = tgt_mem_addr;
9916 arm_insn_r->mem_rec_count = 1;
9917 }
9918 else if (ARM_RECORD_STRD == str_type)
9919 {
9920 record_buf_mem[0] = 4;
9921 record_buf_mem[1] = tgt_mem_addr;
9922 record_buf_mem[2] = 4;
9923 record_buf_mem[3] = tgt_mem_addr + 4;
9924 arm_insn_r->mem_rec_count = 2;
9925 }
9926 }
9927 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9928 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9929 {
9930 /* 3) Store, immediate pre-indexed. */
9931 /* 5) Store, immediate post-indexed. */
9932 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9933 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9934 offset_8 = (immed_high << 4) | immed_low;
9935 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9936 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9937 /* Calculate target store address, Rn +/- Rm, register offset. */
9938 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9939 {
9940 tgt_mem_addr = u_regval[0] + offset_8;
9941 }
9942 else
9943 {
9944 tgt_mem_addr = u_regval[0] - offset_8;
9945 }
9946 if (ARM_RECORD_STRH == str_type)
9947 {
9948 record_buf_mem[0] = 2;
9949 record_buf_mem[1] = tgt_mem_addr;
9950 arm_insn_r->mem_rec_count = 1;
9951 }
9952 else if (ARM_RECORD_STRD == str_type)
9953 {
9954 record_buf_mem[0] = 4;
9955 record_buf_mem[1] = tgt_mem_addr;
9956 record_buf_mem[2] = 4;
9957 record_buf_mem[3] = tgt_mem_addr + 4;
9958 arm_insn_r->mem_rec_count = 2;
9959 }
9960 /* Record Rn also as it changes. */
9961 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9962 arm_insn_r->reg_rec_count = 1;
9963 }
9964 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9965 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9966 {
9967 /* 4) Store, register pre-indexed. */
9968 /* 6) Store, register post -indexed. */
9969 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9970 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9971 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9972 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9973 /* Calculate target store address, Rn +/- Rm, register offset. */
9974 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9975 {
9976 tgt_mem_addr = u_regval[0] + u_regval[1];
9977 }
9978 else
9979 {
9980 tgt_mem_addr = u_regval[1] - u_regval[0];
9981 }
9982 if (ARM_RECORD_STRH == str_type)
9983 {
9984 record_buf_mem[0] = 2;
9985 record_buf_mem[1] = tgt_mem_addr;
9986 arm_insn_r->mem_rec_count = 1;
9987 }
9988 else if (ARM_RECORD_STRD == str_type)
9989 {
9990 record_buf_mem[0] = 4;
9991 record_buf_mem[1] = tgt_mem_addr;
9992 record_buf_mem[2] = 4;
9993 record_buf_mem[3] = tgt_mem_addr + 4;
9994 arm_insn_r->mem_rec_count = 2;
9995 }
9996 /* Record Rn also as it changes. */
9997 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9998 arm_insn_r->reg_rec_count = 1;
9999 }
10000 return 0;
10001}
10002
10003/* Handling ARM extension space insns. */
10004
10005static int
10006arm_record_extension_space (insn_decode_record *arm_insn_r)
10007{
10008 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10009 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10010 uint32_t record_buf[8], record_buf_mem[8];
10011 uint32_t reg_src1 = 0;
72508ac0
PO
10012 struct regcache *reg_cache = arm_insn_r->regcache;
10013 ULONGEST u_regval = 0;
10014
10015 gdb_assert (!INSN_RECORDED(arm_insn_r));
10016 /* Handle unconditional insn extension space. */
10017
10018 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10019 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10020 if (arm_insn_r->cond)
10021 {
10022 /* PLD has no affect on architectural state, it just affects
10023 the caches. */
10024 if (5 == ((opcode1 & 0xE0) >> 5))
10025 {
10026 /* BLX(1) */
10027 record_buf[0] = ARM_PS_REGNUM;
10028 record_buf[1] = ARM_LR_REGNUM;
10029 arm_insn_r->reg_rec_count = 2;
10030 }
10031 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10032 }
10033
10034
10035 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10036 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10037 {
10038 ret = -1;
10039 /* Undefined instruction on ARM V5; need to handle if later
10040 versions define it. */
10041 }
10042
10043 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10044 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10045 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10046
10047 /* Handle arithmetic insn extension space. */
10048 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10049 && !INSN_RECORDED(arm_insn_r))
10050 {
10051 /* Handle MLA(S) and MUL(S). */
10052 if (0 <= insn_op1 && 3 >= insn_op1)
10053 {
10054 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10055 record_buf[1] = ARM_PS_REGNUM;
10056 arm_insn_r->reg_rec_count = 2;
10057 }
10058 else if (4 <= insn_op1 && 15 >= insn_op1)
10059 {
10060 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10061 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10062 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10063 record_buf[2] = ARM_PS_REGNUM;
10064 arm_insn_r->reg_rec_count = 3;
10065 }
10066 }
10067
10068 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10069 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10070 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10071
10072 /* Handle control insn extension space. */
10073
10074 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10075 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10076 {
10077 if (!bit (arm_insn_r->arm_insn,25))
10078 {
10079 if (!bits (arm_insn_r->arm_insn, 4, 7))
10080 {
10081 if ((0 == insn_op1) || (2 == insn_op1))
10082 {
10083 /* MRS. */
10084 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10085 arm_insn_r->reg_rec_count = 1;
10086 }
10087 else if (1 == insn_op1)
10088 {
10089 /* CSPR is going to be changed. */
10090 record_buf[0] = ARM_PS_REGNUM;
10091 arm_insn_r->reg_rec_count = 1;
10092 }
10093 else if (3 == insn_op1)
10094 {
10095 /* SPSR is going to be changed. */
10096 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10097 return -1;
10098 }
10099 }
10100 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10101 {
10102 if (1 == insn_op1)
10103 {
10104 /* BX. */
10105 record_buf[0] = ARM_PS_REGNUM;
10106 arm_insn_r->reg_rec_count = 1;
10107 }
10108 else if (3 == insn_op1)
10109 {
10110 /* CLZ. */
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 arm_insn_r->reg_rec_count = 1;
10113 }
10114 }
10115 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10116 {
10117 /* BLX. */
10118 record_buf[0] = ARM_PS_REGNUM;
10119 record_buf[1] = ARM_LR_REGNUM;
10120 arm_insn_r->reg_rec_count = 2;
10121 }
10122 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10123 {
10124 /* QADD, QSUB, QDADD, QDSUB */
10125 record_buf[0] = ARM_PS_REGNUM;
10126 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10127 arm_insn_r->reg_rec_count = 2;
10128 }
10129 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10130 {
10131 /* BKPT. */
10132 record_buf[0] = ARM_PS_REGNUM;
10133 record_buf[1] = ARM_LR_REGNUM;
10134 arm_insn_r->reg_rec_count = 2;
10135
10136 /* Save SPSR also;how? */
72508ac0
PO
10137 return -1;
10138 }
10139 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10140 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10141 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10142 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10143 )
10144 {
10145 if (0 == insn_op1 || 1 == insn_op1)
10146 {
10147 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10148 /* We dont do optimization for SMULW<y> where we
10149 need only Rd. */
10150 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10151 record_buf[1] = ARM_PS_REGNUM;
10152 arm_insn_r->reg_rec_count = 2;
10153 }
10154 else if (2 == insn_op1)
10155 {
10156 /* SMLAL<x><y>. */
10157 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10158 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10159 arm_insn_r->reg_rec_count = 2;
10160 }
10161 else if (3 == insn_op1)
10162 {
10163 /* SMUL<x><y>. */
10164 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10165 arm_insn_r->reg_rec_count = 1;
10166 }
10167 }
10168 }
10169 else
10170 {
10171 /* MSR : immediate form. */
10172 if (1 == insn_op1)
10173 {
10174 /* CSPR is going to be changed. */
10175 record_buf[0] = ARM_PS_REGNUM;
10176 arm_insn_r->reg_rec_count = 1;
10177 }
10178 else if (3 == insn_op1)
10179 {
10180 /* SPSR is going to be changed. */
10181 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10182 return -1;
10183 }
10184 }
10185 }
10186
10187 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10188 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10189 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10190
10191 /* Handle load/store insn extension space. */
10192
10193 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10194 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10195 && !INSN_RECORDED(arm_insn_r))
10196 {
10197 /* SWP/SWPB. */
10198 if (0 == insn_op1)
10199 {
10200 /* These insn, changes register and memory as well. */
10201 /* SWP or SWPB insn. */
10202 /* Get memory address given by Rn. */
10203 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10204 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10205 /* SWP insn ?, swaps word. */
10206 if (8 == arm_insn_r->opcode)
10207 {
10208 record_buf_mem[0] = 4;
10209 }
10210 else
10211 {
10212 /* SWPB insn, swaps only byte. */
10213 record_buf_mem[0] = 1;
10214 }
10215 record_buf_mem[1] = u_regval;
10216 arm_insn_r->mem_rec_count = 1;
10217 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10218 arm_insn_r->reg_rec_count = 1;
10219 }
10220 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10221 {
10222 /* STRH. */
10223 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10224 ARM_RECORD_STRH);
10225 }
10226 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10227 {
10228 /* LDRD. */
10229 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10230 record_buf[1] = record_buf[0] + 1;
10231 arm_insn_r->reg_rec_count = 2;
10232 }
10233 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10234 {
10235 /* STRD. */
10236 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10237 ARM_RECORD_STRD);
10238 }
10239 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10240 {
10241 /* LDRH, LDRSB, LDRSH. */
10242 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10243 arm_insn_r->reg_rec_count = 1;
10244 }
10245
10246 }
10247
10248 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10249 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10250 && !INSN_RECORDED(arm_insn_r))
10251 {
10252 ret = -1;
10253 /* Handle coprocessor insn extension space. */
10254 }
10255
10256 /* To be done for ARMv5 and later; as of now we return -1. */
10257 if (-1 == ret)
ca92db2d 10258 return ret;
72508ac0
PO
10259
10260 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10261 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10262
10263 return ret;
10264}
10265
10266/* Handling opcode 000 insns. */
10267
10268static int
10269arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10270{
10271 struct regcache *reg_cache = arm_insn_r->regcache;
10272 uint32_t record_buf[8], record_buf_mem[8];
10273 ULONGEST u_regval[2] = {0};
10274
bec2ab5a 10275 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10276 uint32_t opcode1 = 0;
10277
10278 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10279 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10280 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10281
10282 /* Data processing insn /multiply insn. */
10283 if (9 == arm_insn_r->decode
10284 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10285 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10286 {
10287 /* Handle multiply instructions. */
10288 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10289 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10290 {
10291 /* Handle MLA and MUL. */
10292 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10293 record_buf[1] = ARM_PS_REGNUM;
10294 arm_insn_r->reg_rec_count = 2;
10295 }
10296 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10297 {
10298 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10299 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10300 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10301 record_buf[2] = ARM_PS_REGNUM;
10302 arm_insn_r->reg_rec_count = 3;
10303 }
10304 }
10305 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10306 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10307 {
10308 /* Handle misc load insns, as 20th bit (L = 1). */
10309 /* LDR insn has a capability to do branching, if
10310 MOV LR, PC is precceded by LDR insn having Rn as R15
10311 in that case, it emulates branch and link insn, and hence we
10312 need to save CSPR and PC as well. I am not sure this is right
10313 place; as opcode = 010 LDR insn make this happen, if R15 was
10314 used. */
10315 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10316 if (15 != reg_dest)
10317 {
10318 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10319 arm_insn_r->reg_rec_count = 1;
10320 }
10321 else
10322 {
10323 record_buf[0] = reg_dest;
10324 record_buf[1] = ARM_PS_REGNUM;
10325 arm_insn_r->reg_rec_count = 2;
10326 }
10327 }
10328 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10329 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10330 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10331 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10332 {
10333 /* Handle MSR insn. */
10334 if (9 == arm_insn_r->opcode)
10335 {
10336 /* CSPR is going to be changed. */
10337 record_buf[0] = ARM_PS_REGNUM;
10338 arm_insn_r->reg_rec_count = 1;
10339 }
10340 else
10341 {
10342 /* SPSR is going to be changed. */
10343 /* How to read SPSR value? */
72508ac0
PO
10344 return -1;
10345 }
10346 }
10347 else if (9 == arm_insn_r->decode
10348 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10349 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10350 {
10351 /* Handling SWP, SWPB. */
10352 /* These insn, changes register and memory as well. */
10353 /* SWP or SWPB insn. */
10354
10355 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10356 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10357 /* SWP insn ?, swaps word. */
10358 if (8 == arm_insn_r->opcode)
10359 {
10360 record_buf_mem[0] = 4;
10361 }
10362 else
10363 {
10364 /* SWPB insn, swaps only byte. */
10365 record_buf_mem[0] = 1;
10366 }
10367 record_buf_mem[1] = u_regval[0];
10368 arm_insn_r->mem_rec_count = 1;
10369 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10370 arm_insn_r->reg_rec_count = 1;
10371 }
10372 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10373 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10374 {
10375 /* Handle BLX, branch and link/exchange. */
10376 if (9 == arm_insn_r->opcode)
10377 {
10378 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10379 and R14 stores the return address. */
10380 record_buf[0] = ARM_PS_REGNUM;
10381 record_buf[1] = ARM_LR_REGNUM;
10382 arm_insn_r->reg_rec_count = 2;
10383 }
10384 }
10385 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10386 {
10387 /* Handle enhanced software breakpoint insn, BKPT. */
10388 /* CPSR is changed to be executed in ARM state, disabling normal
10389 interrupts, entering abort mode. */
10390 /* According to high vector configuration PC is set. */
10391 /* user hit breakpoint and type reverse, in
10392 that case, we need to go back with previous CPSR and
10393 Program Counter. */
10394 record_buf[0] = ARM_PS_REGNUM;
10395 record_buf[1] = ARM_LR_REGNUM;
10396 arm_insn_r->reg_rec_count = 2;
10397
10398 /* Save SPSR also; how? */
72508ac0
PO
10399 return -1;
10400 }
10401 else if (11 == arm_insn_r->decode
10402 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10403 {
10404 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10405
10406 /* Handle str(x) insn */
10407 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10408 ARM_RECORD_STRH);
10409 }
10410 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10411 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10412 {
10413 /* Handle BX, branch and link/exchange. */
10414 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10415 record_buf[0] = ARM_PS_REGNUM;
10416 arm_insn_r->reg_rec_count = 1;
10417 }
10418 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10419 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10420 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10421 {
10422 /* Count leading zeros: CLZ. */
10423 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10424 arm_insn_r->reg_rec_count = 1;
10425 }
10426 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10427 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10428 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10429 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10430 )
10431 {
10432 /* Handle MRS insn. */
10433 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10434 arm_insn_r->reg_rec_count = 1;
10435 }
10436 else if (arm_insn_r->opcode <= 15)
10437 {
10438 /* Normal data processing insns. */
10439 /* Out of 11 shifter operands mode, all the insn modifies destination
10440 register, which is specified by 13-16 decode. */
10441 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10442 record_buf[1] = ARM_PS_REGNUM;
10443 arm_insn_r->reg_rec_count = 2;
10444 }
10445 else
10446 {
10447 return -1;
10448 }
10449
10450 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10451 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10452 return 0;
10453}
10454
10455/* Handling opcode 001 insns. */
10456
10457static int
10458arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10459{
10460 uint32_t record_buf[8], record_buf_mem[8];
10461
10462 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10463 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10464
10465 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10466 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10467 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10468 )
10469 {
10470 /* Handle MSR insn. */
10471 if (9 == arm_insn_r->opcode)
10472 {
10473 /* CSPR is going to be changed. */
10474 record_buf[0] = ARM_PS_REGNUM;
10475 arm_insn_r->reg_rec_count = 1;
10476 }
10477 else
10478 {
10479 /* SPSR is going to be changed. */
10480 }
10481 }
10482 else if (arm_insn_r->opcode <= 15)
10483 {
10484 /* Normal data processing insns. */
10485 /* Out of 11 shifter operands mode, all the insn modifies destination
10486 register, which is specified by 13-16 decode. */
10487 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10488 record_buf[1] = ARM_PS_REGNUM;
10489 arm_insn_r->reg_rec_count = 2;
10490 }
10491 else
10492 {
10493 return -1;
10494 }
10495
10496 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10497 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10498 return 0;
10499}
10500
c55978a6
YQ
10501static int
10502arm_record_media (insn_decode_record *arm_insn_r)
10503{
10504 uint32_t record_buf[8];
10505
10506 switch (bits (arm_insn_r->arm_insn, 22, 24))
10507 {
10508 case 0:
10509 /* Parallel addition and subtraction, signed */
10510 case 1:
10511 /* Parallel addition and subtraction, unsigned */
10512 case 2:
10513 case 3:
10514 /* Packing, unpacking, saturation and reversal */
10515 {
10516 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10517
10518 record_buf[arm_insn_r->reg_rec_count++] = rd;
10519 }
10520 break;
10521
10522 case 4:
10523 case 5:
10524 /* Signed multiplies */
10525 {
10526 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10527 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10528
10529 record_buf[arm_insn_r->reg_rec_count++] = rd;
10530 if (op1 == 0x0)
10531 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10532 else if (op1 == 0x4)
10533 record_buf[arm_insn_r->reg_rec_count++]
10534 = bits (arm_insn_r->arm_insn, 12, 15);
10535 }
10536 break;
10537
10538 case 6:
10539 {
10540 if (bit (arm_insn_r->arm_insn, 21)
10541 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10542 {
10543 /* SBFX */
10544 record_buf[arm_insn_r->reg_rec_count++]
10545 = bits (arm_insn_r->arm_insn, 12, 15);
10546 }
10547 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10548 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10549 {
10550 /* USAD8 and USADA8 */
10551 record_buf[arm_insn_r->reg_rec_count++]
10552 = bits (arm_insn_r->arm_insn, 16, 19);
10553 }
10554 }
10555 break;
10556
10557 case 7:
10558 {
10559 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10560 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10561 {
10562 /* Permanently UNDEFINED */
10563 return -1;
10564 }
10565 else
10566 {
10567 /* BFC, BFI and UBFX */
10568 record_buf[arm_insn_r->reg_rec_count++]
10569 = bits (arm_insn_r->arm_insn, 12, 15);
10570 }
10571 }
10572 break;
10573
10574 default:
10575 return -1;
10576 }
10577
10578 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10579
10580 return 0;
10581}
10582
71e396f9 10583/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10584
10585static int
10586arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10587{
10588 struct regcache *reg_cache = arm_insn_r->regcache;
10589
71e396f9
LM
10590 uint32_t reg_base , reg_dest;
10591 uint32_t offset_12, tgt_mem_addr;
72508ac0 10592 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10593 unsigned char wback;
10594 ULONGEST u_regval;
72508ac0 10595
71e396f9
LM
10596 /* Calculate wback. */
10597 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10598 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10599
71e396f9
LM
10600 arm_insn_r->reg_rec_count = 0;
10601 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10602
10603 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10604 {
71e396f9
LM
10605 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10606 and LDRT. */
10607
72508ac0 10608 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10609 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10610
10611 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10612 preceeds a LDR instruction having R15 as reg_base, it
10613 emulates a branch and link instruction, and hence we need to save
10614 CPSR and PC as well. */
10615 if (ARM_PC_REGNUM == reg_dest)
10616 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10617
10618 /* If wback is true, also save the base register, which is going to be
10619 written to. */
10620 if (wback)
10621 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10622 }
10623 else
10624 {
71e396f9
LM
10625 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10626
72508ac0 10627 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10628 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10629
10630 /* Handle bit U. */
72508ac0 10631 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10632 {
10633 /* U == 1: Add the offset. */
10634 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10635 }
72508ac0 10636 else
71e396f9
LM
10637 {
10638 /* U == 0: subtract the offset. */
10639 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10640 }
10641
10642 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10643 bytes. */
10644 if (bit (arm_insn_r->arm_insn, 22))
10645 {
10646 /* STRB and STRBT: 1 byte. */
10647 record_buf_mem[0] = 1;
10648 }
10649 else
10650 {
10651 /* STR and STRT: 4 bytes. */
10652 record_buf_mem[0] = 4;
10653 }
10654
10655 /* Handle bit P. */
10656 if (bit (arm_insn_r->arm_insn, 24))
10657 record_buf_mem[1] = tgt_mem_addr;
10658 else
10659 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10660
72508ac0
PO
10661 arm_insn_r->mem_rec_count = 1;
10662
71e396f9
LM
10663 /* If wback is true, also save the base register, which is going to be
10664 written to. */
10665 if (wback)
10666 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10667 }
10668
10669 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10670 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10671 return 0;
10672}
10673
10674/* Handling opcode 011 insns. */
10675
10676static int
10677arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10678{
10679 struct regcache *reg_cache = arm_insn_r->regcache;
10680
10681 uint32_t shift_imm = 0;
10682 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10683 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10684 uint32_t record_buf[8], record_buf_mem[8];
10685
10686 LONGEST s_word;
10687 ULONGEST u_regval[2];
10688
c55978a6
YQ
10689 if (bit (arm_insn_r->arm_insn, 4))
10690 return arm_record_media (arm_insn_r);
10691
72508ac0
PO
10692 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10693 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10694
10695 /* Handle enhanced store insns and LDRD DSP insn,
10696 order begins according to addressing modes for store insns
10697 STRH insn. */
10698
10699 /* LDR or STR? */
10700 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10701 {
10702 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10703 /* LDR insn has a capability to do branching, if
10704 MOV LR, PC is precedded by LDR insn having Rn as R15
10705 in that case, it emulates branch and link insn, and hence we
10706 need to save CSPR and PC as well. */
10707 if (15 != reg_dest)
10708 {
10709 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10710 arm_insn_r->reg_rec_count = 1;
10711 }
10712 else
10713 {
10714 record_buf[0] = reg_dest;
10715 record_buf[1] = ARM_PS_REGNUM;
10716 arm_insn_r->reg_rec_count = 2;
10717 }
10718 }
10719 else
10720 {
10721 if (! bits (arm_insn_r->arm_insn, 4, 11))
10722 {
10723 /* Store insn, register offset and register pre-indexed,
10724 register post-indexed. */
10725 /* Get Rm. */
10726 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10727 /* Get Rn. */
10728 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10729 regcache_raw_read_unsigned (reg_cache, reg_src1
10730 , &u_regval[0]);
10731 regcache_raw_read_unsigned (reg_cache, reg_src2
10732 , &u_regval[1]);
10733 if (15 == reg_src2)
10734 {
10735 /* If R15 was used as Rn, hence current PC+8. */
10736 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10737 u_regval[0] = u_regval[0] + 8;
10738 }
10739 /* Calculate target store address, Rn +/- Rm, register offset. */
10740 /* U == 1. */
10741 if (bit (arm_insn_r->arm_insn, 23))
10742 {
10743 tgt_mem_addr = u_regval[0] + u_regval[1];
10744 }
10745 else
10746 {
10747 tgt_mem_addr = u_regval[1] - u_regval[0];
10748 }
10749
10750 switch (arm_insn_r->opcode)
10751 {
10752 /* STR. */
10753 case 8:
10754 case 12:
10755 /* STR. */
10756 case 9:
10757 case 13:
10758 /* STRT. */
10759 case 1:
10760 case 5:
10761 /* STR. */
10762 case 0:
10763 case 4:
10764 record_buf_mem[0] = 4;
10765 break;
10766
10767 /* STRB. */
10768 case 10:
10769 case 14:
10770 /* STRB. */
10771 case 11:
10772 case 15:
10773 /* STRBT. */
10774 case 3:
10775 case 7:
10776 /* STRB. */
10777 case 2:
10778 case 6:
10779 record_buf_mem[0] = 1;
10780 break;
10781
10782 default:
10783 gdb_assert_not_reached ("no decoding pattern found");
10784 break;
10785 }
10786 record_buf_mem[1] = tgt_mem_addr;
10787 arm_insn_r->mem_rec_count = 1;
10788
10789 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10790 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10791 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10792 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10793 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10794 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10795 )
10796 {
10797 /* Rn is going to be changed in pre-indexed mode and
10798 post-indexed mode as well. */
10799 record_buf[0] = reg_src2;
10800 arm_insn_r->reg_rec_count = 1;
10801 }
10802 }
10803 else
10804 {
10805 /* Store insn, scaled register offset; scaled pre-indexed. */
10806 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10807 /* Get Rm. */
10808 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10809 /* Get Rn. */
10810 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10811 /* Get shift_imm. */
10812 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10813 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10814 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10815 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10816 /* Offset_12 used as shift. */
10817 switch (offset_12)
10818 {
10819 case 0:
10820 /* Offset_12 used as index. */
10821 offset_12 = u_regval[0] << shift_imm;
10822 break;
10823
10824 case 1:
10825 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10826 break;
10827
10828 case 2:
10829 if (!shift_imm)
10830 {
10831 if (bit (u_regval[0], 31))
10832 {
10833 offset_12 = 0xFFFFFFFF;
10834 }
10835 else
10836 {
10837 offset_12 = 0;
10838 }
10839 }
10840 else
10841 {
10842 /* This is arithmetic shift. */
10843 offset_12 = s_word >> shift_imm;
10844 }
10845 break;
10846
10847 case 3:
10848 if (!shift_imm)
10849 {
10850 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10851 &u_regval[1]);
10852 /* Get C flag value and shift it by 31. */
10853 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10854 | (u_regval[0]) >> 1);
10855 }
10856 else
10857 {
10858 offset_12 = (u_regval[0] >> shift_imm) \
10859 | (u_regval[0] <<
10860 (sizeof(uint32_t) - shift_imm));
10861 }
10862 break;
10863
10864 default:
10865 gdb_assert_not_reached ("no decoding pattern found");
10866 break;
10867 }
10868
10869 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10870 /* bit U set. */
10871 if (bit (arm_insn_r->arm_insn, 23))
10872 {
10873 tgt_mem_addr = u_regval[1] + offset_12;
10874 }
10875 else
10876 {
10877 tgt_mem_addr = u_regval[1] - offset_12;
10878 }
10879
10880 switch (arm_insn_r->opcode)
10881 {
10882 /* STR. */
10883 case 8:
10884 case 12:
10885 /* STR. */
10886 case 9:
10887 case 13:
10888 /* STRT. */
10889 case 1:
10890 case 5:
10891 /* STR. */
10892 case 0:
10893 case 4:
10894 record_buf_mem[0] = 4;
10895 break;
10896
10897 /* STRB. */
10898 case 10:
10899 case 14:
10900 /* STRB. */
10901 case 11:
10902 case 15:
10903 /* STRBT. */
10904 case 3:
10905 case 7:
10906 /* STRB. */
10907 case 2:
10908 case 6:
10909 record_buf_mem[0] = 1;
10910 break;
10911
10912 default:
10913 gdb_assert_not_reached ("no decoding pattern found");
10914 break;
10915 }
10916 record_buf_mem[1] = tgt_mem_addr;
10917 arm_insn_r->mem_rec_count = 1;
10918
10919 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10920 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10921 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10922 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10923 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10924 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10925 )
10926 {
10927 /* Rn is going to be changed in register scaled pre-indexed
10928 mode,and scaled post indexed mode. */
10929 record_buf[0] = reg_src2;
10930 arm_insn_r->reg_rec_count = 1;
10931 }
10932 }
10933 }
10934
10935 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10936 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10937 return 0;
10938}
10939
71e396f9 10940/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10941
10942static int
10943arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10944{
10945 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10946 uint32_t register_count = 0, register_bits;
10947 uint32_t reg_base, addr_mode;
72508ac0 10948 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10949 uint32_t wback;
10950 ULONGEST u_regval;
72508ac0 10951
71e396f9
LM
10952 /* Fetch the list of registers. */
10953 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10954 arm_insn_r->reg_rec_count = 0;
10955
10956 /* Fetch the base register that contains the address we are loading data
10957 to. */
10958 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10959
71e396f9
LM
10960 /* Calculate wback. */
10961 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10962
10963 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10964 {
71e396f9 10965 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10966
71e396f9 10967 /* Find out which registers are going to be loaded from memory. */
72508ac0 10968 while (register_bits)
71e396f9
LM
10969 {
10970 if (register_bits & 0x00000001)
10971 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10972 register_bits = register_bits >> 1;
10973 register_count++;
10974 }
72508ac0 10975
71e396f9
LM
10976
10977 /* If wback is true, also save the base register, which is going to be
10978 written to. */
10979 if (wback)
10980 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10981
10982 /* Save the CPSR register. */
10983 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10984 }
10985 else
10986 {
71e396f9 10987 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10988
71e396f9
LM
10989 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10990
10991 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10992
10993 /* Find out how many registers are going to be stored to memory. */
72508ac0 10994 while (register_bits)
71e396f9
LM
10995 {
10996 if (register_bits & 0x00000001)
10997 register_count++;
10998 register_bits = register_bits >> 1;
10999 }
72508ac0
PO
11000
11001 switch (addr_mode)
71e396f9
LM
11002 {
11003 /* STMDA (STMED): Decrement after. */
11004 case 0:
11005 record_buf_mem[1] = (uint32_t) u_regval
11006 - register_count * INT_REGISTER_SIZE + 4;
11007 break;
11008 /* STM (STMIA, STMEA): Increment after. */
11009 case 1:
11010 record_buf_mem[1] = (uint32_t) u_regval;
11011 break;
11012 /* STMDB (STMFD): Decrement before. */
11013 case 2:
11014 record_buf_mem[1] = (uint32_t) u_regval
11015 - register_count * INT_REGISTER_SIZE;
11016 break;
11017 /* STMIB (STMFA): Increment before. */
11018 case 3:
11019 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11020 break;
11021 default:
11022 gdb_assert_not_reached ("no decoding pattern found");
11023 break;
11024 }
72508ac0 11025
71e396f9
LM
11026 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11027 arm_insn_r->mem_rec_count = 1;
11028
11029 /* If wback is true, also save the base register, which is going to be
11030 written to. */
11031 if (wback)
11032 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11033 }
11034
11035 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11036 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11037 return 0;
11038}
11039
11040/* Handling opcode 101 insns. */
11041
11042static int
11043arm_record_b_bl (insn_decode_record *arm_insn_r)
11044{
11045 uint32_t record_buf[8];
11046
11047 /* Handle B, BL, BLX(1) insns. */
11048 /* B simply branches so we do nothing here. */
11049 /* Note: BLX(1) doesnt fall here but instead it falls into
11050 extension space. */
11051 if (bit (arm_insn_r->arm_insn, 24))
11052 {
11053 record_buf[0] = ARM_LR_REGNUM;
11054 arm_insn_r->reg_rec_count = 1;
11055 }
11056
11057 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11058
11059 return 0;
11060}
11061
72508ac0 11062static int
c6ec2b30 11063arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11064{
11065 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11066 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11067 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11068
11069 return -1;
11070}
11071
5a578da5
OJ
11072/* Record handler for vector data transfer instructions. */
11073
11074static int
11075arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11076{
11077 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11078 uint32_t record_buf[4];
11079
5a578da5
OJ
11080 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11081 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11082 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11083 bit_l = bit (arm_insn_r->arm_insn, 20);
11084 bit_c = bit (arm_insn_r->arm_insn, 8);
11085
11086 /* Handle VMOV instruction. */
11087 if (bit_l && bit_c)
11088 {
11089 record_buf[0] = reg_t;
11090 arm_insn_r->reg_rec_count = 1;
11091 }
11092 else if (bit_l && !bit_c)
11093 {
11094 /* Handle VMOV instruction. */
11095 if (bits_a == 0x00)
11096 {
f1771dce 11097 record_buf[0] = reg_t;
5a578da5
OJ
11098 arm_insn_r->reg_rec_count = 1;
11099 }
11100 /* Handle VMRS instruction. */
11101 else if (bits_a == 0x07)
11102 {
11103 if (reg_t == 15)
11104 reg_t = ARM_PS_REGNUM;
11105
11106 record_buf[0] = reg_t;
11107 arm_insn_r->reg_rec_count = 1;
11108 }
11109 }
11110 else if (!bit_l && !bit_c)
11111 {
11112 /* Handle VMOV instruction. */
11113 if (bits_a == 0x00)
11114 {
f1771dce 11115 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11116
11117 arm_insn_r->reg_rec_count = 1;
11118 }
11119 /* Handle VMSR instruction. */
11120 else if (bits_a == 0x07)
11121 {
11122 record_buf[0] = ARM_FPSCR_REGNUM;
11123 arm_insn_r->reg_rec_count = 1;
11124 }
11125 }
11126 else if (!bit_l && bit_c)
11127 {
11128 /* Handle VMOV instruction. */
11129 if (!(bits_a & 0x04))
11130 {
11131 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11132 + ARM_D0_REGNUM;
11133 arm_insn_r->reg_rec_count = 1;
11134 }
11135 /* Handle VDUP instruction. */
11136 else
11137 {
11138 if (bit (arm_insn_r->arm_insn, 21))
11139 {
11140 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11141 record_buf[0] = reg_v + ARM_D0_REGNUM;
11142 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11143 arm_insn_r->reg_rec_count = 2;
11144 }
11145 else
11146 {
11147 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11148 record_buf[0] = reg_v + ARM_D0_REGNUM;
11149 arm_insn_r->reg_rec_count = 1;
11150 }
11151 }
11152 }
11153
11154 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11155 return 0;
11156}
11157
f20f80dd
OJ
11158/* Record handler for extension register load/store instructions. */
11159
11160static int
11161arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11162{
11163 uint32_t opcode, single_reg;
11164 uint8_t op_vldm_vstm;
11165 uint32_t record_buf[8], record_buf_mem[128];
11166 ULONGEST u_regval = 0;
11167
11168 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11169
11170 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11171 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11172 op_vldm_vstm = opcode & 0x1b;
11173
11174 /* Handle VMOV instructions. */
11175 if ((opcode & 0x1e) == 0x04)
11176 {
9fde51ed 11177 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11178 {
11179 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11180 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11181 arm_insn_r->reg_rec_count = 2;
11182 }
f20f80dd 11183 else
01e57735 11184 {
9fde51ed
YQ
11185 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11186 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11187
9fde51ed 11188 if (single_reg)
01e57735 11189 {
9fde51ed
YQ
11190 /* The first S register number m is REG_M:M (M is bit 5),
11191 the corresponding D register number is REG_M:M / 2, which
11192 is REG_M. */
11193 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11194 /* The second S register number is REG_M:M + 1, the
11195 corresponding D register number is (REG_M:M + 1) / 2.
11196 IOW, if bit M is 1, the first and second S registers
11197 are mapped to different D registers, otherwise, they are
11198 in the same D register. */
11199 if (bit_m)
11200 {
11201 record_buf[arm_insn_r->reg_rec_count++]
11202 = ARM_D0_REGNUM + reg_m + 1;
11203 }
01e57735
YQ
11204 }
11205 else
11206 {
9fde51ed 11207 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11208 arm_insn_r->reg_rec_count = 1;
11209 }
11210 }
f20f80dd
OJ
11211 }
11212 /* Handle VSTM and VPUSH instructions. */
11213 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11214 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11215 {
11216 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11217 uint32_t memory_index = 0;
11218
11219 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11220 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11221 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11222 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11223 memory_count = imm_off8;
11224
11225 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11226 start_address = u_regval;
f20f80dd 11227 else
01e57735 11228 start_address = u_regval - imm_off32;
f20f80dd
OJ
11229
11230 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11231 {
11232 record_buf[0] = reg_rn;
11233 arm_insn_r->reg_rec_count = 1;
11234 }
f20f80dd
OJ
11235
11236 while (memory_count > 0)
01e57735 11237 {
9fde51ed 11238 if (single_reg)
01e57735 11239 {
9fde51ed
YQ
11240 record_buf_mem[memory_index] = 4;
11241 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11242 start_address = start_address + 4;
11243 memory_index = memory_index + 2;
11244 }
11245 else
11246 {
9fde51ed
YQ
11247 record_buf_mem[memory_index] = 4;
11248 record_buf_mem[memory_index + 1] = start_address;
11249 record_buf_mem[memory_index + 2] = 4;
11250 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11251 start_address = start_address + 8;
11252 memory_index = memory_index + 4;
11253 }
11254 memory_count--;
11255 }
f20f80dd
OJ
11256 arm_insn_r->mem_rec_count = (memory_index >> 1);
11257 }
11258 /* Handle VLDM instructions. */
11259 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11260 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11261 {
11262 uint32_t reg_count, reg_vd;
11263 uint32_t reg_index = 0;
9fde51ed 11264 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11265
11266 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11267 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11268
9fde51ed
YQ
11269 /* REG_VD is the first D register number. If the instruction
11270 loads memory to S registers (SINGLE_REG is TRUE), the register
11271 number is (REG_VD << 1 | bit D), so the corresponding D
11272 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11273 if (!single_reg)
11274 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11275
9fde51ed 11276 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11277 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11278
9fde51ed
YQ
11279 /* If the instruction loads memory to D register, REG_COUNT should
11280 be divided by 2, according to the ARM Architecture Reference
11281 Manual. If the instruction loads memory to S register, divide by
11282 2 as well because two S registers are mapped to D register. */
11283 reg_count = reg_count / 2;
11284 if (single_reg && bit_d)
01e57735 11285 {
9fde51ed
YQ
11286 /* Increase the register count if S register list starts from
11287 an odd number (bit d is one). */
11288 reg_count++;
11289 }
f20f80dd 11290
9fde51ed
YQ
11291 while (reg_count > 0)
11292 {
11293 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11294 reg_count--;
11295 }
f20f80dd
OJ
11296 arm_insn_r->reg_rec_count = reg_index;
11297 }
11298 /* VSTR Vector store register. */
11299 else if ((opcode & 0x13) == 0x10)
11300 {
bec2ab5a 11301 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11302 uint32_t memory_index = 0;
11303
11304 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11305 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11306 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11307 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11308
11309 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11310 start_address = u_regval + imm_off32;
f20f80dd 11311 else
01e57735 11312 start_address = u_regval - imm_off32;
f20f80dd
OJ
11313
11314 if (single_reg)
01e57735 11315 {
9fde51ed
YQ
11316 record_buf_mem[memory_index] = 4;
11317 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11318 arm_insn_r->mem_rec_count = 1;
11319 }
f20f80dd 11320 else
01e57735 11321 {
9fde51ed
YQ
11322 record_buf_mem[memory_index] = 4;
11323 record_buf_mem[memory_index + 1] = start_address;
11324 record_buf_mem[memory_index + 2] = 4;
11325 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11326 arm_insn_r->mem_rec_count = 2;
11327 }
f20f80dd
OJ
11328 }
11329 /* VLDR Vector load register. */
11330 else if ((opcode & 0x13) == 0x11)
11331 {
11332 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11333
11334 if (!single_reg)
01e57735
YQ
11335 {
11336 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11337 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11338 }
f20f80dd 11339 else
01e57735
YQ
11340 {
11341 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11342 /* Record register D rather than pseudo register S. */
11343 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11344 }
f20f80dd
OJ
11345 arm_insn_r->reg_rec_count = 1;
11346 }
11347
11348 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11349 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11350 return 0;
11351}
11352
851f26ae
OJ
11353/* Record handler for arm/thumb mode VFP data processing instructions. */
11354
11355static int
11356arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11357{
11358 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11359 uint32_t record_buf[4];
11360 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11361 enum insn_types curr_insn_type = INSN_INV;
11362
11363 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11364 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11365 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11366 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11367 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11368 bit_d = bit (arm_insn_r->arm_insn, 22);
11369 opc1 = opc1 & 0x04;
11370
11371 /* Handle VMLA, VMLS. */
11372 if (opc1 == 0x00)
11373 {
11374 if (bit (arm_insn_r->arm_insn, 10))
11375 {
11376 if (bit (arm_insn_r->arm_insn, 6))
11377 curr_insn_type = INSN_T0;
11378 else
11379 curr_insn_type = INSN_T1;
11380 }
11381 else
11382 {
11383 if (dp_op_sz)
11384 curr_insn_type = INSN_T1;
11385 else
11386 curr_insn_type = INSN_T2;
11387 }
11388 }
11389 /* Handle VNMLA, VNMLS, VNMUL. */
11390 else if (opc1 == 0x01)
11391 {
11392 if (dp_op_sz)
11393 curr_insn_type = INSN_T1;
11394 else
11395 curr_insn_type = INSN_T2;
11396 }
11397 /* Handle VMUL. */
11398 else if (opc1 == 0x02 && !(opc3 & 0x01))
11399 {
11400 if (bit (arm_insn_r->arm_insn, 10))
11401 {
11402 if (bit (arm_insn_r->arm_insn, 6))
11403 curr_insn_type = INSN_T0;
11404 else
11405 curr_insn_type = INSN_T1;
11406 }
11407 else
11408 {
11409 if (dp_op_sz)
11410 curr_insn_type = INSN_T1;
11411 else
11412 curr_insn_type = INSN_T2;
11413 }
11414 }
11415 /* Handle VADD, VSUB. */
11416 else if (opc1 == 0x03)
11417 {
11418 if (!bit (arm_insn_r->arm_insn, 9))
11419 {
11420 if (bit (arm_insn_r->arm_insn, 6))
11421 curr_insn_type = INSN_T0;
11422 else
11423 curr_insn_type = INSN_T1;
11424 }
11425 else
11426 {
11427 if (dp_op_sz)
11428 curr_insn_type = INSN_T1;
11429 else
11430 curr_insn_type = INSN_T2;
11431 }
11432 }
11433 /* Handle VDIV. */
11434 else if (opc1 == 0x0b)
11435 {
11436 if (dp_op_sz)
11437 curr_insn_type = INSN_T1;
11438 else
11439 curr_insn_type = INSN_T2;
11440 }
11441 /* Handle all other vfp data processing instructions. */
11442 else if (opc1 == 0x0b)
11443 {
11444 /* Handle VMOV. */
11445 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11446 {
11447 if (bit (arm_insn_r->arm_insn, 4))
11448 {
11449 if (bit (arm_insn_r->arm_insn, 6))
11450 curr_insn_type = INSN_T0;
11451 else
11452 curr_insn_type = INSN_T1;
11453 }
11454 else
11455 {
11456 if (dp_op_sz)
11457 curr_insn_type = INSN_T1;
11458 else
11459 curr_insn_type = INSN_T2;
11460 }
11461 }
11462 /* Handle VNEG and VABS. */
11463 else if ((opc2 == 0x01 && opc3 == 0x01)
11464 || (opc2 == 0x00 && opc3 == 0x03))
11465 {
11466 if (!bit (arm_insn_r->arm_insn, 11))
11467 {
11468 if (bit (arm_insn_r->arm_insn, 6))
11469 curr_insn_type = INSN_T0;
11470 else
11471 curr_insn_type = INSN_T1;
11472 }
11473 else
11474 {
11475 if (dp_op_sz)
11476 curr_insn_type = INSN_T1;
11477 else
11478 curr_insn_type = INSN_T2;
11479 }
11480 }
11481 /* Handle VSQRT. */
11482 else if (opc2 == 0x01 && opc3 == 0x03)
11483 {
11484 if (dp_op_sz)
11485 curr_insn_type = INSN_T1;
11486 else
11487 curr_insn_type = INSN_T2;
11488 }
11489 /* Handle VCVT. */
11490 else if (opc2 == 0x07 && opc3 == 0x03)
11491 {
11492 if (!dp_op_sz)
11493 curr_insn_type = INSN_T1;
11494 else
11495 curr_insn_type = INSN_T2;
11496 }
11497 else if (opc3 & 0x01)
11498 {
11499 /* Handle VCVT. */
11500 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11501 {
11502 if (!bit (arm_insn_r->arm_insn, 18))
11503 curr_insn_type = INSN_T2;
11504 else
11505 {
11506 if (dp_op_sz)
11507 curr_insn_type = INSN_T1;
11508 else
11509 curr_insn_type = INSN_T2;
11510 }
11511 }
11512 /* Handle VCVT. */
11513 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11514 {
11515 if (dp_op_sz)
11516 curr_insn_type = INSN_T1;
11517 else
11518 curr_insn_type = INSN_T2;
11519 }
11520 /* Handle VCVTB, VCVTT. */
11521 else if ((opc2 & 0x0e) == 0x02)
11522 curr_insn_type = INSN_T2;
11523 /* Handle VCMP, VCMPE. */
11524 else if ((opc2 & 0x0e) == 0x04)
11525 curr_insn_type = INSN_T3;
11526 }
11527 }
11528
11529 switch (curr_insn_type)
11530 {
11531 case INSN_T0:
11532 reg_vd = reg_vd | (bit_d << 4);
11533 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11534 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11535 arm_insn_r->reg_rec_count = 2;
11536 break;
11537
11538 case INSN_T1:
11539 reg_vd = reg_vd | (bit_d << 4);
11540 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11541 arm_insn_r->reg_rec_count = 1;
11542 break;
11543
11544 case INSN_T2:
11545 reg_vd = (reg_vd << 1) | bit_d;
11546 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11547 arm_insn_r->reg_rec_count = 1;
11548 break;
11549
11550 case INSN_T3:
11551 record_buf[0] = ARM_FPSCR_REGNUM;
11552 arm_insn_r->reg_rec_count = 1;
11553 break;
11554
11555 default:
11556 gdb_assert_not_reached ("no decoding pattern found");
11557 break;
11558 }
11559
11560 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11561 return 0;
11562}
11563
60cc5e93
OJ
11564/* Handling opcode 110 insns. */
11565
11566static int
11567arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11568{
bec2ab5a 11569 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11570
11571 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11572 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11573 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11574
11575 if ((coproc & 0x0e) == 0x0a)
11576 {
11577 /* Handle extension register ld/st instructions. */
11578 if (!(op1 & 0x20))
f20f80dd 11579 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11580
11581 /* 64-bit transfers between arm core and extension registers. */
11582 if ((op1 & 0x3e) == 0x04)
f20f80dd 11583 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11584 }
11585 else
11586 {
11587 /* Handle coprocessor ld/st instructions. */
11588 if (!(op1 & 0x3a))
11589 {
11590 /* Store. */
11591 if (!op1_ebit)
11592 return arm_record_unsupported_insn (arm_insn_r);
11593 else
11594 /* Load. */
11595 return arm_record_unsupported_insn (arm_insn_r);
11596 }
11597
11598 /* Move to coprocessor from two arm core registers. */
11599 if (op1 == 0x4)
11600 return arm_record_unsupported_insn (arm_insn_r);
11601
11602 /* Move to two arm core registers from coprocessor. */
11603 if (op1 == 0x5)
11604 {
11605 uint32_t reg_t[2];
11606
11607 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11608 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11609 arm_insn_r->reg_rec_count = 2;
11610
11611 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11612 return 0;
11613 }
11614 }
11615 return arm_record_unsupported_insn (arm_insn_r);
11616}
11617
72508ac0
PO
11618/* Handling opcode 111 insns. */
11619
11620static int
11621arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11622{
60cc5e93 11623 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11624 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11625 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11626
11627 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11628 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11629 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11630 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11631 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11632
11633 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11634 if (op1_sbit)
97dfe206
OJ
11635 {
11636 if (tdep->arm_syscall_record != NULL)
11637 {
11638 ULONGEST svc_operand, svc_number;
11639
11640 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11641
11642 if (svc_operand) /* OABI. */
11643 svc_number = svc_operand - 0x900000;
11644 else /* EABI. */
11645 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11646
60cc5e93 11647 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11648 }
11649 else
11650 {
11651 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11652 return -1;
97dfe206
OJ
11653 }
11654 }
60cc5e93
OJ
11655
11656 if ((coproc & 0x0e) == 0x0a)
11657 {
11658 /* VFP data-processing instructions. */
11659 if (!op1_sbit && !op)
851f26ae 11660 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11661
11662 /* Advanced SIMD, VFP instructions. */
11663 if (!op1_sbit && op)
5a578da5 11664 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11665 }
97dfe206
OJ
11666 else
11667 {
60cc5e93
OJ
11668 /* Coprocessor data operations. */
11669 if (!op1_sbit && !op)
11670 return arm_record_unsupported_insn (arm_insn_r);
11671
11672 /* Move to Coprocessor from ARM core register. */
11673 if (!op1_sbit && !op1_ebit && op)
11674 return arm_record_unsupported_insn (arm_insn_r);
11675
11676 /* Move to arm core register from coprocessor. */
11677 if (!op1_sbit && op1_ebit && op)
11678 {
11679 uint32_t record_buf[1];
11680
11681 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11682 if (record_buf[0] == 15)
11683 record_buf[0] = ARM_PS_REGNUM;
11684
11685 arm_insn_r->reg_rec_count = 1;
11686 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11687 record_buf);
11688 return 0;
11689 }
97dfe206 11690 }
72508ac0 11691
60cc5e93 11692 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11693}
11694
11695/* Handling opcode 000 insns. */
11696
11697static int
11698thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11699{
11700 uint32_t record_buf[8];
11701 uint32_t reg_src1 = 0;
11702
11703 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11704
11705 record_buf[0] = ARM_PS_REGNUM;
11706 record_buf[1] = reg_src1;
11707 thumb_insn_r->reg_rec_count = 2;
11708
11709 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11710
11711 return 0;
11712}
11713
11714
11715/* Handling opcode 001 insns. */
11716
11717static int
11718thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11719{
11720 uint32_t record_buf[8];
11721 uint32_t reg_src1 = 0;
11722
11723 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11724
11725 record_buf[0] = ARM_PS_REGNUM;
11726 record_buf[1] = reg_src1;
11727 thumb_insn_r->reg_rec_count = 2;
11728
11729 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11730
11731 return 0;
11732}
11733
11734/* Handling opcode 010 insns. */
11735
11736static int
11737thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11738{
11739 struct regcache *reg_cache = thumb_insn_r->regcache;
11740 uint32_t record_buf[8], record_buf_mem[8];
11741
11742 uint32_t reg_src1 = 0, reg_src2 = 0;
11743 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11744
11745 ULONGEST u_regval[2] = {0};
11746
11747 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11748
11749 if (bit (thumb_insn_r->arm_insn, 12))
11750 {
11751 /* Handle load/store register offset. */
11752 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11753 if (opcode2 >= 12 && opcode2 <= 15)
11754 {
11755 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11756 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11757 record_buf[0] = reg_src1;
11758 thumb_insn_r->reg_rec_count = 1;
11759 }
11760 else if (opcode2 >= 8 && opcode2 <= 10)
11761 {
11762 /* STR(2), STRB(2), STRH(2) . */
11763 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11764 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11765 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11766 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11767 if (8 == opcode2)
11768 record_buf_mem[0] = 4; /* STR (2). */
11769 else if (10 == opcode2)
11770 record_buf_mem[0] = 1; /* STRB (2). */
11771 else if (9 == opcode2)
11772 record_buf_mem[0] = 2; /* STRH (2). */
11773 record_buf_mem[1] = u_regval[0] + u_regval[1];
11774 thumb_insn_r->mem_rec_count = 1;
11775 }
11776 }
11777 else if (bit (thumb_insn_r->arm_insn, 11))
11778 {
11779 /* Handle load from literal pool. */
11780 /* LDR(3). */
11781 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11782 record_buf[0] = reg_src1;
11783 thumb_insn_r->reg_rec_count = 1;
11784 }
11785 else if (opcode1)
11786 {
11787 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11788 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11789 if ((3 == opcode2) && (!opcode3))
11790 {
11791 /* Branch with exchange. */
11792 record_buf[0] = ARM_PS_REGNUM;
11793 thumb_insn_r->reg_rec_count = 1;
11794 }
11795 else
11796 {
1f33efec
YQ
11797 /* Format 8; special data processing insns. */
11798 record_buf[0] = ARM_PS_REGNUM;
11799 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11800 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11801 thumb_insn_r->reg_rec_count = 2;
11802 }
11803 }
11804 else
11805 {
11806 /* Format 5; data processing insns. */
11807 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11808 if (bit (thumb_insn_r->arm_insn, 7))
11809 {
11810 reg_src1 = reg_src1 + 8;
11811 }
11812 record_buf[0] = ARM_PS_REGNUM;
11813 record_buf[1] = reg_src1;
11814 thumb_insn_r->reg_rec_count = 2;
11815 }
11816
11817 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11818 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11819 record_buf_mem);
11820
11821 return 0;
11822}
11823
11824/* Handling opcode 001 insns. */
11825
11826static int
11827thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11828{
11829 struct regcache *reg_cache = thumb_insn_r->regcache;
11830 uint32_t record_buf[8], record_buf_mem[8];
11831
11832 uint32_t reg_src1 = 0;
11833 uint32_t opcode = 0, immed_5 = 0;
11834
11835 ULONGEST u_regval = 0;
11836
11837 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11838
11839 if (opcode)
11840 {
11841 /* LDR(1). */
11842 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11843 record_buf[0] = reg_src1;
11844 thumb_insn_r->reg_rec_count = 1;
11845 }
11846 else
11847 {
11848 /* STR(1). */
11849 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11850 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11851 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11852 record_buf_mem[0] = 4;
11853 record_buf_mem[1] = u_regval + (immed_5 * 4);
11854 thumb_insn_r->mem_rec_count = 1;
11855 }
11856
11857 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11858 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11859 record_buf_mem);
11860
11861 return 0;
11862}
11863
11864/* Handling opcode 100 insns. */
11865
11866static int
11867thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11868{
11869 struct regcache *reg_cache = thumb_insn_r->regcache;
11870 uint32_t record_buf[8], record_buf_mem[8];
11871
11872 uint32_t reg_src1 = 0;
11873 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11874
11875 ULONGEST u_regval = 0;
11876
11877 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11878
11879 if (3 == opcode)
11880 {
11881 /* LDR(4). */
11882 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11883 record_buf[0] = reg_src1;
11884 thumb_insn_r->reg_rec_count = 1;
11885 }
11886 else if (1 == opcode)
11887 {
11888 /* LDRH(1). */
11889 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11890 record_buf[0] = reg_src1;
11891 thumb_insn_r->reg_rec_count = 1;
11892 }
11893 else if (2 == opcode)
11894 {
11895 /* STR(3). */
11896 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11897 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11898 record_buf_mem[0] = 4;
11899 record_buf_mem[1] = u_regval + (immed_8 * 4);
11900 thumb_insn_r->mem_rec_count = 1;
11901 }
11902 else if (0 == opcode)
11903 {
11904 /* STRH(1). */
11905 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11906 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11907 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11908 record_buf_mem[0] = 2;
11909 record_buf_mem[1] = u_regval + (immed_5 * 2);
11910 thumb_insn_r->mem_rec_count = 1;
11911 }
11912
11913 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11914 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11915 record_buf_mem);
11916
11917 return 0;
11918}
11919
11920/* Handling opcode 101 insns. */
11921
11922static int
11923thumb_record_misc (insn_decode_record *thumb_insn_r)
11924{
11925 struct regcache *reg_cache = thumb_insn_r->regcache;
11926
11927 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11928 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11929 uint32_t index = 0, start_address = 0;
72508ac0
PO
11930 uint32_t record_buf[24], record_buf_mem[48];
11931 uint32_t reg_src1;
11932
11933 ULONGEST u_regval = 0;
11934
11935 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11936 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11937 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11938
11939 if (14 == opcode2)
11940 {
11941 /* POP. */
11942 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11943 while (register_bits)
f969241e
OJ
11944 {
11945 if (register_bits & 0x00000001)
11946 record_buf[index++] = register_count;
11947 register_bits = register_bits >> 1;
11948 register_count++;
11949 }
11950 record_buf[index++] = ARM_PS_REGNUM;
11951 record_buf[index++] = ARM_SP_REGNUM;
11952 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11953 }
11954 else if (10 == opcode2)
11955 {
11956 /* PUSH. */
11957 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11958 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11959 while (register_bits)
11960 {
11961 if (register_bits & 0x00000001)
11962 register_count++;
11963 register_bits = register_bits >> 1;
11964 }
11965 start_address = u_regval - \
11966 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11967 thumb_insn_r->mem_rec_count = register_count;
11968 while (register_count)
11969 {
11970 record_buf_mem[(register_count * 2) - 1] = start_address;
11971 record_buf_mem[(register_count * 2) - 2] = 4;
11972 start_address = start_address + 4;
11973 register_count--;
11974 }
11975 record_buf[0] = ARM_SP_REGNUM;
11976 thumb_insn_r->reg_rec_count = 1;
11977 }
11978 else if (0x1E == opcode1)
11979 {
11980 /* BKPT insn. */
11981 /* Handle enhanced software breakpoint insn, BKPT. */
11982 /* CPSR is changed to be executed in ARM state, disabling normal
11983 interrupts, entering abort mode. */
11984 /* According to high vector configuration PC is set. */
11985 /* User hits breakpoint and type reverse, in that case, we need to go back with
11986 previous CPSR and Program Counter. */
11987 record_buf[0] = ARM_PS_REGNUM;
11988 record_buf[1] = ARM_LR_REGNUM;
11989 thumb_insn_r->reg_rec_count = 2;
11990 /* We need to save SPSR value, which is not yet done. */
11991 printf_unfiltered (_("Process record does not support instruction "
11992 "0x%0x at address %s.\n"),
11993 thumb_insn_r->arm_insn,
11994 paddress (thumb_insn_r->gdbarch,
11995 thumb_insn_r->this_addr));
11996 return -1;
11997 }
11998 else if ((0 == opcode) || (1 == opcode))
11999 {
12000 /* ADD(5), ADD(6). */
12001 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12002 record_buf[0] = reg_src1;
12003 thumb_insn_r->reg_rec_count = 1;
12004 }
12005 else if (2 == opcode)
12006 {
12007 /* ADD(7), SUB(4). */
12008 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12009 record_buf[0] = ARM_SP_REGNUM;
12010 thumb_insn_r->reg_rec_count = 1;
12011 }
12012
12013 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12014 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12015 record_buf_mem);
12016
12017 return 0;
12018}
12019
12020/* Handling opcode 110 insns. */
12021
12022static int
12023thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12024{
12025 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12026 struct regcache *reg_cache = thumb_insn_r->regcache;
12027
12028 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12029 uint32_t reg_src1 = 0;
12030 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12031 uint32_t index = 0, start_address = 0;
72508ac0
PO
12032 uint32_t record_buf[24], record_buf_mem[48];
12033
12034 ULONGEST u_regval = 0;
12035
12036 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12037 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12038
12039 if (1 == opcode2)
12040 {
12041
12042 /* LDMIA. */
12043 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12044 /* Get Rn. */
12045 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12046 while (register_bits)
12047 {
12048 if (register_bits & 0x00000001)
f969241e 12049 record_buf[index++] = register_count;
72508ac0 12050 register_bits = register_bits >> 1;
f969241e 12051 register_count++;
72508ac0 12052 }
f969241e
OJ
12053 record_buf[index++] = reg_src1;
12054 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12055 }
12056 else if (0 == opcode2)
12057 {
12058 /* It handles both STMIA. */
12059 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12060 /* Get Rn. */
12061 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12062 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12063 while (register_bits)
12064 {
12065 if (register_bits & 0x00000001)
12066 register_count++;
12067 register_bits = register_bits >> 1;
12068 }
12069 start_address = u_regval;
12070 thumb_insn_r->mem_rec_count = register_count;
12071 while (register_count)
12072 {
12073 record_buf_mem[(register_count * 2) - 1] = start_address;
12074 record_buf_mem[(register_count * 2) - 2] = 4;
12075 start_address = start_address + 4;
12076 register_count--;
12077 }
12078 }
12079 else if (0x1F == opcode1)
12080 {
12081 /* Handle arm syscall insn. */
97dfe206 12082 if (tdep->arm_syscall_record != NULL)
72508ac0 12083 {
97dfe206
OJ
12084 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12085 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12086 }
12087 else
12088 {
12089 printf_unfiltered (_("no syscall record support\n"));
12090 return -1;
12091 }
12092 }
12093
12094 /* B (1), conditional branch is automatically taken care in process_record,
12095 as PC is saved there. */
12096
12097 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12098 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12099 record_buf_mem);
12100
12101 return ret;
12102}
12103
12104/* Handling opcode 111 insns. */
12105
12106static int
12107thumb_record_branch (insn_decode_record *thumb_insn_r)
12108{
12109 uint32_t record_buf[8];
12110 uint32_t bits_h = 0;
12111
12112 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12113
12114 if (2 == bits_h || 3 == bits_h)
12115 {
12116 /* BL */
12117 record_buf[0] = ARM_LR_REGNUM;
12118 thumb_insn_r->reg_rec_count = 1;
12119 }
12120 else if (1 == bits_h)
12121 {
12122 /* BLX(1). */
12123 record_buf[0] = ARM_PS_REGNUM;
12124 record_buf[1] = ARM_LR_REGNUM;
12125 thumb_insn_r->reg_rec_count = 2;
12126 }
12127
12128 /* B(2) is automatically taken care in process_record, as PC is
12129 saved there. */
12130
12131 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12132
12133 return 0;
12134}
12135
c6ec2b30
OJ
12136/* Handler for thumb2 load/store multiple instructions. */
12137
12138static int
12139thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12140{
12141 struct regcache *reg_cache = thumb2_insn_r->regcache;
12142
12143 uint32_t reg_rn, op;
12144 uint32_t register_bits = 0, register_count = 0;
12145 uint32_t index = 0, start_address = 0;
12146 uint32_t record_buf[24], record_buf_mem[48];
12147
12148 ULONGEST u_regval = 0;
12149
12150 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12151 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12152
12153 if (0 == op || 3 == op)
12154 {
12155 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12156 {
12157 /* Handle RFE instruction. */
12158 record_buf[0] = ARM_PS_REGNUM;
12159 thumb2_insn_r->reg_rec_count = 1;
12160 }
12161 else
12162 {
12163 /* Handle SRS instruction after reading banked SP. */
12164 return arm_record_unsupported_insn (thumb2_insn_r);
12165 }
12166 }
12167 else if (1 == op || 2 == op)
12168 {
12169 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12170 {
12171 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12172 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12173 while (register_bits)
12174 {
12175 if (register_bits & 0x00000001)
12176 record_buf[index++] = register_count;
12177
12178 register_count++;
12179 register_bits = register_bits >> 1;
12180 }
12181 record_buf[index++] = reg_rn;
12182 record_buf[index++] = ARM_PS_REGNUM;
12183 thumb2_insn_r->reg_rec_count = index;
12184 }
12185 else
12186 {
12187 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12188 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12189 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12190 while (register_bits)
12191 {
12192 if (register_bits & 0x00000001)
12193 register_count++;
12194
12195 register_bits = register_bits >> 1;
12196 }
12197
12198 if (1 == op)
12199 {
12200 /* Start address calculation for LDMDB/LDMEA. */
12201 start_address = u_regval;
12202 }
12203 else if (2 == op)
12204 {
12205 /* Start address calculation for LDMDB/LDMEA. */
12206 start_address = u_regval - register_count * 4;
12207 }
12208
12209 thumb2_insn_r->mem_rec_count = register_count;
12210 while (register_count)
12211 {
12212 record_buf_mem[register_count * 2 - 1] = start_address;
12213 record_buf_mem[register_count * 2 - 2] = 4;
12214 start_address = start_address + 4;
12215 register_count--;
12216 }
12217 record_buf[0] = reg_rn;
12218 record_buf[1] = ARM_PS_REGNUM;
12219 thumb2_insn_r->reg_rec_count = 2;
12220 }
12221 }
12222
12223 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12224 record_buf_mem);
12225 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12226 record_buf);
12227 return ARM_RECORD_SUCCESS;
12228}
12229
12230/* Handler for thumb2 load/store (dual/exclusive) and table branch
12231 instructions. */
12232
12233static int
12234thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12235{
12236 struct regcache *reg_cache = thumb2_insn_r->regcache;
12237
12238 uint32_t reg_rd, reg_rn, offset_imm;
12239 uint32_t reg_dest1, reg_dest2;
12240 uint32_t address, offset_addr;
12241 uint32_t record_buf[8], record_buf_mem[8];
12242 uint32_t op1, op2, op3;
c6ec2b30
OJ
12243
12244 ULONGEST u_regval[2];
12245
12246 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12247 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12248 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12249
12250 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12251 {
12252 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12253 {
12254 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12255 record_buf[0] = reg_dest1;
12256 record_buf[1] = ARM_PS_REGNUM;
12257 thumb2_insn_r->reg_rec_count = 2;
12258 }
12259
12260 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12261 {
12262 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12263 record_buf[2] = reg_dest2;
12264 thumb2_insn_r->reg_rec_count = 3;
12265 }
12266 }
12267 else
12268 {
12269 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12270 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12271
12272 if (0 == op1 && 0 == op2)
12273 {
12274 /* Handle STREX. */
12275 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12276 address = u_regval[0] + (offset_imm * 4);
12277 record_buf_mem[0] = 4;
12278 record_buf_mem[1] = address;
12279 thumb2_insn_r->mem_rec_count = 1;
12280 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12281 record_buf[0] = reg_rd;
12282 thumb2_insn_r->reg_rec_count = 1;
12283 }
12284 else if (1 == op1 && 0 == op2)
12285 {
12286 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12287 record_buf[0] = reg_rd;
12288 thumb2_insn_r->reg_rec_count = 1;
12289 address = u_regval[0];
12290 record_buf_mem[1] = address;
12291
12292 if (4 == op3)
12293 {
12294 /* Handle STREXB. */
12295 record_buf_mem[0] = 1;
12296 thumb2_insn_r->mem_rec_count = 1;
12297 }
12298 else if (5 == op3)
12299 {
12300 /* Handle STREXH. */
12301 record_buf_mem[0] = 2 ;
12302 thumb2_insn_r->mem_rec_count = 1;
12303 }
12304 else if (7 == op3)
12305 {
12306 /* Handle STREXD. */
12307 address = u_regval[0];
12308 record_buf_mem[0] = 4;
12309 record_buf_mem[2] = 4;
12310 record_buf_mem[3] = address + 4;
12311 thumb2_insn_r->mem_rec_count = 2;
12312 }
12313 }
12314 else
12315 {
12316 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12317
12318 if (bit (thumb2_insn_r->arm_insn, 24))
12319 {
12320 if (bit (thumb2_insn_r->arm_insn, 23))
12321 offset_addr = u_regval[0] + (offset_imm * 4);
12322 else
12323 offset_addr = u_regval[0] - (offset_imm * 4);
12324
12325 address = offset_addr;
12326 }
12327 else
12328 address = u_regval[0];
12329
12330 record_buf_mem[0] = 4;
12331 record_buf_mem[1] = address;
12332 record_buf_mem[2] = 4;
12333 record_buf_mem[3] = address + 4;
12334 thumb2_insn_r->mem_rec_count = 2;
12335 record_buf[0] = reg_rn;
12336 thumb2_insn_r->reg_rec_count = 1;
12337 }
12338 }
12339
12340 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12341 record_buf);
12342 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12343 record_buf_mem);
12344 return ARM_RECORD_SUCCESS;
12345}
12346
12347/* Handler for thumb2 data processing (shift register and modified immediate)
12348 instructions. */
12349
12350static int
12351thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12352{
12353 uint32_t reg_rd, op;
12354 uint32_t record_buf[8];
12355
12356 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12357 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12358
12359 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12360 {
12361 record_buf[0] = ARM_PS_REGNUM;
12362 thumb2_insn_r->reg_rec_count = 1;
12363 }
12364 else
12365 {
12366 record_buf[0] = reg_rd;
12367 record_buf[1] = ARM_PS_REGNUM;
12368 thumb2_insn_r->reg_rec_count = 2;
12369 }
12370
12371 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12372 record_buf);
12373 return ARM_RECORD_SUCCESS;
12374}
12375
12376/* Generic handler for thumb2 instructions which effect destination and PS
12377 registers. */
12378
12379static int
12380thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12381{
12382 uint32_t reg_rd;
12383 uint32_t record_buf[8];
12384
12385 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12386
12387 record_buf[0] = reg_rd;
12388 record_buf[1] = ARM_PS_REGNUM;
12389 thumb2_insn_r->reg_rec_count = 2;
12390
12391 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12392 record_buf);
12393 return ARM_RECORD_SUCCESS;
12394}
12395
12396/* Handler for thumb2 branch and miscellaneous control instructions. */
12397
12398static int
12399thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12400{
12401 uint32_t op, op1, op2;
12402 uint32_t record_buf[8];
12403
12404 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12405 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12406 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12407
12408 /* Handle MSR insn. */
12409 if (!(op1 & 0x2) && 0x38 == op)
12410 {
12411 if (!(op2 & 0x3))
12412 {
12413 /* CPSR is going to be changed. */
12414 record_buf[0] = ARM_PS_REGNUM;
12415 thumb2_insn_r->reg_rec_count = 1;
12416 }
12417 else
12418 {
12419 arm_record_unsupported_insn(thumb2_insn_r);
12420 return -1;
12421 }
12422 }
12423 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12424 {
12425 /* BLX. */
12426 record_buf[0] = ARM_PS_REGNUM;
12427 record_buf[1] = ARM_LR_REGNUM;
12428 thumb2_insn_r->reg_rec_count = 2;
12429 }
12430
12431 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12432 record_buf);
12433 return ARM_RECORD_SUCCESS;
12434}
12435
12436/* Handler for thumb2 store single data item instructions. */
12437
12438static int
12439thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12440{
12441 struct regcache *reg_cache = thumb2_insn_r->regcache;
12442
12443 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12444 uint32_t address, offset_addr;
12445 uint32_t record_buf[8], record_buf_mem[8];
12446 uint32_t op1, op2;
12447
12448 ULONGEST u_regval[2];
12449
12450 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12451 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12452 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12453 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12454
12455 if (bit (thumb2_insn_r->arm_insn, 23))
12456 {
12457 /* T2 encoding. */
12458 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12459 offset_addr = u_regval[0] + offset_imm;
12460 address = offset_addr;
12461 }
12462 else
12463 {
12464 /* T3 encoding. */
12465 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12466 {
12467 /* Handle STRB (register). */
12468 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12469 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12470 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12471 offset_addr = u_regval[1] << shift_imm;
12472 address = u_regval[0] + offset_addr;
12473 }
12474 else
12475 {
12476 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12477 if (bit (thumb2_insn_r->arm_insn, 10))
12478 {
12479 if (bit (thumb2_insn_r->arm_insn, 9))
12480 offset_addr = u_regval[0] + offset_imm;
12481 else
12482 offset_addr = u_regval[0] - offset_imm;
12483
12484 address = offset_addr;
12485 }
12486 else
12487 address = u_regval[0];
12488 }
12489 }
12490
12491 switch (op1)
12492 {
12493 /* Store byte instructions. */
12494 case 4:
12495 case 0:
12496 record_buf_mem[0] = 1;
12497 break;
12498 /* Store half word instructions. */
12499 case 1:
12500 case 5:
12501 record_buf_mem[0] = 2;
12502 break;
12503 /* Store word instructions. */
12504 case 2:
12505 case 6:
12506 record_buf_mem[0] = 4;
12507 break;
12508
12509 default:
12510 gdb_assert_not_reached ("no decoding pattern found");
12511 break;
12512 }
12513
12514 record_buf_mem[1] = address;
12515 thumb2_insn_r->mem_rec_count = 1;
12516 record_buf[0] = reg_rn;
12517 thumb2_insn_r->reg_rec_count = 1;
12518
12519 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12520 record_buf);
12521 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12522 record_buf_mem);
12523 return ARM_RECORD_SUCCESS;
12524}
12525
12526/* Handler for thumb2 load memory hints instructions. */
12527
12528static int
12529thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12530{
12531 uint32_t record_buf[8];
12532 uint32_t reg_rt, reg_rn;
12533
12534 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12535 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12536
12537 if (ARM_PC_REGNUM != reg_rt)
12538 {
12539 record_buf[0] = reg_rt;
12540 record_buf[1] = reg_rn;
12541 record_buf[2] = ARM_PS_REGNUM;
12542 thumb2_insn_r->reg_rec_count = 3;
12543
12544 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12545 record_buf);
12546 return ARM_RECORD_SUCCESS;
12547 }
12548
12549 return ARM_RECORD_FAILURE;
12550}
12551
12552/* Handler for thumb2 load word instructions. */
12553
12554static int
12555thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12556{
c6ec2b30
OJ
12557 uint32_t record_buf[8];
12558
12559 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12560 record_buf[1] = ARM_PS_REGNUM;
12561 thumb2_insn_r->reg_rec_count = 2;
12562
12563 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12564 record_buf);
12565 return ARM_RECORD_SUCCESS;
12566}
12567
12568/* Handler for thumb2 long multiply, long multiply accumulate, and
12569 divide instructions. */
12570
12571static int
12572thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12573{
12574 uint32_t opcode1 = 0, opcode2 = 0;
12575 uint32_t record_buf[8];
c6ec2b30
OJ
12576
12577 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12578 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12579
12580 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12581 {
12582 /* Handle SMULL, UMULL, SMULAL. */
12583 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12584 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12585 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12586 record_buf[2] = ARM_PS_REGNUM;
12587 thumb2_insn_r->reg_rec_count = 3;
12588 }
12589 else if (1 == opcode1 || 3 == opcode2)
12590 {
12591 /* Handle SDIV and UDIV. */
12592 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12593 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12594 record_buf[2] = ARM_PS_REGNUM;
12595 thumb2_insn_r->reg_rec_count = 3;
12596 }
12597 else
12598 return ARM_RECORD_FAILURE;
12599
12600 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12601 record_buf);
12602 return ARM_RECORD_SUCCESS;
12603}
12604
60cc5e93
OJ
12605/* Record handler for thumb32 coprocessor instructions. */
12606
12607static int
12608thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12609{
12610 if (bit (thumb2_insn_r->arm_insn, 25))
12611 return arm_record_coproc_data_proc (thumb2_insn_r);
12612 else
12613 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12614}
12615
1e1b6563
OJ
12616/* Record handler for advance SIMD structure load/store instructions. */
12617
12618static int
12619thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12620{
12621 struct regcache *reg_cache = thumb2_insn_r->regcache;
12622 uint32_t l_bit, a_bit, b_bits;
12623 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12624 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12625 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12626 uint8_t f_ebytes;
12627
12628 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12629 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12630 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12631 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12632 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12633 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12634 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12635 f_elem = 8 / f_ebytes;
12636
12637 if (!l_bit)
12638 {
12639 ULONGEST u_regval = 0;
12640 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12641 address = u_regval;
12642
12643 if (!a_bit)
12644 {
12645 /* Handle VST1. */
12646 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12647 {
12648 if (b_bits == 0x07)
12649 bf_regs = 1;
12650 else if (b_bits == 0x0a)
12651 bf_regs = 2;
12652 else if (b_bits == 0x06)
12653 bf_regs = 3;
12654 else if (b_bits == 0x02)
12655 bf_regs = 4;
12656 else
12657 bf_regs = 0;
12658
12659 for (index_r = 0; index_r < bf_regs; index_r++)
12660 {
12661 for (index_e = 0; index_e < f_elem; index_e++)
12662 {
12663 record_buf_mem[index_m++] = f_ebytes;
12664 record_buf_mem[index_m++] = address;
12665 address = address + f_ebytes;
12666 thumb2_insn_r->mem_rec_count += 1;
12667 }
12668 }
12669 }
12670 /* Handle VST2. */
12671 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12672 {
12673 if (b_bits == 0x09 || b_bits == 0x08)
12674 bf_regs = 1;
12675 else if (b_bits == 0x03)
12676 bf_regs = 2;
12677 else
12678 bf_regs = 0;
12679
12680 for (index_r = 0; index_r < bf_regs; index_r++)
12681 for (index_e = 0; index_e < f_elem; index_e++)
12682 {
12683 for (loop_t = 0; loop_t < 2; loop_t++)
12684 {
12685 record_buf_mem[index_m++] = f_ebytes;
12686 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12687 thumb2_insn_r->mem_rec_count += 1;
12688 }
12689 address = address + (2 * f_ebytes);
12690 }
12691 }
12692 /* Handle VST3. */
12693 else if ((b_bits & 0x0e) == 0x04)
12694 {
12695 for (index_e = 0; index_e < f_elem; index_e++)
12696 {
12697 for (loop_t = 0; loop_t < 3; loop_t++)
12698 {
12699 record_buf_mem[index_m++] = f_ebytes;
12700 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12701 thumb2_insn_r->mem_rec_count += 1;
12702 }
12703 address = address + (3 * f_ebytes);
12704 }
12705 }
12706 /* Handle VST4. */
12707 else if (!(b_bits & 0x0e))
12708 {
12709 for (index_e = 0; index_e < f_elem; index_e++)
12710 {
12711 for (loop_t = 0; loop_t < 4; loop_t++)
12712 {
12713 record_buf_mem[index_m++] = f_ebytes;
12714 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12715 thumb2_insn_r->mem_rec_count += 1;
12716 }
12717 address = address + (4 * f_ebytes);
12718 }
12719 }
12720 }
12721 else
12722 {
12723 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12724
12725 if (bft_size == 0x00)
12726 f_ebytes = 1;
12727 else if (bft_size == 0x01)
12728 f_ebytes = 2;
12729 else if (bft_size == 0x02)
12730 f_ebytes = 4;
12731 else
12732 f_ebytes = 0;
12733
12734 /* Handle VST1. */
12735 if (!(b_bits & 0x0b) || b_bits == 0x08)
12736 thumb2_insn_r->mem_rec_count = 1;
12737 /* Handle VST2. */
12738 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12739 thumb2_insn_r->mem_rec_count = 2;
12740 /* Handle VST3. */
12741 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12742 thumb2_insn_r->mem_rec_count = 3;
12743 /* Handle VST4. */
12744 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12745 thumb2_insn_r->mem_rec_count = 4;
12746
12747 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12748 {
12749 record_buf_mem[index_m] = f_ebytes;
12750 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12751 }
12752 }
12753 }
12754 else
12755 {
12756 if (!a_bit)
12757 {
12758 /* Handle VLD1. */
12759 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12760 thumb2_insn_r->reg_rec_count = 1;
12761 /* Handle VLD2. */
12762 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12763 thumb2_insn_r->reg_rec_count = 2;
12764 /* Handle VLD3. */
12765 else if ((b_bits & 0x0e) == 0x04)
12766 thumb2_insn_r->reg_rec_count = 3;
12767 /* Handle VLD4. */
12768 else if (!(b_bits & 0x0e))
12769 thumb2_insn_r->reg_rec_count = 4;
12770 }
12771 else
12772 {
12773 /* Handle VLD1. */
12774 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12775 thumb2_insn_r->reg_rec_count = 1;
12776 /* Handle VLD2. */
12777 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12778 thumb2_insn_r->reg_rec_count = 2;
12779 /* Handle VLD3. */
12780 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12781 thumb2_insn_r->reg_rec_count = 3;
12782 /* Handle VLD4. */
12783 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12784 thumb2_insn_r->reg_rec_count = 4;
12785
12786 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12787 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12788 }
12789 }
12790
12791 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12792 {
12793 record_buf[index_r] = reg_rn;
12794 thumb2_insn_r->reg_rec_count += 1;
12795 }
12796
12797 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12798 record_buf);
12799 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12800 record_buf_mem);
12801 return 0;
12802}
12803
c6ec2b30
OJ
12804/* Decodes thumb2 instruction type and invokes its record handler. */
12805
12806static unsigned int
12807thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12808{
12809 uint32_t op, op1, op2;
12810
12811 op = bit (thumb2_insn_r->arm_insn, 15);
12812 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12813 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12814
12815 if (op1 == 0x01)
12816 {
12817 if (!(op2 & 0x64 ))
12818 {
12819 /* Load/store multiple instruction. */
12820 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12821 }
12822 else if (!((op2 & 0x64) ^ 0x04))
12823 {
12824 /* Load/store (dual/exclusive) and table branch instruction. */
12825 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12826 }
12827 else if (!((op2 & 0x20) ^ 0x20))
12828 {
12829 /* Data-processing (shifted register). */
12830 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12831 }
12832 else if (op2 & 0x40)
12833 {
12834 /* Co-processor instructions. */
60cc5e93 12835 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12836 }
12837 }
12838 else if (op1 == 0x02)
12839 {
12840 if (op)
12841 {
12842 /* Branches and miscellaneous control instructions. */
12843 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12844 }
12845 else if (op2 & 0x20)
12846 {
12847 /* Data-processing (plain binary immediate) instruction. */
12848 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12849 }
12850 else
12851 {
12852 /* Data-processing (modified immediate). */
12853 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12854 }
12855 }
12856 else if (op1 == 0x03)
12857 {
12858 if (!(op2 & 0x71 ))
12859 {
12860 /* Store single data item. */
12861 return thumb2_record_str_single_data (thumb2_insn_r);
12862 }
12863 else if (!((op2 & 0x71) ^ 0x10))
12864 {
12865 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12866 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12867 }
12868 else if (!((op2 & 0x67) ^ 0x01))
12869 {
12870 /* Load byte, memory hints instruction. */
12871 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12872 }
12873 else if (!((op2 & 0x67) ^ 0x03))
12874 {
12875 /* Load halfword, memory hints instruction. */
12876 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12877 }
12878 else if (!((op2 & 0x67) ^ 0x05))
12879 {
12880 /* Load word instruction. */
12881 return thumb2_record_ld_word (thumb2_insn_r);
12882 }
12883 else if (!((op2 & 0x70) ^ 0x20))
12884 {
12885 /* Data-processing (register) instruction. */
12886 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12887 }
12888 else if (!((op2 & 0x78) ^ 0x30))
12889 {
12890 /* Multiply, multiply accumulate, abs diff instruction. */
12891 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12892 }
12893 else if (!((op2 & 0x78) ^ 0x38))
12894 {
12895 /* Long multiply, long multiply accumulate, and divide. */
12896 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12897 }
12898 else if (op2 & 0x40)
12899 {
12900 /* Co-processor instructions. */
60cc5e93 12901 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12902 }
12903 }
12904
12905 return -1;
12906}
72508ac0
PO
12907
12908/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12909and positive val on fauilure. */
12910
12911static int
12912extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12913{
12914 gdb_byte buf[insn_size];
12915
12916 memset (&buf[0], 0, insn_size);
12917
12918 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12919 return 1;
12920 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12921 insn_size,
2959fed9 12922 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12923 return 0;
12924}
12925
12926typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12927
12928/* Decode arm/thumb insn depending on condition cods and opcodes; and
12929 dispatch it. */
12930
12931static int
12932decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12933 uint32_t insn_size)
72508ac0
PO
12934{
12935
01e57735
YQ
12936 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12937 instruction. */
0fa9c223 12938 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12939 {
12940 arm_record_data_proc_misc_ld_str, /* 000. */
12941 arm_record_data_proc_imm, /* 001. */
12942 arm_record_ld_st_imm_offset, /* 010. */
12943 arm_record_ld_st_reg_offset, /* 011. */
12944 arm_record_ld_st_multiple, /* 100. */
12945 arm_record_b_bl, /* 101. */
60cc5e93 12946 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12947 arm_record_coproc_data_proc /* 111. */
12948 };
12949
01e57735
YQ
12950 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12951 instruction. */
0fa9c223 12952 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12953 { \
12954 thumb_record_shift_add_sub, /* 000. */
12955 thumb_record_add_sub_cmp_mov, /* 001. */
12956 thumb_record_ld_st_reg_offset, /* 010. */
12957 thumb_record_ld_st_imm_offset, /* 011. */
12958 thumb_record_ld_st_stack, /* 100. */
12959 thumb_record_misc, /* 101. */
12960 thumb_record_ldm_stm_swi, /* 110. */
12961 thumb_record_branch /* 111. */
12962 };
12963
12964 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12965 uint32_t insn_id = 0;
12966
12967 if (extract_arm_insn (arm_record, insn_size))
12968 {
12969 if (record_debug)
01e57735
YQ
12970 {
12971 printf_unfiltered (_("Process record: error reading memory at "
12972 "addr %s len = %d.\n"),
12973 paddress (arm_record->gdbarch,
12974 arm_record->this_addr), insn_size);
12975 }
72508ac0
PO
12976 return -1;
12977 }
12978 else if (ARM_RECORD == record_type)
12979 {
12980 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12981 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12982
12983 if (arm_record->cond == 0xf)
12984 ret = arm_record_extension_space (arm_record);
12985 else
01e57735 12986 {
ca92db2d
YQ
12987 /* If this insn has fallen into extension space
12988 then we need not decode it anymore. */
01e57735
YQ
12989 ret = arm_handle_insn[insn_id] (arm_record);
12990 }
ca92db2d
YQ
12991 if (ret != ARM_RECORD_SUCCESS)
12992 {
12993 arm_record_unsupported_insn (arm_record);
12994 ret = -1;
12995 }
72508ac0
PO
12996 }
12997 else if (THUMB_RECORD == record_type)
12998 {
12999 /* As thumb does not have condition codes, we set negative. */
13000 arm_record->cond = -1;
13001 insn_id = bits (arm_record->arm_insn, 13, 15);
13002 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13003 if (ret != ARM_RECORD_SUCCESS)
13004 {
13005 arm_record_unsupported_insn (arm_record);
13006 ret = -1;
13007 }
72508ac0
PO
13008 }
13009 else if (THUMB2_RECORD == record_type)
13010 {
c6ec2b30
OJ
13011 /* As thumb does not have condition codes, we set negative. */
13012 arm_record->cond = -1;
13013
13014 /* Swap first half of 32bit thumb instruction with second half. */
13015 arm_record->arm_insn
01e57735 13016 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13017
ca92db2d 13018 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13019
ca92db2d 13020 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13021 {
13022 arm_record_unsupported_insn (arm_record);
13023 ret = -1;
13024 }
72508ac0
PO
13025 }
13026 else
13027 {
13028 /* Throw assertion. */
13029 gdb_assert_not_reached ("not a valid instruction, could not decode");
13030 }
13031
13032 return ret;
13033}
13034
13035
13036/* Cleans up local record registers and memory allocations. */
13037
13038static void
13039deallocate_reg_mem (insn_decode_record *record)
13040{
13041 xfree (record->arm_regs);
13042 xfree (record->arm_mems);
13043}
13044
13045
01e57735 13046/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13047 memory that will be changed in current instruction to record_arch_list".
13048 Return -1 if something is wrong. */
13049
13050int
01e57735
YQ
13051arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13052 CORE_ADDR insn_addr)
72508ac0
PO
13053{
13054
72508ac0
PO
13055 uint32_t no_of_rec = 0;
13056 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13057 ULONGEST t_bit = 0, insn_id = 0;
13058
13059 ULONGEST u_regval = 0;
13060
13061 insn_decode_record arm_record;
13062
13063 memset (&arm_record, 0, sizeof (insn_decode_record));
13064 arm_record.regcache = regcache;
13065 arm_record.this_addr = insn_addr;
13066 arm_record.gdbarch = gdbarch;
13067
13068
13069 if (record_debug > 1)
13070 {
13071 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13072 "addr = %s\n",
72508ac0
PO
13073 paddress (gdbarch, arm_record.this_addr));
13074 }
13075
13076 if (extract_arm_insn (&arm_record, 2))
13077 {
13078 if (record_debug)
01e57735
YQ
13079 {
13080 printf_unfiltered (_("Process record: error reading memory at "
13081 "addr %s len = %d.\n"),
13082 paddress (arm_record.gdbarch,
13083 arm_record.this_addr), 2);
13084 }
72508ac0
PO
13085 return -1;
13086 }
13087
13088 /* Check the insn, whether it is thumb or arm one. */
13089
13090 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13091 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13092
13093
13094 if (!(u_regval & t_bit))
13095 {
13096 /* We are decoding arm insn. */
13097 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13098 }
13099 else
13100 {
13101 insn_id = bits (arm_record.arm_insn, 11, 15);
13102 /* is it thumb2 insn? */
13103 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13104 {
13105 ret = decode_insn (&arm_record, THUMB2_RECORD,
13106 THUMB2_INSN_SIZE_BYTES);
13107 }
72508ac0 13108 else
01e57735
YQ
13109 {
13110 /* We are decoding thumb insn. */
13111 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13112 }
72508ac0
PO
13113 }
13114
13115 if (0 == ret)
13116 {
13117 /* Record registers. */
25ea693b 13118 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13119 if (arm_record.arm_regs)
01e57735
YQ
13120 {
13121 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13122 {
13123 if (record_full_arch_list_add_reg
25ea693b 13124 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13125 ret = -1;
13126 }
13127 }
72508ac0
PO
13128 /* Record memories. */
13129 if (arm_record.arm_mems)
01e57735
YQ
13130 {
13131 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13132 {
13133 if (record_full_arch_list_add_mem
13134 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13135 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13136 ret = -1;
13137 }
13138 }
72508ac0 13139
25ea693b 13140 if (record_full_arch_list_add_end ())
01e57735 13141 ret = -1;
72508ac0
PO
13142 }
13143
13144
13145 deallocate_reg_mem (&arm_record);
13146
13147 return ret;
13148}
This page took 1.837296 seconds and 4 git commands to generate.