Fix PR breakpoints/20739: Badly formatted adress string in error message
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d 47
8689682c 48#include "arch/arm.h"
d9311bfa 49#include "arch/arm-get-next-pcs.h"
34e8f22d 50#include "arm-tdep.h"
26216b98 51#include "gdb/sim-arm.h"
34e8f22d 52
082fc60d
RE
53#include "elf-bfd.h"
54#include "coff/internal.h"
97e03143 55#include "elf/arm.h"
c906108c 56
60c5725c 57#include "vec.h"
26216b98 58
72508ac0 59#include "record.h"
d02ed0bb 60#include "record-full.h"
325fac50 61#include <algorithm>
72508ac0 62
0a69eedb
YQ
63#include "features/arm/arm-with-m.c"
64#include "features/arm/arm-with-m-fpa-layout.c"
65#include "features/arm/arm-with-m-vfp-d16.c"
66#include "features/arm/arm-with-iwmmxt.c"
67#include "features/arm/arm-with-vfpv2.c"
68#include "features/arm/arm-with-vfpv3.c"
69#include "features/arm/arm-with-neon.c"
9779414d 70
6529d2dd
AC
71static int arm_debug;
72
082fc60d
RE
73/* Macros for setting and testing a bit in a minimal symbol that marks
74 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 75 is used for this purpose.
082fc60d
RE
76
77 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 78 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 79
0963b4bd 80#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 81 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
82
83#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 84 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 85
60c5725c
DJ
86/* Per-objfile data used for mapping symbols. */
87static const struct objfile_data *arm_objfile_data_key;
88
89struct arm_mapping_symbol
90{
91 bfd_vma value;
92 char type;
93};
94typedef struct arm_mapping_symbol arm_mapping_symbol_s;
95DEF_VEC_O(arm_mapping_symbol_s);
96
97struct arm_per_objfile
98{
99 VEC(arm_mapping_symbol_s) **section_maps;
100};
101
afd7eef0
RE
102/* The list of available "set arm ..." and "show arm ..." commands. */
103static struct cmd_list_element *setarmcmdlist = NULL;
104static struct cmd_list_element *showarmcmdlist = NULL;
105
fd50bc42
RE
106/* The type of floating-point to use. Keep this in sync with enum
107 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 108static const char *const fp_model_strings[] =
fd50bc42
RE
109{
110 "auto",
111 "softfpa",
112 "fpa",
113 "softvfp",
28e97307
DJ
114 "vfp",
115 NULL
fd50bc42
RE
116};
117
118/* A variable that can be configured by the user. */
119static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
120static const char *current_fp_model = "auto";
121
28e97307 122/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 123static const char *const arm_abi_strings[] =
28e97307
DJ
124{
125 "auto",
126 "APCS",
127 "AAPCS",
128 NULL
129};
130
131/* A variable that can be configured by the user. */
132static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
133static const char *arm_abi_string = "auto";
134
0428b8f5 135/* The execution mode to assume. */
40478521 136static const char *const arm_mode_strings[] =
0428b8f5
DJ
137 {
138 "auto",
139 "arm",
68770265
MGD
140 "thumb",
141 NULL
0428b8f5
DJ
142 };
143
144static const char *arm_fallback_mode_string = "auto";
145static const char *arm_force_mode_string = "auto";
146
94c30b78 147/* Number of different reg name sets (options). */
afd7eef0 148static int num_disassembly_options;
bc90b915 149
f32bf4a4
YQ
150/* The standard register names, and all the valid aliases for them. Note
151 that `fp', `sp' and `pc' are not added in this alias list, because they
152 have been added as builtin user registers in
153 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
154static const struct
155{
156 const char *name;
157 int regnum;
158} arm_register_aliases[] = {
159 /* Basic register numbers. */
160 { "r0", 0 },
161 { "r1", 1 },
162 { "r2", 2 },
163 { "r3", 3 },
164 { "r4", 4 },
165 { "r5", 5 },
166 { "r6", 6 },
167 { "r7", 7 },
168 { "r8", 8 },
169 { "r9", 9 },
170 { "r10", 10 },
171 { "r11", 11 },
172 { "r12", 12 },
173 { "r13", 13 },
174 { "r14", 14 },
175 { "r15", 15 },
176 /* Synonyms (argument and variable registers). */
177 { "a1", 0 },
178 { "a2", 1 },
179 { "a3", 2 },
180 { "a4", 3 },
181 { "v1", 4 },
182 { "v2", 5 },
183 { "v3", 6 },
184 { "v4", 7 },
185 { "v5", 8 },
186 { "v6", 9 },
187 { "v7", 10 },
188 { "v8", 11 },
189 /* Other platform-specific names for r9. */
190 { "sb", 9 },
191 { "tr", 9 },
192 /* Special names. */
193 { "ip", 12 },
123dc839 194 { "lr", 14 },
123dc839
DJ
195 /* Names used by GCC (not listed in the ARM EABI). */
196 { "sl", 10 },
123dc839
DJ
197 /* A special name from the older ATPCS. */
198 { "wr", 7 },
199};
bc90b915 200
123dc839 201static const char *const arm_register_names[] =
da59e081
JM
202{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
203 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
204 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
205 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
206 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
207 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 208 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 209
afd7eef0
RE
210/* Valid register name styles. */
211static const char **valid_disassembly_styles;
ed9a39eb 212
afd7eef0
RE
213/* Disassembly style to use. Default to "std" register names. */
214static const char *disassembly_style;
96baa820 215
ed9a39eb 216/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
217 style. */
218static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 219 struct cmd_list_element *);
afd7eef0 220static void set_disassembly_style (void);
ed9a39eb 221
b508a996 222static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 223 void *, int);
b508a996 224static void convert_to_extended (const struct floatformat *, void *,
be8626e0 225 const void *, int);
ed9a39eb 226
05d1431c
PA
227static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
228 struct regcache *regcache,
229 int regnum, gdb_byte *buf);
58d6951d
DJ
230static void arm_neon_quad_write (struct gdbarch *gdbarch,
231 struct regcache *regcache,
232 int regnum, const gdb_byte *buf);
233
e7cf25a8 234static CORE_ADDR
553cb527 235 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
236
237
d9311bfa
AT
238/* get_next_pcs operations. */
239static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
240 arm_get_next_pcs_read_memory_unsigned_integer,
241 arm_get_next_pcs_syscall_next_pc,
242 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
243 arm_get_next_pcs_is_thumb,
244 NULL,
d9311bfa
AT
245};
246
9b8d791a 247struct arm_prologue_cache
c3b4394c 248{
eb5492fa
DJ
249 /* The stack pointer at the time this frame was created; i.e. the
250 caller's stack pointer when this function was called. It is used
251 to identify this frame. */
252 CORE_ADDR prev_sp;
253
4be43953
DJ
254 /* The frame base for this frame is just prev_sp - frame size.
255 FRAMESIZE is the distance from the frame pointer to the
256 initial stack pointer. */
eb5492fa 257
c3b4394c 258 int framesize;
eb5492fa
DJ
259
260 /* The register used to hold the frame pointer for this frame. */
c3b4394c 261 int framereg;
eb5492fa
DJ
262
263 /* Saved register offsets. */
264 struct trad_frame_saved_reg *saved_regs;
c3b4394c 265};
ed9a39eb 266
0d39a070
DJ
267static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
268 CORE_ADDR prologue_start,
269 CORE_ADDR prologue_end,
270 struct arm_prologue_cache *cache);
271
cca44b1b
JB
272/* Architecture version for displaced stepping. This effects the behaviour of
273 certain instructions, and really should not be hard-wired. */
274
275#define DISPLACED_STEPPING_ARCH_VERSION 5
276
94c30b78 277/* Set to true if the 32-bit mode is in use. */
c906108c
SS
278
279int arm_apcs_32 = 1;
280
9779414d
DJ
281/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
282
478fd957 283int
9779414d
DJ
284arm_psr_thumb_bit (struct gdbarch *gdbarch)
285{
286 if (gdbarch_tdep (gdbarch)->is_m)
287 return XPSR_T;
288 else
289 return CPSR_T;
290}
291
d0e59a68
AT
292/* Determine if the processor is currently executing in Thumb mode. */
293
294int
295arm_is_thumb (struct regcache *regcache)
296{
297 ULONGEST cpsr;
298 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
299
300 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
301
302 return (cpsr & t_bit) != 0;
303}
304
b39cc962
DJ
305/* Determine if FRAME is executing in Thumb mode. */
306
25b41d01 307int
b39cc962
DJ
308arm_frame_is_thumb (struct frame_info *frame)
309{
310 CORE_ADDR cpsr;
9779414d 311 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
312
313 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
314 directly (from a signal frame or dummy frame) or by interpreting
315 the saved LR (from a prologue or DWARF frame). So consult it and
316 trust the unwinders. */
317 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
318
9779414d 319 return (cpsr & t_bit) != 0;
b39cc962
DJ
320}
321
60c5725c
DJ
322/* Callback for VEC_lower_bound. */
323
324static inline int
325arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
326 const struct arm_mapping_symbol *rhs)
327{
328 return lhs->value < rhs->value;
329}
330
f9d67f43
DJ
331/* Search for the mapping symbol covering MEMADDR. If one is found,
332 return its type. Otherwise, return 0. If START is non-NULL,
333 set *START to the location of the mapping symbol. */
c906108c 334
f9d67f43
DJ
335static char
336arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 337{
60c5725c 338 struct obj_section *sec;
0428b8f5 339
60c5725c
DJ
340 /* If there are mapping symbols, consult them. */
341 sec = find_pc_section (memaddr);
342 if (sec != NULL)
343 {
344 struct arm_per_objfile *data;
345 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
346 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
347 0 };
60c5725c
DJ
348 unsigned int idx;
349
9a3c8263
SM
350 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
351 arm_objfile_data_key);
60c5725c
DJ
352 if (data != NULL)
353 {
354 map = data->section_maps[sec->the_bfd_section->index];
355 if (!VEC_empty (arm_mapping_symbol_s, map))
356 {
357 struct arm_mapping_symbol *map_sym;
358
359 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
360 arm_compare_mapping_symbols);
361
362 /* VEC_lower_bound finds the earliest ordered insertion
363 point. If the following symbol starts at this exact
364 address, we use that; otherwise, the preceding
365 mapping symbol covers this address. */
366 if (idx < VEC_length (arm_mapping_symbol_s, map))
367 {
368 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
369 if (map_sym->value == map_key.value)
f9d67f43
DJ
370 {
371 if (start)
372 *start = map_sym->value + obj_section_addr (sec);
373 return map_sym->type;
374 }
60c5725c
DJ
375 }
376
377 if (idx > 0)
378 {
379 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
380 if (start)
381 *start = map_sym->value + obj_section_addr (sec);
382 return map_sym->type;
60c5725c
DJ
383 }
384 }
385 }
386 }
387
f9d67f43
DJ
388 return 0;
389}
390
391/* Determine if the program counter specified in MEMADDR is in a Thumb
392 function. This function should be called for addresses unrelated to
393 any executing frame; otherwise, prefer arm_frame_is_thumb. */
394
e3039479 395int
9779414d 396arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 397{
7cbd4a93 398 struct bound_minimal_symbol sym;
f9d67f43 399 char type;
a42244db
YQ
400 struct displaced_step_closure* dsc
401 = get_displaced_step_closure_by_addr(memaddr);
402
403 /* If checking the mode of displaced instruction in copy area, the mode
404 should be determined by instruction on the original address. */
405 if (dsc)
406 {
407 if (debug_displaced)
408 fprintf_unfiltered (gdb_stdlog,
409 "displaced: check mode of %.8lx instead of %.8lx\n",
410 (unsigned long) dsc->insn_addr,
411 (unsigned long) memaddr);
412 memaddr = dsc->insn_addr;
413 }
f9d67f43
DJ
414
415 /* If bit 0 of the address is set, assume this is a Thumb address. */
416 if (IS_THUMB_ADDR (memaddr))
417 return 1;
418
419 /* If the user wants to override the symbol table, let him. */
420 if (strcmp (arm_force_mode_string, "arm") == 0)
421 return 0;
422 if (strcmp (arm_force_mode_string, "thumb") == 0)
423 return 1;
424
9779414d
DJ
425 /* ARM v6-M and v7-M are always in Thumb mode. */
426 if (gdbarch_tdep (gdbarch)->is_m)
427 return 1;
428
f9d67f43
DJ
429 /* If there are mapping symbols, consult them. */
430 type = arm_find_mapping_symbol (memaddr, NULL);
431 if (type)
432 return type == 't';
433
ed9a39eb 434 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 435 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
436 if (sym.minsym)
437 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
438
439 /* If the user wants to override the fallback mode, let them. */
440 if (strcmp (arm_fallback_mode_string, "arm") == 0)
441 return 0;
442 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
443 return 1;
444
445 /* If we couldn't find any symbol, but we're talking to a running
446 target, then trust the current value of $cpsr. This lets
447 "display/i $pc" always show the correct mode (though if there is
448 a symbol table we will not reach here, so it still may not be
18819fa6 449 displayed in the mode it will be executed). */
0428b8f5 450 if (target_has_registers)
18819fa6 451 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
452
453 /* Otherwise we're out of luck; we assume ARM. */
454 return 0;
c906108c
SS
455}
456
ca90e760
FH
457/* Determine if the address specified equals any of these magic return
458 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
459 architectures.
460
461 From ARMv6-M Reference Manual B1.5.8
462 Table B1-5 Exception return behavior
463
464 EXC_RETURN Return To Return Stack
465 0xFFFFFFF1 Handler mode Main
466 0xFFFFFFF9 Thread mode Main
467 0xFFFFFFFD Thread mode Process
468
469 From ARMv7-M Reference Manual B1.5.8
470 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
471
472 EXC_RETURN Return To Return Stack
473 0xFFFFFFF1 Handler mode Main
474 0xFFFFFFF9 Thread mode Main
475 0xFFFFFFFD Thread mode Process
476
477 Table B1-9 EXC_RETURN definition of exception return behavior, with
478 FP
479
480 EXC_RETURN Return To Return Stack Frame Type
481 0xFFFFFFE1 Handler mode Main Extended
482 0xFFFFFFE9 Thread mode Main Extended
483 0xFFFFFFED Thread mode Process Extended
484 0xFFFFFFF1 Handler mode Main Basic
485 0xFFFFFFF9 Thread mode Main Basic
486 0xFFFFFFFD Thread mode Process Basic
487
488 For more details see "B1.5.8 Exception return behavior"
489 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
490
491static int
492arm_m_addr_is_magic (CORE_ADDR addr)
493{
494 switch (addr)
495 {
496 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
497 the exception return behavior. */
498 case 0xffffffe1:
499 case 0xffffffe9:
500 case 0xffffffed:
501 case 0xfffffff1:
502 case 0xfffffff9:
503 case 0xfffffffd:
504 /* Address is magic. */
505 return 1;
506
507 default:
508 /* Address is not magic. */
509 return 0;
510 }
511}
512
181c1381 513/* Remove useless bits from addresses in a running program. */
34e8f22d 514static CORE_ADDR
24568a2c 515arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 516{
2ae28aa9
YQ
517 /* On M-profile devices, do not strip the low bit from EXC_RETURN
518 (the magic exception return address). */
519 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 520 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
521 return val;
522
a3a2ee65 523 if (arm_apcs_32)
dd6be234 524 return UNMAKE_THUMB_ADDR (val);
c906108c 525 else
a3a2ee65 526 return (val & 0x03fffffc);
c906108c
SS
527}
528
0d39a070 529/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
530 can be safely ignored during prologue skipping. IS_THUMB is true
531 if the function is known to be a Thumb function due to the way it
532 is being called. */
0d39a070 533static int
e0634ccf 534skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 535{
e0634ccf 536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 537 struct bound_minimal_symbol msym;
0d39a070
DJ
538
539 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 540 if (msym.minsym != NULL
77e371c0 541 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 542 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 543 {
efd66ac6 544 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 545
e0634ccf
UW
546 /* The GNU linker's Thumb call stub to foo is named
547 __foo_from_thumb. */
548 if (strstr (name, "_from_thumb") != NULL)
549 name += 2;
0d39a070 550
e0634ccf
UW
551 /* On soft-float targets, __truncdfsf2 is called to convert promoted
552 arguments to their argument types in non-prototyped
553 functions. */
61012eef 554 if (startswith (name, "__truncdfsf2"))
e0634ccf 555 return 1;
61012eef 556 if (startswith (name, "__aeabi_d2f"))
e0634ccf 557 return 1;
0d39a070 558
e0634ccf 559 /* Internal functions related to thread-local storage. */
61012eef 560 if (startswith (name, "__tls_get_addr"))
e0634ccf 561 return 1;
61012eef 562 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
563 return 1;
564 }
565 else
566 {
567 /* If we run against a stripped glibc, we may be unable to identify
568 special functions by name. Check for one important case,
569 __aeabi_read_tp, by comparing the *code* against the default
570 implementation (this is hand-written ARM assembler in glibc). */
571
572 if (!is_thumb
573 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
574 == 0xe3e00a0f /* mov r0, #0xffff0fff */
575 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
576 == 0xe240f01f) /* sub pc, r0, #31 */
577 return 1;
578 }
ec3d575a 579
0d39a070
DJ
580 return 0;
581}
582
621c6d5b
YQ
583/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
584 the first 16-bit of instruction, and INSN2 is the second 16-bit of
585 instruction. */
586#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
587 ((bits ((insn1), 0, 3) << 12) \
588 | (bits ((insn1), 10, 10) << 11) \
589 | (bits ((insn2), 12, 14) << 8) \
590 | bits ((insn2), 0, 7))
591
592/* Extract the immediate from instruction movw/movt of encoding A. INSN is
593 the 32-bit instruction. */
594#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
595 ((bits ((insn), 16, 19) << 12) \
596 | bits ((insn), 0, 11))
597
ec3d575a
UW
598/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
599
600static unsigned int
601thumb_expand_immediate (unsigned int imm)
602{
603 unsigned int count = imm >> 7;
604
605 if (count < 8)
606 switch (count / 2)
607 {
608 case 0:
609 return imm & 0xff;
610 case 1:
611 return (imm & 0xff) | ((imm & 0xff) << 16);
612 case 2:
613 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
614 case 3:
615 return (imm & 0xff) | ((imm & 0xff) << 8)
616 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
617 }
618
619 return (0x80 | (imm & 0x7f)) << (32 - count);
620}
621
540314bd
YQ
622/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
623 epilogue, 0 otherwise. */
624
625static int
626thumb_instruction_restores_sp (unsigned short insn)
627{
628 return (insn == 0x46bd /* mov sp, r7 */
629 || (insn & 0xff80) == 0xb000 /* add sp, imm */
630 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
631}
632
29d73ae4
DJ
633/* Analyze a Thumb prologue, looking for a recognizable stack frame
634 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
635 clobber the stack frame unexpectedly, or an unknown instruction.
636 Return the last address which is definitely safe to skip for an
637 initial breakpoint. */
c906108c
SS
638
639static CORE_ADDR
29d73ae4
DJ
640thumb_analyze_prologue (struct gdbarch *gdbarch,
641 CORE_ADDR start, CORE_ADDR limit,
642 struct arm_prologue_cache *cache)
c906108c 643{
0d39a070 644 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 645 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
646 int i;
647 pv_t regs[16];
648 struct pv_area *stack;
649 struct cleanup *back_to;
650 CORE_ADDR offset;
ec3d575a 651 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 652
29d73ae4
DJ
653 for (i = 0; i < 16; i++)
654 regs[i] = pv_register (i, 0);
55f960e1 655 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
656 back_to = make_cleanup_free_pv_area (stack);
657
29d73ae4 658 while (start < limit)
c906108c 659 {
29d73ae4
DJ
660 unsigned short insn;
661
e17a4113 662 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 663
94c30b78 664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 665 {
29d73ae4
DJ
666 int regno;
667 int mask;
4be43953
DJ
668
669 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
670 break;
29d73ae4
DJ
671
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
675
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
679 {
29d73ae4
DJ
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
681 -4);
682 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
683 }
da59e081 684 }
1db01f22 685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 686 {
29d73ae4 687 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -offset);
da59e081 690 }
808f7ab1
YQ
691 else if (thumb_instruction_restores_sp (insn))
692 {
693 /* Don't scan past the epilogue. */
694 break;
695 }
0d39a070
DJ
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
698 (insn & 0xff) << 2);
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
702 bits (insn, 6, 8));
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
706 bits (insn, 0, 7));
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
714 {
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
718 }
29d73ae4 719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 720 {
29d73ae4
DJ
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
da59e081 724 }
29d73ae4 725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 726 {
29d73ae4
DJ
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
731 pv_t addr;
732
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
735
736 if (pv_area_store_would_trash (stack, addr))
737 break;
738
739 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 740 }
0d39a070
DJ
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
742 {
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
745 pv_t addr;
746
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
749
750 if (pv_area_store_would_trash (stack, addr))
751 break;
752
753 pv_area_store (stack, addr, 4, regs[rd]);
754 }
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
759 ;
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
764 ;
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
769 ;
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
774 ;
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
777 on Thumb. */
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
780 {
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
783 CORE_ADDR loc;
784
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
788 }
db24da6d 789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 790 {
0d39a070
DJ
791 unsigned short inst2;
792
793 inst2 = read_memory_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
795
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
797 {
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
801 CORE_ADDR nextpc;
802 int j1, j2, imm1, imm2;
803
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
808
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
811
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
816
e0634ccf
UW
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
0d39a070
DJ
819 break;
820 }
ec3d575a 821
0963b4bd
MS
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
823 { registers } */
ec3d575a
UW
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
825 {
826 pv_t addr = regs[bits (insn, 0, 3)];
827 int regno;
828
829 if (pv_area_store_would_trash (stack, addr))
830 break;
831
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
835 {
836 addr = pv_add_constant (addr, -4);
837 pv_area_store (stack, addr, 4, regs[regno]);
838 }
839
840 if (insn & 0x0020)
841 regs[bits (insn, 0, 3)] = addr;
842 }
843
0963b4bd
MS
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
845 [Rn, #+/-imm]{!} */
ec3d575a
UW
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
847 {
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
851
852 offset = inst2 & 0xff;
853 if (insn & 0x0080)
854 addr = pv_add_constant (addr, offset);
855 else
856 addr = pv_add_constant (addr, -offset);
857
858 if (pv_area_store_would_trash (stack, addr))
859 break;
860
861 pv_area_store (stack, addr, 4, regs[regno1]);
862 pv_area_store (stack, pv_add_constant (addr, 4),
863 4, regs[regno2]);
864
865 if (insn & 0x0020)
866 regs[bits (insn, 0, 3)] = addr;
867 }
868
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
872 {
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
875
876 offset = inst2 & 0xff;
877 if (inst2 & 0x0200)
878 addr = pv_add_constant (addr, offset);
879 else
880 addr = pv_add_constant (addr, -offset);
881
882 if (pv_area_store_would_trash (stack, addr))
883 break;
884
885 pv_area_store (stack, addr, 4, regs[regno]);
886
887 if (inst2 & 0x0100)
888 regs[bits (insn, 0, 3)] = addr;
889 }
890
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 {
894 int regno = bits (inst2, 12, 15);
895 pv_t addr;
896
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
899
900 if (pv_area_store_would_trash (stack, addr))
901 break;
902
903 pv_area_store (stack, addr, 4, regs[regno]);
904 }
905
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 908 /* Ignore stores of argument registers to the stack. */
0d39a070 909 ;
ec3d575a
UW
910
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
0d39a070 913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 914 /* Ignore stores of argument registers to the stack. */
0d39a070 915 ;
ec3d575a 916
0963b4bd
MS
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
918 { registers } */
ec3d575a
UW
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
0d39a070 923 ;
ec3d575a 924
0963b4bd
MS
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
926 [Rn, #+/-imm] */
0d39a070 927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 928 /* Similarly ignore dual loads from the stack. */
0d39a070 929 ;
ec3d575a
UW
930
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
0d39a070 933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 934 /* Similarly ignore single loads from the stack. */
0d39a070 935 ;
ec3d575a
UW
936
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 939 /* Similarly ignore single loads from the stack. */
0d39a070 940 ;
ec3d575a
UW
941
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
944 {
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
948
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
952 }
953
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
0d39a070 956 {
ec3d575a
UW
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
960
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
963 }
964
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
986 }
987
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
989 {
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
993
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
996 }
997
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
999 {
621c6d5b
YQ
1000 unsigned int imm
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1002
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1004 }
1005
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1008 {
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1012 }
1013
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1015 {
1016 /* Constant pool loads. */
1017 unsigned int constant;
1018 CORE_ADDR loc;
1019
cac395ea 1020 offset = bits (inst2, 0, 11);
ec3d575a
UW
1021 if (insn & 0x0080)
1022 loc = start + 4 + offset;
1023 else
1024 loc = start + 4 - offset;
1025
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1028 }
1029
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1031 {
1032 /* Constant pool loads. */
1033 unsigned int constant;
1034 CORE_ADDR loc;
1035
cac395ea 1036 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1037 if (insn & 0x0080)
1038 loc = start + 4 + offset;
1039 else
1040 loc = start + 4 - offset;
1041
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1044
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1047 }
1048
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1050 {
1051 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1052 break;
1053 }
ec3d575a
UW
1054 else
1055 {
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1059 }
0d39a070
DJ
1060
1061 start += 2;
1062 }
ec3d575a 1063 else if (thumb_instruction_changes_pc (insn))
3d74b771 1064 {
ec3d575a 1065 /* Don't scan past anything that might change control flow. */
da3c6d4a 1066 break;
3d74b771 1067 }
ec3d575a
UW
1068 else
1069 {
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1073 }
29d73ae4
DJ
1074
1075 start += 2;
c906108c
SS
1076 }
1077
0d39a070
DJ
1078 if (arm_debug)
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1081
ec3d575a
UW
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1084
29d73ae4
DJ
1085 if (cache == NULL)
1086 {
1087 do_cleanups (back_to);
ec3d575a 1088 return unrecognized_pc;
29d73ae4
DJ
1089 }
1090
29d73ae4
DJ
1091 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1092 {
1093 /* Frame pointer is fp. Frame size is constant. */
1094 cache->framereg = ARM_FP_REGNUM;
1095 cache->framesize = -regs[ARM_FP_REGNUM].k;
1096 }
1097 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1098 {
1099 /* Frame pointer is r7. Frame size is constant. */
1100 cache->framereg = THUMB_FP_REGNUM;
1101 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1102 }
72a2e3dc 1103 else
29d73ae4
DJ
1104 {
1105 /* Try the stack pointer... this is a bit desperate. */
1106 cache->framereg = ARM_SP_REGNUM;
1107 cache->framesize = -regs[ARM_SP_REGNUM].k;
1108 }
29d73ae4
DJ
1109
1110 for (i = 0; i < 16; i++)
1111 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1112 cache->saved_regs[i].addr = offset;
1113
1114 do_cleanups (back_to);
ec3d575a 1115 return unrecognized_pc;
c906108c
SS
1116}
1117
621c6d5b
YQ
1118
1119/* Try to analyze the instructions starting from PC, which load symbol
1120 __stack_chk_guard. Return the address of instruction after loading this
1121 symbol, set the dest register number to *BASEREG, and set the size of
1122 instructions for loading symbol in OFFSET. Return 0 if instructions are
1123 not recognized. */
1124
1125static CORE_ADDR
1126arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1127 unsigned int *destreg, int *offset)
1128{
1129 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1130 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1131 unsigned int low, high, address;
1132
1133 address = 0;
1134 if (is_thumb)
1135 {
1136 unsigned short insn1
1137 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1138
1139 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1140 {
1141 *destreg = bits (insn1, 8, 10);
1142 *offset = 2;
6ae274b7
YQ
1143 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1144 address = read_memory_unsigned_integer (address, 4,
1145 byte_order_for_code);
621c6d5b
YQ
1146 }
1147 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1148 {
1149 unsigned short insn2
1150 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1151
1152 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1153
1154 insn1
1155 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1156 insn2
1157 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1158
1159 /* movt Rd, #const */
1160 if ((insn1 & 0xfbc0) == 0xf2c0)
1161 {
1162 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1163 *destreg = bits (insn2, 8, 11);
1164 *offset = 8;
1165 address = (high << 16 | low);
1166 }
1167 }
1168 }
1169 else
1170 {
2e9e421f
UW
1171 unsigned int insn
1172 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1173
6ae274b7 1174 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1175 {
6ae274b7
YQ
1176 address = bits (insn, 0, 11) + pc + 8;
1177 address = read_memory_unsigned_integer (address, 4,
1178 byte_order_for_code);
1179
2e9e421f
UW
1180 *destreg = bits (insn, 12, 15);
1181 *offset = 4;
1182 }
1183 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1184 {
1185 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1186
1187 insn
1188 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1189
1190 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1191 {
1192 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1193 *destreg = bits (insn, 12, 15);
1194 *offset = 8;
1195 address = (high << 16 | low);
1196 }
1197 }
621c6d5b
YQ
1198 }
1199
1200 return address;
1201}
1202
1203/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1204 points to the first instruction of this sequence, return the address of
1205 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1206
1207 On arm, this sequence of instructions is composed of mainly three steps,
1208 Step 1: load symbol __stack_chk_guard,
1209 Step 2: load from address of __stack_chk_guard,
1210 Step 3: store it to somewhere else.
1211
1212 Usually, instructions on step 2 and step 3 are the same on various ARM
1213 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1214 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1215 instructions in step 1 vary from different ARM architectures. On ARMv7,
1216 they are,
1217
1218 movw Rn, #:lower16:__stack_chk_guard
1219 movt Rn, #:upper16:__stack_chk_guard
1220
1221 On ARMv5t, it is,
1222
1223 ldr Rn, .Label
1224 ....
1225 .Lable:
1226 .word __stack_chk_guard
1227
1228 Since ldr/str is a very popular instruction, we can't use them as
1229 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1230 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1231 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1232
1233static CORE_ADDR
1234arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1235{
1236 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1237 unsigned int basereg;
7cbd4a93 1238 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1239 int offset;
1240 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1241 CORE_ADDR addr;
1242
1243 /* Try to parse the instructions in Step 1. */
1244 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1245 &basereg, &offset);
1246 if (!addr)
1247 return pc;
1248
1249 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1250 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1251 Otherwise, this sequence cannot be for stack protector. */
1252 if (stack_chk_guard.minsym == NULL
61012eef 1253 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1254 return pc;
1255
1256 if (is_thumb)
1257 {
1258 unsigned int destreg;
1259 unsigned short insn
1260 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1261
1262 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1263 if ((insn & 0xf800) != 0x6800)
1264 return pc;
1265 if (bits (insn, 3, 5) != basereg)
1266 return pc;
1267 destreg = bits (insn, 0, 2);
1268
1269 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1270 byte_order_for_code);
1271 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1272 if ((insn & 0xf800) != 0x6000)
1273 return pc;
1274 if (destreg != bits (insn, 0, 2))
1275 return pc;
1276 }
1277 else
1278 {
1279 unsigned int destreg;
1280 unsigned int insn
1281 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1282
1283 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1284 if ((insn & 0x0e500000) != 0x04100000)
1285 return pc;
1286 if (bits (insn, 16, 19) != basereg)
1287 return pc;
1288 destreg = bits (insn, 12, 15);
1289 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1290 insn = read_memory_unsigned_integer (pc + offset + 4,
1291 4, byte_order_for_code);
1292 if ((insn & 0x0e500000) != 0x04000000)
1293 return pc;
1294 if (bits (insn, 12, 15) != destreg)
1295 return pc;
1296 }
1297 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1298 on arm. */
1299 if (is_thumb)
1300 return pc + offset + 4;
1301 else
1302 return pc + offset + 8;
1303}
1304
da3c6d4a
MS
1305/* Advance the PC across any function entry prologue instructions to
1306 reach some "real" code.
34e8f22d
RE
1307
1308 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1309 prologue:
c906108c 1310
c5aa993b
JM
1311 mov ip, sp
1312 [stmfd sp!, {a1,a2,a3,a4}]
1313 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1314 [stfe f7, [sp, #-12]!]
1315 [stfe f6, [sp, #-12]!]
1316 [stfe f5, [sp, #-12]!]
1317 [stfe f4, [sp, #-12]!]
0963b4bd 1318 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1319
34e8f22d 1320static CORE_ADDR
6093d2eb 1321arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1322{
a89fea3c 1323 CORE_ADDR func_addr, limit_pc;
c906108c 1324
a89fea3c
JL
1325 /* See if we can determine the end of the prologue via the symbol table.
1326 If so, then return either PC, or the PC after the prologue, whichever
1327 is greater. */
1328 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1329 {
d80b854b
UW
1330 CORE_ADDR post_prologue_pc
1331 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1332 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1333
621c6d5b
YQ
1334 if (post_prologue_pc)
1335 post_prologue_pc
1336 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1337
1338
0d39a070
DJ
1339 /* GCC always emits a line note before the prologue and another
1340 one after, even if the two are at the same address or on the
1341 same line. Take advantage of this so that we do not need to
1342 know every instruction that might appear in the prologue. We
1343 will have producer information for most binaries; if it is
1344 missing (e.g. for -gstabs), assuming the GNU tools. */
1345 if (post_prologue_pc
43f3e411
DE
1346 && (cust == NULL
1347 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1348 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1349 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1350 return post_prologue_pc;
1351
a89fea3c 1352 if (post_prologue_pc != 0)
0d39a070
DJ
1353 {
1354 CORE_ADDR analyzed_limit;
1355
1356 /* For non-GCC compilers, make sure the entire line is an
1357 acceptable prologue; GDB will round this function's
1358 return value up to the end of the following line so we
1359 can not skip just part of a line (and we do not want to).
1360
1361 RealView does not treat the prologue specially, but does
1362 associate prologue code with the opening brace; so this
1363 lets us skip the first line if we think it is the opening
1364 brace. */
9779414d 1365 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1366 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1367 post_prologue_pc, NULL);
1368 else
1369 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1370 post_prologue_pc, NULL);
1371
1372 if (analyzed_limit != post_prologue_pc)
1373 return func_addr;
1374
1375 return post_prologue_pc;
1376 }
c906108c
SS
1377 }
1378
a89fea3c
JL
1379 /* Can't determine prologue from the symbol table, need to examine
1380 instructions. */
c906108c 1381
a89fea3c
JL
1382 /* Find an upper limit on the function prologue using the debug
1383 information. If the debug information could not be used to provide
1384 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1385 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1386 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1387 if (limit_pc == 0)
1388 limit_pc = pc + 64; /* Magic. */
1389
c906108c 1390
29d73ae4 1391 /* Check if this is Thumb code. */
9779414d 1392 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1393 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1394 else
1395 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1396}
94c30b78 1397
c5aa993b 1398/* *INDENT-OFF* */
c906108c
SS
1399/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1400 This function decodes a Thumb function prologue to determine:
1401 1) the size of the stack frame
1402 2) which registers are saved on it
1403 3) the offsets of saved regs
1404 4) the offset from the stack pointer to the frame pointer
c906108c 1405
da59e081
JM
1406 A typical Thumb function prologue would create this stack frame
1407 (offsets relative to FP)
c906108c
SS
1408 old SP -> 24 stack parameters
1409 20 LR
1410 16 R7
1411 R7 -> 0 local variables (16 bytes)
1412 SP -> -12 additional stack space (12 bytes)
1413 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1414 12 bytes. The frame register is R7.
da59e081 1415
da3c6d4a
MS
1416 The comments for thumb_skip_prolog() describe the algorithm we use
1417 to detect the end of the prolog. */
c5aa993b
JM
1418/* *INDENT-ON* */
1419
c906108c 1420static void
be8626e0 1421thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1422 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1423{
1424 CORE_ADDR prologue_start;
1425 CORE_ADDR prologue_end;
c906108c 1426
b39cc962
DJ
1427 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1428 &prologue_end))
c906108c 1429 {
ec3d575a
UW
1430 /* See comment in arm_scan_prologue for an explanation of
1431 this heuristics. */
1432 if (prologue_end > prologue_start + 64)
1433 {
1434 prologue_end = prologue_start + 64;
1435 }
c906108c
SS
1436 }
1437 else
f7060f85
DJ
1438 /* We're in the boondocks: we have no idea where the start of the
1439 function is. */
1440 return;
c906108c 1441
325fac50 1442 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1443
be8626e0 1444 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1445}
1446
f303bc3e
YQ
1447/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1448 otherwise. */
1449
1450static int
1451arm_instruction_restores_sp (unsigned int insn)
1452{
1453 if (bits (insn, 28, 31) != INST_NV)
1454 {
1455 if ((insn & 0x0df0f000) == 0x0080d000
1456 /* ADD SP (register or immediate). */
1457 || (insn & 0x0df0f000) == 0x0040d000
1458 /* SUB SP (register or immediate). */
1459 || (insn & 0x0ffffff0) == 0x01a0d000
1460 /* MOV SP. */
1461 || (insn & 0x0fff0000) == 0x08bd0000
1462 /* POP (LDMIA). */
1463 || (insn & 0x0fff0000) == 0x049d0000)
1464 /* POP of a single register. */
1465 return 1;
1466 }
1467
1468 return 0;
1469}
1470
0d39a070
DJ
1471/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1472 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1473 fill it in. Return the first address not recognized as a prologue
1474 instruction.
eb5492fa 1475
0d39a070
DJ
1476 We recognize all the instructions typically found in ARM prologues,
1477 plus harmless instructions which can be skipped (either for analysis
1478 purposes, or a more restrictive set that can be skipped when finding
1479 the end of the prologue). */
1480
1481static CORE_ADDR
1482arm_analyze_prologue (struct gdbarch *gdbarch,
1483 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1484 struct arm_prologue_cache *cache)
1485{
0d39a070
DJ
1486 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1487 int regno;
1488 CORE_ADDR offset, current_pc;
1489 pv_t regs[ARM_FPS_REGNUM];
1490 struct pv_area *stack;
1491 struct cleanup *back_to;
0d39a070
DJ
1492 CORE_ADDR unrecognized_pc = 0;
1493
1494 /* Search the prologue looking for instructions that set up the
96baa820 1495 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1496
96baa820
JM
1497 Be careful, however, and if it doesn't look like a prologue,
1498 don't try to scan it. If, for instance, a frameless function
1499 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1500 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1501 and other operations that rely on a knowledge of the stack
0d39a070 1502 traceback. */
d4473757 1503
4be43953
DJ
1504 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1505 regs[regno] = pv_register (regno, 0);
55f960e1 1506 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1507 back_to = make_cleanup_free_pv_area (stack);
1508
94c30b78
MS
1509 for (current_pc = prologue_start;
1510 current_pc < prologue_end;
f43845b3 1511 current_pc += 4)
96baa820 1512 {
e17a4113
UW
1513 unsigned int insn
1514 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1515
94c30b78 1516 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1517 {
4be43953 1518 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1519 continue;
1520 }
0d39a070
DJ
1521 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1522 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1523 {
1524 unsigned imm = insn & 0xff; /* immediate value */
1525 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1526 int rd = bits (insn, 12, 15);
28cd8767 1527 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1528 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1529 continue;
1530 }
0d39a070
DJ
1531 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1532 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1533 {
1534 unsigned imm = insn & 0xff; /* immediate value */
1535 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1536 int rd = bits (insn, 12, 15);
28cd8767 1537 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1538 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1539 continue;
1540 }
0963b4bd
MS
1541 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1542 [sp, #-4]! */
f43845b3 1543 {
4be43953
DJ
1544 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1545 break;
1546 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1547 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1548 regs[bits (insn, 12, 15)]);
f43845b3
MS
1549 continue;
1550 }
1551 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1552 /* stmfd sp!, {..., fp, ip, lr, pc}
1553 or
1554 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1555 {
d4473757 1556 int mask = insn & 0xffff;
ed9a39eb 1557
4be43953
DJ
1558 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1559 break;
1560
94c30b78 1561 /* Calculate offsets of saved registers. */
34e8f22d 1562 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1563 if (mask & (1 << regno))
1564 {
0963b4bd
MS
1565 regs[ARM_SP_REGNUM]
1566 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1567 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1568 }
1569 }
0d39a070
DJ
1570 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1571 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1572 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1573 {
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1575 continue;
1576 }
0d39a070
DJ
1577 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1578 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1579 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1580 {
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1582 continue;
1583 }
0963b4bd
MS
1584 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1585 { registers } */
0d39a070
DJ
1586 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1587 {
1588 /* No need to add this to saved_regs -- it's just arg regs. */
1589 continue;
1590 }
d4473757
KB
1591 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1592 {
94c30b78
MS
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1595 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1596 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1597 }
1598 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1599 {
94c30b78
MS
1600 unsigned imm = insn & 0xff; /* immediate value */
1601 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1602 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1603 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1604 }
0963b4bd
MS
1605 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1606 [sp, -#c]! */
2af46ca0 1607 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1608 {
4be43953
DJ
1609 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1610 break;
1611
1612 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1613 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1614 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1615 }
0963b4bd
MS
1616 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1617 [sp!] */
2af46ca0 1618 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1619 {
1620 int n_saved_fp_regs;
1621 unsigned int fp_start_reg, fp_bound_reg;
1622
4be43953
DJ
1623 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1624 break;
1625
94c30b78 1626 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1627 {
d4473757
KB
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 3;
1630 else
1631 n_saved_fp_regs = 1;
96baa820 1632 }
d4473757 1633 else
96baa820 1634 {
d4473757
KB
1635 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs = 2;
1637 else
1638 n_saved_fp_regs = 4;
96baa820 1639 }
d4473757 1640
34e8f22d 1641 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1642 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1643 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1644 {
4be43953
DJ
1645 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1646 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1647 regs[fp_start_reg++]);
96baa820 1648 }
c906108c 1649 }
0d39a070
DJ
1650 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1651 {
1652 /* Allow some special function calls when skipping the
1653 prologue; GCC generates these before storing arguments to
1654 the stack. */
1655 CORE_ADDR dest = BranchDest (current_pc, insn);
1656
e0634ccf 1657 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1658 continue;
1659 else
1660 break;
1661 }
d4473757 1662 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1663 break; /* Condition not true, exit early. */
0d39a070
DJ
1664 else if (arm_instruction_changes_pc (insn))
1665 /* Don't scan past anything that might change control flow. */
1666 break;
f303bc3e
YQ
1667 else if (arm_instruction_restores_sp (insn))
1668 {
1669 /* Don't scan past the epilogue. */
1670 break;
1671 }
d19f7eee
UW
1672 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1673 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1674 /* Ignore block loads from the stack, potentially copying
1675 parameters from memory. */
1676 continue;
1677 else if ((insn & 0xfc500000) == 0xe4100000
1678 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1679 /* Similarly ignore single loads from the stack. */
1680 continue;
0d39a070
DJ
1681 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1682 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1683 register instead of the stack. */
d4473757 1684 continue;
0d39a070
DJ
1685 else
1686 {
21daaaaf
YQ
1687 /* The optimizer might shove anything into the prologue, if
1688 we build up cache (cache != NULL) from scanning prologue,
1689 we just skip what we don't recognize and scan further to
1690 make cache as complete as possible. However, if we skip
1691 prologue, we'll stop immediately on unrecognized
1692 instruction. */
0d39a070 1693 unrecognized_pc = current_pc;
21daaaaf
YQ
1694 if (cache != NULL)
1695 continue;
1696 else
1697 break;
0d39a070 1698 }
c906108c
SS
1699 }
1700
0d39a070
DJ
1701 if (unrecognized_pc == 0)
1702 unrecognized_pc = current_pc;
1703
0d39a070
DJ
1704 if (cache)
1705 {
4072f920
YQ
1706 int framereg, framesize;
1707
1708 /* The frame size is just the distance from the frame register
1709 to the original stack pointer. */
1710 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1711 {
1712 /* Frame pointer is fp. */
1713 framereg = ARM_FP_REGNUM;
1714 framesize = -regs[ARM_FP_REGNUM].k;
1715 }
1716 else
1717 {
1718 /* Try the stack pointer... this is a bit desperate. */
1719 framereg = ARM_SP_REGNUM;
1720 framesize = -regs[ARM_SP_REGNUM].k;
1721 }
1722
0d39a070
DJ
1723 cache->framereg = framereg;
1724 cache->framesize = framesize;
1725
1726 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1727 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1728 cache->saved_regs[regno].addr = offset;
1729 }
1730
1731 if (arm_debug)
1732 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1733 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1734
1735 do_cleanups (back_to);
0d39a070
DJ
1736 return unrecognized_pc;
1737}
1738
1739static void
1740arm_scan_prologue (struct frame_info *this_frame,
1741 struct arm_prologue_cache *cache)
1742{
1743 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1744 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1745 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1746 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1747 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1748
1749 /* Assume there is no frame until proven otherwise. */
1750 cache->framereg = ARM_SP_REGNUM;
1751 cache->framesize = 0;
1752
1753 /* Check for Thumb prologue. */
1754 if (arm_frame_is_thumb (this_frame))
1755 {
1756 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1757 return;
1758 }
1759
1760 /* Find the function prologue. If we can't find the function in
1761 the symbol table, peek in the stack frame to find the PC. */
1762 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1763 &prologue_end))
1764 {
1765 /* One way to find the end of the prologue (which works well
1766 for unoptimized code) is to do the following:
1767
1768 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1769
1770 if (sal.line == 0)
1771 prologue_end = prev_pc;
1772 else if (sal.end < prologue_end)
1773 prologue_end = sal.end;
1774
1775 This mechanism is very accurate so long as the optimizer
1776 doesn't move any instructions from the function body into the
1777 prologue. If this happens, sal.end will be the last
1778 instruction in the first hunk of prologue code just before
1779 the first instruction that the scheduler has moved from
1780 the body to the prologue.
1781
1782 In order to make sure that we scan all of the prologue
1783 instructions, we use a slightly less accurate mechanism which
1784 may scan more than necessary. To help compensate for this
1785 lack of accuracy, the prologue scanning loop below contains
1786 several clauses which'll cause the loop to terminate early if
1787 an implausible prologue instruction is encountered.
1788
1789 The expression
1790
1791 prologue_start + 64
1792
1793 is a suitable endpoint since it accounts for the largest
1794 possible prologue plus up to five instructions inserted by
1795 the scheduler. */
1796
1797 if (prologue_end > prologue_start + 64)
1798 {
1799 prologue_end = prologue_start + 64; /* See above. */
1800 }
1801 }
1802 else
1803 {
1804 /* We have no symbol information. Our only option is to assume this
1805 function has a standard stack frame and the normal frame register.
1806 Then, we can find the value of our frame pointer on entrance to
1807 the callee (or at the present moment if this is the innermost frame).
1808 The value stored there should be the address of the stmfd + 8. */
1809 CORE_ADDR frame_loc;
1810 LONGEST return_value;
1811
1812 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1813 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1814 return;
1815 else
1816 {
1817 prologue_start = gdbarch_addr_bits_remove
1818 (gdbarch, return_value) - 8;
1819 prologue_end = prologue_start + 64; /* See above. */
1820 }
1821 }
1822
1823 if (prev_pc < prologue_end)
1824 prologue_end = prev_pc;
1825
1826 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1827}
1828
eb5492fa 1829static struct arm_prologue_cache *
a262aec2 1830arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1831{
eb5492fa
DJ
1832 int reg;
1833 struct arm_prologue_cache *cache;
1834 CORE_ADDR unwound_fp;
c5aa993b 1835
35d5d4ee 1836 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1837 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1838
a262aec2 1839 arm_scan_prologue (this_frame, cache);
848cfffb 1840
a262aec2 1841 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1842 if (unwound_fp == 0)
1843 return cache;
c906108c 1844
4be43953 1845 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1846
eb5492fa
DJ
1847 /* Calculate actual addresses of saved registers using offsets
1848 determined by arm_scan_prologue. */
a262aec2 1849 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1850 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1851 cache->saved_regs[reg].addr += cache->prev_sp;
1852
1853 return cache;
c906108c
SS
1854}
1855
c1ee9414
LM
1856/* Implementation of the stop_reason hook for arm_prologue frames. */
1857
1858static enum unwind_stop_reason
1859arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1860 void **this_cache)
1861{
1862 struct arm_prologue_cache *cache;
1863 CORE_ADDR pc;
1864
1865 if (*this_cache == NULL)
1866 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1867 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1868
1869 /* This is meant to halt the backtrace at "_start". */
1870 pc = get_frame_pc (this_frame);
1871 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1872 return UNWIND_OUTERMOST;
1873
1874 /* If we've hit a wall, stop. */
1875 if (cache->prev_sp == 0)
1876 return UNWIND_OUTERMOST;
1877
1878 return UNWIND_NO_REASON;
1879}
1880
eb5492fa
DJ
1881/* Our frame ID for a normal frame is the current function's starting PC
1882 and the caller's SP when we were called. */
c906108c 1883
148754e5 1884static void
a262aec2 1885arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1886 void **this_cache,
1887 struct frame_id *this_id)
c906108c 1888{
eb5492fa
DJ
1889 struct arm_prologue_cache *cache;
1890 struct frame_id id;
2c404490 1891 CORE_ADDR pc, func;
f079148d 1892
eb5492fa 1893 if (*this_cache == NULL)
a262aec2 1894 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1895 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1896
0e9e9abd
UW
1897 /* Use function start address as part of the frame ID. If we cannot
1898 identify the start address (due to missing symbol information),
1899 fall back to just using the current PC. */
c1ee9414 1900 pc = get_frame_pc (this_frame);
2c404490 1901 func = get_frame_func (this_frame);
0e9e9abd
UW
1902 if (!func)
1903 func = pc;
1904
eb5492fa 1905 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1906 *this_id = id;
c906108c
SS
1907}
1908
a262aec2
DJ
1909static struct value *
1910arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1911 void **this_cache,
a262aec2 1912 int prev_regnum)
24de872b 1913{
24568a2c 1914 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1915 struct arm_prologue_cache *cache;
1916
eb5492fa 1917 if (*this_cache == NULL)
a262aec2 1918 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1919 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1920
eb5492fa 1921 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1922 instead. The prologue may save PC, but it will point into this
1923 frame's prologue, not the next frame's resume location. Also
1924 strip the saved T bit. A valid LR may have the low bit set, but
1925 a valid PC never does. */
eb5492fa 1926 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1927 {
1928 CORE_ADDR lr;
1929
1930 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1931 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1932 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1933 }
24de872b 1934
eb5492fa 1935 /* SP is generally not saved to the stack, but this frame is
a262aec2 1936 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1937 The value was already reconstructed into PREV_SP. */
1938 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1939 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1940
b39cc962
DJ
1941 /* The CPSR may have been changed by the call instruction and by the
1942 called function. The only bit we can reconstruct is the T bit,
1943 by checking the low bit of LR as of the call. This is a reliable
1944 indicator of Thumb-ness except for some ARM v4T pre-interworking
1945 Thumb code, which could get away with a clear low bit as long as
1946 the called function did not use bx. Guess that all other
1947 bits are unchanged; the condition flags are presumably lost,
1948 but the processor status is likely valid. */
1949 if (prev_regnum == ARM_PS_REGNUM)
1950 {
1951 CORE_ADDR lr, cpsr;
9779414d 1952 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1953
1954 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1955 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1956 if (IS_THUMB_ADDR (lr))
9779414d 1957 cpsr |= t_bit;
b39cc962 1958 else
9779414d 1959 cpsr &= ~t_bit;
b39cc962
DJ
1960 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1961 }
1962
a262aec2
DJ
1963 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1964 prev_regnum);
eb5492fa
DJ
1965}
1966
1967struct frame_unwind arm_prologue_unwind = {
1968 NORMAL_FRAME,
c1ee9414 1969 arm_prologue_unwind_stop_reason,
eb5492fa 1970 arm_prologue_this_id,
a262aec2
DJ
1971 arm_prologue_prev_register,
1972 NULL,
1973 default_frame_sniffer
eb5492fa
DJ
1974};
1975
0e9e9abd
UW
1976/* Maintain a list of ARM exception table entries per objfile, similar to the
1977 list of mapping symbols. We only cache entries for standard ARM-defined
1978 personality routines; the cache will contain only the frame unwinding
1979 instructions associated with the entry (not the descriptors). */
1980
1981static const struct objfile_data *arm_exidx_data_key;
1982
1983struct arm_exidx_entry
1984{
1985 bfd_vma addr;
1986 gdb_byte *entry;
1987};
1988typedef struct arm_exidx_entry arm_exidx_entry_s;
1989DEF_VEC_O(arm_exidx_entry_s);
1990
1991struct arm_exidx_data
1992{
1993 VEC(arm_exidx_entry_s) **section_maps;
1994};
1995
1996static void
1997arm_exidx_data_free (struct objfile *objfile, void *arg)
1998{
9a3c8263 1999 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2000 unsigned int i;
2001
2002 for (i = 0; i < objfile->obfd->section_count; i++)
2003 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2004}
2005
2006static inline int
2007arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2008 const struct arm_exidx_entry *rhs)
2009{
2010 return lhs->addr < rhs->addr;
2011}
2012
2013static struct obj_section *
2014arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2015{
2016 struct obj_section *osect;
2017
2018 ALL_OBJFILE_OSECTIONS (objfile, osect)
2019 if (bfd_get_section_flags (objfile->obfd,
2020 osect->the_bfd_section) & SEC_ALLOC)
2021 {
2022 bfd_vma start, size;
2023 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2024 size = bfd_get_section_size (osect->the_bfd_section);
2025
2026 if (start <= vma && vma < start + size)
2027 return osect;
2028 }
2029
2030 return NULL;
2031}
2032
2033/* Parse contents of exception table and exception index sections
2034 of OBJFILE, and fill in the exception table entry cache.
2035
2036 For each entry that refers to a standard ARM-defined personality
2037 routine, extract the frame unwinding instructions (from either
2038 the index or the table section). The unwinding instructions
2039 are normalized by:
2040 - extracting them from the rest of the table data
2041 - converting to host endianness
2042 - appending the implicit 0xb0 ("Finish") code
2043
2044 The extracted and normalized instructions are stored for later
2045 retrieval by the arm_find_exidx_entry routine. */
2046
2047static void
2048arm_exidx_new_objfile (struct objfile *objfile)
2049{
3bb47e8b 2050 struct cleanup *cleanups;
0e9e9abd
UW
2051 struct arm_exidx_data *data;
2052 asection *exidx, *extab;
2053 bfd_vma exidx_vma = 0, extab_vma = 0;
2054 bfd_size_type exidx_size = 0, extab_size = 0;
2055 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2056 LONGEST i;
2057
2058 /* If we've already touched this file, do nothing. */
2059 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2060 return;
3bb47e8b 2061 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2062
2063 /* Read contents of exception table and index. */
a5eda10c 2064 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2065 if (exidx)
2066 {
2067 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2068 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2069 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2070 make_cleanup (xfree, exidx_data);
2071
2072 if (!bfd_get_section_contents (objfile->obfd, exidx,
2073 exidx_data, 0, exidx_size))
2074 {
2075 do_cleanups (cleanups);
2076 return;
2077 }
2078 }
2079
2080 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2081 if (extab)
2082 {
2083 extab_vma = bfd_section_vma (objfile->obfd, extab);
2084 extab_size = bfd_get_section_size (extab);
224c3ddb 2085 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2086 make_cleanup (xfree, extab_data);
2087
2088 if (!bfd_get_section_contents (objfile->obfd, extab,
2089 extab_data, 0, extab_size))
2090 {
2091 do_cleanups (cleanups);
2092 return;
2093 }
2094 }
2095
2096 /* Allocate exception table data structure. */
2097 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2098 set_objfile_data (objfile, arm_exidx_data_key, data);
2099 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2100 objfile->obfd->section_count,
2101 VEC(arm_exidx_entry_s) *);
2102
2103 /* Fill in exception table. */
2104 for (i = 0; i < exidx_size / 8; i++)
2105 {
2106 struct arm_exidx_entry new_exidx_entry;
2107 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2108 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2109 bfd_vma addr = 0, word = 0;
2110 int n_bytes = 0, n_words = 0;
2111 struct obj_section *sec;
2112 gdb_byte *entry = NULL;
2113
2114 /* Extract address of start of function. */
2115 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2116 idx += exidx_vma + i * 8;
2117
2118 /* Find section containing function and compute section offset. */
2119 sec = arm_obj_section_from_vma (objfile, idx);
2120 if (sec == NULL)
2121 continue;
2122 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2123
2124 /* Determine address of exception table entry. */
2125 if (val == 1)
2126 {
2127 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2128 }
2129 else if ((val & 0xff000000) == 0x80000000)
2130 {
2131 /* Exception table entry embedded in .ARM.exidx
2132 -- must be short form. */
2133 word = val;
2134 n_bytes = 3;
2135 }
2136 else if (!(val & 0x80000000))
2137 {
2138 /* Exception table entry in .ARM.extab. */
2139 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2140 addr += exidx_vma + i * 8 + 4;
2141
2142 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2143 {
2144 word = bfd_h_get_32 (objfile->obfd,
2145 extab_data + addr - extab_vma);
2146 addr += 4;
2147
2148 if ((word & 0xff000000) == 0x80000000)
2149 {
2150 /* Short form. */
2151 n_bytes = 3;
2152 }
2153 else if ((word & 0xff000000) == 0x81000000
2154 || (word & 0xff000000) == 0x82000000)
2155 {
2156 /* Long form. */
2157 n_bytes = 2;
2158 n_words = ((word >> 16) & 0xff);
2159 }
2160 else if (!(word & 0x80000000))
2161 {
2162 bfd_vma pers;
2163 struct obj_section *pers_sec;
2164 int gnu_personality = 0;
2165
2166 /* Custom personality routine. */
2167 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2168 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2169
2170 /* Check whether we've got one of the variants of the
2171 GNU personality routines. */
2172 pers_sec = arm_obj_section_from_vma (objfile, pers);
2173 if (pers_sec)
2174 {
2175 static const char *personality[] =
2176 {
2177 "__gcc_personality_v0",
2178 "__gxx_personality_v0",
2179 "__gcj_personality_v0",
2180 "__gnu_objc_personality_v0",
2181 NULL
2182 };
2183
2184 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2185 int k;
2186
2187 for (k = 0; personality[k]; k++)
2188 if (lookup_minimal_symbol_by_pc_name
2189 (pc, personality[k], objfile))
2190 {
2191 gnu_personality = 1;
2192 break;
2193 }
2194 }
2195
2196 /* If so, the next word contains a word count in the high
2197 byte, followed by the same unwind instructions as the
2198 pre-defined forms. */
2199 if (gnu_personality
2200 && addr + 4 <= extab_vma + extab_size)
2201 {
2202 word = bfd_h_get_32 (objfile->obfd,
2203 extab_data + addr - extab_vma);
2204 addr += 4;
2205 n_bytes = 3;
2206 n_words = ((word >> 24) & 0xff);
2207 }
2208 }
2209 }
2210 }
2211
2212 /* Sanity check address. */
2213 if (n_words)
2214 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2215 n_words = n_bytes = 0;
2216
2217 /* The unwind instructions reside in WORD (only the N_BYTES least
2218 significant bytes are valid), followed by N_WORDS words in the
2219 extab section starting at ADDR. */
2220 if (n_bytes || n_words)
2221 {
224c3ddb
SM
2222 gdb_byte *p = entry
2223 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2224 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2225
2226 while (n_bytes--)
2227 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2228
2229 while (n_words--)
2230 {
2231 word = bfd_h_get_32 (objfile->obfd,
2232 extab_data + addr - extab_vma);
2233 addr += 4;
2234
2235 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2236 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2237 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2238 *p++ = (gdb_byte) (word & 0xff);
2239 }
2240
2241 /* Implied "Finish" to terminate the list. */
2242 *p++ = 0xb0;
2243 }
2244
2245 /* Push entry onto vector. They are guaranteed to always
2246 appear in order of increasing addresses. */
2247 new_exidx_entry.addr = idx;
2248 new_exidx_entry.entry = entry;
2249 VEC_safe_push (arm_exidx_entry_s,
2250 data->section_maps[sec->the_bfd_section->index],
2251 &new_exidx_entry);
2252 }
2253
2254 do_cleanups (cleanups);
2255}
2256
2257/* Search for the exception table entry covering MEMADDR. If one is found,
2258 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2259 set *START to the start of the region covered by this entry. */
2260
2261static gdb_byte *
2262arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2263{
2264 struct obj_section *sec;
2265
2266 sec = find_pc_section (memaddr);
2267 if (sec != NULL)
2268 {
2269 struct arm_exidx_data *data;
2270 VEC(arm_exidx_entry_s) *map;
2271 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2272 unsigned int idx;
2273
9a3c8263
SM
2274 data = ((struct arm_exidx_data *)
2275 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2276 if (data != NULL)
2277 {
2278 map = data->section_maps[sec->the_bfd_section->index];
2279 if (!VEC_empty (arm_exidx_entry_s, map))
2280 {
2281 struct arm_exidx_entry *map_sym;
2282
2283 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2284 arm_compare_exidx_entries);
2285
2286 /* VEC_lower_bound finds the earliest ordered insertion
2287 point. If the following symbol starts at this exact
2288 address, we use that; otherwise, the preceding
2289 exception table entry covers this address. */
2290 if (idx < VEC_length (arm_exidx_entry_s, map))
2291 {
2292 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2293 if (map_sym->addr == map_key.addr)
2294 {
2295 if (start)
2296 *start = map_sym->addr + obj_section_addr (sec);
2297 return map_sym->entry;
2298 }
2299 }
2300
2301 if (idx > 0)
2302 {
2303 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2304 if (start)
2305 *start = map_sym->addr + obj_section_addr (sec);
2306 return map_sym->entry;
2307 }
2308 }
2309 }
2310 }
2311
2312 return NULL;
2313}
2314
2315/* Given the current frame THIS_FRAME, and its associated frame unwinding
2316 instruction list from the ARM exception table entry ENTRY, allocate and
2317 return a prologue cache structure describing how to unwind this frame.
2318
2319 Return NULL if the unwinding instruction list contains a "spare",
2320 "reserved" or "refuse to unwind" instruction as defined in section
2321 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2322 for the ARM Architecture" document. */
2323
2324static struct arm_prologue_cache *
2325arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2326{
2327 CORE_ADDR vsp = 0;
2328 int vsp_valid = 0;
2329
2330 struct arm_prologue_cache *cache;
2331 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2332 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2333
2334 for (;;)
2335 {
2336 gdb_byte insn;
2337
2338 /* Whenever we reload SP, we actually have to retrieve its
2339 actual value in the current frame. */
2340 if (!vsp_valid)
2341 {
2342 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2343 {
2344 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2345 vsp = get_frame_register_unsigned (this_frame, reg);
2346 }
2347 else
2348 {
2349 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2350 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2351 }
2352
2353 vsp_valid = 1;
2354 }
2355
2356 /* Decode next unwind instruction. */
2357 insn = *entry++;
2358
2359 if ((insn & 0xc0) == 0)
2360 {
2361 int offset = insn & 0x3f;
2362 vsp += (offset << 2) + 4;
2363 }
2364 else if ((insn & 0xc0) == 0x40)
2365 {
2366 int offset = insn & 0x3f;
2367 vsp -= (offset << 2) + 4;
2368 }
2369 else if ((insn & 0xf0) == 0x80)
2370 {
2371 int mask = ((insn & 0xf) << 8) | *entry++;
2372 int i;
2373
2374 /* The special case of an all-zero mask identifies
2375 "Refuse to unwind". We return NULL to fall back
2376 to the prologue analyzer. */
2377 if (mask == 0)
2378 return NULL;
2379
2380 /* Pop registers r4..r15 under mask. */
2381 for (i = 0; i < 12; i++)
2382 if (mask & (1 << i))
2383 {
2384 cache->saved_regs[4 + i].addr = vsp;
2385 vsp += 4;
2386 }
2387
2388 /* Special-case popping SP -- we need to reload vsp. */
2389 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2390 vsp_valid = 0;
2391 }
2392 else if ((insn & 0xf0) == 0x90)
2393 {
2394 int reg = insn & 0xf;
2395
2396 /* Reserved cases. */
2397 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2398 return NULL;
2399
2400 /* Set SP from another register and mark VSP for reload. */
2401 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2402 vsp_valid = 0;
2403 }
2404 else if ((insn & 0xf0) == 0xa0)
2405 {
2406 int count = insn & 0x7;
2407 int pop_lr = (insn & 0x8) != 0;
2408 int i;
2409
2410 /* Pop r4..r[4+count]. */
2411 for (i = 0; i <= count; i++)
2412 {
2413 cache->saved_regs[4 + i].addr = vsp;
2414 vsp += 4;
2415 }
2416
2417 /* If indicated by flag, pop LR as well. */
2418 if (pop_lr)
2419 {
2420 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2421 vsp += 4;
2422 }
2423 }
2424 else if (insn == 0xb0)
2425 {
2426 /* We could only have updated PC by popping into it; if so, it
2427 will show up as address. Otherwise, copy LR into PC. */
2428 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2429 cache->saved_regs[ARM_PC_REGNUM]
2430 = cache->saved_regs[ARM_LR_REGNUM];
2431
2432 /* We're done. */
2433 break;
2434 }
2435 else if (insn == 0xb1)
2436 {
2437 int mask = *entry++;
2438 int i;
2439
2440 /* All-zero mask and mask >= 16 is "spare". */
2441 if (mask == 0 || mask >= 16)
2442 return NULL;
2443
2444 /* Pop r0..r3 under mask. */
2445 for (i = 0; i < 4; i++)
2446 if (mask & (1 << i))
2447 {
2448 cache->saved_regs[i].addr = vsp;
2449 vsp += 4;
2450 }
2451 }
2452 else if (insn == 0xb2)
2453 {
2454 ULONGEST offset = 0;
2455 unsigned shift = 0;
2456
2457 do
2458 {
2459 offset |= (*entry & 0x7f) << shift;
2460 shift += 7;
2461 }
2462 while (*entry++ & 0x80);
2463
2464 vsp += 0x204 + (offset << 2);
2465 }
2466 else if (insn == 0xb3)
2467 {
2468 int start = *entry >> 4;
2469 int count = (*entry++) & 0xf;
2470 int i;
2471
2472 /* Only registers D0..D15 are valid here. */
2473 if (start + count >= 16)
2474 return NULL;
2475
2476 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2477 for (i = 0; i <= count; i++)
2478 {
2479 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2480 vsp += 8;
2481 }
2482
2483 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2484 vsp += 4;
2485 }
2486 else if ((insn & 0xf8) == 0xb8)
2487 {
2488 int count = insn & 0x7;
2489 int i;
2490
2491 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2492 for (i = 0; i <= count; i++)
2493 {
2494 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2495 vsp += 8;
2496 }
2497
2498 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2499 vsp += 4;
2500 }
2501 else if (insn == 0xc6)
2502 {
2503 int start = *entry >> 4;
2504 int count = (*entry++) & 0xf;
2505 int i;
2506
2507 /* Only registers WR0..WR15 are valid. */
2508 if (start + count >= 16)
2509 return NULL;
2510
2511 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2512 for (i = 0; i <= count; i++)
2513 {
2514 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2515 vsp += 8;
2516 }
2517 }
2518 else if (insn == 0xc7)
2519 {
2520 int mask = *entry++;
2521 int i;
2522
2523 /* All-zero mask and mask >= 16 is "spare". */
2524 if (mask == 0 || mask >= 16)
2525 return NULL;
2526
2527 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2528 for (i = 0; i < 4; i++)
2529 if (mask & (1 << i))
2530 {
2531 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2532 vsp += 4;
2533 }
2534 }
2535 else if ((insn & 0xf8) == 0xc0)
2536 {
2537 int count = insn & 0x7;
2538 int i;
2539
2540 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2541 for (i = 0; i <= count; i++)
2542 {
2543 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2544 vsp += 8;
2545 }
2546 }
2547 else if (insn == 0xc8)
2548 {
2549 int start = *entry >> 4;
2550 int count = (*entry++) & 0xf;
2551 int i;
2552
2553 /* Only registers D0..D31 are valid. */
2554 if (start + count >= 16)
2555 return NULL;
2556
2557 /* Pop VFP double-precision registers
2558 D[16+start]..D[16+start+count]. */
2559 for (i = 0; i <= count; i++)
2560 {
2561 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2562 vsp += 8;
2563 }
2564 }
2565 else if (insn == 0xc9)
2566 {
2567 int start = *entry >> 4;
2568 int count = (*entry++) & 0xf;
2569 int i;
2570
2571 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2572 for (i = 0; i <= count; i++)
2573 {
2574 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2575 vsp += 8;
2576 }
2577 }
2578 else if ((insn & 0xf8) == 0xd0)
2579 {
2580 int count = insn & 0x7;
2581 int i;
2582
2583 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2584 for (i = 0; i <= count; i++)
2585 {
2586 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2587 vsp += 8;
2588 }
2589 }
2590 else
2591 {
2592 /* Everything else is "spare". */
2593 return NULL;
2594 }
2595 }
2596
2597 /* If we restore SP from a register, assume this was the frame register.
2598 Otherwise just fall back to SP as frame register. */
2599 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2600 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2601 else
2602 cache->framereg = ARM_SP_REGNUM;
2603
2604 /* Determine offset to previous frame. */
2605 cache->framesize
2606 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2607
2608 /* We already got the previous SP. */
2609 cache->prev_sp = vsp;
2610
2611 return cache;
2612}
2613
2614/* Unwinding via ARM exception table entries. Note that the sniffer
2615 already computes a filled-in prologue cache, which is then used
2616 with the same arm_prologue_this_id and arm_prologue_prev_register
2617 routines also used for prologue-parsing based unwinding. */
2618
2619static int
2620arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2621 struct frame_info *this_frame,
2622 void **this_prologue_cache)
2623{
2624 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2625 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2626 CORE_ADDR addr_in_block, exidx_region, func_start;
2627 struct arm_prologue_cache *cache;
2628 gdb_byte *entry;
2629
2630 /* See if we have an ARM exception table entry covering this address. */
2631 addr_in_block = get_frame_address_in_block (this_frame);
2632 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2633 if (!entry)
2634 return 0;
2635
2636 /* The ARM exception table does not describe unwind information
2637 for arbitrary PC values, but is guaranteed to be correct only
2638 at call sites. We have to decide here whether we want to use
2639 ARM exception table information for this frame, or fall back
2640 to using prologue parsing. (Note that if we have DWARF CFI,
2641 this sniffer isn't even called -- CFI is always preferred.)
2642
2643 Before we make this decision, however, we check whether we
2644 actually have *symbol* information for the current frame.
2645 If not, prologue parsing would not work anyway, so we might
2646 as well use the exception table and hope for the best. */
2647 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2648 {
2649 int exc_valid = 0;
2650
2651 /* If the next frame is "normal", we are at a call site in this
2652 frame, so exception information is guaranteed to be valid. */
2653 if (get_next_frame (this_frame)
2654 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2655 exc_valid = 1;
2656
2657 /* We also assume exception information is valid if we're currently
2658 blocked in a system call. The system library is supposed to
d9311bfa
AT
2659 ensure this, so that e.g. pthread cancellation works. */
2660 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2661 {
d9311bfa 2662 LONGEST insn;
416dc9c6 2663
d9311bfa
AT
2664 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2665 byte_order_for_code, &insn)
2666 && (insn & 0xff00) == 0xdf00 /* svc */)
2667 exc_valid = 1;
0e9e9abd 2668 }
d9311bfa
AT
2669 else
2670 {
2671 LONGEST insn;
416dc9c6 2672
d9311bfa
AT
2673 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2674 byte_order_for_code, &insn)
2675 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2676 exc_valid = 1;
2677 }
2678
0e9e9abd
UW
2679 /* Bail out if we don't know that exception information is valid. */
2680 if (!exc_valid)
2681 return 0;
2682
2683 /* The ARM exception index does not mark the *end* of the region
2684 covered by the entry, and some functions will not have any entry.
2685 To correctly recognize the end of the covered region, the linker
2686 should have inserted dummy records with a CANTUNWIND marker.
2687
2688 Unfortunately, current versions of GNU ld do not reliably do
2689 this, and thus we may have found an incorrect entry above.
2690 As a (temporary) sanity check, we only use the entry if it
2691 lies *within* the bounds of the function. Note that this check
2692 might reject perfectly valid entries that just happen to cover
2693 multiple functions; therefore this check ought to be removed
2694 once the linker is fixed. */
2695 if (func_start > exidx_region)
2696 return 0;
2697 }
2698
2699 /* Decode the list of unwinding instructions into a prologue cache.
2700 Note that this may fail due to e.g. a "refuse to unwind" code. */
2701 cache = arm_exidx_fill_cache (this_frame, entry);
2702 if (!cache)
2703 return 0;
2704
2705 *this_prologue_cache = cache;
2706 return 1;
2707}
2708
2709struct frame_unwind arm_exidx_unwind = {
2710 NORMAL_FRAME,
8fbca658 2711 default_frame_unwind_stop_reason,
0e9e9abd
UW
2712 arm_prologue_this_id,
2713 arm_prologue_prev_register,
2714 NULL,
2715 arm_exidx_unwind_sniffer
2716};
2717
779aa56f
YQ
2718static struct arm_prologue_cache *
2719arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2720{
2721 struct arm_prologue_cache *cache;
779aa56f
YQ
2722 int reg;
2723
2724 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2725 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2726
2727 /* Still rely on the offset calculated from prologue. */
2728 arm_scan_prologue (this_frame, cache);
2729
2730 /* Since we are in epilogue, the SP has been restored. */
2731 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2732
2733 /* Calculate actual addresses of saved registers using offsets
2734 determined by arm_scan_prologue. */
2735 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2736 if (trad_frame_addr_p (cache->saved_regs, reg))
2737 cache->saved_regs[reg].addr += cache->prev_sp;
2738
2739 return cache;
2740}
2741
2742/* Implementation of function hook 'this_id' in
2743 'struct frame_uwnind' for epilogue unwinder. */
2744
2745static void
2746arm_epilogue_frame_this_id (struct frame_info *this_frame,
2747 void **this_cache,
2748 struct frame_id *this_id)
2749{
2750 struct arm_prologue_cache *cache;
2751 CORE_ADDR pc, func;
2752
2753 if (*this_cache == NULL)
2754 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2755 cache = (struct arm_prologue_cache *) *this_cache;
2756
2757 /* Use function start address as part of the frame ID. If we cannot
2758 identify the start address (due to missing symbol information),
2759 fall back to just using the current PC. */
2760 pc = get_frame_pc (this_frame);
2761 func = get_frame_func (this_frame);
fb3f3d25 2762 if (func == 0)
779aa56f
YQ
2763 func = pc;
2764
2765 (*this_id) = frame_id_build (cache->prev_sp, pc);
2766}
2767
2768/* Implementation of function hook 'prev_register' in
2769 'struct frame_uwnind' for epilogue unwinder. */
2770
2771static struct value *
2772arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2773 void **this_cache, int regnum)
2774{
779aa56f
YQ
2775 if (*this_cache == NULL)
2776 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2777
2778 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2779}
2780
2781static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2782 CORE_ADDR pc);
2783static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2784 CORE_ADDR pc);
2785
2786/* Implementation of function hook 'sniffer' in
2787 'struct frame_uwnind' for epilogue unwinder. */
2788
2789static int
2790arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2791 struct frame_info *this_frame,
2792 void **this_prologue_cache)
2793{
2794 if (frame_relative_level (this_frame) == 0)
2795 {
2796 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2797 CORE_ADDR pc = get_frame_pc (this_frame);
2798
2799 if (arm_frame_is_thumb (this_frame))
2800 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2801 else
2802 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2803 }
2804 else
2805 return 0;
2806}
2807
2808/* Frame unwinder from epilogue. */
2809
2810static const struct frame_unwind arm_epilogue_frame_unwind =
2811{
2812 NORMAL_FRAME,
2813 default_frame_unwind_stop_reason,
2814 arm_epilogue_frame_this_id,
2815 arm_epilogue_frame_prev_register,
2816 NULL,
2817 arm_epilogue_frame_sniffer,
2818};
2819
80d8d390
YQ
2820/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2821 trampoline, return the target PC. Otherwise return 0.
2822
2823 void call0a (char c, short s, int i, long l) {}
2824
2825 int main (void)
2826 {
2827 (*pointer_to_call0a) (c, s, i, l);
2828 }
2829
2830 Instead of calling a stub library function _call_via_xx (xx is
2831 the register name), GCC may inline the trampoline in the object
2832 file as below (register r2 has the address of call0a).
2833
2834 .global main
2835 .type main, %function
2836 ...
2837 bl .L1
2838 ...
2839 .size main, .-main
2840
2841 .L1:
2842 bx r2
2843
2844 The trampoline 'bx r2' doesn't belong to main. */
2845
2846static CORE_ADDR
2847arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2848{
2849 /* The heuristics of recognizing such trampoline is that FRAME is
2850 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2851 if (arm_frame_is_thumb (frame))
2852 {
2853 gdb_byte buf[2];
2854
2855 if (target_read_memory (pc, buf, 2) == 0)
2856 {
2857 struct gdbarch *gdbarch = get_frame_arch (frame);
2858 enum bfd_endian byte_order_for_code
2859 = gdbarch_byte_order_for_code (gdbarch);
2860 uint16_t insn
2861 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2862
2863 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2864 {
2865 CORE_ADDR dest
2866 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2867
2868 /* Clear the LSB so that gdb core sets step-resume
2869 breakpoint at the right address. */
2870 return UNMAKE_THUMB_ADDR (dest);
2871 }
2872 }
2873 }
2874
2875 return 0;
2876}
2877
909cf6ea 2878static struct arm_prologue_cache *
a262aec2 2879arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2880{
909cf6ea 2881 struct arm_prologue_cache *cache;
909cf6ea 2882
35d5d4ee 2883 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2884 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2885
a262aec2 2886 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2887
2888 return cache;
2889}
2890
2891/* Our frame ID for a stub frame is the current SP and LR. */
2892
2893static void
a262aec2 2894arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2895 void **this_cache,
2896 struct frame_id *this_id)
2897{
2898 struct arm_prologue_cache *cache;
2899
2900 if (*this_cache == NULL)
a262aec2 2901 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2902 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2903
a262aec2 2904 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2905}
2906
a262aec2
DJ
2907static int
2908arm_stub_unwind_sniffer (const struct frame_unwind *self,
2909 struct frame_info *this_frame,
2910 void **this_prologue_cache)
909cf6ea 2911{
93d42b30 2912 CORE_ADDR addr_in_block;
948f8e3d 2913 gdb_byte dummy[4];
18d18ac8
YQ
2914 CORE_ADDR pc, start_addr;
2915 const char *name;
909cf6ea 2916
a262aec2 2917 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2918 pc = get_frame_pc (this_frame);
3e5d3a5a 2919 if (in_plt_section (addr_in_block)
fc36e839
DE
2920 /* We also use the stub winder if the target memory is unreadable
2921 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2922 || target_read_memory (pc, dummy, 4) != 0)
2923 return 1;
2924
2925 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2926 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2927 return 1;
909cf6ea 2928
a262aec2 2929 return 0;
909cf6ea
DJ
2930}
2931
a262aec2
DJ
2932struct frame_unwind arm_stub_unwind = {
2933 NORMAL_FRAME,
8fbca658 2934 default_frame_unwind_stop_reason,
a262aec2
DJ
2935 arm_stub_this_id,
2936 arm_prologue_prev_register,
2937 NULL,
2938 arm_stub_unwind_sniffer
2939};
2940
2ae28aa9
YQ
2941/* Put here the code to store, into CACHE->saved_regs, the addresses
2942 of the saved registers of frame described by THIS_FRAME. CACHE is
2943 returned. */
2944
2945static struct arm_prologue_cache *
2946arm_m_exception_cache (struct frame_info *this_frame)
2947{
2948 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2949 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2950 struct arm_prologue_cache *cache;
2951 CORE_ADDR unwound_sp;
2952 LONGEST xpsr;
2953
2954 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2955 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2956
2957 unwound_sp = get_frame_register_unsigned (this_frame,
2958 ARM_SP_REGNUM);
2959
2960 /* The hardware saves eight 32-bit words, comprising xPSR,
2961 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2962 "B1.5.6 Exception entry behavior" in
2963 "ARMv7-M Architecture Reference Manual". */
2964 cache->saved_regs[0].addr = unwound_sp;
2965 cache->saved_regs[1].addr = unwound_sp + 4;
2966 cache->saved_regs[2].addr = unwound_sp + 8;
2967 cache->saved_regs[3].addr = unwound_sp + 12;
2968 cache->saved_regs[12].addr = unwound_sp + 16;
2969 cache->saved_regs[14].addr = unwound_sp + 20;
2970 cache->saved_regs[15].addr = unwound_sp + 24;
2971 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2972
2973 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2974 aligner between the top of the 32-byte stack frame and the
2975 previous context's stack pointer. */
2976 cache->prev_sp = unwound_sp + 32;
2977 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2978 && (xpsr & (1 << 9)) != 0)
2979 cache->prev_sp += 4;
2980
2981 return cache;
2982}
2983
2984/* Implementation of function hook 'this_id' in
2985 'struct frame_uwnind'. */
2986
2987static void
2988arm_m_exception_this_id (struct frame_info *this_frame,
2989 void **this_cache,
2990 struct frame_id *this_id)
2991{
2992 struct arm_prologue_cache *cache;
2993
2994 if (*this_cache == NULL)
2995 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2996 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2997
2998 /* Our frame ID for a stub frame is the current SP and LR. */
2999 *this_id = frame_id_build (cache->prev_sp,
3000 get_frame_pc (this_frame));
3001}
3002
3003/* Implementation of function hook 'prev_register' in
3004 'struct frame_uwnind'. */
3005
3006static struct value *
3007arm_m_exception_prev_register (struct frame_info *this_frame,
3008 void **this_cache,
3009 int prev_regnum)
3010{
2ae28aa9
YQ
3011 struct arm_prologue_cache *cache;
3012
3013 if (*this_cache == NULL)
3014 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3015 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3016
3017 /* The value was already reconstructed into PREV_SP. */
3018 if (prev_regnum == ARM_SP_REGNUM)
3019 return frame_unwind_got_constant (this_frame, prev_regnum,
3020 cache->prev_sp);
3021
3022 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3023 prev_regnum);
3024}
3025
3026/* Implementation of function hook 'sniffer' in
3027 'struct frame_uwnind'. */
3028
3029static int
3030arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3031 struct frame_info *this_frame,
3032 void **this_prologue_cache)
3033{
3034 CORE_ADDR this_pc = get_frame_pc (this_frame);
3035
3036 /* No need to check is_m; this sniffer is only registered for
3037 M-profile architectures. */
3038
ca90e760
FH
3039 /* Check if exception frame returns to a magic PC value. */
3040 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3041}
3042
3043/* Frame unwinder for M-profile exceptions. */
3044
3045struct frame_unwind arm_m_exception_unwind =
3046{
3047 SIGTRAMP_FRAME,
3048 default_frame_unwind_stop_reason,
3049 arm_m_exception_this_id,
3050 arm_m_exception_prev_register,
3051 NULL,
3052 arm_m_exception_unwind_sniffer
3053};
3054
24de872b 3055static CORE_ADDR
a262aec2 3056arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3057{
3058 struct arm_prologue_cache *cache;
3059
eb5492fa 3060 if (*this_cache == NULL)
a262aec2 3061 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3062 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3063
4be43953 3064 return cache->prev_sp - cache->framesize;
24de872b
DJ
3065}
3066
eb5492fa
DJ
3067struct frame_base arm_normal_base = {
3068 &arm_prologue_unwind,
3069 arm_normal_frame_base,
3070 arm_normal_frame_base,
3071 arm_normal_frame_base
3072};
3073
a262aec2 3074/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3075 dummy frame. The frame ID's base needs to match the TOS value
3076 saved by save_dummy_frame_tos() and returned from
3077 arm_push_dummy_call, and the PC needs to match the dummy frame's
3078 breakpoint. */
c906108c 3079
eb5492fa 3080static struct frame_id
a262aec2 3081arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3082{
0963b4bd
MS
3083 return frame_id_build (get_frame_register_unsigned (this_frame,
3084 ARM_SP_REGNUM),
a262aec2 3085 get_frame_pc (this_frame));
eb5492fa 3086}
c3b4394c 3087
eb5492fa
DJ
3088/* Given THIS_FRAME, find the previous frame's resume PC (which will
3089 be used to construct the previous frame's ID, after looking up the
3090 containing function). */
c3b4394c 3091
eb5492fa
DJ
3092static CORE_ADDR
3093arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3094{
3095 CORE_ADDR pc;
3096 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3097 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3098}
3099
3100static CORE_ADDR
3101arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3102{
3103 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3104}
3105
b39cc962
DJ
3106static struct value *
3107arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3108 int regnum)
3109{
24568a2c 3110 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3111 CORE_ADDR lr, cpsr;
9779414d 3112 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3113
3114 switch (regnum)
3115 {
3116 case ARM_PC_REGNUM:
3117 /* The PC is normally copied from the return column, which
3118 describes saves of LR. However, that version may have an
3119 extra bit set to indicate Thumb state. The bit is not
3120 part of the PC. */
3121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3122 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3123 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3124
3125 case ARM_PS_REGNUM:
3126 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3127 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3128 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3129 if (IS_THUMB_ADDR (lr))
9779414d 3130 cpsr |= t_bit;
b39cc962 3131 else
9779414d 3132 cpsr &= ~t_bit;
ca38c58e 3133 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3134
3135 default:
3136 internal_error (__FILE__, __LINE__,
3137 _("Unexpected register %d"), regnum);
3138 }
3139}
3140
3141static void
3142arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3143 struct dwarf2_frame_state_reg *reg,
3144 struct frame_info *this_frame)
3145{
3146 switch (regnum)
3147 {
3148 case ARM_PC_REGNUM:
3149 case ARM_PS_REGNUM:
3150 reg->how = DWARF2_FRAME_REG_FN;
3151 reg->loc.fn = arm_dwarf2_prev_register;
3152 break;
3153 case ARM_SP_REGNUM:
3154 reg->how = DWARF2_FRAME_REG_CFA;
3155 break;
3156 }
3157}
3158
c9cf6e20 3159/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3160
3161static int
c9cf6e20 3162thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3163{
3164 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3165 unsigned int insn, insn2;
3166 int found_return = 0, found_stack_adjust = 0;
3167 CORE_ADDR func_start, func_end;
3168 CORE_ADDR scan_pc;
3169 gdb_byte buf[4];
3170
3171 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3172 return 0;
3173
3174 /* The epilogue is a sequence of instructions along the following lines:
3175
3176 - add stack frame size to SP or FP
3177 - [if frame pointer used] restore SP from FP
3178 - restore registers from SP [may include PC]
3179 - a return-type instruction [if PC wasn't already restored]
3180
3181 In a first pass, we scan forward from the current PC and verify the
3182 instructions we find as compatible with this sequence, ending in a
3183 return instruction.
3184
3185 However, this is not sufficient to distinguish indirect function calls
3186 within a function from indirect tail calls in the epilogue in some cases.
3187 Therefore, if we didn't already find any SP-changing instruction during
3188 forward scan, we add a backward scanning heuristic to ensure we actually
3189 are in the epilogue. */
3190
3191 scan_pc = pc;
3192 while (scan_pc < func_end && !found_return)
3193 {
3194 if (target_read_memory (scan_pc, buf, 2))
3195 break;
3196
3197 scan_pc += 2;
3198 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3199
3200 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3201 found_return = 1;
3202 else if (insn == 0x46f7) /* mov pc, lr */
3203 found_return = 1;
540314bd 3204 else if (thumb_instruction_restores_sp (insn))
4024ca99 3205 {
b7576e5c 3206 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3207 found_return = 1;
3208 }
db24da6d 3209 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3210 {
3211 if (target_read_memory (scan_pc, buf, 2))
3212 break;
3213
3214 scan_pc += 2;
3215 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3216
3217 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3218 {
4024ca99
UW
3219 if (insn2 & 0x8000) /* <registers> include PC. */
3220 found_return = 1;
3221 }
3222 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3223 && (insn2 & 0x0fff) == 0x0b04)
3224 {
4024ca99
UW
3225 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3226 found_return = 1;
3227 }
3228 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3229 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3230 ;
4024ca99
UW
3231 else
3232 break;
3233 }
3234 else
3235 break;
3236 }
3237
3238 if (!found_return)
3239 return 0;
3240
3241 /* Since any instruction in the epilogue sequence, with the possible
3242 exception of return itself, updates the stack pointer, we need to
3243 scan backwards for at most one instruction. Try either a 16-bit or
3244 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3245 too much about false positives. */
4024ca99 3246
6b65d1b6
YQ
3247 if (pc - 4 < func_start)
3248 return 0;
3249 if (target_read_memory (pc - 4, buf, 4))
3250 return 0;
4024ca99 3251
6b65d1b6
YQ
3252 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3253 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3254
3255 if (thumb_instruction_restores_sp (insn2))
3256 found_stack_adjust = 1;
3257 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3258 found_stack_adjust = 1;
3259 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3260 && (insn2 & 0x0fff) == 0x0b04)
3261 found_stack_adjust = 1;
3262 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3263 && (insn2 & 0x0e00) == 0x0a00)
3264 found_stack_adjust = 1;
4024ca99
UW
3265
3266 return found_stack_adjust;
3267}
3268
4024ca99 3269static int
c58b006a 3270arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3271{
3272 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3273 unsigned int insn;
f303bc3e 3274 int found_return;
4024ca99
UW
3275 CORE_ADDR func_start, func_end;
3276
4024ca99
UW
3277 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3278 return 0;
3279
3280 /* We are in the epilogue if the previous instruction was a stack
3281 adjustment and the next instruction is a possible return (bx, mov
3282 pc, or pop). We could have to scan backwards to find the stack
3283 adjustment, or forwards to find the return, but this is a decent
3284 approximation. First scan forwards. */
3285
3286 found_return = 0;
3287 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3288 if (bits (insn, 28, 31) != INST_NV)
3289 {
3290 if ((insn & 0x0ffffff0) == 0x012fff10)
3291 /* BX. */
3292 found_return = 1;
3293 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3294 /* MOV PC. */
3295 found_return = 1;
3296 else if ((insn & 0x0fff0000) == 0x08bd0000
3297 && (insn & 0x0000c000) != 0)
3298 /* POP (LDMIA), including PC or LR. */
3299 found_return = 1;
3300 }
3301
3302 if (!found_return)
3303 return 0;
3304
3305 /* Scan backwards. This is just a heuristic, so do not worry about
3306 false positives from mode changes. */
3307
3308 if (pc < func_start + 4)
3309 return 0;
3310
3311 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3312 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3313 return 1;
3314
3315 return 0;
3316}
3317
c58b006a
YQ
3318/* Implement the stack_frame_destroyed_p gdbarch method. */
3319
3320static int
3321arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3322{
3323 if (arm_pc_is_thumb (gdbarch, pc))
3324 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3325 else
3326 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3327}
4024ca99 3328
2dd604e7
RE
3329/* When arguments must be pushed onto the stack, they go on in reverse
3330 order. The code below implements a FILO (stack) to do this. */
3331
3332struct stack_item
3333{
3334 int len;
3335 struct stack_item *prev;
7c543f7b 3336 gdb_byte *data;
2dd604e7
RE
3337};
3338
3339static struct stack_item *
df3b6708 3340push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3341{
3342 struct stack_item *si;
8d749320 3343 si = XNEW (struct stack_item);
7c543f7b 3344 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3345 si->len = len;
3346 si->prev = prev;
3347 memcpy (si->data, contents, len);
3348 return si;
3349}
3350
3351static struct stack_item *
3352pop_stack_item (struct stack_item *si)
3353{
3354 struct stack_item *dead = si;
3355 si = si->prev;
3356 xfree (dead->data);
3357 xfree (dead);
3358 return si;
3359}
3360
2af48f68
PB
3361
3362/* Return the alignment (in bytes) of the given type. */
3363
3364static int
3365arm_type_align (struct type *t)
3366{
3367 int n;
3368 int align;
3369 int falign;
3370
3371 t = check_typedef (t);
3372 switch (TYPE_CODE (t))
3373 {
3374 default:
3375 /* Should never happen. */
3376 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3377 return 4;
3378
3379 case TYPE_CODE_PTR:
3380 case TYPE_CODE_ENUM:
3381 case TYPE_CODE_INT:
3382 case TYPE_CODE_FLT:
3383 case TYPE_CODE_SET:
3384 case TYPE_CODE_RANGE:
2af48f68
PB
3385 case TYPE_CODE_REF:
3386 case TYPE_CODE_CHAR:
3387 case TYPE_CODE_BOOL:
3388 return TYPE_LENGTH (t);
3389
3390 case TYPE_CODE_ARRAY:
c4312b19
YQ
3391 if (TYPE_VECTOR (t))
3392 {
3393 /* Use the natural alignment for vector types (the same for
3394 scalar type), but the maximum alignment is 64-bit. */
3395 if (TYPE_LENGTH (t) > 8)
3396 return 8;
3397 else
3398 return TYPE_LENGTH (t);
3399 }
3400 else
3401 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3402 case TYPE_CODE_COMPLEX:
2af48f68
PB
3403 return arm_type_align (TYPE_TARGET_TYPE (t));
3404
3405 case TYPE_CODE_STRUCT:
3406 case TYPE_CODE_UNION:
3407 align = 1;
3408 for (n = 0; n < TYPE_NFIELDS (t); n++)
3409 {
3410 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3411 if (falign > align)
3412 align = falign;
3413 }
3414 return align;
3415 }
3416}
3417
90445bd3
DJ
3418/* Possible base types for a candidate for passing and returning in
3419 VFP registers. */
3420
3421enum arm_vfp_cprc_base_type
3422{
3423 VFP_CPRC_UNKNOWN,
3424 VFP_CPRC_SINGLE,
3425 VFP_CPRC_DOUBLE,
3426 VFP_CPRC_VEC64,
3427 VFP_CPRC_VEC128
3428};
3429
3430/* The length of one element of base type B. */
3431
3432static unsigned
3433arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3434{
3435 switch (b)
3436 {
3437 case VFP_CPRC_SINGLE:
3438 return 4;
3439 case VFP_CPRC_DOUBLE:
3440 return 8;
3441 case VFP_CPRC_VEC64:
3442 return 8;
3443 case VFP_CPRC_VEC128:
3444 return 16;
3445 default:
3446 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3447 (int) b);
3448 }
3449}
3450
3451/* The character ('s', 'd' or 'q') for the type of VFP register used
3452 for passing base type B. */
3453
3454static int
3455arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3456{
3457 switch (b)
3458 {
3459 case VFP_CPRC_SINGLE:
3460 return 's';
3461 case VFP_CPRC_DOUBLE:
3462 return 'd';
3463 case VFP_CPRC_VEC64:
3464 return 'd';
3465 case VFP_CPRC_VEC128:
3466 return 'q';
3467 default:
3468 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3469 (int) b);
3470 }
3471}
3472
3473/* Determine whether T may be part of a candidate for passing and
3474 returning in VFP registers, ignoring the limit on the total number
3475 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3476 classification of the first valid component found; if it is not
3477 VFP_CPRC_UNKNOWN, all components must have the same classification
3478 as *BASE_TYPE. If it is found that T contains a type not permitted
3479 for passing and returning in VFP registers, a type differently
3480 classified from *BASE_TYPE, or two types differently classified
3481 from each other, return -1, otherwise return the total number of
3482 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3483 array). Vector types are not currently supported, matching the
3484 generic AAPCS support. */
90445bd3
DJ
3485
3486static int
3487arm_vfp_cprc_sub_candidate (struct type *t,
3488 enum arm_vfp_cprc_base_type *base_type)
3489{
3490 t = check_typedef (t);
3491 switch (TYPE_CODE (t))
3492 {
3493 case TYPE_CODE_FLT:
3494 switch (TYPE_LENGTH (t))
3495 {
3496 case 4:
3497 if (*base_type == VFP_CPRC_UNKNOWN)
3498 *base_type = VFP_CPRC_SINGLE;
3499 else if (*base_type != VFP_CPRC_SINGLE)
3500 return -1;
3501 return 1;
3502
3503 case 8:
3504 if (*base_type == VFP_CPRC_UNKNOWN)
3505 *base_type = VFP_CPRC_DOUBLE;
3506 else if (*base_type != VFP_CPRC_DOUBLE)
3507 return -1;
3508 return 1;
3509
3510 default:
3511 return -1;
3512 }
3513 break;
3514
817e0957
YQ
3515 case TYPE_CODE_COMPLEX:
3516 /* Arguments of complex T where T is one of the types float or
3517 double get treated as if they are implemented as:
3518
3519 struct complexT
3520 {
3521 T real;
3522 T imag;
5f52445b
YQ
3523 };
3524
3525 */
817e0957
YQ
3526 switch (TYPE_LENGTH (t))
3527 {
3528 case 8:
3529 if (*base_type == VFP_CPRC_UNKNOWN)
3530 *base_type = VFP_CPRC_SINGLE;
3531 else if (*base_type != VFP_CPRC_SINGLE)
3532 return -1;
3533 return 2;
3534
3535 case 16:
3536 if (*base_type == VFP_CPRC_UNKNOWN)
3537 *base_type = VFP_CPRC_DOUBLE;
3538 else if (*base_type != VFP_CPRC_DOUBLE)
3539 return -1;
3540 return 2;
3541
3542 default:
3543 return -1;
3544 }
3545 break;
3546
90445bd3
DJ
3547 case TYPE_CODE_ARRAY:
3548 {
c4312b19 3549 if (TYPE_VECTOR (t))
90445bd3 3550 {
c4312b19
YQ
3551 /* A 64-bit or 128-bit containerized vector type are VFP
3552 CPRCs. */
3553 switch (TYPE_LENGTH (t))
3554 {
3555 case 8:
3556 if (*base_type == VFP_CPRC_UNKNOWN)
3557 *base_type = VFP_CPRC_VEC64;
3558 return 1;
3559 case 16:
3560 if (*base_type == VFP_CPRC_UNKNOWN)
3561 *base_type = VFP_CPRC_VEC128;
3562 return 1;
3563 default:
3564 return -1;
3565 }
3566 }
3567 else
3568 {
3569 int count;
3570 unsigned unitlen;
3571
3572 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3573 base_type);
3574 if (count == -1)
3575 return -1;
3576 if (TYPE_LENGTH (t) == 0)
3577 {
3578 gdb_assert (count == 0);
3579 return 0;
3580 }
3581 else if (count == 0)
3582 return -1;
3583 unitlen = arm_vfp_cprc_unit_length (*base_type);
3584 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3585 return TYPE_LENGTH (t) / unitlen;
90445bd3 3586 }
90445bd3
DJ
3587 }
3588 break;
3589
3590 case TYPE_CODE_STRUCT:
3591 {
3592 int count = 0;
3593 unsigned unitlen;
3594 int i;
3595 for (i = 0; i < TYPE_NFIELDS (t); i++)
3596 {
1040b979
YQ
3597 int sub_count = 0;
3598
3599 if (!field_is_static (&TYPE_FIELD (t, i)))
3600 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3601 base_type);
90445bd3
DJ
3602 if (sub_count == -1)
3603 return -1;
3604 count += sub_count;
3605 }
3606 if (TYPE_LENGTH (t) == 0)
3607 {
3608 gdb_assert (count == 0);
3609 return 0;
3610 }
3611 else if (count == 0)
3612 return -1;
3613 unitlen = arm_vfp_cprc_unit_length (*base_type);
3614 if (TYPE_LENGTH (t) != unitlen * count)
3615 return -1;
3616 return count;
3617 }
3618
3619 case TYPE_CODE_UNION:
3620 {
3621 int count = 0;
3622 unsigned unitlen;
3623 int i;
3624 for (i = 0; i < TYPE_NFIELDS (t); i++)
3625 {
3626 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3627 base_type);
3628 if (sub_count == -1)
3629 return -1;
3630 count = (count > sub_count ? count : sub_count);
3631 }
3632 if (TYPE_LENGTH (t) == 0)
3633 {
3634 gdb_assert (count == 0);
3635 return 0;
3636 }
3637 else if (count == 0)
3638 return -1;
3639 unitlen = arm_vfp_cprc_unit_length (*base_type);
3640 if (TYPE_LENGTH (t) != unitlen * count)
3641 return -1;
3642 return count;
3643 }
3644
3645 default:
3646 break;
3647 }
3648
3649 return -1;
3650}
3651
3652/* Determine whether T is a VFP co-processor register candidate (CPRC)
3653 if passed to or returned from a non-variadic function with the VFP
3654 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3655 *BASE_TYPE to the base type for T and *COUNT to the number of
3656 elements of that base type before returning. */
3657
3658static int
3659arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3660 int *count)
3661{
3662 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3663 int c = arm_vfp_cprc_sub_candidate (t, &b);
3664 if (c <= 0 || c > 4)
3665 return 0;
3666 *base_type = b;
3667 *count = c;
3668 return 1;
3669}
3670
3671/* Return 1 if the VFP ABI should be used for passing arguments to and
3672 returning values from a function of type FUNC_TYPE, 0
3673 otherwise. */
3674
3675static int
3676arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3677{
3678 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3679 /* Variadic functions always use the base ABI. Assume that functions
3680 without debug info are not variadic. */
3681 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3682 return 0;
3683 /* The VFP ABI is only supported as a variant of AAPCS. */
3684 if (tdep->arm_abi != ARM_ABI_AAPCS)
3685 return 0;
3686 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3687}
3688
3689/* We currently only support passing parameters in integer registers, which
3690 conforms with GCC's default model, and VFP argument passing following
3691 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3692 we should probably support some of them based on the selected ABI. */
3693
3694static CORE_ADDR
7d9b040b 3695arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3696 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3697 struct value **args, CORE_ADDR sp, int struct_return,
3698 CORE_ADDR struct_addr)
2dd604e7 3699{
e17a4113 3700 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3701 int argnum;
3702 int argreg;
3703 int nstack;
3704 struct stack_item *si = NULL;
90445bd3
DJ
3705 int use_vfp_abi;
3706 struct type *ftype;
3707 unsigned vfp_regs_free = (1 << 16) - 1;
3708
3709 /* Determine the type of this function and whether the VFP ABI
3710 applies. */
3711 ftype = check_typedef (value_type (function));
3712 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3713 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3714 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3715
6a65450a
AC
3716 /* Set the return address. For the ARM, the return breakpoint is
3717 always at BP_ADDR. */
9779414d 3718 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3719 bp_addr |= 1;
6a65450a 3720 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3721
3722 /* Walk through the list of args and determine how large a temporary
3723 stack is required. Need to take care here as structs may be
7a9dd1b2 3724 passed on the stack, and we have to push them. */
2dd604e7
RE
3725 nstack = 0;
3726
3727 argreg = ARM_A1_REGNUM;
3728 nstack = 0;
3729
2dd604e7
RE
3730 /* The struct_return pointer occupies the first parameter
3731 passing register. */
3732 if (struct_return)
3733 {
3734 if (arm_debug)
5af949e3 3735 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3736 gdbarch_register_name (gdbarch, argreg),
5af949e3 3737 paddress (gdbarch, struct_addr));
2dd604e7
RE
3738 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3739 argreg++;
3740 }
3741
3742 for (argnum = 0; argnum < nargs; argnum++)
3743 {
3744 int len;
3745 struct type *arg_type;
3746 struct type *target_type;
3747 enum type_code typecode;
8c6363cf 3748 const bfd_byte *val;
2af48f68 3749 int align;
90445bd3
DJ
3750 enum arm_vfp_cprc_base_type vfp_base_type;
3751 int vfp_base_count;
3752 int may_use_core_reg = 1;
2dd604e7 3753
df407dfe 3754 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3755 len = TYPE_LENGTH (arg_type);
3756 target_type = TYPE_TARGET_TYPE (arg_type);
3757 typecode = TYPE_CODE (arg_type);
8c6363cf 3758 val = value_contents (args[argnum]);
2dd604e7 3759
2af48f68
PB
3760 align = arm_type_align (arg_type);
3761 /* Round alignment up to a whole number of words. */
3762 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3763 /* Different ABIs have different maximum alignments. */
3764 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3765 {
3766 /* The APCS ABI only requires word alignment. */
3767 align = INT_REGISTER_SIZE;
3768 }
3769 else
3770 {
3771 /* The AAPCS requires at most doubleword alignment. */
3772 if (align > INT_REGISTER_SIZE * 2)
3773 align = INT_REGISTER_SIZE * 2;
3774 }
3775
90445bd3
DJ
3776 if (use_vfp_abi
3777 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3778 &vfp_base_count))
3779 {
3780 int regno;
3781 int unit_length;
3782 int shift;
3783 unsigned mask;
3784
3785 /* Because this is a CPRC it cannot go in a core register or
3786 cause a core register to be skipped for alignment.
3787 Either it goes in VFP registers and the rest of this loop
3788 iteration is skipped for this argument, or it goes on the
3789 stack (and the stack alignment code is correct for this
3790 case). */
3791 may_use_core_reg = 0;
3792
3793 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3794 shift = unit_length / 4;
3795 mask = (1 << (shift * vfp_base_count)) - 1;
3796 for (regno = 0; regno < 16; regno += shift)
3797 if (((vfp_regs_free >> regno) & mask) == mask)
3798 break;
3799
3800 if (regno < 16)
3801 {
3802 int reg_char;
3803 int reg_scaled;
3804 int i;
3805
3806 vfp_regs_free &= ~(mask << regno);
3807 reg_scaled = regno / shift;
3808 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3809 for (i = 0; i < vfp_base_count; i++)
3810 {
3811 char name_buf[4];
3812 int regnum;
58d6951d
DJ
3813 if (reg_char == 'q')
3814 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3815 val + i * unit_length);
58d6951d
DJ
3816 else
3817 {
8c042590
PM
3818 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3819 reg_char, reg_scaled + i);
58d6951d
DJ
3820 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3821 strlen (name_buf));
3822 regcache_cooked_write (regcache, regnum,
3823 val + i * unit_length);
3824 }
90445bd3
DJ
3825 }
3826 continue;
3827 }
3828 else
3829 {
3830 /* This CPRC could not go in VFP registers, so all VFP
3831 registers are now marked as used. */
3832 vfp_regs_free = 0;
3833 }
3834 }
3835
2af48f68
PB
3836 /* Push stack padding for dowubleword alignment. */
3837 if (nstack & (align - 1))
3838 {
3839 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3840 nstack += INT_REGISTER_SIZE;
3841 }
3842
3843 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3844 if (may_use_core_reg
3845 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3846 && align > INT_REGISTER_SIZE
3847 && argreg & 1)
3848 argreg++;
3849
2dd604e7
RE
3850 /* If the argument is a pointer to a function, and it is a
3851 Thumb function, create a LOCAL copy of the value and set
3852 the THUMB bit in it. */
3853 if (TYPE_CODE_PTR == typecode
3854 && target_type != NULL
f96b8fa0 3855 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3856 {
e17a4113 3857 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3858 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3859 {
224c3ddb 3860 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3861 store_unsigned_integer (copy, len, byte_order,
e17a4113 3862 MAKE_THUMB_ADDR (regval));
8c6363cf 3863 val = copy;
2dd604e7
RE
3864 }
3865 }
3866
3867 /* Copy the argument to general registers or the stack in
3868 register-sized pieces. Large arguments are split between
3869 registers and stack. */
3870 while (len > 0)
3871 {
f0c9063c 3872 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3873 CORE_ADDR regval
3874 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3875
90445bd3 3876 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3877 {
3878 /* The argument is being passed in a general purpose
3879 register. */
e17a4113 3880 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3881 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3882 if (arm_debug)
3883 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3884 argnum,
3885 gdbarch_register_name
2af46ca0 3886 (gdbarch, argreg),
f0c9063c 3887 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3888 regcache_cooked_write_unsigned (regcache, argreg, regval);
3889 argreg++;
3890 }
3891 else
3892 {
ef9bd0b8
YQ
3893 gdb_byte buf[INT_REGISTER_SIZE];
3894
3895 memset (buf, 0, sizeof (buf));
3896 store_unsigned_integer (buf, partial_len, byte_order, regval);
3897
2dd604e7
RE
3898 /* Push the arguments onto the stack. */
3899 if (arm_debug)
3900 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3901 argnum, nstack);
ef9bd0b8 3902 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3903 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3904 }
3905
3906 len -= partial_len;
3907 val += partial_len;
3908 }
3909 }
3910 /* If we have an odd number of words to push, then decrement the stack
3911 by one word now, so first stack argument will be dword aligned. */
3912 if (nstack & 4)
3913 sp -= 4;
3914
3915 while (si)
3916 {
3917 sp -= si->len;
3918 write_memory (sp, si->data, si->len);
3919 si = pop_stack_item (si);
3920 }
3921
3922 /* Finally, update teh SP register. */
3923 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3924
3925 return sp;
3926}
3927
f53f0d0b
PB
3928
3929/* Always align the frame to an 8-byte boundary. This is required on
3930 some platforms and harmless on the rest. */
3931
3932static CORE_ADDR
3933arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3934{
3935 /* Align the stack to eight bytes. */
3936 return sp & ~ (CORE_ADDR) 7;
3937}
3938
c906108c 3939static void
12b27276 3940print_fpu_flags (struct ui_file *file, int flags)
c906108c 3941{
c5aa993b 3942 if (flags & (1 << 0))
12b27276 3943 fputs_filtered ("IVO ", file);
c5aa993b 3944 if (flags & (1 << 1))
12b27276 3945 fputs_filtered ("DVZ ", file);
c5aa993b 3946 if (flags & (1 << 2))
12b27276 3947 fputs_filtered ("OFL ", file);
c5aa993b 3948 if (flags & (1 << 3))
12b27276 3949 fputs_filtered ("UFL ", file);
c5aa993b 3950 if (flags & (1 << 4))
12b27276
WN
3951 fputs_filtered ("INX ", file);
3952 fputc_filtered ('\n', file);
c906108c
SS
3953}
3954
5e74b15c
RE
3955/* Print interesting information about the floating point processor
3956 (if present) or emulator. */
34e8f22d 3957static void
d855c300 3958arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3959 struct frame_info *frame, const char *args)
c906108c 3960{
9c9acae0 3961 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3962 int type;
3963
3964 type = (status >> 24) & 127;
edefbb7c 3965 if (status & (1 << 31))
12b27276 3966 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3967 else
12b27276 3968 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3969 /* i18n: [floating point unit] mask */
12b27276
WN
3970 fputs_filtered (_("mask: "), file);
3971 print_fpu_flags (file, status >> 16);
edefbb7c 3972 /* i18n: [floating point unit] flags */
12b27276
WN
3973 fputs_filtered (_("flags: "), file);
3974 print_fpu_flags (file, status);
c906108c
SS
3975}
3976
27067745
UW
3977/* Construct the ARM extended floating point type. */
3978static struct type *
3979arm_ext_type (struct gdbarch *gdbarch)
3980{
3981 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3982
3983 if (!tdep->arm_ext_type)
3984 tdep->arm_ext_type
e9bb382b 3985 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3986 floatformats_arm_ext);
3987
3988 return tdep->arm_ext_type;
3989}
3990
58d6951d
DJ
3991static struct type *
3992arm_neon_double_type (struct gdbarch *gdbarch)
3993{
3994 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3995
3996 if (tdep->neon_double_type == NULL)
3997 {
3998 struct type *t, *elem;
3999
4000 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4001 TYPE_CODE_UNION);
4002 elem = builtin_type (gdbarch)->builtin_uint8;
4003 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4004 elem = builtin_type (gdbarch)->builtin_uint16;
4005 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4006 elem = builtin_type (gdbarch)->builtin_uint32;
4007 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4008 elem = builtin_type (gdbarch)->builtin_uint64;
4009 append_composite_type_field (t, "u64", elem);
4010 elem = builtin_type (gdbarch)->builtin_float;
4011 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4012 elem = builtin_type (gdbarch)->builtin_double;
4013 append_composite_type_field (t, "f64", elem);
4014
4015 TYPE_VECTOR (t) = 1;
4016 TYPE_NAME (t) = "neon_d";
4017 tdep->neon_double_type = t;
4018 }
4019
4020 return tdep->neon_double_type;
4021}
4022
4023/* FIXME: The vector types are not correctly ordered on big-endian
4024 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4025 bits of d0 - regardless of what unit size is being held in d0. So
4026 the offset of the first uint8 in d0 is 7, but the offset of the
4027 first float is 4. This code works as-is for little-endian
4028 targets. */
4029
4030static struct type *
4031arm_neon_quad_type (struct gdbarch *gdbarch)
4032{
4033 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4034
4035 if (tdep->neon_quad_type == NULL)
4036 {
4037 struct type *t, *elem;
4038
4039 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4040 TYPE_CODE_UNION);
4041 elem = builtin_type (gdbarch)->builtin_uint8;
4042 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4043 elem = builtin_type (gdbarch)->builtin_uint16;
4044 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4045 elem = builtin_type (gdbarch)->builtin_uint32;
4046 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4047 elem = builtin_type (gdbarch)->builtin_uint64;
4048 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4049 elem = builtin_type (gdbarch)->builtin_float;
4050 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4051 elem = builtin_type (gdbarch)->builtin_double;
4052 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4053
4054 TYPE_VECTOR (t) = 1;
4055 TYPE_NAME (t) = "neon_q";
4056 tdep->neon_quad_type = t;
4057 }
4058
4059 return tdep->neon_quad_type;
4060}
4061
34e8f22d
RE
4062/* Return the GDB type object for the "standard" data type of data in
4063 register N. */
4064
4065static struct type *
7a5ea0d4 4066arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4067{
58d6951d
DJ
4068 int num_regs = gdbarch_num_regs (gdbarch);
4069
4070 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4071 && regnum >= num_regs && regnum < num_regs + 32)
4072 return builtin_type (gdbarch)->builtin_float;
4073
4074 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4075 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4076 return arm_neon_quad_type (gdbarch);
4077
4078 /* If the target description has register information, we are only
4079 in this function so that we can override the types of
4080 double-precision registers for NEON. */
4081 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4082 {
4083 struct type *t = tdesc_register_type (gdbarch, regnum);
4084
4085 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4086 && TYPE_CODE (t) == TYPE_CODE_FLT
4087 && gdbarch_tdep (gdbarch)->have_neon)
4088 return arm_neon_double_type (gdbarch);
4089 else
4090 return t;
4091 }
4092
34e8f22d 4093 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4094 {
4095 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4096 return builtin_type (gdbarch)->builtin_void;
4097
4098 return arm_ext_type (gdbarch);
4099 }
e4c16157 4100 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4101 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4102 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4103 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4104 else if (regnum >= ARRAY_SIZE (arm_register_names))
4105 /* These registers are only supported on targets which supply
4106 an XML description. */
df4df182 4107 return builtin_type (gdbarch)->builtin_int0;
032758dc 4108 else
df4df182 4109 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4110}
4111
ff6f572f
DJ
4112/* Map a DWARF register REGNUM onto the appropriate GDB register
4113 number. */
4114
4115static int
d3f73121 4116arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4117{
4118 /* Core integer regs. */
4119 if (reg >= 0 && reg <= 15)
4120 return reg;
4121
4122 /* Legacy FPA encoding. These were once used in a way which
4123 overlapped with VFP register numbering, so their use is
4124 discouraged, but GDB doesn't support the ARM toolchain
4125 which used them for VFP. */
4126 if (reg >= 16 && reg <= 23)
4127 return ARM_F0_REGNUM + reg - 16;
4128
4129 /* New assignments for the FPA registers. */
4130 if (reg >= 96 && reg <= 103)
4131 return ARM_F0_REGNUM + reg - 96;
4132
4133 /* WMMX register assignments. */
4134 if (reg >= 104 && reg <= 111)
4135 return ARM_WCGR0_REGNUM + reg - 104;
4136
4137 if (reg >= 112 && reg <= 127)
4138 return ARM_WR0_REGNUM + reg - 112;
4139
4140 if (reg >= 192 && reg <= 199)
4141 return ARM_WC0_REGNUM + reg - 192;
4142
58d6951d
DJ
4143 /* VFP v2 registers. A double precision value is actually
4144 in d1 rather than s2, but the ABI only defines numbering
4145 for the single precision registers. This will "just work"
4146 in GDB for little endian targets (we'll read eight bytes,
4147 starting in s0 and then progressing to s1), but will be
4148 reversed on big endian targets with VFP. This won't
4149 be a problem for the new Neon quad registers; you're supposed
4150 to use DW_OP_piece for those. */
4151 if (reg >= 64 && reg <= 95)
4152 {
4153 char name_buf[4];
4154
8c042590 4155 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4156 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4157 strlen (name_buf));
4158 }
4159
4160 /* VFP v3 / Neon registers. This range is also used for VFP v2
4161 registers, except that it now describes d0 instead of s0. */
4162 if (reg >= 256 && reg <= 287)
4163 {
4164 char name_buf[4];
4165
8c042590 4166 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4167 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4168 strlen (name_buf));
4169 }
4170
ff6f572f
DJ
4171 return -1;
4172}
4173
26216b98
AC
4174/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4175static int
e7faf938 4176arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4177{
4178 int reg = regnum;
e7faf938 4179 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4180
ff6f572f
DJ
4181 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4182 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4183
4184 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4185 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4186
4187 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4188 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4189
26216b98
AC
4190 if (reg < NUM_GREGS)
4191 return SIM_ARM_R0_REGNUM + reg;
4192 reg -= NUM_GREGS;
4193
4194 if (reg < NUM_FREGS)
4195 return SIM_ARM_FP0_REGNUM + reg;
4196 reg -= NUM_FREGS;
4197
4198 if (reg < NUM_SREGS)
4199 return SIM_ARM_FPS_REGNUM + reg;
4200 reg -= NUM_SREGS;
4201
edefbb7c 4202 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4203}
34e8f22d 4204
a37b3cc0
AC
4205/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4206 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4207 It is thought that this is is the floating-point register format on
4208 little-endian systems. */
c906108c 4209
ed9a39eb 4210static void
b508a996 4211convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4212 void *dbl, int endianess)
c906108c 4213{
a37b3cc0 4214 DOUBLEST d;
be8626e0
MD
4215
4216 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4217 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4218 else
4219 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4220 ptr, &d);
b508a996 4221 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4222}
4223
34e8f22d 4224static void
be8626e0
MD
4225convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4226 int endianess)
c906108c 4227{
a37b3cc0 4228 DOUBLEST d;
be8626e0 4229
b508a996 4230 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4231 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4232 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4233 else
4234 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4235 &d, dbl);
c906108c 4236}
ed9a39eb 4237
d9311bfa
AT
4238/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4239 of the appropriate mode (as encoded in the PC value), even if this
4240 differs from what would be expected according to the symbol tables. */
4241
4242void
4243arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4244 struct address_space *aspace,
4245 CORE_ADDR pc)
c906108c 4246{
d9311bfa 4247 pc = gdbarch_addr_bits_remove (gdbarch, pc);
c5aa993b 4248
d9311bfa 4249 insert_single_step_breakpoint (gdbarch, aspace, pc);
d9311bfa 4250}
c5aa993b 4251
d9311bfa
AT
4252/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4253 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4254 NULL if an error occurs. BUF is freed. */
c906108c 4255
d9311bfa
AT
4256static gdb_byte *
4257extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4258 int old_len, int new_len)
4259{
4260 gdb_byte *new_buf;
4261 int bytes_to_read = new_len - old_len;
c906108c 4262
d9311bfa
AT
4263 new_buf = (gdb_byte *) xmalloc (new_len);
4264 memcpy (new_buf + bytes_to_read, buf, old_len);
4265 xfree (buf);
4266 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4267 {
4268 xfree (new_buf);
4269 return NULL;
c906108c 4270 }
d9311bfa 4271 return new_buf;
c906108c
SS
4272}
4273
d9311bfa
AT
4274/* An IT block is at most the 2-byte IT instruction followed by
4275 four 4-byte instructions. The furthest back we must search to
4276 find an IT block that affects the current instruction is thus
4277 2 + 3 * 4 == 14 bytes. */
4278#define MAX_IT_BLOCK_PREFIX 14
177321bd 4279
d9311bfa
AT
4280/* Use a quick scan if there are more than this many bytes of
4281 code. */
4282#define IT_SCAN_THRESHOLD 32
177321bd 4283
d9311bfa
AT
4284/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4285 A breakpoint in an IT block may not be hit, depending on the
4286 condition flags. */
ad527d2e 4287static CORE_ADDR
d9311bfa 4288arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4289{
d9311bfa
AT
4290 gdb_byte *buf;
4291 char map_type;
4292 CORE_ADDR boundary, func_start;
4293 int buf_len;
4294 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4295 int i, any, last_it, last_it_count;
177321bd 4296
d9311bfa
AT
4297 /* If we are using BKPT breakpoints, none of this is necessary. */
4298 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4299 return bpaddr;
177321bd 4300
d9311bfa
AT
4301 /* ARM mode does not have this problem. */
4302 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4303 return bpaddr;
177321bd 4304
d9311bfa
AT
4305 /* We are setting a breakpoint in Thumb code that could potentially
4306 contain an IT block. The first step is to find how much Thumb
4307 code there is; we do not need to read outside of known Thumb
4308 sequences. */
4309 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4310 if (map_type == 0)
4311 /* Thumb-2 code must have mapping symbols to have a chance. */
4312 return bpaddr;
9dca5578 4313
d9311bfa 4314 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4315
d9311bfa
AT
4316 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4317 && func_start > boundary)
4318 boundary = func_start;
9dca5578 4319
d9311bfa
AT
4320 /* Search for a candidate IT instruction. We have to do some fancy
4321 footwork to distinguish a real IT instruction from the second
4322 half of a 32-bit instruction, but there is no need for that if
4323 there's no candidate. */
325fac50 4324 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4325 if (buf_len == 0)
4326 /* No room for an IT instruction. */
4327 return bpaddr;
c906108c 4328
d9311bfa
AT
4329 buf = (gdb_byte *) xmalloc (buf_len);
4330 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4331 return bpaddr;
4332 any = 0;
4333 for (i = 0; i < buf_len; i += 2)
c906108c 4334 {
d9311bfa
AT
4335 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4336 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4337 {
d9311bfa
AT
4338 any = 1;
4339 break;
25b41d01 4340 }
c906108c 4341 }
d9311bfa
AT
4342
4343 if (any == 0)
c906108c 4344 {
d9311bfa
AT
4345 xfree (buf);
4346 return bpaddr;
f9d67f43
DJ
4347 }
4348
4349 /* OK, the code bytes before this instruction contain at least one
4350 halfword which resembles an IT instruction. We know that it's
4351 Thumb code, but there are still two possibilities. Either the
4352 halfword really is an IT instruction, or it is the second half of
4353 a 32-bit Thumb instruction. The only way we can tell is to
4354 scan forwards from a known instruction boundary. */
4355 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4356 {
4357 int definite;
4358
4359 /* There's a lot of code before this instruction. Start with an
4360 optimistic search; it's easy to recognize halfwords that can
4361 not be the start of a 32-bit instruction, and use that to
4362 lock on to the instruction boundaries. */
4363 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4364 if (buf == NULL)
4365 return bpaddr;
4366 buf_len = IT_SCAN_THRESHOLD;
4367
4368 definite = 0;
4369 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4370 {
4371 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4372 if (thumb_insn_size (inst1) == 2)
4373 {
4374 definite = 1;
4375 break;
4376 }
4377 }
4378
4379 /* At this point, if DEFINITE, BUF[I] is the first place we
4380 are sure that we know the instruction boundaries, and it is far
4381 enough from BPADDR that we could not miss an IT instruction
4382 affecting BPADDR. If ! DEFINITE, give up - start from a
4383 known boundary. */
4384 if (! definite)
4385 {
0963b4bd
MS
4386 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4387 bpaddr - boundary);
f9d67f43
DJ
4388 if (buf == NULL)
4389 return bpaddr;
4390 buf_len = bpaddr - boundary;
4391 i = 0;
4392 }
4393 }
4394 else
4395 {
4396 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4397 if (buf == NULL)
4398 return bpaddr;
4399 buf_len = bpaddr - boundary;
4400 i = 0;
4401 }
4402
4403 /* Scan forwards. Find the last IT instruction before BPADDR. */
4404 last_it = -1;
4405 last_it_count = 0;
4406 while (i < buf_len)
4407 {
4408 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4409 last_it_count--;
4410 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4411 {
4412 last_it = i;
4413 if (inst1 & 0x0001)
4414 last_it_count = 4;
4415 else if (inst1 & 0x0002)
4416 last_it_count = 3;
4417 else if (inst1 & 0x0004)
4418 last_it_count = 2;
4419 else
4420 last_it_count = 1;
4421 }
4422 i += thumb_insn_size (inst1);
4423 }
4424
4425 xfree (buf);
4426
4427 if (last_it == -1)
4428 /* There wasn't really an IT instruction after all. */
4429 return bpaddr;
4430
4431 if (last_it_count < 1)
4432 /* It was too far away. */
4433 return bpaddr;
4434
4435 /* This really is a trouble spot. Move the breakpoint to the IT
4436 instruction. */
4437 return bpaddr - buf_len + last_it;
4438}
4439
cca44b1b 4440/* ARM displaced stepping support.
c906108c 4441
cca44b1b 4442 Generally ARM displaced stepping works as follows:
c906108c 4443
cca44b1b 4444 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4445 arm_process_displaced_insn. Depending on the type of instruction, it is
4446 then copied to a scratch location, possibly in a modified form. The
4447 copy_* set of functions performs such modification, as necessary. A
4448 breakpoint is placed after the modified instruction in the scratch space
4449 to return control to GDB. Note in particular that instructions which
4450 modify the PC will no longer do so after modification.
c5aa993b 4451
cca44b1b
JB
4452 2. The instruction is single-stepped, by setting the PC to the scratch
4453 location address, and resuming. Control returns to GDB when the
4454 breakpoint is hit.
c5aa993b 4455
cca44b1b
JB
4456 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4457 function used for the current instruction. This function's job is to
4458 put the CPU/memory state back to what it would have been if the
4459 instruction had been executed unmodified in its original location. */
c5aa993b 4460
cca44b1b
JB
4461/* NOP instruction (mov r0, r0). */
4462#define ARM_NOP 0xe1a00000
34518530 4463#define THUMB_NOP 0x4600
cca44b1b
JB
4464
4465/* Helper for register reads for displaced stepping. In particular, this
4466 returns the PC as it would be seen by the instruction at its original
4467 location. */
4468
4469ULONGEST
36073a92
YQ
4470displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4471 int regno)
cca44b1b
JB
4472{
4473 ULONGEST ret;
36073a92 4474 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4475
bf9f652a 4476 if (regno == ARM_PC_REGNUM)
cca44b1b 4477 {
4db71c0b
YQ
4478 /* Compute pipeline offset:
4479 - When executing an ARM instruction, PC reads as the address of the
4480 current instruction plus 8.
4481 - When executing a Thumb instruction, PC reads as the address of the
4482 current instruction plus 4. */
4483
36073a92 4484 if (!dsc->is_thumb)
4db71c0b
YQ
4485 from += 8;
4486 else
4487 from += 4;
4488
cca44b1b
JB
4489 if (debug_displaced)
4490 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4491 (unsigned long) from);
4492 return (ULONGEST) from;
cca44b1b 4493 }
c906108c 4494 else
cca44b1b
JB
4495 {
4496 regcache_cooked_read_unsigned (regs, regno, &ret);
4497 if (debug_displaced)
4498 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4499 regno, (unsigned long) ret);
4500 return ret;
4501 }
c906108c
SS
4502}
4503
cca44b1b
JB
4504static int
4505displaced_in_arm_mode (struct regcache *regs)
4506{
4507 ULONGEST ps;
9779414d 4508 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4509
cca44b1b 4510 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4511
9779414d 4512 return (ps & t_bit) == 0;
cca44b1b 4513}
66e810cd 4514
cca44b1b 4515/* Write to the PC as from a branch instruction. */
c906108c 4516
cca44b1b 4517static void
36073a92
YQ
4518branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4519 ULONGEST val)
c906108c 4520{
36073a92 4521 if (!dsc->is_thumb)
cca44b1b
JB
4522 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4523 architecture versions < 6. */
0963b4bd
MS
4524 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4525 val & ~(ULONGEST) 0x3);
cca44b1b 4526 else
0963b4bd
MS
4527 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4528 val & ~(ULONGEST) 0x1);
cca44b1b 4529}
66e810cd 4530
cca44b1b
JB
4531/* Write to the PC as from a branch-exchange instruction. */
4532
4533static void
4534bx_write_pc (struct regcache *regs, ULONGEST val)
4535{
4536 ULONGEST ps;
9779414d 4537 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4538
4539 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4540
4541 if ((val & 1) == 1)
c906108c 4542 {
9779414d 4543 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4544 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4545 }
4546 else if ((val & 2) == 0)
4547 {
9779414d 4548 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4549 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4550 }
4551 else
4552 {
cca44b1b
JB
4553 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4554 mode, align dest to 4 bytes). */
4555 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4556 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4557 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4558 }
4559}
ed9a39eb 4560
cca44b1b 4561/* Write to the PC as if from a load instruction. */
ed9a39eb 4562
34e8f22d 4563static void
36073a92
YQ
4564load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4565 ULONGEST val)
ed9a39eb 4566{
cca44b1b
JB
4567 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4568 bx_write_pc (regs, val);
4569 else
36073a92 4570 branch_write_pc (regs, dsc, val);
cca44b1b 4571}
be8626e0 4572
cca44b1b
JB
4573/* Write to the PC as if from an ALU instruction. */
4574
4575static void
36073a92
YQ
4576alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4577 ULONGEST val)
cca44b1b 4578{
36073a92 4579 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4580 bx_write_pc (regs, val);
4581 else
36073a92 4582 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4583}
4584
4585/* Helper for writing to registers for displaced stepping. Writing to the PC
4586 has a varying effects depending on the instruction which does the write:
4587 this is controlled by the WRITE_PC argument. */
4588
4589void
4590displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4591 int regno, ULONGEST val, enum pc_write_style write_pc)
4592{
bf9f652a 4593 if (regno == ARM_PC_REGNUM)
08216dd7 4594 {
cca44b1b
JB
4595 if (debug_displaced)
4596 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4597 (unsigned long) val);
4598 switch (write_pc)
08216dd7 4599 {
cca44b1b 4600 case BRANCH_WRITE_PC:
36073a92 4601 branch_write_pc (regs, dsc, val);
08216dd7
RE
4602 break;
4603
cca44b1b
JB
4604 case BX_WRITE_PC:
4605 bx_write_pc (regs, val);
4606 break;
4607
4608 case LOAD_WRITE_PC:
36073a92 4609 load_write_pc (regs, dsc, val);
cca44b1b
JB
4610 break;
4611
4612 case ALU_WRITE_PC:
36073a92 4613 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4614 break;
4615
4616 case CANNOT_WRITE_PC:
4617 warning (_("Instruction wrote to PC in an unexpected way when "
4618 "single-stepping"));
08216dd7
RE
4619 break;
4620
4621 default:
97b9747c
JB
4622 internal_error (__FILE__, __LINE__,
4623 _("Invalid argument to displaced_write_reg"));
08216dd7 4624 }
b508a996 4625
cca44b1b 4626 dsc->wrote_to_pc = 1;
b508a996 4627 }
ed9a39eb 4628 else
b508a996 4629 {
cca44b1b
JB
4630 if (debug_displaced)
4631 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4632 regno, (unsigned long) val);
4633 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4634 }
34e8f22d
RE
4635}
4636
cca44b1b
JB
4637/* This function is used to concisely determine if an instruction INSN
4638 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4639 corresponding fields of BITMASK set to 0b1111. The function
4640 returns return 1 if any of these fields in INSN reference the PC
4641 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4642
4643static int
cca44b1b 4644insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4645{
cca44b1b 4646 uint32_t lowbit = 1;
67255d04 4647
cca44b1b
JB
4648 while (bitmask != 0)
4649 {
4650 uint32_t mask;
44e1a9eb 4651
cca44b1b
JB
4652 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4653 ;
67255d04 4654
cca44b1b
JB
4655 if (!lowbit)
4656 break;
67255d04 4657
cca44b1b 4658 mask = lowbit * 0xf;
67255d04 4659
cca44b1b
JB
4660 if ((insn & mask) == mask)
4661 return 1;
4662
4663 bitmask &= ~mask;
67255d04
RE
4664 }
4665
cca44b1b
JB
4666 return 0;
4667}
2af48f68 4668
cca44b1b
JB
4669/* The simplest copy function. Many instructions have the same effect no
4670 matter what address they are executed at: in those cases, use this. */
67255d04 4671
cca44b1b 4672static int
7ff120b4
YQ
4673arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4674 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4675{
4676 if (debug_displaced)
4677 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4678 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4679 iname);
67255d04 4680
cca44b1b 4681 dsc->modinsn[0] = insn;
67255d04 4682
cca44b1b
JB
4683 return 0;
4684}
4685
34518530
YQ
4686static int
4687thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4688 uint16_t insn2, const char *iname,
4689 struct displaced_step_closure *dsc)
4690{
4691 if (debug_displaced)
4692 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4693 "opcode/class '%s' unmodified\n", insn1, insn2,
4694 iname);
4695
4696 dsc->modinsn[0] = insn1;
4697 dsc->modinsn[1] = insn2;
4698 dsc->numinsns = 2;
4699
4700 return 0;
4701}
4702
4703/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4704 modification. */
4705static int
615234c1 4706thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4707 const char *iname,
4708 struct displaced_step_closure *dsc)
4709{
4710 if (debug_displaced)
4711 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4712 "opcode/class '%s' unmodified\n", insn,
4713 iname);
4714
4715 dsc->modinsn[0] = insn;
4716
4717 return 0;
4718}
4719
cca44b1b
JB
4720/* Preload instructions with immediate offset. */
4721
4722static void
6e39997a 4723cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4724 struct regcache *regs, struct displaced_step_closure *dsc)
4725{
4726 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4727 if (!dsc->u.preload.immed)
4728 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4729}
4730
7ff120b4
YQ
4731static void
4732install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4733 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4734{
cca44b1b 4735 ULONGEST rn_val;
cca44b1b
JB
4736 /* Preload instructions:
4737
4738 {pli/pld} [rn, #+/-imm]
4739 ->
4740 {pli/pld} [r0, #+/-imm]. */
4741
36073a92
YQ
4742 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4743 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4744 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4745 dsc->u.preload.immed = 1;
4746
cca44b1b 4747 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4748}
4749
cca44b1b 4750static int
7ff120b4 4751arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4752 struct displaced_step_closure *dsc)
4753{
4754 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4755
7ff120b4
YQ
4756 if (!insn_references_pc (insn, 0x000f0000ul))
4757 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4758
4759 if (debug_displaced)
4760 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4761 (unsigned long) insn);
4762
7ff120b4
YQ
4763 dsc->modinsn[0] = insn & 0xfff0ffff;
4764
4765 install_preload (gdbarch, regs, dsc, rn);
4766
4767 return 0;
4768}
4769
34518530
YQ
4770static int
4771thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4772 struct regcache *regs, struct displaced_step_closure *dsc)
4773{
4774 unsigned int rn = bits (insn1, 0, 3);
4775 unsigned int u_bit = bit (insn1, 7);
4776 int imm12 = bits (insn2, 0, 11);
4777 ULONGEST pc_val;
4778
4779 if (rn != ARM_PC_REGNUM)
4780 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4781
4782 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4783 PLD (literal) Encoding T1. */
4784 if (debug_displaced)
4785 fprintf_unfiltered (gdb_stdlog,
4786 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4787 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4788 imm12);
4789
4790 if (!u_bit)
4791 imm12 = -1 * imm12;
4792
4793 /* Rewrite instruction {pli/pld} PC imm12 into:
4794 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4795
4796 {pli/pld} [r0, r1]
4797
4798 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4799
4800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4801 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4802
4803 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4804
4805 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4806 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4807 dsc->u.preload.immed = 0;
4808
4809 /* {pli/pld} [r0, r1] */
4810 dsc->modinsn[0] = insn1 & 0xfff0;
4811 dsc->modinsn[1] = 0xf001;
4812 dsc->numinsns = 2;
4813
4814 dsc->cleanup = &cleanup_preload;
4815 return 0;
4816}
4817
7ff120b4
YQ
4818/* Preload instructions with register offset. */
4819
4820static void
4821install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4822 struct displaced_step_closure *dsc, unsigned int rn,
4823 unsigned int rm)
4824{
4825 ULONGEST rn_val, rm_val;
4826
cca44b1b
JB
4827 /* Preload register-offset instructions:
4828
4829 {pli/pld} [rn, rm {, shift}]
4830 ->
4831 {pli/pld} [r0, r1 {, shift}]. */
4832
36073a92
YQ
4833 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4834 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4835 rn_val = displaced_read_reg (regs, dsc, rn);
4836 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4837 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4838 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4839 dsc->u.preload.immed = 0;
4840
cca44b1b 4841 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4842}
4843
4844static int
4845arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4846 struct regcache *regs,
4847 struct displaced_step_closure *dsc)
4848{
4849 unsigned int rn = bits (insn, 16, 19);
4850 unsigned int rm = bits (insn, 0, 3);
4851
4852
4853 if (!insn_references_pc (insn, 0x000f000ful))
4854 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4855
4856 if (debug_displaced)
4857 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4858 (unsigned long) insn);
4859
4860 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4861
7ff120b4 4862 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4863 return 0;
4864}
4865
4866/* Copy/cleanup coprocessor load and store instructions. */
4867
4868static void
6e39997a 4869cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4870 struct regcache *regs,
4871 struct displaced_step_closure *dsc)
4872{
36073a92 4873 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4874
4875 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4876
4877 if (dsc->u.ldst.writeback)
4878 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4879}
4880
7ff120b4
YQ
4881static void
4882install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4883 struct displaced_step_closure *dsc,
4884 int writeback, unsigned int rn)
cca44b1b 4885{
cca44b1b 4886 ULONGEST rn_val;
cca44b1b 4887
cca44b1b
JB
4888 /* Coprocessor load/store instructions:
4889
4890 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4891 ->
4892 {stc/stc2} [r0, #+/-imm].
4893
4894 ldc/ldc2 are handled identically. */
4895
36073a92
YQ
4896 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4897 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4898 /* PC should be 4-byte aligned. */
4899 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4900 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4901
7ff120b4 4902 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4903 dsc->u.ldst.rn = rn;
4904
7ff120b4
YQ
4905 dsc->cleanup = &cleanup_copro_load_store;
4906}
4907
4908static int
4909arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4910 struct regcache *regs,
4911 struct displaced_step_closure *dsc)
4912{
4913 unsigned int rn = bits (insn, 16, 19);
4914
4915 if (!insn_references_pc (insn, 0x000f0000ul))
4916 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4917
4918 if (debug_displaced)
4919 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4920 "load/store insn %.8lx\n", (unsigned long) insn);
4921
cca44b1b
JB
4922 dsc->modinsn[0] = insn & 0xfff0ffff;
4923
7ff120b4 4924 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4925
4926 return 0;
4927}
4928
34518530
YQ
4929static int
4930thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4931 uint16_t insn2, struct regcache *regs,
4932 struct displaced_step_closure *dsc)
4933{
4934 unsigned int rn = bits (insn1, 0, 3);
4935
4936 if (rn != ARM_PC_REGNUM)
4937 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4938 "copro load/store", dsc);
4939
4940 if (debug_displaced)
4941 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4942 "load/store insn %.4x%.4x\n", insn1, insn2);
4943
4944 dsc->modinsn[0] = insn1 & 0xfff0;
4945 dsc->modinsn[1] = insn2;
4946 dsc->numinsns = 2;
4947
4948 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4949 doesn't support writeback, so pass 0. */
4950 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4951
4952 return 0;
4953}
4954
cca44b1b
JB
4955/* Clean up branch instructions (actually perform the branch, by setting
4956 PC). */
4957
4958static void
6e39997a 4959cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4960 struct displaced_step_closure *dsc)
4961{
36073a92 4962 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4963 int branch_taken = condition_true (dsc->u.branch.cond, status);
4964 enum pc_write_style write_pc = dsc->u.branch.exchange
4965 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4966
4967 if (!branch_taken)
4968 return;
4969
4970 if (dsc->u.branch.link)
4971 {
8c8dba6d
YQ
4972 /* The value of LR should be the next insn of current one. In order
4973 not to confuse logic hanlding later insn `bx lr', if current insn mode
4974 is Thumb, the bit 0 of LR value should be set to 1. */
4975 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4976
4977 if (dsc->is_thumb)
4978 next_insn_addr |= 0x1;
4979
4980 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4981 CANNOT_WRITE_PC);
cca44b1b
JB
4982 }
4983
bf9f652a 4984 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4985}
4986
4987/* Copy B/BL/BLX instructions with immediate destinations. */
4988
7ff120b4
YQ
4989static void
4990install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4991 struct displaced_step_closure *dsc,
4992 unsigned int cond, int exchange, int link, long offset)
4993{
4994 /* Implement "BL<cond> <label>" as:
4995
4996 Preparation: cond <- instruction condition
4997 Insn: mov r0, r0 (nop)
4998 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4999
5000 B<cond> similar, but don't set r14 in cleanup. */
5001
5002 dsc->u.branch.cond = cond;
5003 dsc->u.branch.link = link;
5004 dsc->u.branch.exchange = exchange;
5005
2b16b2e3
YQ
5006 dsc->u.branch.dest = dsc->insn_addr;
5007 if (link && exchange)
5008 /* For BLX, offset is computed from the Align (PC, 4). */
5009 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5010
7ff120b4 5011 if (dsc->is_thumb)
2b16b2e3 5012 dsc->u.branch.dest += 4 + offset;
7ff120b4 5013 else
2b16b2e3 5014 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5015
5016 dsc->cleanup = &cleanup_branch;
5017}
cca44b1b 5018static int
7ff120b4
YQ
5019arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5020 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5021{
5022 unsigned int cond = bits (insn, 28, 31);
5023 int exchange = (cond == 0xf);
5024 int link = exchange || bit (insn, 24);
cca44b1b
JB
5025 long offset;
5026
5027 if (debug_displaced)
5028 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5029 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5030 (unsigned long) insn);
cca44b1b
JB
5031 if (exchange)
5032 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5033 then arrange the switch into Thumb mode. */
5034 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5035 else
5036 offset = bits (insn, 0, 23) << 2;
5037
5038 if (bit (offset, 25))
5039 offset = offset | ~0x3ffffff;
5040
cca44b1b
JB
5041 dsc->modinsn[0] = ARM_NOP;
5042
7ff120b4 5043 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5044 return 0;
5045}
5046
34518530
YQ
5047static int
5048thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5049 uint16_t insn2, struct regcache *regs,
5050 struct displaced_step_closure *dsc)
5051{
5052 int link = bit (insn2, 14);
5053 int exchange = link && !bit (insn2, 12);
5054 int cond = INST_AL;
5055 long offset = 0;
5056 int j1 = bit (insn2, 13);
5057 int j2 = bit (insn2, 11);
5058 int s = sbits (insn1, 10, 10);
5059 int i1 = !(j1 ^ bit (insn1, 10));
5060 int i2 = !(j2 ^ bit (insn1, 10));
5061
5062 if (!link && !exchange) /* B */
5063 {
5064 offset = (bits (insn2, 0, 10) << 1);
5065 if (bit (insn2, 12)) /* Encoding T4 */
5066 {
5067 offset |= (bits (insn1, 0, 9) << 12)
5068 | (i2 << 22)
5069 | (i1 << 23)
5070 | (s << 24);
5071 cond = INST_AL;
5072 }
5073 else /* Encoding T3 */
5074 {
5075 offset |= (bits (insn1, 0, 5) << 12)
5076 | (j1 << 18)
5077 | (j2 << 19)
5078 | (s << 20);
5079 cond = bits (insn1, 6, 9);
5080 }
5081 }
5082 else
5083 {
5084 offset = (bits (insn1, 0, 9) << 12);
5085 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5086 offset |= exchange ?
5087 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5088 }
5089
5090 if (debug_displaced)
5091 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5092 "%.4x %.4x with offset %.8lx\n",
5093 link ? (exchange) ? "blx" : "bl" : "b",
5094 insn1, insn2, offset);
5095
5096 dsc->modinsn[0] = THUMB_NOP;
5097
5098 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5099 return 0;
5100}
5101
5102/* Copy B Thumb instructions. */
5103static int
615234c1 5104thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5105 struct displaced_step_closure *dsc)
5106{
5107 unsigned int cond = 0;
5108 int offset = 0;
5109 unsigned short bit_12_15 = bits (insn, 12, 15);
5110 CORE_ADDR from = dsc->insn_addr;
5111
5112 if (bit_12_15 == 0xd)
5113 {
5114 /* offset = SignExtend (imm8:0, 32) */
5115 offset = sbits ((insn << 1), 0, 8);
5116 cond = bits (insn, 8, 11);
5117 }
5118 else if (bit_12_15 == 0xe) /* Encoding T2 */
5119 {
5120 offset = sbits ((insn << 1), 0, 11);
5121 cond = INST_AL;
5122 }
5123
5124 if (debug_displaced)
5125 fprintf_unfiltered (gdb_stdlog,
5126 "displaced: copying b immediate insn %.4x "
5127 "with offset %d\n", insn, offset);
5128
5129 dsc->u.branch.cond = cond;
5130 dsc->u.branch.link = 0;
5131 dsc->u.branch.exchange = 0;
5132 dsc->u.branch.dest = from + 4 + offset;
5133
5134 dsc->modinsn[0] = THUMB_NOP;
5135
5136 dsc->cleanup = &cleanup_branch;
5137
5138 return 0;
5139}
5140
cca44b1b
JB
5141/* Copy BX/BLX with register-specified destinations. */
5142
7ff120b4
YQ
5143static void
5144install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5145 struct displaced_step_closure *dsc, int link,
5146 unsigned int cond, unsigned int rm)
cca44b1b 5147{
cca44b1b
JB
5148 /* Implement {BX,BLX}<cond> <reg>" as:
5149
5150 Preparation: cond <- instruction condition
5151 Insn: mov r0, r0 (nop)
5152 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5153
5154 Don't set r14 in cleanup for BX. */
5155
36073a92 5156 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5157
5158 dsc->u.branch.cond = cond;
5159 dsc->u.branch.link = link;
cca44b1b 5160
7ff120b4 5161 dsc->u.branch.exchange = 1;
cca44b1b
JB
5162
5163 dsc->cleanup = &cleanup_branch;
7ff120b4 5164}
cca44b1b 5165
7ff120b4
YQ
5166static int
5167arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5168 struct regcache *regs, struct displaced_step_closure *dsc)
5169{
5170 unsigned int cond = bits (insn, 28, 31);
5171 /* BX: x12xxx1x
5172 BLX: x12xxx3x. */
5173 int link = bit (insn, 5);
5174 unsigned int rm = bits (insn, 0, 3);
5175
5176 if (debug_displaced)
5177 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5178 (unsigned long) insn);
5179
5180 dsc->modinsn[0] = ARM_NOP;
5181
5182 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5183 return 0;
5184}
5185
34518530
YQ
5186static int
5187thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5188 struct regcache *regs,
5189 struct displaced_step_closure *dsc)
5190{
5191 int link = bit (insn, 7);
5192 unsigned int rm = bits (insn, 3, 6);
5193
5194 if (debug_displaced)
5195 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5196 (unsigned short) insn);
5197
5198 dsc->modinsn[0] = THUMB_NOP;
5199
5200 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5201
5202 return 0;
5203}
5204
5205
0963b4bd 5206/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5207
5208static void
6e39997a 5209cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5210 struct regcache *regs, struct displaced_step_closure *dsc)
5211{
36073a92 5212 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5213 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5214 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5215 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5216}
5217
5218static int
7ff120b4
YQ
5219arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5220 struct displaced_step_closure *dsc)
cca44b1b
JB
5221{
5222 unsigned int rn = bits (insn, 16, 19);
5223 unsigned int rd = bits (insn, 12, 15);
5224 unsigned int op = bits (insn, 21, 24);
5225 int is_mov = (op == 0xd);
5226 ULONGEST rd_val, rn_val;
cca44b1b
JB
5227
5228 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5229 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5230
5231 if (debug_displaced)
5232 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5233 "%.8lx\n", is_mov ? "move" : "ALU",
5234 (unsigned long) insn);
5235
5236 /* Instruction is of form:
5237
5238 <op><cond> rd, [rn,] #imm
5239
5240 Rewrite as:
5241
5242 Preparation: tmp1, tmp2 <- r0, r1;
5243 r0, r1 <- rd, rn
5244 Insn: <op><cond> r0, r1, #imm
5245 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5246 */
5247
36073a92
YQ
5248 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5249 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5250 rn_val = displaced_read_reg (regs, dsc, rn);
5251 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5252 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5253 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5254 dsc->rd = rd;
5255
5256 if (is_mov)
5257 dsc->modinsn[0] = insn & 0xfff00fff;
5258 else
5259 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5260
5261 dsc->cleanup = &cleanup_alu_imm;
5262
5263 return 0;
5264}
5265
34518530
YQ
5266static int
5267thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5268 uint16_t insn2, struct regcache *regs,
5269 struct displaced_step_closure *dsc)
5270{
5271 unsigned int op = bits (insn1, 5, 8);
5272 unsigned int rn, rm, rd;
5273 ULONGEST rd_val, rn_val;
5274
5275 rn = bits (insn1, 0, 3); /* Rn */
5276 rm = bits (insn2, 0, 3); /* Rm */
5277 rd = bits (insn2, 8, 11); /* Rd */
5278
5279 /* This routine is only called for instruction MOV. */
5280 gdb_assert (op == 0x2 && rn == 0xf);
5281
5282 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5283 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5284
5285 if (debug_displaced)
5286 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5287 "ALU", insn1, insn2);
5288
5289 /* Instruction is of form:
5290
5291 <op><cond> rd, [rn,] #imm
5292
5293 Rewrite as:
5294
5295 Preparation: tmp1, tmp2 <- r0, r1;
5296 r0, r1 <- rd, rn
5297 Insn: <op><cond> r0, r1, #imm
5298 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5299 */
5300
5301 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5302 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5303 rn_val = displaced_read_reg (regs, dsc, rn);
5304 rd_val = displaced_read_reg (regs, dsc, rd);
5305 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5306 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5307 dsc->rd = rd;
5308
5309 dsc->modinsn[0] = insn1;
5310 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5311 dsc->numinsns = 2;
5312
5313 dsc->cleanup = &cleanup_alu_imm;
5314
5315 return 0;
5316}
5317
cca44b1b
JB
5318/* Copy/cleanup arithmetic/logic insns with register RHS. */
5319
5320static void
6e39997a 5321cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5322 struct regcache *regs, struct displaced_step_closure *dsc)
5323{
5324 ULONGEST rd_val;
5325 int i;
5326
36073a92 5327 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5328
5329 for (i = 0; i < 3; i++)
5330 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5331
5332 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5333}
5334
7ff120b4
YQ
5335static void
5336install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5337 struct displaced_step_closure *dsc,
5338 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5339{
cca44b1b 5340 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5341
cca44b1b
JB
5342 /* Instruction is of form:
5343
5344 <op><cond> rd, [rn,] rm [, <shift>]
5345
5346 Rewrite as:
5347
5348 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5349 r0, r1, r2 <- rd, rn, rm
ef713951 5350 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5351 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5352 */
5353
36073a92
YQ
5354 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5355 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5356 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5357 rd_val = displaced_read_reg (regs, dsc, rd);
5358 rn_val = displaced_read_reg (regs, dsc, rn);
5359 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5360 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5361 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5362 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5363 dsc->rd = rd;
5364
7ff120b4
YQ
5365 dsc->cleanup = &cleanup_alu_reg;
5366}
5367
5368static int
5369arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5370 struct displaced_step_closure *dsc)
5371{
5372 unsigned int op = bits (insn, 21, 24);
5373 int is_mov = (op == 0xd);
5374
5375 if (!insn_references_pc (insn, 0x000ff00ful))
5376 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5377
5378 if (debug_displaced)
5379 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5380 is_mov ? "move" : "ALU", (unsigned long) insn);
5381
cca44b1b
JB
5382 if (is_mov)
5383 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5384 else
5385 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5386
7ff120b4
YQ
5387 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5388 bits (insn, 0, 3));
cca44b1b
JB
5389 return 0;
5390}
5391
34518530
YQ
5392static int
5393thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5394 struct regcache *regs,
5395 struct displaced_step_closure *dsc)
5396{
ef713951 5397 unsigned rm, rd;
34518530 5398
ef713951
YQ
5399 rm = bits (insn, 3, 6);
5400 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5401
ef713951 5402 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5403 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5404
5405 if (debug_displaced)
ef713951
YQ
5406 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5407 (unsigned short) insn);
34518530 5408
ef713951 5409 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5410
ef713951 5411 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5412
5413 return 0;
5414}
5415
cca44b1b
JB
5416/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5417
5418static void
6e39997a 5419cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5420 struct regcache *regs,
5421 struct displaced_step_closure *dsc)
5422{
36073a92 5423 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5424 int i;
5425
5426 for (i = 0; i < 4; i++)
5427 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5428
5429 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5430}
5431
7ff120b4
YQ
5432static void
5433install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5434 struct displaced_step_closure *dsc,
5435 unsigned int rd, unsigned int rn, unsigned int rm,
5436 unsigned rs)
cca44b1b 5437{
7ff120b4 5438 int i;
cca44b1b 5439 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5440
cca44b1b
JB
5441 /* Instruction is of form:
5442
5443 <op><cond> rd, [rn,] rm, <shift> rs
5444
5445 Rewrite as:
5446
5447 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5448 r0, r1, r2, r3 <- rd, rn, rm, rs
5449 Insn: <op><cond> r0, r1, r2, <shift> r3
5450 Cleanup: tmp5 <- r0
5451 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5452 rd <- tmp5
5453 */
5454
5455 for (i = 0; i < 4; i++)
36073a92 5456 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5457
36073a92
YQ
5458 rd_val = displaced_read_reg (regs, dsc, rd);
5459 rn_val = displaced_read_reg (regs, dsc, rn);
5460 rm_val = displaced_read_reg (regs, dsc, rm);
5461 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5462 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5463 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5464 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5465 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5466 dsc->rd = rd;
7ff120b4
YQ
5467 dsc->cleanup = &cleanup_alu_shifted_reg;
5468}
5469
5470static int
5471arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5472 struct regcache *regs,
5473 struct displaced_step_closure *dsc)
5474{
5475 unsigned int op = bits (insn, 21, 24);
5476 int is_mov = (op == 0xd);
5477 unsigned int rd, rn, rm, rs;
5478
5479 if (!insn_references_pc (insn, 0x000fff0ful))
5480 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5481
5482 if (debug_displaced)
5483 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5484 "%.8lx\n", is_mov ? "move" : "ALU",
5485 (unsigned long) insn);
5486
5487 rn = bits (insn, 16, 19);
5488 rm = bits (insn, 0, 3);
5489 rs = bits (insn, 8, 11);
5490 rd = bits (insn, 12, 15);
cca44b1b
JB
5491
5492 if (is_mov)
5493 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5494 else
5495 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5496
7ff120b4 5497 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5498
5499 return 0;
5500}
5501
5502/* Clean up load instructions. */
5503
5504static void
6e39997a 5505cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5506 struct displaced_step_closure *dsc)
5507{
5508 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5509
36073a92 5510 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5511 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5512 rt_val2 = displaced_read_reg (regs, dsc, 1);
5513 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5514
5515 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5516 if (dsc->u.ldst.xfersize > 4)
5517 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5518 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5519 if (!dsc->u.ldst.immed)
5520 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5521
5522 /* Handle register writeback. */
5523 if (dsc->u.ldst.writeback)
5524 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5525 /* Put result in right place. */
5526 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5527 if (dsc->u.ldst.xfersize == 8)
5528 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5529}
5530
5531/* Clean up store instructions. */
5532
5533static void
6e39997a 5534cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5535 struct displaced_step_closure *dsc)
5536{
36073a92 5537 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5538
5539 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5540 if (dsc->u.ldst.xfersize > 4)
5541 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5542 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5543 if (!dsc->u.ldst.immed)
5544 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5545 if (!dsc->u.ldst.restore_r4)
5546 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5547
5548 /* Writeback. */
5549 if (dsc->u.ldst.writeback)
5550 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5551}
5552
5553/* Copy "extra" load/store instructions. These are halfword/doubleword
5554 transfers, which have a different encoding to byte/word transfers. */
5555
5556static int
550dc4e2 5557arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5558 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5559{
5560 unsigned int op1 = bits (insn, 20, 24);
5561 unsigned int op2 = bits (insn, 5, 6);
5562 unsigned int rt = bits (insn, 12, 15);
5563 unsigned int rn = bits (insn, 16, 19);
5564 unsigned int rm = bits (insn, 0, 3);
5565 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5566 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5567 int immed = (op1 & 0x4) != 0;
5568 int opcode;
5569 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5570
5571 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5572 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5573
5574 if (debug_displaced)
5575 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5576 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5577 (unsigned long) insn);
5578
5579 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5580
5581 if (opcode < 0)
5582 internal_error (__FILE__, __LINE__,
5583 _("copy_extra_ld_st: instruction decode error"));
5584
36073a92
YQ
5585 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5586 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5587 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5588 if (!immed)
36073a92 5589 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5590
36073a92 5591 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5592 if (bytesize[opcode] == 8)
36073a92
YQ
5593 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5594 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5595 if (!immed)
36073a92 5596 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5597
5598 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5599 if (bytesize[opcode] == 8)
5600 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5601 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5602 if (!immed)
5603 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5604
5605 dsc->rd = rt;
5606 dsc->u.ldst.xfersize = bytesize[opcode];
5607 dsc->u.ldst.rn = rn;
5608 dsc->u.ldst.immed = immed;
5609 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5610 dsc->u.ldst.restore_r4 = 0;
5611
5612 if (immed)
5613 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5614 ->
5615 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5616 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5617 else
5618 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5619 ->
5620 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5621 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5622
5623 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5624
5625 return 0;
5626}
5627
0f6f04ba 5628/* Copy byte/half word/word loads and stores. */
cca44b1b 5629
7ff120b4 5630static void
0f6f04ba
YQ
5631install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5632 struct displaced_step_closure *dsc, int load,
5633 int immed, int writeback, int size, int usermode,
5634 int rt, int rm, int rn)
cca44b1b 5635{
cca44b1b 5636 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5637
36073a92
YQ
5638 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5639 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5640 if (!immed)
36073a92 5641 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5642 if (!load)
36073a92 5643 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5644
36073a92
YQ
5645 rt_val = displaced_read_reg (regs, dsc, rt);
5646 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5647 if (!immed)
36073a92 5648 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5649
5650 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5651 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5652 if (!immed)
5653 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5654 dsc->rd = rt;
0f6f04ba 5655 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5656 dsc->u.ldst.rn = rn;
5657 dsc->u.ldst.immed = immed;
7ff120b4 5658 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5659
5660 /* To write PC we can do:
5661
494e194e
YQ
5662 Before this sequence of instructions:
5663 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5664 r2 is the Rn value got from dispalced_read_reg.
5665
5666 Insn1: push {pc} Write address of STR instruction + offset on stack
5667 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5668 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5669 = addr(Insn1) + offset - addr(Insn3) - 8
5670 = offset - 16
5671 Insn4: add r4, r4, #8 r4 = offset - 8
5672 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5673 = from + offset
5674 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5675
5676 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5677 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5678 of this can be found in Section "Saving from r15" in
5679 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5680
7ff120b4
YQ
5681 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5682}
5683
34518530
YQ
5684
5685static int
5686thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5687 uint16_t insn2, struct regcache *regs,
5688 struct displaced_step_closure *dsc, int size)
5689{
5690 unsigned int u_bit = bit (insn1, 7);
5691 unsigned int rt = bits (insn2, 12, 15);
5692 int imm12 = bits (insn2, 0, 11);
5693 ULONGEST pc_val;
5694
5695 if (debug_displaced)
5696 fprintf_unfiltered (gdb_stdlog,
5697 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5698 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5699 imm12);
5700
5701 if (!u_bit)
5702 imm12 = -1 * imm12;
5703
5704 /* Rewrite instruction LDR Rt imm12 into:
5705
5706 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5707
5708 LDR R0, R2, R3,
5709
5710 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5711
5712
5713 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5714 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5715 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5716
5717 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5718
5719 pc_val = pc_val & 0xfffffffc;
5720
5721 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5722 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5723
5724 dsc->rd = rt;
5725
5726 dsc->u.ldst.xfersize = size;
5727 dsc->u.ldst.immed = 0;
5728 dsc->u.ldst.writeback = 0;
5729 dsc->u.ldst.restore_r4 = 0;
5730
5731 /* LDR R0, R2, R3 */
5732 dsc->modinsn[0] = 0xf852;
5733 dsc->modinsn[1] = 0x3;
5734 dsc->numinsns = 2;
5735
5736 dsc->cleanup = &cleanup_load;
5737
5738 return 0;
5739}
5740
5741static int
5742thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5743 uint16_t insn2, struct regcache *regs,
5744 struct displaced_step_closure *dsc,
5745 int writeback, int immed)
5746{
5747 unsigned int rt = bits (insn2, 12, 15);
5748 unsigned int rn = bits (insn1, 0, 3);
5749 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5750 /* In LDR (register), there is also a register Rm, which is not allowed to
5751 be PC, so we don't have to check it. */
5752
5753 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5754 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5755 dsc);
5756
5757 if (debug_displaced)
5758 fprintf_unfiltered (gdb_stdlog,
5759 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5760 rt, rn, insn1, insn2);
5761
5762 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5763 0, rt, rm, rn);
5764
5765 dsc->u.ldst.restore_r4 = 0;
5766
5767 if (immed)
5768 /* ldr[b]<cond> rt, [rn, #imm], etc.
5769 ->
5770 ldr[b]<cond> r0, [r2, #imm]. */
5771 {
5772 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5773 dsc->modinsn[1] = insn2 & 0x0fff;
5774 }
5775 else
5776 /* ldr[b]<cond> rt, [rn, rm], etc.
5777 ->
5778 ldr[b]<cond> r0, [r2, r3]. */
5779 {
5780 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5781 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5782 }
5783
5784 dsc->numinsns = 2;
5785
5786 return 0;
5787}
5788
5789
7ff120b4
YQ
5790static int
5791arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5792 struct regcache *regs,
5793 struct displaced_step_closure *dsc,
0f6f04ba 5794 int load, int size, int usermode)
7ff120b4
YQ
5795{
5796 int immed = !bit (insn, 25);
5797 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5798 unsigned int rt = bits (insn, 12, 15);
5799 unsigned int rn = bits (insn, 16, 19);
5800 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5801
5802 if (!insn_references_pc (insn, 0x000ff00ful))
5803 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5804
5805 if (debug_displaced)
5806 fprintf_unfiltered (gdb_stdlog,
5807 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5808 load ? (size == 1 ? "ldrb" : "ldr")
5809 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5810 rt, rn,
5811 (unsigned long) insn);
5812
0f6f04ba
YQ
5813 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5814 usermode, rt, rm, rn);
7ff120b4 5815
bf9f652a 5816 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5817 {
5818 dsc->u.ldst.restore_r4 = 0;
5819
5820 if (immed)
5821 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5822 ->
5823 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5824 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5825 else
5826 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5827 ->
5828 {ldr,str}[b]<cond> r0, [r2, r3]. */
5829 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5830 }
5831 else
5832 {
5833 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5834 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5835 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5836 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5837 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5838 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5839 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5840
5841 /* As above. */
5842 if (immed)
5843 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5844 else
5845 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5846
cca44b1b
JB
5847 dsc->numinsns = 6;
5848 }
5849
5850 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5851
5852 return 0;
5853}
5854
5855/* Cleanup LDM instructions with fully-populated register list. This is an
5856 unfortunate corner case: it's impossible to implement correctly by modifying
5857 the instruction. The issue is as follows: we have an instruction,
5858
5859 ldm rN, {r0-r15}
5860
5861 which we must rewrite to avoid loading PC. A possible solution would be to
5862 do the load in two halves, something like (with suitable cleanup
5863 afterwards):
5864
5865 mov r8, rN
5866 ldm[id][ab] r8!, {r0-r7}
5867 str r7, <temp>
5868 ldm[id][ab] r8, {r7-r14}
5869 <bkpt>
5870
5871 but at present there's no suitable place for <temp>, since the scratch space
5872 is overwritten before the cleanup routine is called. For now, we simply
5873 emulate the instruction. */
5874
5875static void
5876cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5877 struct displaced_step_closure *dsc)
5878{
cca44b1b
JB
5879 int inc = dsc->u.block.increment;
5880 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5881 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5882 uint32_t regmask = dsc->u.block.regmask;
5883 int regno = inc ? 0 : 15;
5884 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5885 int exception_return = dsc->u.block.load && dsc->u.block.user
5886 && (regmask & 0x8000) != 0;
36073a92 5887 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5888 int do_transfer = condition_true (dsc->u.block.cond, status);
5889 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5890
5891 if (!do_transfer)
5892 return;
5893
5894 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5895 sensible we can do here. Complain loudly. */
5896 if (exception_return)
5897 error (_("Cannot single-step exception return"));
5898
5899 /* We don't handle any stores here for now. */
5900 gdb_assert (dsc->u.block.load != 0);
5901
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5904 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5905 dsc->u.block.increment ? "inc" : "dec",
5906 dsc->u.block.before ? "before" : "after");
5907
5908 while (regmask)
5909 {
5910 uint32_t memword;
5911
5912 if (inc)
bf9f652a 5913 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5914 regno++;
5915 else
5916 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5917 regno--;
5918
5919 xfer_addr += bump_before;
5920
5921 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5922 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5923
5924 xfer_addr += bump_after;
5925
5926 regmask &= ~(1 << regno);
5927 }
5928
5929 if (dsc->u.block.writeback)
5930 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5931 CANNOT_WRITE_PC);
5932}
5933
5934/* Clean up an STM which included the PC in the register list. */
5935
5936static void
5937cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5938 struct displaced_step_closure *dsc)
5939{
36073a92 5940 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5941 int store_executed = condition_true (dsc->u.block.cond, status);
5942 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5943 CORE_ADDR stm_insn_addr;
5944 uint32_t pc_val;
5945 long offset;
5946 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5947
5948 /* If condition code fails, there's nothing else to do. */
5949 if (!store_executed)
5950 return;
5951
5952 if (dsc->u.block.increment)
5953 {
5954 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5955
5956 if (dsc->u.block.before)
5957 pc_stored_at += 4;
5958 }
5959 else
5960 {
5961 pc_stored_at = dsc->u.block.xfer_addr;
5962
5963 if (dsc->u.block.before)
5964 pc_stored_at -= 4;
5965 }
5966
5967 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5968 stm_insn_addr = dsc->scratch_base;
5969 offset = pc_val - stm_insn_addr;
5970
5971 if (debug_displaced)
5972 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5973 "STM instruction\n", offset);
5974
5975 /* Rewrite the stored PC to the proper value for the non-displaced original
5976 instruction. */
5977 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5978 dsc->insn_addr + offset);
5979}
5980
5981/* Clean up an LDM which includes the PC in the register list. We clumped all
5982 the registers in the transferred list into a contiguous range r0...rX (to
5983 avoid loading PC directly and losing control of the debugged program), so we
5984 must undo that here. */
5985
5986static void
6e39997a 5987cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5988 struct regcache *regs,
5989 struct displaced_step_closure *dsc)
5990{
36073a92 5991 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5992 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5993 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5994 unsigned int regs_loaded = bitcount (mask);
5995 unsigned int num_to_shuffle = regs_loaded, clobbered;
5996
5997 /* The method employed here will fail if the register list is fully populated
5998 (we need to avoid loading PC directly). */
5999 gdb_assert (num_to_shuffle < 16);
6000
6001 if (!load_executed)
6002 return;
6003
6004 clobbered = (1 << num_to_shuffle) - 1;
6005
6006 while (num_to_shuffle > 0)
6007 {
6008 if ((mask & (1 << write_reg)) != 0)
6009 {
6010 unsigned int read_reg = num_to_shuffle - 1;
6011
6012 if (read_reg != write_reg)
6013 {
36073a92 6014 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6015 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6016 if (debug_displaced)
6017 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6018 "loaded register r%d to r%d\n"), read_reg,
6019 write_reg);
6020 }
6021 else if (debug_displaced)
6022 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6023 "r%d already in the right place\n"),
6024 write_reg);
6025
6026 clobbered &= ~(1 << write_reg);
6027
6028 num_to_shuffle--;
6029 }
6030
6031 write_reg--;
6032 }
6033
6034 /* Restore any registers we scribbled over. */
6035 for (write_reg = 0; clobbered != 0; write_reg++)
6036 {
6037 if ((clobbered & (1 << write_reg)) != 0)
6038 {
6039 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6040 CANNOT_WRITE_PC);
6041 if (debug_displaced)
6042 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6043 "clobbered register r%d\n"), write_reg);
6044 clobbered &= ~(1 << write_reg);
6045 }
6046 }
6047
6048 /* Perform register writeback manually. */
6049 if (dsc->u.block.writeback)
6050 {
6051 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6052
6053 if (dsc->u.block.increment)
6054 new_rn_val += regs_loaded * 4;
6055 else
6056 new_rn_val -= regs_loaded * 4;
6057
6058 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6059 CANNOT_WRITE_PC);
6060 }
6061}
6062
6063/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6064 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6065
6066static int
7ff120b4
YQ
6067arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6068 struct regcache *regs,
6069 struct displaced_step_closure *dsc)
cca44b1b
JB
6070{
6071 int load = bit (insn, 20);
6072 int user = bit (insn, 22);
6073 int increment = bit (insn, 23);
6074 int before = bit (insn, 24);
6075 int writeback = bit (insn, 21);
6076 int rn = bits (insn, 16, 19);
cca44b1b 6077
0963b4bd
MS
6078 /* Block transfers which don't mention PC can be run directly
6079 out-of-line. */
bf9f652a 6080 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6081 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6082
bf9f652a 6083 if (rn == ARM_PC_REGNUM)
cca44b1b 6084 {
0963b4bd
MS
6085 warning (_("displaced: Unpredictable LDM or STM with "
6086 "base register r15"));
7ff120b4 6087 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6088 }
6089
6090 if (debug_displaced)
6091 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6092 "%.8lx\n", (unsigned long) insn);
6093
36073a92 6094 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6095 dsc->u.block.rn = rn;
6096
6097 dsc->u.block.load = load;
6098 dsc->u.block.user = user;
6099 dsc->u.block.increment = increment;
6100 dsc->u.block.before = before;
6101 dsc->u.block.writeback = writeback;
6102 dsc->u.block.cond = bits (insn, 28, 31);
6103
6104 dsc->u.block.regmask = insn & 0xffff;
6105
6106 if (load)
6107 {
6108 if ((insn & 0xffff) == 0xffff)
6109 {
6110 /* LDM with a fully-populated register list. This case is
6111 particularly tricky. Implement for now by fully emulating the
6112 instruction (which might not behave perfectly in all cases, but
6113 these instructions should be rare enough for that not to matter
6114 too much). */
6115 dsc->modinsn[0] = ARM_NOP;
6116
6117 dsc->cleanup = &cleanup_block_load_all;
6118 }
6119 else
6120 {
6121 /* LDM of a list of registers which includes PC. Implement by
6122 rewriting the list of registers to be transferred into a
6123 contiguous chunk r0...rX before doing the transfer, then shuffling
6124 registers into the correct places in the cleanup routine. */
6125 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6126 unsigned int num_in_list = bitcount (regmask), new_regmask;
6127 unsigned int i;
cca44b1b
JB
6128
6129 for (i = 0; i < num_in_list; i++)
36073a92 6130 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6131
6132 /* Writeback makes things complicated. We need to avoid clobbering
6133 the base register with one of the registers in our modified
6134 register list, but just using a different register can't work in
6135 all cases, e.g.:
6136
6137 ldm r14!, {r0-r13,pc}
6138
6139 which would need to be rewritten as:
6140
6141 ldm rN!, {r0-r14}
6142
6143 but that can't work, because there's no free register for N.
6144
6145 Solve this by turning off the writeback bit, and emulating
6146 writeback manually in the cleanup routine. */
6147
6148 if (writeback)
6149 insn &= ~(1 << 21);
6150
6151 new_regmask = (1 << num_in_list) - 1;
6152
6153 if (debug_displaced)
6154 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6155 "{..., pc}: original reg list %.4x, modified "
6156 "list %.4x\n"), rn, writeback ? "!" : "",
6157 (int) insn & 0xffff, new_regmask);
6158
6159 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6160
6161 dsc->cleanup = &cleanup_block_load_pc;
6162 }
6163 }
6164 else
6165 {
6166 /* STM of a list of registers which includes PC. Run the instruction
6167 as-is, but out of line: this will store the wrong value for the PC,
6168 so we must manually fix up the memory in the cleanup routine.
6169 Doing things this way has the advantage that we can auto-detect
6170 the offset of the PC write (which is architecture-dependent) in
6171 the cleanup routine. */
6172 dsc->modinsn[0] = insn;
6173
6174 dsc->cleanup = &cleanup_block_store_pc;
6175 }
6176
6177 return 0;
6178}
6179
34518530
YQ
6180static int
6181thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6182 struct regcache *regs,
6183 struct displaced_step_closure *dsc)
cca44b1b 6184{
34518530
YQ
6185 int rn = bits (insn1, 0, 3);
6186 int load = bit (insn1, 4);
6187 int writeback = bit (insn1, 5);
cca44b1b 6188
34518530
YQ
6189 /* Block transfers which don't mention PC can be run directly
6190 out-of-line. */
6191 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6192 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6193
34518530
YQ
6194 if (rn == ARM_PC_REGNUM)
6195 {
6196 warning (_("displaced: Unpredictable LDM or STM with "
6197 "base register r15"));
6198 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6199 "unpredictable ldm/stm", dsc);
6200 }
cca44b1b
JB
6201
6202 if (debug_displaced)
34518530
YQ
6203 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6204 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6205
34518530
YQ
6206 /* Clear bit 13, since it should be always zero. */
6207 dsc->u.block.regmask = (insn2 & 0xdfff);
6208 dsc->u.block.rn = rn;
cca44b1b 6209
34518530
YQ
6210 dsc->u.block.load = load;
6211 dsc->u.block.user = 0;
6212 dsc->u.block.increment = bit (insn1, 7);
6213 dsc->u.block.before = bit (insn1, 8);
6214 dsc->u.block.writeback = writeback;
6215 dsc->u.block.cond = INST_AL;
6216 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6217
34518530
YQ
6218 if (load)
6219 {
6220 if (dsc->u.block.regmask == 0xffff)
6221 {
6222 /* This branch is impossible to happen. */
6223 gdb_assert (0);
6224 }
6225 else
6226 {
6227 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6228 unsigned int num_in_list = bitcount (regmask), new_regmask;
6229 unsigned int i;
34518530
YQ
6230
6231 for (i = 0; i < num_in_list; i++)
6232 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6233
6234 if (writeback)
6235 insn1 &= ~(1 << 5);
6236
6237 new_regmask = (1 << num_in_list) - 1;
6238
6239 if (debug_displaced)
6240 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6241 "{..., pc}: original reg list %.4x, modified "
6242 "list %.4x\n"), rn, writeback ? "!" : "",
6243 (int) dsc->u.block.regmask, new_regmask);
6244
6245 dsc->modinsn[0] = insn1;
6246 dsc->modinsn[1] = (new_regmask & 0xffff);
6247 dsc->numinsns = 2;
6248
6249 dsc->cleanup = &cleanup_block_load_pc;
6250 }
6251 }
6252 else
6253 {
6254 dsc->modinsn[0] = insn1;
6255 dsc->modinsn[1] = insn2;
6256 dsc->numinsns = 2;
6257 dsc->cleanup = &cleanup_block_store_pc;
6258 }
6259 return 0;
6260}
6261
d9311bfa
AT
6262/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6263 This is used to avoid a dependency on BFD's bfd_endian enum. */
6264
6265ULONGEST
6266arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6267 int byte_order)
6268{
5f2dfcfd
AT
6269 return read_memory_unsigned_integer (memaddr, len,
6270 (enum bfd_endian) byte_order);
d9311bfa
AT
6271}
6272
6273/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6274
6275CORE_ADDR
6276arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6277 CORE_ADDR val)
6278{
6279 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6280}
6281
6282/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6283
e7cf25a8 6284static CORE_ADDR
553cb527 6285arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6286{
d9311bfa
AT
6287 return 0;
6288}
6289
6290/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6291
6292int
6293arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6294{
6295 return arm_is_thumb (self->regcache);
6296}
6297
6298/* single_step() is called just before we want to resume the inferior,
6299 if we want to single-step it but there is no hardware or kernel
6300 single-step support. We find the target of the coming instructions
6301 and breakpoint them. */
6302
6303int
6304arm_software_single_step (struct frame_info *frame)
6305{
6306 struct regcache *regcache = get_current_regcache ();
6307 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6308 struct address_space *aspace = get_regcache_aspace (regcache);
6309 struct arm_get_next_pcs next_pcs_ctx;
6310 CORE_ADDR pc;
6311 int i;
6312 VEC (CORE_ADDR) *next_pcs = NULL;
6313 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6314
6315 arm_get_next_pcs_ctor (&next_pcs_ctx,
6316 &arm_get_next_pcs_ops,
6317 gdbarch_byte_order (gdbarch),
6318 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6319 0,
d9311bfa
AT
6320 regcache);
6321
4d18591b 6322 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6323
6324 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6325 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6326
6327 do_cleanups (old_chain);
6328
6329 return 1;
6330}
6331
34518530
YQ
6332/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6333 for Linux, where some SVC instructions must be treated specially. */
6334
6335static void
6336cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6337 struct displaced_step_closure *dsc)
6338{
6339 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6340
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6343 "%.8lx\n", (unsigned long) resume_addr);
6344
6345 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6346}
6347
6348
6349/* Common copy routine for svc instruciton. */
6350
6351static int
6352install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6353 struct displaced_step_closure *dsc)
6354{
6355 /* Preparation: none.
6356 Insn: unmodified svc.
6357 Cleanup: pc <- insn_addr + insn_size. */
6358
6359 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6360 instruction. */
6361 dsc->wrote_to_pc = 1;
6362
6363 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6364 if (dsc->u.svc.copy_svc_os)
6365 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6366 else
6367 {
6368 dsc->cleanup = &cleanup_svc;
6369 return 0;
6370 }
34518530
YQ
6371}
6372
6373static int
6374arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6375 struct regcache *regs, struct displaced_step_closure *dsc)
6376{
6377
6378 if (debug_displaced)
6379 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6380 (unsigned long) insn);
6381
6382 dsc->modinsn[0] = insn;
6383
6384 return install_svc (gdbarch, regs, dsc);
6385}
6386
6387static int
6388thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6389 struct regcache *regs, struct displaced_step_closure *dsc)
6390{
6391
6392 if (debug_displaced)
6393 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6394 insn);
bd18283a 6395
34518530
YQ
6396 dsc->modinsn[0] = insn;
6397
6398 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6399}
6400
6401/* Copy undefined instructions. */
6402
6403static int
7ff120b4
YQ
6404arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6405 struct displaced_step_closure *dsc)
cca44b1b
JB
6406{
6407 if (debug_displaced)
0963b4bd
MS
6408 fprintf_unfiltered (gdb_stdlog,
6409 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6410 (unsigned long) insn);
6411
6412 dsc->modinsn[0] = insn;
6413
6414 return 0;
6415}
6416
34518530
YQ
6417static int
6418thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6419 struct displaced_step_closure *dsc)
6420{
6421
6422 if (debug_displaced)
6423 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6424 "%.4x %.4x\n", (unsigned short) insn1,
6425 (unsigned short) insn2);
6426
6427 dsc->modinsn[0] = insn1;
6428 dsc->modinsn[1] = insn2;
6429 dsc->numinsns = 2;
6430
6431 return 0;
6432}
6433
cca44b1b
JB
6434/* Copy unpredictable instructions. */
6435
6436static int
7ff120b4
YQ
6437arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6438 struct displaced_step_closure *dsc)
cca44b1b
JB
6439{
6440 if (debug_displaced)
6441 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6442 "%.8lx\n", (unsigned long) insn);
6443
6444 dsc->modinsn[0] = insn;
6445
6446 return 0;
6447}
6448
6449/* The decode_* functions are instruction decoding helpers. They mostly follow
6450 the presentation in the ARM ARM. */
6451
6452static int
7ff120b4
YQ
6453arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6454 struct regcache *regs,
6455 struct displaced_step_closure *dsc)
cca44b1b
JB
6456{
6457 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6458 unsigned int rn = bits (insn, 16, 19);
6459
6460 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6461 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6462 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6463 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6464 else if ((op1 & 0x60) == 0x20)
7ff120b4 6465 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6466 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6467 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6468 dsc);
cca44b1b 6469 else if ((op1 & 0x77) == 0x41)
7ff120b4 6470 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6471 else if ((op1 & 0x77) == 0x45)
7ff120b4 6472 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6473 else if ((op1 & 0x77) == 0x51)
6474 {
6475 if (rn != 0xf)
7ff120b4 6476 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6477 else
7ff120b4 6478 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6479 }
6480 else if ((op1 & 0x77) == 0x55)
7ff120b4 6481 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6482 else if (op1 == 0x57)
6483 switch (op2)
6484 {
7ff120b4
YQ
6485 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6486 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6487 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6488 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6489 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6490 }
6491 else if ((op1 & 0x63) == 0x43)
7ff120b4 6492 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6493 else if ((op2 & 0x1) == 0x0)
6494 switch (op1 & ~0x80)
6495 {
6496 case 0x61:
7ff120b4 6497 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6498 case 0x65:
7ff120b4 6499 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6500 case 0x71: case 0x75:
6501 /* pld/pldw reg. */
7ff120b4 6502 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6503 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6504 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6505 default:
7ff120b4 6506 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6507 }
6508 else
7ff120b4 6509 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6510}
6511
6512static int
7ff120b4
YQ
6513arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6514 struct regcache *regs,
6515 struct displaced_step_closure *dsc)
cca44b1b
JB
6516{
6517 if (bit (insn, 27) == 0)
7ff120b4 6518 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6519 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6520 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6521 {
6522 case 0x0: case 0x2:
7ff120b4 6523 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6524
6525 case 0x1: case 0x3:
7ff120b4 6526 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6527
6528 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6529 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6530
6531 case 0x8:
6532 switch ((insn & 0xe00000) >> 21)
6533 {
6534 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6535 /* stc/stc2. */
7ff120b4 6536 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6537
6538 case 0x2:
7ff120b4 6539 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6540
6541 default:
7ff120b4 6542 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6543 }
6544
6545 case 0x9:
6546 {
6547 int rn_f = (bits (insn, 16, 19) == 0xf);
6548 switch ((insn & 0xe00000) >> 21)
6549 {
6550 case 0x1: case 0x3:
6551 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6552 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6553 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6554
6555 case 0x2:
7ff120b4 6556 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6557
6558 case 0x4: case 0x5: case 0x6: case 0x7:
6559 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6560 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6561 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6562
6563 default:
7ff120b4 6564 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6565 }
6566 }
6567
6568 case 0xa:
7ff120b4 6569 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6570
6571 case 0xb:
6572 if (bits (insn, 16, 19) == 0xf)
6573 /* ldc/ldc2 lit. */
7ff120b4 6574 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6575 else
7ff120b4 6576 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6577
6578 case 0xc:
6579 if (bit (insn, 4))
7ff120b4 6580 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6581 else
7ff120b4 6582 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6583
6584 case 0xd:
6585 if (bit (insn, 4))
7ff120b4 6586 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6587 else
7ff120b4 6588 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6589
6590 default:
7ff120b4 6591 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6592 }
6593}
6594
6595/* Decode miscellaneous instructions in dp/misc encoding space. */
6596
6597static int
7ff120b4
YQ
6598arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6599 struct regcache *regs,
6600 struct displaced_step_closure *dsc)
cca44b1b
JB
6601{
6602 unsigned int op2 = bits (insn, 4, 6);
6603 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6604
6605 switch (op2)
6606 {
6607 case 0x0:
7ff120b4 6608 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6609
6610 case 0x1:
6611 if (op == 0x1) /* bx. */
7ff120b4 6612 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6613 else if (op == 0x3)
7ff120b4 6614 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6615 else
7ff120b4 6616 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6617
6618 case 0x2:
6619 if (op == 0x1)
6620 /* Not really supported. */
7ff120b4 6621 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6622 else
7ff120b4 6623 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6624
6625 case 0x3:
6626 if (op == 0x1)
7ff120b4 6627 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6628 regs, dsc); /* blx register. */
cca44b1b 6629 else
7ff120b4 6630 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6631
6632 case 0x5:
7ff120b4 6633 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6634
6635 case 0x7:
6636 if (op == 0x1)
7ff120b4 6637 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6638 else if (op == 0x3)
6639 /* Not really supported. */
7ff120b4 6640 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6641
6642 default:
7ff120b4 6643 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6644 }
6645}
6646
6647static int
7ff120b4
YQ
6648arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6649 struct regcache *regs,
6650 struct displaced_step_closure *dsc)
cca44b1b
JB
6651{
6652 if (bit (insn, 25))
6653 switch (bits (insn, 20, 24))
6654 {
6655 case 0x10:
7ff120b4 6656 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6657
6658 case 0x14:
7ff120b4 6659 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6660
6661 case 0x12: case 0x16:
7ff120b4 6662 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6663
6664 default:
7ff120b4 6665 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6666 }
6667 else
6668 {
6669 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6670
6671 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6672 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6673 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6674 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6675 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6676 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6677 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6678 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6679 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6680 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6681 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6682 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6683 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6684 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6685 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6686 dsc);
cca44b1b
JB
6687 }
6688
6689 /* Should be unreachable. */
6690 return 1;
6691}
6692
6693static int
7ff120b4
YQ
6694arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6695 struct regcache *regs,
6696 struct displaced_step_closure *dsc)
cca44b1b
JB
6697{
6698 int a = bit (insn, 25), b = bit (insn, 4);
6699 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6700
6701 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6702 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6704 else if ((!a && (op1 & 0x17) == 0x02)
6705 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6707 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6708 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6710 else if ((!a && (op1 & 0x17) == 0x03)
6711 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6713 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6714 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6715 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6716 else if ((!a && (op1 & 0x17) == 0x06)
6717 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6718 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6719 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6720 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6721 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6722 else if ((!a && (op1 & 0x17) == 0x07)
6723 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6724 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6725
6726 /* Should be unreachable. */
6727 return 1;
6728}
6729
6730static int
7ff120b4
YQ
6731arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6732 struct displaced_step_closure *dsc)
cca44b1b
JB
6733{
6734 switch (bits (insn, 20, 24))
6735 {
6736 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6737 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6738
6739 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6740 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6741
6742 case 0x08: case 0x09: case 0x0a: case 0x0b:
6743 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6744 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6745 "decode/pack/unpack/saturate/reverse", dsc);
6746
6747 case 0x18:
6748 if (bits (insn, 5, 7) == 0) /* op2. */
6749 {
6750 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6751 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6752 else
7ff120b4 6753 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6754 }
6755 else
7ff120b4 6756 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6757
6758 case 0x1a: case 0x1b:
6759 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6760 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6761 else
7ff120b4 6762 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6763
6764 case 0x1c: case 0x1d:
6765 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6766 {
6767 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6768 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6769 else
7ff120b4 6770 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6771 }
6772 else
7ff120b4 6773 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6774
6775 case 0x1e: case 0x1f:
6776 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6777 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6778 else
7ff120b4 6779 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6780 }
6781
6782 /* Should be unreachable. */
6783 return 1;
6784}
6785
6786static int
615234c1 6787arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6788 struct regcache *regs,
6789 struct displaced_step_closure *dsc)
cca44b1b
JB
6790{
6791 if (bit (insn, 25))
7ff120b4 6792 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6793 else
7ff120b4 6794 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6795}
6796
6797static int
7ff120b4
YQ
6798arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6799 struct regcache *regs,
6800 struct displaced_step_closure *dsc)
cca44b1b
JB
6801{
6802 unsigned int opcode = bits (insn, 20, 24);
6803
6804 switch (opcode)
6805 {
6806 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6807 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6808
6809 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6810 case 0x12: case 0x16:
7ff120b4 6811 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6812
6813 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6814 case 0x13: case 0x17:
7ff120b4 6815 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6816
6817 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6818 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6819 /* Note: no writeback for these instructions. Bit 25 will always be
6820 zero though (via caller), so the following works OK. */
7ff120b4 6821 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6822 }
6823
6824 /* Should be unreachable. */
6825 return 1;
6826}
6827
34518530
YQ
6828/* Decode shifted register instructions. */
6829
6830static int
6831thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6832 uint16_t insn2, struct regcache *regs,
6833 struct displaced_step_closure *dsc)
6834{
6835 /* PC is only allowed to be used in instruction MOV. */
6836
6837 unsigned int op = bits (insn1, 5, 8);
6838 unsigned int rn = bits (insn1, 0, 3);
6839
6840 if (op == 0x2 && rn == 0xf) /* MOV */
6841 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6842 else
6843 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6844 "dp (shift reg)", dsc);
6845}
6846
6847
6848/* Decode extension register load/store. Exactly the same as
6849 arm_decode_ext_reg_ld_st. */
6850
6851static int
6852thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6853 uint16_t insn2, struct regcache *regs,
6854 struct displaced_step_closure *dsc)
6855{
6856 unsigned int opcode = bits (insn1, 4, 8);
6857
6858 switch (opcode)
6859 {
6860 case 0x04: case 0x05:
6861 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6862 "vfp/neon vmov", dsc);
6863
6864 case 0x08: case 0x0c: /* 01x00 */
6865 case 0x0a: case 0x0e: /* 01x10 */
6866 case 0x12: case 0x16: /* 10x10 */
6867 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6868 "vfp/neon vstm/vpush", dsc);
6869
6870 case 0x09: case 0x0d: /* 01x01 */
6871 case 0x0b: case 0x0f: /* 01x11 */
6872 case 0x13: case 0x17: /* 10x11 */
6873 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6874 "vfp/neon vldm/vpop", dsc);
6875
6876 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6877 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6878 "vstr", dsc);
6879 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6880 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6881 }
6882
6883 /* Should be unreachable. */
6884 return 1;
6885}
6886
cca44b1b 6887static int
12545665 6888arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6889 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6890{
6891 unsigned int op1 = bits (insn, 20, 25);
6892 int op = bit (insn, 4);
6893 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6894
6895 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6896 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6897 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6898 && (coproc & 0xe) != 0xa)
6899 /* stc/stc2. */
7ff120b4 6900 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6901 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6902 && (coproc & 0xe) != 0xa)
6903 /* ldc/ldc2 imm/lit. */
7ff120b4 6904 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6905 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6906 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6907 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6908 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6909 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6910 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6911 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6912 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6913 else if ((op1 & 0x30) == 0x20 && !op)
6914 {
6915 if ((coproc & 0xe) == 0xa)
7ff120b4 6916 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6917 else
7ff120b4 6918 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6919 }
6920 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6921 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6922 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6923 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6924 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6925 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6926 else if ((op1 & 0x30) == 0x30)
7ff120b4 6927 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6928 else
7ff120b4 6929 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6930}
6931
34518530
YQ
6932static int
6933thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6934 uint16_t insn2, struct regcache *regs,
6935 struct displaced_step_closure *dsc)
6936{
6937 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6938 unsigned int bit_5_8 = bits (insn1, 5, 8);
6939 unsigned int bit_9 = bit (insn1, 9);
6940 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6941
6942 if (bit_9 == 0)
6943 {
6944 if (bit_5_8 == 2)
6945 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6946 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6947 dsc);
6948 else if (bit_5_8 == 0) /* UNDEFINED. */
6949 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6950 else
6951 {
6952 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6953 if ((coproc & 0xe) == 0xa)
6954 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6955 dsc);
6956 else /* coproc is not 101x. */
6957 {
6958 if (bit_4 == 0) /* STC/STC2. */
6959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6960 "stc/stc2", dsc);
6961 else /* LDC/LDC2 {literal, immeidate}. */
6962 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6963 regs, dsc);
6964 }
6965 }
6966 }
6967 else
6968 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6969
6970 return 0;
6971}
6972
6973static void
6974install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6975 struct displaced_step_closure *dsc, int rd)
6976{
6977 /* ADR Rd, #imm
6978
6979 Rewrite as:
6980
6981 Preparation: Rd <- PC
6982 Insn: ADD Rd, #imm
6983 Cleanup: Null.
6984 */
6985
6986 /* Rd <- PC */
6987 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6988 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6989}
6990
6991static int
6992thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6993 struct displaced_step_closure *dsc,
6994 int rd, unsigned int imm)
6995{
6996
6997 /* Encoding T2: ADDS Rd, #imm */
6998 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6999
7000 install_pc_relative (gdbarch, regs, dsc, rd);
7001
7002 return 0;
7003}
7004
7005static int
7006thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7007 struct regcache *regs,
7008 struct displaced_step_closure *dsc)
7009{
7010 unsigned int rd = bits (insn, 8, 10);
7011 unsigned int imm8 = bits (insn, 0, 7);
7012
7013 if (debug_displaced)
7014 fprintf_unfiltered (gdb_stdlog,
7015 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7016 rd, imm8, insn);
7017
7018 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7019}
7020
7021static int
7022thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7023 uint16_t insn2, struct regcache *regs,
7024 struct displaced_step_closure *dsc)
7025{
7026 unsigned int rd = bits (insn2, 8, 11);
7027 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7028 extract raw immediate encoding rather than computing immediate. When
7029 generating ADD or SUB instruction, we can simply perform OR operation to
7030 set immediate into ADD. */
7031 unsigned int imm_3_8 = insn2 & 0x70ff;
7032 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7033
7034 if (debug_displaced)
7035 fprintf_unfiltered (gdb_stdlog,
7036 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7037 rd, imm_i, imm_3_8, insn1, insn2);
7038
7039 if (bit (insn1, 7)) /* Encoding T2 */
7040 {
7041 /* Encoding T3: SUB Rd, Rd, #imm */
7042 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7043 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7044 }
7045 else /* Encoding T3 */
7046 {
7047 /* Encoding T3: ADD Rd, Rd, #imm */
7048 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7049 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7050 }
7051 dsc->numinsns = 2;
7052
7053 install_pc_relative (gdbarch, regs, dsc, rd);
7054
7055 return 0;
7056}
7057
7058static int
615234c1 7059thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7060 struct regcache *regs,
7061 struct displaced_step_closure *dsc)
7062{
7063 unsigned int rt = bits (insn1, 8, 10);
7064 unsigned int pc;
7065 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7066
7067 /* LDR Rd, #imm8
7068
7069 Rwrite as:
7070
7071 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7072
7073 Insn: LDR R0, [R2, R3];
7074 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7075
7076 if (debug_displaced)
7077 fprintf_unfiltered (gdb_stdlog,
7078 "displaced: copying thumb ldr r%d [pc #%d]\n"
7079 , rt, imm8);
7080
7081 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7082 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7083 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7084 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7085 /* The assembler calculates the required value of the offset from the
7086 Align(PC,4) value of this instruction to the label. */
7087 pc = pc & 0xfffffffc;
7088
7089 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7090 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7091
7092 dsc->rd = rt;
7093 dsc->u.ldst.xfersize = 4;
7094 dsc->u.ldst.rn = 0;
7095 dsc->u.ldst.immed = 0;
7096 dsc->u.ldst.writeback = 0;
7097 dsc->u.ldst.restore_r4 = 0;
7098
7099 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7100
7101 dsc->cleanup = &cleanup_load;
7102
7103 return 0;
7104}
7105
7106/* Copy Thumb cbnz/cbz insruction. */
7107
7108static int
7109thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7110 struct regcache *regs,
7111 struct displaced_step_closure *dsc)
7112{
7113 int non_zero = bit (insn1, 11);
7114 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7115 CORE_ADDR from = dsc->insn_addr;
7116 int rn = bits (insn1, 0, 2);
7117 int rn_val = displaced_read_reg (regs, dsc, rn);
7118
7119 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7120 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7121 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7122 condition is false, let it be, cleanup_branch will do nothing. */
7123 if (dsc->u.branch.cond)
7124 {
7125 dsc->u.branch.cond = INST_AL;
7126 dsc->u.branch.dest = from + 4 + imm5;
7127 }
7128 else
7129 dsc->u.branch.dest = from + 2;
7130
7131 dsc->u.branch.link = 0;
7132 dsc->u.branch.exchange = 0;
7133
7134 if (debug_displaced)
7135 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7136 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7137 rn, rn_val, insn1, dsc->u.branch.dest);
7138
7139 dsc->modinsn[0] = THUMB_NOP;
7140
7141 dsc->cleanup = &cleanup_branch;
7142 return 0;
7143}
7144
7145/* Copy Table Branch Byte/Halfword */
7146static int
7147thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7148 uint16_t insn2, struct regcache *regs,
7149 struct displaced_step_closure *dsc)
7150{
7151 ULONGEST rn_val, rm_val;
7152 int is_tbh = bit (insn2, 4);
7153 CORE_ADDR halfwords = 0;
7154 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7155
7156 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7157 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7158
7159 if (is_tbh)
7160 {
7161 gdb_byte buf[2];
7162
7163 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7164 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7165 }
7166 else
7167 {
7168 gdb_byte buf[1];
7169
7170 target_read_memory (rn_val + rm_val, buf, 1);
7171 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7172 }
7173
7174 if (debug_displaced)
7175 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7176 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7177 (unsigned int) rn_val, (unsigned int) rm_val,
7178 (unsigned int) halfwords);
7179
7180 dsc->u.branch.cond = INST_AL;
7181 dsc->u.branch.link = 0;
7182 dsc->u.branch.exchange = 0;
7183 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7184
7185 dsc->cleanup = &cleanup_branch;
7186
7187 return 0;
7188}
7189
7190static void
7191cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7192 struct displaced_step_closure *dsc)
7193{
7194 /* PC <- r7 */
7195 int val = displaced_read_reg (regs, dsc, 7);
7196 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7197
7198 /* r7 <- r8 */
7199 val = displaced_read_reg (regs, dsc, 8);
7200 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7201
7202 /* r8 <- tmp[0] */
7203 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7204
7205}
7206
7207static int
615234c1 7208thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7209 struct regcache *regs,
7210 struct displaced_step_closure *dsc)
7211{
7212 dsc->u.block.regmask = insn1 & 0x00ff;
7213
7214 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7215 to :
7216
7217 (1) register list is full, that is, r0-r7 are used.
7218 Prepare: tmp[0] <- r8
7219
7220 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7221 MOV r8, r7; Move value of r7 to r8;
7222 POP {r7}; Store PC value into r7.
7223
7224 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7225
7226 (2) register list is not full, supposing there are N registers in
7227 register list (except PC, 0 <= N <= 7).
7228 Prepare: for each i, 0 - N, tmp[i] <- ri.
7229
7230 POP {r0, r1, ...., rN};
7231
7232 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7233 from tmp[] properly.
7234 */
7235 if (debug_displaced)
7236 fprintf_unfiltered (gdb_stdlog,
7237 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7238 dsc->u.block.regmask, insn1);
7239
7240 if (dsc->u.block.regmask == 0xff)
7241 {
7242 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7243
7244 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7245 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7246 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7247
7248 dsc->numinsns = 3;
7249 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7250 }
7251 else
7252 {
7253 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7254 unsigned int i;
7255 unsigned int new_regmask;
34518530
YQ
7256
7257 for (i = 0; i < num_in_list + 1; i++)
7258 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7259
7260 new_regmask = (1 << (num_in_list + 1)) - 1;
7261
7262 if (debug_displaced)
7263 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7264 "{..., pc}: original reg list %.4x,"
7265 " modified list %.4x\n"),
7266 (int) dsc->u.block.regmask, new_regmask);
7267
7268 dsc->u.block.regmask |= 0x8000;
7269 dsc->u.block.writeback = 0;
7270 dsc->u.block.cond = INST_AL;
7271
7272 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7273
7274 dsc->cleanup = &cleanup_block_load_pc;
7275 }
7276
7277 return 0;
7278}
7279
7280static void
7281thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7282 struct regcache *regs,
7283 struct displaced_step_closure *dsc)
7284{
7285 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7286 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7287 int err = 0;
7288
7289 /* 16-bit thumb instructions. */
7290 switch (op_bit_12_15)
7291 {
7292 /* Shift (imme), add, subtract, move and compare. */
7293 case 0: case 1: case 2: case 3:
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7295 "shift/add/sub/mov/cmp",
7296 dsc);
7297 break;
7298 case 4:
7299 switch (op_bit_10_11)
7300 {
7301 case 0: /* Data-processing */
7302 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7303 "data-processing",
7304 dsc);
7305 break;
7306 case 1: /* Special data instructions and branch and exchange. */
7307 {
7308 unsigned short op = bits (insn1, 7, 9);
7309 if (op == 6 || op == 7) /* BX or BLX */
7310 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7311 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7312 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7313 else
7314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7315 dsc);
7316 }
7317 break;
7318 default: /* LDR (literal) */
7319 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7320 }
7321 break;
7322 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7323 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7324 break;
7325 case 10:
7326 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7327 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7328 else /* Generate SP-relative address */
7329 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7330 break;
7331 case 11: /* Misc 16-bit instructions */
7332 {
7333 switch (bits (insn1, 8, 11))
7334 {
7335 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7336 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7337 break;
7338 case 12: case 13: /* POP */
7339 if (bit (insn1, 8)) /* PC is in register list. */
7340 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7341 else
7342 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7343 break;
7344 case 15: /* If-Then, and hints */
7345 if (bits (insn1, 0, 3))
7346 /* If-Then makes up to four following instructions conditional.
7347 IT instruction itself is not conditional, so handle it as a
7348 common unmodified instruction. */
7349 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7350 dsc);
7351 else
7352 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7353 break;
7354 default:
7355 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7356 }
7357 }
7358 break;
7359 case 12:
7360 if (op_bit_10_11 < 2) /* Store multiple registers */
7361 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7362 else /* Load multiple registers */
7363 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7364 break;
7365 case 13: /* Conditional branch and supervisor call */
7366 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7367 err = thumb_copy_b (gdbarch, insn1, dsc);
7368 else
7369 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7370 break;
7371 case 14: /* Unconditional branch */
7372 err = thumb_copy_b (gdbarch, insn1, dsc);
7373 break;
7374 default:
7375 err = 1;
7376 }
7377
7378 if (err)
7379 internal_error (__FILE__, __LINE__,
7380 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7381}
7382
7383static int
7384decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7385 uint16_t insn1, uint16_t insn2,
7386 struct regcache *regs,
7387 struct displaced_step_closure *dsc)
7388{
7389 int rt = bits (insn2, 12, 15);
7390 int rn = bits (insn1, 0, 3);
7391 int op1 = bits (insn1, 7, 8);
34518530
YQ
7392
7393 switch (bits (insn1, 5, 6))
7394 {
7395 case 0: /* Load byte and memory hints */
7396 if (rt == 0xf) /* PLD/PLI */
7397 {
7398 if (rn == 0xf)
7399 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7400 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7401 else
7402 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7403 "pli/pld", dsc);
7404 }
7405 else
7406 {
7407 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7408 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7409 1);
7410 else
7411 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7412 "ldrb{reg, immediate}/ldrbt",
7413 dsc);
7414 }
7415
7416 break;
7417 case 1: /* Load halfword and memory hints. */
7418 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7419 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 "pld/unalloc memhint", dsc);
7421 else
7422 {
7423 if (rn == 0xf)
7424 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7425 2);
7426 else
7427 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7428 "ldrh/ldrht", dsc);
7429 }
7430 break;
7431 case 2: /* Load word */
7432 {
7433 int insn2_bit_8_11 = bits (insn2, 8, 11);
7434
7435 if (rn == 0xf)
7436 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7437 else if (op1 == 0x1) /* Encoding T3 */
7438 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7439 0, 1);
7440 else /* op1 == 0x0 */
7441 {
7442 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7443 /* LDR (immediate) */
7444 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7445 dsc, bit (insn2, 8), 1);
7446 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7447 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7448 "ldrt", dsc);
7449 else
7450 /* LDR (register) */
7451 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7452 dsc, 0, 0);
7453 }
7454 break;
7455 }
7456 default:
7457 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7458 break;
7459 }
7460 return 0;
7461}
7462
7463static void
7464thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7465 uint16_t insn2, struct regcache *regs,
7466 struct displaced_step_closure *dsc)
7467{
7468 int err = 0;
7469 unsigned short op = bit (insn2, 15);
7470 unsigned int op1 = bits (insn1, 11, 12);
7471
7472 switch (op1)
7473 {
7474 case 1:
7475 {
7476 switch (bits (insn1, 9, 10))
7477 {
7478 case 0:
7479 if (bit (insn1, 6))
7480 {
7481 /* Load/store {dual, execlusive}, table branch. */
7482 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7483 && bits (insn2, 5, 7) == 0)
7484 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7485 dsc);
7486 else
7487 /* PC is not allowed to use in load/store {dual, exclusive}
7488 instructions. */
7489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7490 "load/store dual/ex", dsc);
7491 }
7492 else /* load/store multiple */
7493 {
7494 switch (bits (insn1, 7, 8))
7495 {
7496 case 0: case 3: /* SRS, RFE */
7497 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7498 "srs/rfe", dsc);
7499 break;
7500 case 1: case 2: /* LDM/STM/PUSH/POP */
7501 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7502 break;
7503 }
7504 }
7505 break;
7506
7507 case 1:
7508 /* Data-processing (shift register). */
7509 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7510 dsc);
7511 break;
7512 default: /* Coprocessor instructions. */
7513 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7514 break;
7515 }
7516 break;
7517 }
7518 case 2: /* op1 = 2 */
7519 if (op) /* Branch and misc control. */
7520 {
7521 if (bit (insn2, 14) /* BLX/BL */
7522 || bit (insn2, 12) /* Unconditional branch */
7523 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7524 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7525 else
7526 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7527 "misc ctrl", dsc);
7528 }
7529 else
7530 {
7531 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7532 {
7533 int op = bits (insn1, 4, 8);
7534 int rn = bits (insn1, 0, 3);
7535 if ((op == 0 || op == 0xa) && rn == 0xf)
7536 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7537 regs, dsc);
7538 else
7539 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7540 "dp/pb", dsc);
7541 }
7542 else /* Data processing (modified immeidate) */
7543 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7544 "dp/mi", dsc);
7545 }
7546 break;
7547 case 3: /* op1 = 3 */
7548 switch (bits (insn1, 9, 10))
7549 {
7550 case 0:
7551 if (bit (insn1, 4))
7552 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7553 regs, dsc);
7554 else /* NEON Load/Store and Store single data item */
7555 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7556 "neon elt/struct load/store",
7557 dsc);
7558 break;
7559 case 1: /* op1 = 3, bits (9, 10) == 1 */
7560 switch (bits (insn1, 7, 8))
7561 {
7562 case 0: case 1: /* Data processing (register) */
7563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7564 "dp(reg)", dsc);
7565 break;
7566 case 2: /* Multiply and absolute difference */
7567 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7568 "mul/mua/diff", dsc);
7569 break;
7570 case 3: /* Long multiply and divide */
7571 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7572 "lmul/lmua", dsc);
7573 break;
7574 }
7575 break;
7576 default: /* Coprocessor instructions */
7577 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7578 break;
7579 }
7580 break;
7581 default:
7582 err = 1;
7583 }
7584
7585 if (err)
7586 internal_error (__FILE__, __LINE__,
7587 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7588
7589}
7590
b434a28f
YQ
7591static void
7592thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7593 struct regcache *regs,
b434a28f
YQ
7594 struct displaced_step_closure *dsc)
7595{
34518530
YQ
7596 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7597 uint16_t insn1
7598 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7599
7600 if (debug_displaced)
7601 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7602 "at %.8lx\n", insn1, (unsigned long) from);
7603
7604 dsc->is_thumb = 1;
7605 dsc->insn_size = thumb_insn_size (insn1);
7606 if (thumb_insn_size (insn1) == 4)
7607 {
7608 uint16_t insn2
7609 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7610 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7611 }
7612 else
7613 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7614}
7615
cca44b1b 7616void
b434a28f
YQ
7617arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7618 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7619 struct displaced_step_closure *dsc)
7620{
7621 int err = 0;
b434a28f
YQ
7622 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7623 uint32_t insn;
cca44b1b
JB
7624
7625 /* Most displaced instructions use a 1-instruction scratch space, so set this
7626 here and override below if/when necessary. */
7627 dsc->numinsns = 1;
7628 dsc->insn_addr = from;
7629 dsc->scratch_base = to;
7630 dsc->cleanup = NULL;
7631 dsc->wrote_to_pc = 0;
7632
b434a28f 7633 if (!displaced_in_arm_mode (regs))
12545665 7634 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7635
4db71c0b
YQ
7636 dsc->is_thumb = 0;
7637 dsc->insn_size = 4;
b434a28f
YQ
7638 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7639 if (debug_displaced)
7640 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7641 "at %.8lx\n", (unsigned long) insn,
7642 (unsigned long) from);
7643
cca44b1b 7644 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7645 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7646 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7647 {
7648 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7649 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7650 break;
7651
7652 case 0x4: case 0x5: case 0x6:
7ff120b4 7653 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7654 break;
7655
7656 case 0x7:
7ff120b4 7657 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7658 break;
7659
7660 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7661 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7662 break;
7663
7664 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7665 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7666 break;
7667 }
7668
7669 if (err)
7670 internal_error (__FILE__, __LINE__,
7671 _("arm_process_displaced_insn: Instruction decode error"));
7672}
7673
7674/* Actually set up the scratch space for a displaced instruction. */
7675
7676void
7677arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7678 CORE_ADDR to, struct displaced_step_closure *dsc)
7679{
7680 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7681 unsigned int i, len, offset;
cca44b1b 7682 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7683 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7684 const gdb_byte *bkp_insn;
cca44b1b 7685
4db71c0b 7686 offset = 0;
cca44b1b
JB
7687 /* Poke modified instruction(s). */
7688 for (i = 0; i < dsc->numinsns; i++)
7689 {
7690 if (debug_displaced)
4db71c0b
YQ
7691 {
7692 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7693 if (size == 4)
7694 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7695 dsc->modinsn[i]);
7696 else if (size == 2)
7697 fprintf_unfiltered (gdb_stdlog, "%.4x",
7698 (unsigned short)dsc->modinsn[i]);
7699
7700 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7701 (unsigned long) to + offset);
7702
7703 }
7704 write_memory_unsigned_integer (to + offset, size,
7705 byte_order_for_code,
cca44b1b 7706 dsc->modinsn[i]);
4db71c0b
YQ
7707 offset += size;
7708 }
7709
7710 /* Choose the correct breakpoint instruction. */
7711 if (dsc->is_thumb)
7712 {
7713 bkp_insn = tdep->thumb_breakpoint;
7714 len = tdep->thumb_breakpoint_size;
7715 }
7716 else
7717 {
7718 bkp_insn = tdep->arm_breakpoint;
7719 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7720 }
7721
7722 /* Put breakpoint afterwards. */
4db71c0b 7723 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7724
7725 if (debug_displaced)
7726 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7727 paddress (gdbarch, from), paddress (gdbarch, to));
7728}
7729
cca44b1b
JB
7730/* Entry point for cleaning things up after a displaced instruction has been
7731 single-stepped. */
7732
7733void
7734arm_displaced_step_fixup (struct gdbarch *gdbarch,
7735 struct displaced_step_closure *dsc,
7736 CORE_ADDR from, CORE_ADDR to,
7737 struct regcache *regs)
7738{
7739 if (dsc->cleanup)
7740 dsc->cleanup (gdbarch, regs, dsc);
7741
7742 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7743 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7744 dsc->insn_addr + dsc->insn_size);
7745
cca44b1b
JB
7746}
7747
7748#include "bfd-in2.h"
7749#include "libcoff.h"
7750
7751static int
7752gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7753{
9a3c8263 7754 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
9779414d
DJ
7755
7756 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7757 {
7758 static asymbol *asym;
7759 static combined_entry_type ce;
7760 static struct coff_symbol_struct csym;
7761 static struct bfd fake_bfd;
7762 static bfd_target fake_target;
7763
7764 if (csym.native == NULL)
7765 {
7766 /* Create a fake symbol vector containing a Thumb symbol.
7767 This is solely so that the code in print_insn_little_arm()
7768 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7769 the presence of a Thumb symbol and switch to decoding
7770 Thumb instructions. */
7771
7772 fake_target.flavour = bfd_target_coff_flavour;
7773 fake_bfd.xvec = &fake_target;
7774 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7775 csym.native = &ce;
7776 csym.symbol.the_bfd = &fake_bfd;
7777 csym.symbol.name = "fake";
7778 asym = (asymbol *) & csym;
7779 }
7780
7781 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7782 info->symbols = &asym;
7783 }
7784 else
7785 info->symbols = NULL;
7786
7787 if (info->endian == BFD_ENDIAN_BIG)
7788 return print_insn_big_arm (memaddr, info);
7789 else
7790 return print_insn_little_arm (memaddr, info);
7791}
7792
7793/* The following define instruction sequences that will cause ARM
7794 cpu's to take an undefined instruction trap. These are used to
7795 signal a breakpoint to GDB.
7796
7797 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7798 modes. A different instruction is required for each mode. The ARM
7799 cpu's can also be big or little endian. Thus four different
7800 instructions are needed to support all cases.
7801
7802 Note: ARMv4 defines several new instructions that will take the
7803 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7804 not in fact add the new instructions. The new undefined
7805 instructions in ARMv4 are all instructions that had no defined
7806 behaviour in earlier chips. There is no guarantee that they will
7807 raise an exception, but may be treated as NOP's. In practice, it
7808 may only safe to rely on instructions matching:
7809
7810 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7811 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7812 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7813
0963b4bd 7814 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7815 following use a condition predicate of ALWAYS so it is always TRUE.
7816
7817 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7818 and NetBSD all use a software interrupt rather than an undefined
7819 instruction to force a trap. This can be handled by by the
7820 abi-specific code during establishment of the gdbarch vector. */
7821
7822#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7823#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7824#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7825#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7826
948f8e3d
PA
7827static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7828static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7829static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7830static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7831
cd6c3b4f
YQ
7832/* Implement the breakpoint_kind_from_pc gdbarch method. */
7833
d19280ad
YQ
7834static int
7835arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7836{
7837 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7838 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7839
9779414d 7840 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7841 {
7842 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7843
7844 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7845 check whether we are replacing a 32-bit instruction. */
7846 if (tdep->thumb2_breakpoint != NULL)
7847 {
7848 gdb_byte buf[2];
d19280ad 7849
177321bd
DJ
7850 if (target_read_memory (*pcptr, buf, 2) == 0)
7851 {
7852 unsigned short inst1;
d19280ad 7853
177321bd 7854 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7855 if (thumb_insn_size (inst1) == 4)
d19280ad 7856 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7857 }
7858 }
7859
d19280ad 7860 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7861 }
7862 else
d19280ad
YQ
7863 return ARM_BP_KIND_ARM;
7864
7865}
7866
cd6c3b4f
YQ
7867/* Implement the sw_breakpoint_from_kind gdbarch method. */
7868
d19280ad
YQ
7869static const gdb_byte *
7870arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7871{
7872 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7873
7874 switch (kind)
cca44b1b 7875 {
d19280ad
YQ
7876 case ARM_BP_KIND_ARM:
7877 *size = tdep->arm_breakpoint_size;
cca44b1b 7878 return tdep->arm_breakpoint;
d19280ad
YQ
7879 case ARM_BP_KIND_THUMB:
7880 *size = tdep->thumb_breakpoint_size;
7881 return tdep->thumb_breakpoint;
7882 case ARM_BP_KIND_THUMB2:
7883 *size = tdep->thumb2_breakpoint_size;
7884 return tdep->thumb2_breakpoint;
7885 default:
7886 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7887 }
7888}
7889
833b7ab5
YQ
7890/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7891
7892static int
7893arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7894 struct regcache *regcache,
7895 CORE_ADDR *pcptr)
7896{
7897 gdb_byte buf[4];
7898
7899 /* Check the memory pointed by PC is readable. */
7900 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7901 {
7902 struct arm_get_next_pcs next_pcs_ctx;
7903 CORE_ADDR pc;
7904 int i;
7905 VEC (CORE_ADDR) *next_pcs = NULL;
7906 struct cleanup *old_chain
7907 = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
7908
7909 arm_get_next_pcs_ctor (&next_pcs_ctx,
7910 &arm_get_next_pcs_ops,
7911 gdbarch_byte_order (gdbarch),
7912 gdbarch_byte_order_for_code (gdbarch),
7913 0,
7914 regcache);
7915
7916 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7917
7918 /* If MEMADDR is the next instruction of current pc, do the
7919 software single step computation, and get the thumb mode by
7920 the destination address. */
7921 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
7922 {
7923 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7924 {
7925 do_cleanups (old_chain);
7926
7927 if (IS_THUMB_ADDR (pc))
7928 {
7929 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7930 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7931 }
7932 else
7933 return ARM_BP_KIND_ARM;
7934 }
7935 }
7936
7937 do_cleanups (old_chain);
7938 }
7939
7940 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7941}
7942
cca44b1b
JB
7943/* Extract from an array REGBUF containing the (raw) register state a
7944 function return value of type TYPE, and copy that, in virtual
7945 format, into VALBUF. */
7946
7947static void
7948arm_extract_return_value (struct type *type, struct regcache *regs,
7949 gdb_byte *valbuf)
7950{
7951 struct gdbarch *gdbarch = get_regcache_arch (regs);
7952 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7953
7954 if (TYPE_CODE_FLT == TYPE_CODE (type))
7955 {
7956 switch (gdbarch_tdep (gdbarch)->fp_model)
7957 {
7958 case ARM_FLOAT_FPA:
7959 {
7960 /* The value is in register F0 in internal format. We need to
7961 extract the raw value and then convert it to the desired
7962 internal type. */
7963 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7964
7965 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7966 convert_from_extended (floatformat_from_type (type), tmpbuf,
7967 valbuf, gdbarch_byte_order (gdbarch));
7968 }
7969 break;
7970
7971 case ARM_FLOAT_SOFT_FPA:
7972 case ARM_FLOAT_SOFT_VFP:
7973 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7974 not using the VFP ABI code. */
7975 case ARM_FLOAT_VFP:
7976 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7977 if (TYPE_LENGTH (type) > 4)
7978 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7979 valbuf + INT_REGISTER_SIZE);
7980 break;
7981
7982 default:
0963b4bd
MS
7983 internal_error (__FILE__, __LINE__,
7984 _("arm_extract_return_value: "
7985 "Floating point model not supported"));
cca44b1b
JB
7986 break;
7987 }
7988 }
7989 else if (TYPE_CODE (type) == TYPE_CODE_INT
7990 || TYPE_CODE (type) == TYPE_CODE_CHAR
7991 || TYPE_CODE (type) == TYPE_CODE_BOOL
7992 || TYPE_CODE (type) == TYPE_CODE_PTR
7993 || TYPE_CODE (type) == TYPE_CODE_REF
7994 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7995 {
b021a221
MS
7996 /* If the type is a plain integer, then the access is
7997 straight-forward. Otherwise we have to play around a bit
7998 more. */
cca44b1b
JB
7999 int len = TYPE_LENGTH (type);
8000 int regno = ARM_A1_REGNUM;
8001 ULONGEST tmp;
8002
8003 while (len > 0)
8004 {
8005 /* By using store_unsigned_integer we avoid having to do
8006 anything special for small big-endian values. */
8007 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8008 store_unsigned_integer (valbuf,
8009 (len > INT_REGISTER_SIZE
8010 ? INT_REGISTER_SIZE : len),
8011 byte_order, tmp);
8012 len -= INT_REGISTER_SIZE;
8013 valbuf += INT_REGISTER_SIZE;
8014 }
8015 }
8016 else
8017 {
8018 /* For a structure or union the behaviour is as if the value had
8019 been stored to word-aligned memory and then loaded into
8020 registers with 32-bit load instruction(s). */
8021 int len = TYPE_LENGTH (type);
8022 int regno = ARM_A1_REGNUM;
8023 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8024
8025 while (len > 0)
8026 {
8027 regcache_cooked_read (regs, regno++, tmpbuf);
8028 memcpy (valbuf, tmpbuf,
8029 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8030 len -= INT_REGISTER_SIZE;
8031 valbuf += INT_REGISTER_SIZE;
8032 }
8033 }
8034}
8035
8036
8037/* Will a function return an aggregate type in memory or in a
8038 register? Return 0 if an aggregate type can be returned in a
8039 register, 1 if it must be returned in memory. */
8040
8041static int
8042arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8043{
cca44b1b
JB
8044 enum type_code code;
8045
f168693b 8046 type = check_typedef (type);
cca44b1b 8047
b13c8ab2
YQ
8048 /* Simple, non-aggregate types (ie not including vectors and
8049 complex) are always returned in a register (or registers). */
8050 code = TYPE_CODE (type);
8051 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8052 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8053 return 0;
cca44b1b 8054
c4312b19
YQ
8055 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8056 {
8057 /* Vector values should be returned using ARM registers if they
8058 are not over 16 bytes. */
8059 return (TYPE_LENGTH (type) > 16);
8060 }
8061
b13c8ab2 8062 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8063 {
b13c8ab2
YQ
8064 /* The AAPCS says all aggregates not larger than a word are returned
8065 in a register. */
8066 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8067 return 0;
8068
cca44b1b
JB
8069 return 1;
8070 }
b13c8ab2
YQ
8071 else
8072 {
8073 int nRc;
cca44b1b 8074
b13c8ab2
YQ
8075 /* All aggregate types that won't fit in a register must be returned
8076 in memory. */
8077 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8078 return 1;
cca44b1b 8079
b13c8ab2
YQ
8080 /* In the ARM ABI, "integer" like aggregate types are returned in
8081 registers. For an aggregate type to be integer like, its size
8082 must be less than or equal to INT_REGISTER_SIZE and the
8083 offset of each addressable subfield must be zero. Note that bit
8084 fields are not addressable, and all addressable subfields of
8085 unions always start at offset zero.
cca44b1b 8086
b13c8ab2
YQ
8087 This function is based on the behaviour of GCC 2.95.1.
8088 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8089
b13c8ab2
YQ
8090 Note: All versions of GCC before GCC 2.95.2 do not set up the
8091 parameters correctly for a function returning the following
8092 structure: struct { float f;}; This should be returned in memory,
8093 not a register. Richard Earnshaw sent me a patch, but I do not
8094 know of any way to detect if a function like the above has been
8095 compiled with the correct calling convention. */
8096
8097 /* Assume all other aggregate types can be returned in a register.
8098 Run a check for structures, unions and arrays. */
8099 nRc = 0;
67255d04 8100
b13c8ab2
YQ
8101 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8102 {
8103 int i;
8104 /* Need to check if this struct/union is "integer" like. For
8105 this to be true, its size must be less than or equal to
8106 INT_REGISTER_SIZE and the offset of each addressable
8107 subfield must be zero. Note that bit fields are not
8108 addressable, and unions always start at offset zero. If any
8109 of the subfields is a floating point type, the struct/union
8110 cannot be an integer type. */
8111
8112 /* For each field in the object, check:
8113 1) Is it FP? --> yes, nRc = 1;
8114 2) Is it addressable (bitpos != 0) and
8115 not packed (bitsize == 0)?
8116 --> yes, nRc = 1
8117 */
8118
8119 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8120 {
b13c8ab2
YQ
8121 enum type_code field_type_code;
8122
8123 field_type_code
8124 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8125 i)));
8126
8127 /* Is it a floating point type field? */
8128 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8129 {
8130 nRc = 1;
8131 break;
8132 }
b13c8ab2
YQ
8133
8134 /* If bitpos != 0, then we have to care about it. */
8135 if (TYPE_FIELD_BITPOS (type, i) != 0)
8136 {
8137 /* Bitfields are not addressable. If the field bitsize is
8138 zero, then the field is not packed. Hence it cannot be
8139 a bitfield or any other packed type. */
8140 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8141 {
8142 nRc = 1;
8143 break;
8144 }
8145 }
67255d04
RE
8146 }
8147 }
67255d04 8148
b13c8ab2
YQ
8149 return nRc;
8150 }
67255d04
RE
8151}
8152
34e8f22d
RE
8153/* Write into appropriate registers a function return value of type
8154 TYPE, given in virtual format. */
8155
8156static void
b508a996 8157arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8158 const gdb_byte *valbuf)
34e8f22d 8159{
be8626e0 8160 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8161 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8162
34e8f22d
RE
8163 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8164 {
e362b510 8165 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8166
be8626e0 8167 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8168 {
8169 case ARM_FLOAT_FPA:
8170
be8626e0
MD
8171 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8172 gdbarch_byte_order (gdbarch));
b508a996 8173 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8174 break;
8175
fd50bc42 8176 case ARM_FLOAT_SOFT_FPA:
08216dd7 8177 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8178 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8179 not using the VFP ABI code. */
8180 case ARM_FLOAT_VFP:
b508a996
RE
8181 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8182 if (TYPE_LENGTH (type) > 4)
8183 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8184 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8185 break;
8186
8187 default:
9b20d036
MS
8188 internal_error (__FILE__, __LINE__,
8189 _("arm_store_return_value: Floating "
8190 "point model not supported"));
08216dd7
RE
8191 break;
8192 }
34e8f22d 8193 }
b508a996
RE
8194 else if (TYPE_CODE (type) == TYPE_CODE_INT
8195 || TYPE_CODE (type) == TYPE_CODE_CHAR
8196 || TYPE_CODE (type) == TYPE_CODE_BOOL
8197 || TYPE_CODE (type) == TYPE_CODE_PTR
8198 || TYPE_CODE (type) == TYPE_CODE_REF
8199 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8200 {
8201 if (TYPE_LENGTH (type) <= 4)
8202 {
8203 /* Values of one word or less are zero/sign-extended and
8204 returned in r0. */
7a5ea0d4 8205 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8206 LONGEST val = unpack_long (type, valbuf);
8207
e17a4113 8208 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8209 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8210 }
8211 else
8212 {
8213 /* Integral values greater than one word are stored in consecutive
8214 registers starting with r0. This will always be a multiple of
8215 the regiser size. */
8216 int len = TYPE_LENGTH (type);
8217 int regno = ARM_A1_REGNUM;
8218
8219 while (len > 0)
8220 {
8221 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8222 len -= INT_REGISTER_SIZE;
8223 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8224 }
8225 }
8226 }
34e8f22d 8227 else
b508a996
RE
8228 {
8229 /* For a structure or union the behaviour is as if the value had
8230 been stored to word-aligned memory and then loaded into
8231 registers with 32-bit load instruction(s). */
8232 int len = TYPE_LENGTH (type);
8233 int regno = ARM_A1_REGNUM;
7a5ea0d4 8234 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8235
8236 while (len > 0)
8237 {
8238 memcpy (tmpbuf, valbuf,
7a5ea0d4 8239 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8240 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8241 len -= INT_REGISTER_SIZE;
8242 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8243 }
8244 }
34e8f22d
RE
8245}
8246
2af48f68
PB
8247
8248/* Handle function return values. */
8249
8250static enum return_value_convention
6a3a010b 8251arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8252 struct type *valtype, struct regcache *regcache,
8253 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8254{
7c00367c 8255 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8256 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8257 enum arm_vfp_cprc_base_type vfp_base_type;
8258 int vfp_base_count;
8259
8260 if (arm_vfp_abi_for_function (gdbarch, func_type)
8261 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8262 {
8263 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8264 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8265 int i;
8266 for (i = 0; i < vfp_base_count; i++)
8267 {
58d6951d
DJ
8268 if (reg_char == 'q')
8269 {
8270 if (writebuf)
8271 arm_neon_quad_write (gdbarch, regcache, i,
8272 writebuf + i * unit_length);
8273
8274 if (readbuf)
8275 arm_neon_quad_read (gdbarch, regcache, i,
8276 readbuf + i * unit_length);
8277 }
8278 else
8279 {
8280 char name_buf[4];
8281 int regnum;
8282
8c042590 8283 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8284 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8285 strlen (name_buf));
8286 if (writebuf)
8287 regcache_cooked_write (regcache, regnum,
8288 writebuf + i * unit_length);
8289 if (readbuf)
8290 regcache_cooked_read (regcache, regnum,
8291 readbuf + i * unit_length);
8292 }
90445bd3
DJ
8293 }
8294 return RETURN_VALUE_REGISTER_CONVENTION;
8295 }
7c00367c 8296
2af48f68
PB
8297 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8298 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8299 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8300 {
7c00367c
MK
8301 if (tdep->struct_return == pcc_struct_return
8302 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8303 return RETURN_VALUE_STRUCT_CONVENTION;
8304 }
b13c8ab2
YQ
8305 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8306 {
8307 if (arm_return_in_memory (gdbarch, valtype))
8308 return RETURN_VALUE_STRUCT_CONVENTION;
8309 }
7052e42c 8310
2af48f68
PB
8311 if (writebuf)
8312 arm_store_return_value (valtype, regcache, writebuf);
8313
8314 if (readbuf)
8315 arm_extract_return_value (valtype, regcache, readbuf);
8316
8317 return RETURN_VALUE_REGISTER_CONVENTION;
8318}
8319
8320
9df628e0 8321static int
60ade65d 8322arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8323{
e17a4113
UW
8324 struct gdbarch *gdbarch = get_frame_arch (frame);
8325 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8326 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8327 CORE_ADDR jb_addr;
e362b510 8328 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8329
60ade65d 8330 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8331
8332 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8333 INT_REGISTER_SIZE))
9df628e0
RE
8334 return 0;
8335
e17a4113 8336 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8337 return 1;
8338}
8339
faa95490
DJ
8340/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8341 return the target PC. Otherwise return 0. */
c906108c
SS
8342
8343CORE_ADDR
52f729a7 8344arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8345{
2c02bd72 8346 const char *name;
faa95490 8347 int namelen;
c906108c
SS
8348 CORE_ADDR start_addr;
8349
8350 /* Find the starting address and name of the function containing the PC. */
8351 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8352 {
8353 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8354 check here. */
8355 start_addr = arm_skip_bx_reg (frame, pc);
8356 if (start_addr != 0)
8357 return start_addr;
8358
8359 return 0;
8360 }
c906108c 8361
faa95490
DJ
8362 /* If PC is in a Thumb call or return stub, return the address of the
8363 target PC, which is in a register. The thunk functions are called
8364 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8365 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8366 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8367 if (startswith (name, "_call_via_")
8368 || startswith (name, "__ARM_call_via_"))
c906108c 8369 {
ed9a39eb
JM
8370 /* Use the name suffix to determine which register contains the
8371 target PC. */
c5aa993b
JM
8372 static char *table[15] =
8373 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8374 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8375 };
c906108c 8376 int regno;
faa95490 8377 int offset = strlen (name) - 2;
c906108c
SS
8378
8379 for (regno = 0; regno <= 14; regno++)
faa95490 8380 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8381 return get_frame_register_unsigned (frame, regno);
c906108c 8382 }
ed9a39eb 8383
faa95490
DJ
8384 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8385 non-interworking calls to foo. We could decode the stubs
8386 to find the target but it's easier to use the symbol table. */
8387 namelen = strlen (name);
8388 if (name[0] == '_' && name[1] == '_'
8389 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8390 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8391 || (namelen > 2 + strlen ("_from_arm")
61012eef 8392 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8393 {
8394 char *target_name;
8395 int target_len = namelen - 2;
3b7344d5 8396 struct bound_minimal_symbol minsym;
faa95490
DJ
8397 struct objfile *objfile;
8398 struct obj_section *sec;
8399
8400 if (name[namelen - 1] == 'b')
8401 target_len -= strlen ("_from_thumb");
8402 else
8403 target_len -= strlen ("_from_arm");
8404
224c3ddb 8405 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8406 memcpy (target_name, name + 2, target_len);
8407 target_name[target_len] = '\0';
8408
8409 sec = find_pc_section (pc);
8410 objfile = (sec == NULL) ? NULL : sec->objfile;
8411 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8412 if (minsym.minsym != NULL)
77e371c0 8413 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8414 else
8415 return 0;
8416 }
8417
c5aa993b 8418 return 0; /* not a stub */
c906108c
SS
8419}
8420
afd7eef0
RE
8421static void
8422set_arm_command (char *args, int from_tty)
8423{
edefbb7c
AC
8424 printf_unfiltered (_("\
8425\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8426 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8427}
8428
8429static void
8430show_arm_command (char *args, int from_tty)
8431{
26304000 8432 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8433}
8434
28e97307
DJ
8435static void
8436arm_update_current_architecture (void)
fd50bc42 8437{
28e97307 8438 struct gdbarch_info info;
fd50bc42 8439
28e97307 8440 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8441 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8442 return;
fd50bc42 8443
28e97307
DJ
8444 /* Update the architecture. */
8445 gdbarch_info_init (&info);
fd50bc42 8446
28e97307 8447 if (!gdbarch_update_p (info))
9b20d036 8448 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8449}
8450
8451static void
8452set_fp_model_sfunc (char *args, int from_tty,
8453 struct cmd_list_element *c)
8454{
570dc176 8455 int fp_model;
fd50bc42
RE
8456
8457 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8458 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8459 {
aead7601 8460 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8461 break;
8462 }
8463
8464 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8465 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8466 current_fp_model);
8467
28e97307 8468 arm_update_current_architecture ();
fd50bc42
RE
8469}
8470
8471static void
08546159
AC
8472show_fp_model (struct ui_file *file, int from_tty,
8473 struct cmd_list_element *c, const char *value)
fd50bc42 8474{
f5656ead 8475 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8476
28e97307 8477 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8478 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8479 fprintf_filtered (file, _("\
8480The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8481 fp_model_strings[tdep->fp_model]);
8482 else
8483 fprintf_filtered (file, _("\
8484The current ARM floating point model is \"%s\".\n"),
8485 fp_model_strings[arm_fp_model]);
8486}
8487
8488static void
8489arm_set_abi (char *args, int from_tty,
8490 struct cmd_list_element *c)
8491{
570dc176 8492 int arm_abi;
28e97307
DJ
8493
8494 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8495 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8496 {
aead7601 8497 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8498 break;
8499 }
8500
8501 if (arm_abi == ARM_ABI_LAST)
8502 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8503 arm_abi_string);
8504
8505 arm_update_current_architecture ();
8506}
8507
8508static void
8509arm_show_abi (struct ui_file *file, int from_tty,
8510 struct cmd_list_element *c, const char *value)
8511{
f5656ead 8512 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8513
8514 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8515 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8516 fprintf_filtered (file, _("\
8517The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8518 arm_abi_strings[tdep->arm_abi]);
8519 else
8520 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8521 arm_abi_string);
fd50bc42
RE
8522}
8523
0428b8f5
DJ
8524static void
8525arm_show_fallback_mode (struct ui_file *file, int from_tty,
8526 struct cmd_list_element *c, const char *value)
8527{
0963b4bd
MS
8528 fprintf_filtered (file,
8529 _("The current execution mode assumed "
8530 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8531 arm_fallback_mode_string);
8532}
8533
8534static void
8535arm_show_force_mode (struct ui_file *file, int from_tty,
8536 struct cmd_list_element *c, const char *value)
8537{
0963b4bd
MS
8538 fprintf_filtered (file,
8539 _("The current execution mode assumed "
8540 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8541 arm_force_mode_string);
8542}
8543
afd7eef0
RE
8544/* If the user changes the register disassembly style used for info
8545 register and other commands, we have to also switch the style used
8546 in opcodes for disassembly output. This function is run in the "set
8547 arm disassembly" command, and does that. */
bc90b915
FN
8548
8549static void
afd7eef0 8550set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8551 struct cmd_list_element *c)
8552{
afd7eef0 8553 set_disassembly_style ();
bc90b915
FN
8554}
8555\f
966fbf70 8556/* Return the ARM register name corresponding to register I. */
a208b0cb 8557static const char *
d93859e2 8558arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8559{
58d6951d
DJ
8560 const int num_regs = gdbarch_num_regs (gdbarch);
8561
8562 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8563 && i >= num_regs && i < num_regs + 32)
8564 {
8565 static const char *const vfp_pseudo_names[] = {
8566 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8567 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8568 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8569 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8570 };
8571
8572 return vfp_pseudo_names[i - num_regs];
8573 }
8574
8575 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8576 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8577 {
8578 static const char *const neon_pseudo_names[] = {
8579 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8580 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8581 };
8582
8583 return neon_pseudo_names[i - num_regs - 32];
8584 }
8585
ff6f572f
DJ
8586 if (i >= ARRAY_SIZE (arm_register_names))
8587 /* These registers are only supported on targets which supply
8588 an XML description. */
8589 return "";
8590
966fbf70
RE
8591 return arm_register_names[i];
8592}
8593
bc90b915 8594static void
afd7eef0 8595set_disassembly_style (void)
bc90b915 8596{
123dc839 8597 int current;
bc90b915 8598
123dc839
DJ
8599 /* Find the style that the user wants. */
8600 for (current = 0; current < num_disassembly_options; current++)
8601 if (disassembly_style == valid_disassembly_styles[current])
8602 break;
8603 gdb_assert (current < num_disassembly_options);
bc90b915 8604
94c30b78 8605 /* Synchronize the disassembler. */
bc90b915
FN
8606 set_arm_regname_option (current);
8607}
8608
082fc60d
RE
8609/* Test whether the coff symbol specific value corresponds to a Thumb
8610 function. */
8611
8612static int
8613coff_sym_is_thumb (int val)
8614{
f8bf5763
PM
8615 return (val == C_THUMBEXT
8616 || val == C_THUMBSTAT
8617 || val == C_THUMBEXTFUNC
8618 || val == C_THUMBSTATFUNC
8619 || val == C_THUMBLABEL);
082fc60d
RE
8620}
8621
8622/* arm_coff_make_msymbol_special()
8623 arm_elf_make_msymbol_special()
8624
8625 These functions test whether the COFF or ELF symbol corresponds to
8626 an address in thumb code, and set a "special" bit in a minimal
8627 symbol to indicate that it does. */
8628
34e8f22d 8629static void
082fc60d
RE
8630arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8631{
39d911fc
TP
8632 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8633
8634 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8635 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8636 MSYMBOL_SET_SPECIAL (msym);
8637}
8638
34e8f22d 8639static void
082fc60d
RE
8640arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8641{
8642 if (coff_sym_is_thumb (val))
8643 MSYMBOL_SET_SPECIAL (msym);
8644}
8645
60c5725c 8646static void
c1bd65d0 8647arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8648{
9a3c8263 8649 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8650 unsigned int i;
8651
8652 for (i = 0; i < objfile->obfd->section_count; i++)
8653 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8654}
8655
8656static void
8657arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8658 asymbol *sym)
8659{
8660 const char *name = bfd_asymbol_name (sym);
8661 struct arm_per_objfile *data;
8662 VEC(arm_mapping_symbol_s) **map_p;
8663 struct arm_mapping_symbol new_map_sym;
8664
8665 gdb_assert (name[0] == '$');
8666 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8667 return;
8668
9a3c8263
SM
8669 data = (struct arm_per_objfile *) objfile_data (objfile,
8670 arm_objfile_data_key);
60c5725c
DJ
8671 if (data == NULL)
8672 {
8673 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8674 struct arm_per_objfile);
8675 set_objfile_data (objfile, arm_objfile_data_key, data);
8676 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8677 objfile->obfd->section_count,
8678 VEC(arm_mapping_symbol_s) *);
8679 }
8680 map_p = &data->section_maps[bfd_get_section (sym)->index];
8681
8682 new_map_sym.value = sym->value;
8683 new_map_sym.type = name[1];
8684
8685 /* Assume that most mapping symbols appear in order of increasing
8686 value. If they were randomly distributed, it would be faster to
8687 always push here and then sort at first use. */
8688 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8689 {
8690 struct arm_mapping_symbol *prev_map_sym;
8691
8692 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8693 if (prev_map_sym->value >= sym->value)
8694 {
8695 unsigned int idx;
8696 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8697 arm_compare_mapping_symbols);
8698 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8699 return;
8700 }
8701 }
8702
8703 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8704}
8705
756fe439 8706static void
61a1198a 8707arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8708{
9779414d 8709 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8710 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8711
8712 /* If necessary, set the T bit. */
8713 if (arm_apcs_32)
8714 {
9779414d 8715 ULONGEST val, t_bit;
61a1198a 8716 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8717 t_bit = arm_psr_thumb_bit (gdbarch);
8718 if (arm_pc_is_thumb (gdbarch, pc))
8719 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8720 val | t_bit);
756fe439 8721 else
61a1198a 8722 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8723 val & ~t_bit);
756fe439
DJ
8724 }
8725}
123dc839 8726
58d6951d
DJ
8727/* Read the contents of a NEON quad register, by reading from two
8728 double registers. This is used to implement the quad pseudo
8729 registers, and for argument passing in case the quad registers are
8730 missing; vectors are passed in quad registers when using the VFP
8731 ABI, even if a NEON unit is not present. REGNUM is the index of
8732 the quad register, in [0, 15]. */
8733
05d1431c 8734static enum register_status
58d6951d
DJ
8735arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8736 int regnum, gdb_byte *buf)
8737{
8738 char name_buf[4];
8739 gdb_byte reg_buf[8];
8740 int offset, double_regnum;
05d1431c 8741 enum register_status status;
58d6951d 8742
8c042590 8743 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8744 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8745 strlen (name_buf));
8746
8747 /* d0 is always the least significant half of q0. */
8748 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8749 offset = 8;
8750 else
8751 offset = 0;
8752
05d1431c
PA
8753 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8754 if (status != REG_VALID)
8755 return status;
58d6951d
DJ
8756 memcpy (buf + offset, reg_buf, 8);
8757
8758 offset = 8 - offset;
05d1431c
PA
8759 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8760 if (status != REG_VALID)
8761 return status;
58d6951d 8762 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8763
8764 return REG_VALID;
58d6951d
DJ
8765}
8766
05d1431c 8767static enum register_status
58d6951d
DJ
8768arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8769 int regnum, gdb_byte *buf)
8770{
8771 const int num_regs = gdbarch_num_regs (gdbarch);
8772 char name_buf[4];
8773 gdb_byte reg_buf[8];
8774 int offset, double_regnum;
8775
8776 gdb_assert (regnum >= num_regs);
8777 regnum -= num_regs;
8778
8779 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8780 /* Quad-precision register. */
05d1431c 8781 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8782 else
8783 {
05d1431c
PA
8784 enum register_status status;
8785
58d6951d
DJ
8786 /* Single-precision register. */
8787 gdb_assert (regnum < 32);
8788
8789 /* s0 is always the least significant half of d0. */
8790 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8791 offset = (regnum & 1) ? 0 : 4;
8792 else
8793 offset = (regnum & 1) ? 4 : 0;
8794
8c042590 8795 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8796 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8797 strlen (name_buf));
8798
05d1431c
PA
8799 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8800 if (status == REG_VALID)
8801 memcpy (buf, reg_buf + offset, 4);
8802 return status;
58d6951d
DJ
8803 }
8804}
8805
8806/* Store the contents of BUF to a NEON quad register, by writing to
8807 two double registers. This is used to implement the quad pseudo
8808 registers, and for argument passing in case the quad registers are
8809 missing; vectors are passed in quad registers when using the VFP
8810 ABI, even if a NEON unit is not present. REGNUM is the index
8811 of the quad register, in [0, 15]. */
8812
8813static void
8814arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8815 int regnum, const gdb_byte *buf)
8816{
8817 char name_buf[4];
58d6951d
DJ
8818 int offset, double_regnum;
8819
8c042590 8820 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8821 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8822 strlen (name_buf));
8823
8824 /* d0 is always the least significant half of q0. */
8825 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8826 offset = 8;
8827 else
8828 offset = 0;
8829
8830 regcache_raw_write (regcache, double_regnum, buf + offset);
8831 offset = 8 - offset;
8832 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8833}
8834
8835static void
8836arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8837 int regnum, const gdb_byte *buf)
8838{
8839 const int num_regs = gdbarch_num_regs (gdbarch);
8840 char name_buf[4];
8841 gdb_byte reg_buf[8];
8842 int offset, double_regnum;
8843
8844 gdb_assert (regnum >= num_regs);
8845 regnum -= num_regs;
8846
8847 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8848 /* Quad-precision register. */
8849 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8850 else
8851 {
8852 /* Single-precision register. */
8853 gdb_assert (regnum < 32);
8854
8855 /* s0 is always the least significant half of d0. */
8856 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8857 offset = (regnum & 1) ? 0 : 4;
8858 else
8859 offset = (regnum & 1) ? 4 : 0;
8860
8c042590 8861 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8862 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8863 strlen (name_buf));
8864
8865 regcache_raw_read (regcache, double_regnum, reg_buf);
8866 memcpy (reg_buf + offset, buf, 4);
8867 regcache_raw_write (regcache, double_regnum, reg_buf);
8868 }
8869}
8870
123dc839
DJ
8871static struct value *
8872value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8873{
9a3c8263 8874 const int *reg_p = (const int *) baton;
123dc839
DJ
8875 return value_of_register (*reg_p, frame);
8876}
97e03143 8877\f
70f80edf
JT
8878static enum gdb_osabi
8879arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8880{
2af48f68 8881 unsigned int elfosabi;
70f80edf 8882 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8883
70f80edf 8884 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8885
28e97307
DJ
8886 if (elfosabi == ELFOSABI_ARM)
8887 /* GNU tools use this value. Check note sections in this case,
8888 as well. */
8889 bfd_map_over_sections (abfd,
8890 generic_elf_osabi_sniff_abi_tag_sections,
8891 &osabi);
97e03143 8892
28e97307 8893 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8894 return osabi;
97e03143
RE
8895}
8896
54483882
YQ
8897static int
8898arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8899 struct reggroup *group)
8900{
2c291032
YQ
8901 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8902 this, FPS register belongs to save_regroup, restore_reggroup, and
8903 all_reggroup, of course. */
54483882 8904 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8905 return (group == float_reggroup
8906 || group == save_reggroup
8907 || group == restore_reggroup
8908 || group == all_reggroup);
54483882
YQ
8909 else
8910 return default_register_reggroup_p (gdbarch, regnum, group);
8911}
8912
25f8c692
JL
8913\f
8914/* For backward-compatibility we allow two 'g' packet lengths with
8915 the remote protocol depending on whether FPA registers are
8916 supplied. M-profile targets do not have FPA registers, but some
8917 stubs already exist in the wild which use a 'g' packet which
8918 supplies them albeit with dummy values. The packet format which
8919 includes FPA registers should be considered deprecated for
8920 M-profile targets. */
8921
8922static void
8923arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8924{
8925 if (gdbarch_tdep (gdbarch)->is_m)
8926 {
8927 /* If we know from the executable this is an M-profile target,
8928 cater for remote targets whose register set layout is the
8929 same as the FPA layout. */
8930 register_remote_g_packet_guess (gdbarch,
03145bf4 8931 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8932 (16 * INT_REGISTER_SIZE)
8933 + (8 * FP_REGISTER_SIZE)
8934 + (2 * INT_REGISTER_SIZE),
8935 tdesc_arm_with_m_fpa_layout);
8936
8937 /* The regular M-profile layout. */
8938 register_remote_g_packet_guess (gdbarch,
8939 /* r0-r12,sp,lr,pc; xpsr */
8940 (16 * INT_REGISTER_SIZE)
8941 + INT_REGISTER_SIZE,
8942 tdesc_arm_with_m);
3184d3f9
JL
8943
8944 /* M-profile plus M4F VFP. */
8945 register_remote_g_packet_guess (gdbarch,
8946 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8947 (16 * INT_REGISTER_SIZE)
8948 + (16 * VFP_REGISTER_SIZE)
8949 + (2 * INT_REGISTER_SIZE),
8950 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8951 }
8952
8953 /* Otherwise we don't have a useful guess. */
8954}
8955
7eb89530
YQ
8956/* Implement the code_of_frame_writable gdbarch method. */
8957
8958static int
8959arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8960{
8961 if (gdbarch_tdep (gdbarch)->is_m
8962 && get_frame_type (frame) == SIGTRAMP_FRAME)
8963 {
8964 /* M-profile exception frames return to some magic PCs, where
8965 isn't writable at all. */
8966 return 0;
8967 }
8968 else
8969 return 1;
8970}
8971
70f80edf 8972\f
da3c6d4a
MS
8973/* Initialize the current architecture based on INFO. If possible,
8974 re-use an architecture from ARCHES, which is a list of
8975 architectures already created during this debugging session.
97e03143 8976
da3c6d4a
MS
8977 Called e.g. at program startup, when reading a core file, and when
8978 reading a binary file. */
97e03143 8979
39bbf761
RE
8980static struct gdbarch *
8981arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8982{
97e03143 8983 struct gdbarch_tdep *tdep;
39bbf761 8984 struct gdbarch *gdbarch;
28e97307
DJ
8985 struct gdbarch_list *best_arch;
8986 enum arm_abi_kind arm_abi = arm_abi_global;
8987 enum arm_float_model fp_model = arm_fp_model;
123dc839 8988 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8989 int i, is_m = 0;
330c6ca9 8990 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8991 int have_wmmx_registers = 0;
58d6951d 8992 int have_neon = 0;
ff6f572f 8993 int have_fpa_registers = 1;
9779414d
DJ
8994 const struct target_desc *tdesc = info.target_desc;
8995
8996 /* If we have an object to base this architecture on, try to determine
8997 its ABI. */
8998
8999 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9000 {
9001 int ei_osabi, e_flags;
9002
9003 switch (bfd_get_flavour (info.abfd))
9004 {
9005 case bfd_target_aout_flavour:
9006 /* Assume it's an old APCS-style ABI. */
9007 arm_abi = ARM_ABI_APCS;
9008 break;
9009
9010 case bfd_target_coff_flavour:
9011 /* Assume it's an old APCS-style ABI. */
9012 /* XXX WinCE? */
9013 arm_abi = ARM_ABI_APCS;
9014 break;
9015
9016 case bfd_target_elf_flavour:
9017 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9018 e_flags = elf_elfheader (info.abfd)->e_flags;
9019
9020 if (ei_osabi == ELFOSABI_ARM)
9021 {
9022 /* GNU tools used to use this value, but do not for EABI
9023 objects. There's nowhere to tag an EABI version
9024 anyway, so assume APCS. */
9025 arm_abi = ARM_ABI_APCS;
9026 }
d403db27 9027 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
9028 {
9029 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9030 int attr_arch, attr_profile;
9031
9032 switch (eabi_ver)
9033 {
9034 case EF_ARM_EABI_UNKNOWN:
9035 /* Assume GNU tools. */
9036 arm_abi = ARM_ABI_APCS;
9037 break;
9038
9039 case EF_ARM_EABI_VER4:
9040 case EF_ARM_EABI_VER5:
9041 arm_abi = ARM_ABI_AAPCS;
9042 /* EABI binaries default to VFP float ordering.
9043 They may also contain build attributes that can
9044 be used to identify if the VFP argument-passing
9045 ABI is in use. */
9046 if (fp_model == ARM_FLOAT_AUTO)
9047 {
9048#ifdef HAVE_ELF
9049 switch (bfd_elf_get_obj_attr_int (info.abfd,
9050 OBJ_ATTR_PROC,
9051 Tag_ABI_VFP_args))
9052 {
b35b0298 9053 case AEABI_VFP_args_base:
9779414d
DJ
9054 /* "The user intended FP parameter/result
9055 passing to conform to AAPCS, base
9056 variant". */
9057 fp_model = ARM_FLOAT_SOFT_VFP;
9058 break;
b35b0298 9059 case AEABI_VFP_args_vfp:
9779414d
DJ
9060 /* "The user intended FP parameter/result
9061 passing to conform to AAPCS, VFP
9062 variant". */
9063 fp_model = ARM_FLOAT_VFP;
9064 break;
b35b0298 9065 case AEABI_VFP_args_toolchain:
9779414d
DJ
9066 /* "The user intended FP parameter/result
9067 passing to conform to tool chain-specific
9068 conventions" - we don't know any such
9069 conventions, so leave it as "auto". */
9070 break;
b35b0298 9071 case AEABI_VFP_args_compatible:
5c294fee
TG
9072 /* "Code is compatible with both the base
9073 and VFP variants; the user did not permit
9074 non-variadic functions to pass FP
9075 parameters/results" - leave it as
9076 "auto". */
9077 break;
9779414d
DJ
9078 default:
9079 /* Attribute value not mentioned in the
5c294fee 9080 November 2012 ABI, so leave it as
9779414d
DJ
9081 "auto". */
9082 break;
9083 }
9084#else
9085 fp_model = ARM_FLOAT_SOFT_VFP;
9086#endif
9087 }
9088 break;
9089
9090 default:
9091 /* Leave it as "auto". */
9092 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9093 break;
9094 }
9095
9096#ifdef HAVE_ELF
9097 /* Detect M-profile programs. This only works if the
9098 executable file includes build attributes; GCC does
9099 copy them to the executable, but e.g. RealView does
9100 not. */
9101 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9102 Tag_CPU_arch);
0963b4bd
MS
9103 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9104 OBJ_ATTR_PROC,
9779414d
DJ
9105 Tag_CPU_arch_profile);
9106 /* GCC specifies the profile for v6-M; RealView only
9107 specifies the profile for architectures starting with
9108 V7 (as opposed to architectures with a tag
9109 numerically greater than TAG_CPU_ARCH_V7). */
9110 if (!tdesc_has_registers (tdesc)
9111 && (attr_arch == TAG_CPU_ARCH_V6_M
9112 || attr_arch == TAG_CPU_ARCH_V6S_M
9113 || attr_profile == 'M'))
25f8c692 9114 is_m = 1;
9779414d
DJ
9115#endif
9116 }
9117
9118 if (fp_model == ARM_FLOAT_AUTO)
9119 {
9120 int e_flags = elf_elfheader (info.abfd)->e_flags;
9121
9122 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9123 {
9124 case 0:
9125 /* Leave it as "auto". Strictly speaking this case
9126 means FPA, but almost nobody uses that now, and
9127 many toolchains fail to set the appropriate bits
9128 for the floating-point model they use. */
9129 break;
9130 case EF_ARM_SOFT_FLOAT:
9131 fp_model = ARM_FLOAT_SOFT_FPA;
9132 break;
9133 case EF_ARM_VFP_FLOAT:
9134 fp_model = ARM_FLOAT_VFP;
9135 break;
9136 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9137 fp_model = ARM_FLOAT_SOFT_VFP;
9138 break;
9139 }
9140 }
9141
9142 if (e_flags & EF_ARM_BE8)
9143 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9144
9145 break;
9146
9147 default:
9148 /* Leave it as "auto". */
9149 break;
9150 }
9151 }
123dc839
DJ
9152
9153 /* Check any target description for validity. */
9779414d 9154 if (tdesc_has_registers (tdesc))
123dc839
DJ
9155 {
9156 /* For most registers we require GDB's default names; but also allow
9157 the numeric names for sp / lr / pc, as a convenience. */
9158 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9159 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9160 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9161
9162 const struct tdesc_feature *feature;
58d6951d 9163 int valid_p;
123dc839 9164
9779414d 9165 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9166 "org.gnu.gdb.arm.core");
9167 if (feature == NULL)
9779414d
DJ
9168 {
9169 feature = tdesc_find_feature (tdesc,
9170 "org.gnu.gdb.arm.m-profile");
9171 if (feature == NULL)
9172 return NULL;
9173 else
9174 is_m = 1;
9175 }
123dc839
DJ
9176
9177 tdesc_data = tdesc_data_alloc ();
9178
9179 valid_p = 1;
9180 for (i = 0; i < ARM_SP_REGNUM; i++)
9181 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9182 arm_register_names[i]);
9183 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9184 ARM_SP_REGNUM,
9185 arm_sp_names);
9186 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9187 ARM_LR_REGNUM,
9188 arm_lr_names);
9189 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9190 ARM_PC_REGNUM,
9191 arm_pc_names);
9779414d
DJ
9192 if (is_m)
9193 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9194 ARM_PS_REGNUM, "xpsr");
9195 else
9196 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9197 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9198
9199 if (!valid_p)
9200 {
9201 tdesc_data_cleanup (tdesc_data);
9202 return NULL;
9203 }
9204
9779414d 9205 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9206 "org.gnu.gdb.arm.fpa");
9207 if (feature != NULL)
9208 {
9209 valid_p = 1;
9210 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9211 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9212 arm_register_names[i]);
9213 if (!valid_p)
9214 {
9215 tdesc_data_cleanup (tdesc_data);
9216 return NULL;
9217 }
9218 }
ff6f572f
DJ
9219 else
9220 have_fpa_registers = 0;
9221
9779414d 9222 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9223 "org.gnu.gdb.xscale.iwmmxt");
9224 if (feature != NULL)
9225 {
9226 static const char *const iwmmxt_names[] = {
9227 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9228 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9229 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9230 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9231 };
9232
9233 valid_p = 1;
9234 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9235 valid_p
9236 &= tdesc_numbered_register (feature, tdesc_data, i,
9237 iwmmxt_names[i - ARM_WR0_REGNUM]);
9238
9239 /* Check for the control registers, but do not fail if they
9240 are missing. */
9241 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9242 tdesc_numbered_register (feature, tdesc_data, i,
9243 iwmmxt_names[i - ARM_WR0_REGNUM]);
9244
9245 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9246 valid_p
9247 &= tdesc_numbered_register (feature, tdesc_data, i,
9248 iwmmxt_names[i - ARM_WR0_REGNUM]);
9249
9250 if (!valid_p)
9251 {
9252 tdesc_data_cleanup (tdesc_data);
9253 return NULL;
9254 }
a56cc1ce
YQ
9255
9256 have_wmmx_registers = 1;
ff6f572f 9257 }
58d6951d
DJ
9258
9259 /* If we have a VFP unit, check whether the single precision registers
9260 are present. If not, then we will synthesize them as pseudo
9261 registers. */
9779414d 9262 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9263 "org.gnu.gdb.arm.vfp");
9264 if (feature != NULL)
9265 {
9266 static const char *const vfp_double_names[] = {
9267 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9268 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9269 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9270 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9271 };
9272
9273 /* Require the double precision registers. There must be either
9274 16 or 32. */
9275 valid_p = 1;
9276 for (i = 0; i < 32; i++)
9277 {
9278 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9279 ARM_D0_REGNUM + i,
9280 vfp_double_names[i]);
9281 if (!valid_p)
9282 break;
9283 }
2b9e5ea6
UW
9284 if (!valid_p && i == 16)
9285 valid_p = 1;
58d6951d 9286
2b9e5ea6
UW
9287 /* Also require FPSCR. */
9288 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9289 ARM_FPSCR_REGNUM, "fpscr");
9290 if (!valid_p)
58d6951d
DJ
9291 {
9292 tdesc_data_cleanup (tdesc_data);
9293 return NULL;
9294 }
9295
9296 if (tdesc_unnumbered_register (feature, "s0") == 0)
9297 have_vfp_pseudos = 1;
9298
330c6ca9 9299 vfp_register_count = i;
58d6951d
DJ
9300
9301 /* If we have VFP, also check for NEON. The architecture allows
9302 NEON without VFP (integer vector operations only), but GDB
9303 does not support that. */
9779414d 9304 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9305 "org.gnu.gdb.arm.neon");
9306 if (feature != NULL)
9307 {
9308 /* NEON requires 32 double-precision registers. */
9309 if (i != 32)
9310 {
9311 tdesc_data_cleanup (tdesc_data);
9312 return NULL;
9313 }
9314
9315 /* If there are quad registers defined by the stub, use
9316 their type; otherwise (normally) provide them with
9317 the default type. */
9318 if (tdesc_unnumbered_register (feature, "q0") == 0)
9319 have_neon_pseudos = 1;
9320
9321 have_neon = 1;
9322 }
9323 }
123dc839 9324 }
39bbf761 9325
28e97307
DJ
9326 /* If there is already a candidate, use it. */
9327 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9328 best_arch != NULL;
9329 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9330 {
b8926edc
DJ
9331 if (arm_abi != ARM_ABI_AUTO
9332 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9333 continue;
9334
b8926edc
DJ
9335 if (fp_model != ARM_FLOAT_AUTO
9336 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9337 continue;
9338
58d6951d
DJ
9339 /* There are various other properties in tdep that we do not
9340 need to check here: those derived from a target description,
9341 since gdbarches with a different target description are
9342 automatically disqualified. */
9343
9779414d
DJ
9344 /* Do check is_m, though, since it might come from the binary. */
9345 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9346 continue;
9347
28e97307
DJ
9348 /* Found a match. */
9349 break;
9350 }
97e03143 9351
28e97307 9352 if (best_arch != NULL)
123dc839
DJ
9353 {
9354 if (tdesc_data != NULL)
9355 tdesc_data_cleanup (tdesc_data);
9356 return best_arch->gdbarch;
9357 }
28e97307 9358
8d749320 9359 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9360 gdbarch = gdbarch_alloc (&info, tdep);
9361
28e97307
DJ
9362 /* Record additional information about the architecture we are defining.
9363 These are gdbarch discriminators, like the OSABI. */
9364 tdep->arm_abi = arm_abi;
9365 tdep->fp_model = fp_model;
9779414d 9366 tdep->is_m = is_m;
ff6f572f 9367 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9368 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9369 gdb_assert (vfp_register_count == 0
9370 || vfp_register_count == 16
9371 || vfp_register_count == 32);
9372 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9373 tdep->have_vfp_pseudos = have_vfp_pseudos;
9374 tdep->have_neon_pseudos = have_neon_pseudos;
9375 tdep->have_neon = have_neon;
08216dd7 9376
25f8c692
JL
9377 arm_register_g_packet_guesses (gdbarch);
9378
08216dd7 9379 /* Breakpoints. */
9d4fde75 9380 switch (info.byte_order_for_code)
67255d04
RE
9381 {
9382 case BFD_ENDIAN_BIG:
66e810cd
RE
9383 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9384 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9385 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9386 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9387
67255d04
RE
9388 break;
9389
9390 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9391 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9392 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9393 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9394 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9395
67255d04
RE
9396 break;
9397
9398 default:
9399 internal_error (__FILE__, __LINE__,
edefbb7c 9400 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9401 }
9402
d7b486e7
RE
9403 /* On ARM targets char defaults to unsigned. */
9404 set_gdbarch_char_signed (gdbarch, 0);
9405
cca44b1b
JB
9406 /* Note: for displaced stepping, this includes the breakpoint, and one word
9407 of additional scratch space. This setting isn't used for anything beside
9408 displaced stepping at present. */
9409 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9410
9df628e0 9411 /* This should be low enough for everything. */
97e03143 9412 tdep->lowest_pc = 0x20;
94c30b78 9413 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9414
7c00367c
MK
9415 /* The default, for both APCS and AAPCS, is to return small
9416 structures in registers. */
9417 tdep->struct_return = reg_struct_return;
9418
2dd604e7 9419 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9420 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9421
7eb89530
YQ
9422 if (is_m)
9423 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9424
756fe439
DJ
9425 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9426
148754e5 9427 /* Frame handling. */
a262aec2 9428 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9429 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9430 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9431
eb5492fa 9432 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9433
34e8f22d 9434 /* Address manipulation. */
34e8f22d
RE
9435 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9436
34e8f22d
RE
9437 /* Advance PC across function entry code. */
9438 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9439
c9cf6e20
MG
9440 /* Detect whether PC is at a point where the stack has been destroyed. */
9441 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9442
190dce09
UW
9443 /* Skip trampolines. */
9444 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9445
34e8f22d
RE
9446 /* The stack grows downward. */
9447 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9448
9449 /* Breakpoint manipulation. */
04180708
YQ
9450 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9451 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9452 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9453 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9454
9455 /* Information about registers, etc. */
34e8f22d
RE
9456 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9457 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9458 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9459 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9460 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9461
ff6f572f
DJ
9462 /* This "info float" is FPA-specific. Use the generic version if we
9463 do not have FPA. */
9464 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9465 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9466
26216b98 9467 /* Internal <-> external register number maps. */
ff6f572f 9468 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9469 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9470
34e8f22d
RE
9471 set_gdbarch_register_name (gdbarch, arm_register_name);
9472
9473 /* Returning results. */
2af48f68 9474 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9475
03d48a7d
RE
9476 /* Disassembly. */
9477 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9478
34e8f22d
RE
9479 /* Minsymbol frobbing. */
9480 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9481 set_gdbarch_coff_make_msymbol_special (gdbarch,
9482 arm_coff_make_msymbol_special);
60c5725c 9483 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9484
f9d67f43
DJ
9485 /* Thumb-2 IT block support. */
9486 set_gdbarch_adjust_breakpoint_address (gdbarch,
9487 arm_adjust_breakpoint_address);
9488
0d5de010
DJ
9489 /* Virtual tables. */
9490 set_gdbarch_vbit_in_delta (gdbarch, 1);
9491
97e03143 9492 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9493 gdbarch_init_osabi (info, gdbarch);
97e03143 9494
b39cc962
DJ
9495 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9496
eb5492fa 9497 /* Add some default predicates. */
2ae28aa9
YQ
9498 if (is_m)
9499 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9500 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9501 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9502 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9503 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9504 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9505
97e03143
RE
9506 /* Now we have tuned the configuration, set a few final things,
9507 based on what the OS ABI has told us. */
9508
b8926edc
DJ
9509 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9510 binaries are always marked. */
9511 if (tdep->arm_abi == ARM_ABI_AUTO)
9512 tdep->arm_abi = ARM_ABI_APCS;
9513
e3039479
UW
9514 /* Watchpoints are not steppable. */
9515 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9516
b8926edc
DJ
9517 /* We used to default to FPA for generic ARM, but almost nobody
9518 uses that now, and we now provide a way for the user to force
9519 the model. So default to the most useful variant. */
9520 if (tdep->fp_model == ARM_FLOAT_AUTO)
9521 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9522
9df628e0
RE
9523 if (tdep->jb_pc >= 0)
9524 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9525
08216dd7 9526 /* Floating point sizes and format. */
8da61cc4 9527 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9528 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9529 {
8da61cc4
DJ
9530 set_gdbarch_double_format
9531 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9532 set_gdbarch_long_double_format
9533 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9534 }
9535 else
9536 {
9537 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9538 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9539 }
9540
58d6951d
DJ
9541 if (have_vfp_pseudos)
9542 {
9543 /* NOTE: These are the only pseudo registers used by
9544 the ARM target at the moment. If more are added, a
9545 little more care in numbering will be needed. */
9546
9547 int num_pseudos = 32;
9548 if (have_neon_pseudos)
9549 num_pseudos += 16;
9550 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9551 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9552 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9553 }
9554
123dc839 9555 if (tdesc_data)
58d6951d
DJ
9556 {
9557 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9558
9779414d 9559 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9560
9561 /* Override tdesc_register_type to adjust the types of VFP
9562 registers for NEON. */
9563 set_gdbarch_register_type (gdbarch, arm_register_type);
9564 }
123dc839
DJ
9565
9566 /* Add standard register aliases. We add aliases even for those
9567 nanes which are used by the current architecture - it's simpler,
9568 and does no harm, since nothing ever lists user registers. */
9569 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9570 user_reg_add (gdbarch, arm_register_aliases[i].name,
9571 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9572
39bbf761
RE
9573 return gdbarch;
9574}
9575
97e03143 9576static void
2af46ca0 9577arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9578{
2af46ca0 9579 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9580
9581 if (tdep == NULL)
9582 return;
9583
edefbb7c 9584 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9585 (unsigned long) tdep->lowest_pc);
9586}
9587
a78f21af
AC
9588extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9589
c906108c 9590void
ed9a39eb 9591_initialize_arm_tdep (void)
c906108c 9592{
bc90b915
FN
9593 struct ui_file *stb;
9594 long length;
53904c9e
AC
9595 const char *setname;
9596 const char *setdesc;
4bd7b427 9597 const char *const *regnames;
bec2ab5a 9598 int i;
bc90b915 9599 static char *helptext;
edefbb7c
AC
9600 char regdesc[1024], *rdptr = regdesc;
9601 size_t rest = sizeof (regdesc);
085dd6e6 9602
42cf1509 9603 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9604
60c5725c 9605 arm_objfile_data_key
c1bd65d0 9606 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9607
0e9e9abd
UW
9608 /* Add ourselves to objfile event chain. */
9609 observer_attach_new_objfile (arm_exidx_new_objfile);
9610 arm_exidx_data_key
9611 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9612
70f80edf
JT
9613 /* Register an ELF OS ABI sniffer for ARM binaries. */
9614 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9615 bfd_target_elf_flavour,
9616 arm_elf_osabi_sniffer);
9617
9779414d
DJ
9618 /* Initialize the standard target descriptions. */
9619 initialize_tdesc_arm_with_m ();
25f8c692 9620 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9621 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9622 initialize_tdesc_arm_with_iwmmxt ();
9623 initialize_tdesc_arm_with_vfpv2 ();
9624 initialize_tdesc_arm_with_vfpv3 ();
9625 initialize_tdesc_arm_with_neon ();
9779414d 9626
94c30b78 9627 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9628 num_disassembly_options = get_arm_regname_num_options ();
9629
9630 /* Add root prefix command for all "set arm"/"show arm" commands. */
9631 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9632 _("Various ARM-specific commands."),
afd7eef0
RE
9633 &setarmcmdlist, "set arm ", 0, &setlist);
9634
9635 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9636 _("Various ARM-specific commands."),
afd7eef0 9637 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9638
94c30b78 9639 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9640 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9641
eefe576e
AC
9642 /* Initialize the array that will be passed to
9643 add_setshow_enum_cmd(). */
8d749320
SM
9644 valid_disassembly_styles = XNEWVEC (const char *,
9645 num_disassembly_options + 1);
afd7eef0 9646 for (i = 0; i < num_disassembly_options; i++)
bc90b915 9647 {
bec2ab5a 9648 get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9649 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9650 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9651 rdptr += length;
9652 rest -= length;
123dc839
DJ
9653 /* When we find the default names, tell the disassembler to use
9654 them. */
bc90b915
FN
9655 if (!strcmp (setname, "std"))
9656 {
afd7eef0 9657 disassembly_style = setname;
bc90b915
FN
9658 set_arm_regname_option (i);
9659 }
9660 }
94c30b78 9661 /* Mark the end of valid options. */
afd7eef0 9662 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9663
edefbb7c
AC
9664 /* Create the help text. */
9665 stb = mem_fileopen ();
9666 fprintf_unfiltered (stb, "%s%s%s",
9667 _("The valid values are:\n"),
9668 regdesc,
9669 _("The default is \"std\"."));
759ef836 9670 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 9671 ui_file_delete (stb);
ed9a39eb 9672
edefbb7c
AC
9673 add_setshow_enum_cmd("disassembler", no_class,
9674 valid_disassembly_styles, &disassembly_style,
9675 _("Set the disassembly style."),
9676 _("Show the disassembly style."),
9677 helptext,
2c5b56ce 9678 set_disassembly_style_sfunc,
0963b4bd
MS
9679 NULL, /* FIXME: i18n: The disassembly style is
9680 \"%s\". */
7376b4c2 9681 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9682
9683 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9684 _("Set usage of ARM 32-bit mode."),
9685 _("Show usage of ARM 32-bit mode."),
9686 _("When off, a 26-bit PC will be used."),
2c5b56ce 9687 NULL,
0963b4bd
MS
9688 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9689 mode is %s. */
26304000 9690 &setarmcmdlist, &showarmcmdlist);
c906108c 9691
fd50bc42 9692 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9693 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9694 _("Set the floating point type."),
9695 _("Show the floating point type."),
9696 _("auto - Determine the FP typefrom the OS-ABI.\n\
9697softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9698fpa - FPA co-processor (GCC compiled).\n\
9699softvfp - Software FP with pure-endian doubles.\n\
9700vfp - VFP co-processor."),
edefbb7c 9701 set_fp_model_sfunc, show_fp_model,
7376b4c2 9702 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9703
28e97307
DJ
9704 /* Add a command to allow the user to force the ABI. */
9705 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9706 _("Set the ABI."),
9707 _("Show the ABI."),
9708 NULL, arm_set_abi, arm_show_abi,
9709 &setarmcmdlist, &showarmcmdlist);
9710
0428b8f5
DJ
9711 /* Add two commands to allow the user to force the assumed
9712 execution mode. */
9713 add_setshow_enum_cmd ("fallback-mode", class_support,
9714 arm_mode_strings, &arm_fallback_mode_string,
9715 _("Set the mode assumed when symbols are unavailable."),
9716 _("Show the mode assumed when symbols are unavailable."),
9717 NULL, NULL, arm_show_fallback_mode,
9718 &setarmcmdlist, &showarmcmdlist);
9719 add_setshow_enum_cmd ("force-mode", class_support,
9720 arm_mode_strings, &arm_force_mode_string,
9721 _("Set the mode assumed even when symbols are available."),
9722 _("Show the mode assumed even when symbols are available."),
9723 NULL, NULL, arm_show_force_mode,
9724 &setarmcmdlist, &showarmcmdlist);
9725
6529d2dd 9726 /* Debugging flag. */
edefbb7c
AC
9727 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9728 _("Set ARM debugging."),
9729 _("Show ARM debugging."),
9730 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9731 NULL,
7915a72c 9732 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9733 &setdebuglist, &showdebuglist);
c906108c 9734}
72508ac0
PO
9735
9736/* ARM-reversible process record data structures. */
9737
9738#define ARM_INSN_SIZE_BYTES 4
9739#define THUMB_INSN_SIZE_BYTES 2
9740#define THUMB2_INSN_SIZE_BYTES 4
9741
9742
71e396f9
LM
9743/* Position of the bit within a 32-bit ARM instruction
9744 that defines whether the instruction is a load or store. */
72508ac0
PO
9745#define INSN_S_L_BIT_NUM 20
9746
9747#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9748 do \
9749 { \
9750 unsigned int reg_len = LENGTH; \
9751 if (reg_len) \
9752 { \
9753 REGS = XNEWVEC (uint32_t, reg_len); \
9754 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9755 } \
9756 } \
9757 while (0)
9758
9759#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9760 do \
9761 { \
9762 unsigned int mem_len = LENGTH; \
9763 if (mem_len) \
9764 { \
9765 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9766 memcpy(&MEMS->len, &RECORD_BUF[0], \
9767 sizeof(struct arm_mem_r) * LENGTH); \
9768 } \
9769 } \
9770 while (0)
9771
9772/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9773#define INSN_RECORDED(ARM_RECORD) \
9774 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9775
9776/* ARM memory record structure. */
9777struct arm_mem_r
9778{
9779 uint32_t len; /* Record length. */
bfbbec00 9780 uint32_t addr; /* Memory address. */
72508ac0
PO
9781};
9782
9783/* ARM instruction record contains opcode of current insn
9784 and execution state (before entry to decode_insn()),
9785 contains list of to-be-modified registers and
9786 memory blocks (on return from decode_insn()). */
9787
9788typedef struct insn_decode_record_t
9789{
9790 struct gdbarch *gdbarch;
9791 struct regcache *regcache;
9792 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9793 uint32_t arm_insn; /* Should accommodate thumb. */
9794 uint32_t cond; /* Condition code. */
9795 uint32_t opcode; /* Insn opcode. */
9796 uint32_t decode; /* Insn decode bits. */
9797 uint32_t mem_rec_count; /* No of mem records. */
9798 uint32_t reg_rec_count; /* No of reg records. */
9799 uint32_t *arm_regs; /* Registers to be saved for this record. */
9800 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9801} insn_decode_record;
9802
9803
9804/* Checks ARM SBZ and SBO mandatory fields. */
9805
9806static int
9807sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9808{
9809 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9810
9811 if (!len)
9812 return 1;
9813
9814 if (!sbo)
9815 ones = ~ones;
9816
9817 while (ones)
9818 {
9819 if (!(ones & sbo))
9820 {
9821 return 0;
9822 }
9823 ones = ones >> 1;
9824 }
9825 return 1;
9826}
9827
c6ec2b30
OJ
9828enum arm_record_result
9829{
9830 ARM_RECORD_SUCCESS = 0,
9831 ARM_RECORD_FAILURE = 1
9832};
9833
72508ac0
PO
9834typedef enum
9835{
9836 ARM_RECORD_STRH=1,
9837 ARM_RECORD_STRD
9838} arm_record_strx_t;
9839
9840typedef enum
9841{
9842 ARM_RECORD=1,
9843 THUMB_RECORD,
9844 THUMB2_RECORD
9845} record_type_t;
9846
9847
9848static int
9849arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9850 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9851{
9852
9853 struct regcache *reg_cache = arm_insn_r->regcache;
9854 ULONGEST u_regval[2]= {0};
9855
9856 uint32_t reg_src1 = 0, reg_src2 = 0;
9857 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9858
9859 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9860 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9861
9862 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9863 {
9864 /* 1) Handle misc store, immediate offset. */
9865 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9866 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9867 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9868 regcache_raw_read_unsigned (reg_cache, reg_src1,
9869 &u_regval[0]);
9870 if (ARM_PC_REGNUM == reg_src1)
9871 {
9872 /* If R15 was used as Rn, hence current PC+8. */
9873 u_regval[0] = u_regval[0] + 8;
9874 }
9875 offset_8 = (immed_high << 4) | immed_low;
9876 /* Calculate target store address. */
9877 if (14 == arm_insn_r->opcode)
9878 {
9879 tgt_mem_addr = u_regval[0] + offset_8;
9880 }
9881 else
9882 {
9883 tgt_mem_addr = u_regval[0] - offset_8;
9884 }
9885 if (ARM_RECORD_STRH == str_type)
9886 {
9887 record_buf_mem[0] = 2;
9888 record_buf_mem[1] = tgt_mem_addr;
9889 arm_insn_r->mem_rec_count = 1;
9890 }
9891 else if (ARM_RECORD_STRD == str_type)
9892 {
9893 record_buf_mem[0] = 4;
9894 record_buf_mem[1] = tgt_mem_addr;
9895 record_buf_mem[2] = 4;
9896 record_buf_mem[3] = tgt_mem_addr + 4;
9897 arm_insn_r->mem_rec_count = 2;
9898 }
9899 }
9900 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9901 {
9902 /* 2) Store, register offset. */
9903 /* Get Rm. */
9904 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9905 /* Get Rn. */
9906 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9907 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9908 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9909 if (15 == reg_src2)
9910 {
9911 /* If R15 was used as Rn, hence current PC+8. */
9912 u_regval[0] = u_regval[0] + 8;
9913 }
9914 /* Calculate target store address, Rn +/- Rm, register offset. */
9915 if (12 == arm_insn_r->opcode)
9916 {
9917 tgt_mem_addr = u_regval[0] + u_regval[1];
9918 }
9919 else
9920 {
9921 tgt_mem_addr = u_regval[1] - u_regval[0];
9922 }
9923 if (ARM_RECORD_STRH == str_type)
9924 {
9925 record_buf_mem[0] = 2;
9926 record_buf_mem[1] = tgt_mem_addr;
9927 arm_insn_r->mem_rec_count = 1;
9928 }
9929 else if (ARM_RECORD_STRD == str_type)
9930 {
9931 record_buf_mem[0] = 4;
9932 record_buf_mem[1] = tgt_mem_addr;
9933 record_buf_mem[2] = 4;
9934 record_buf_mem[3] = tgt_mem_addr + 4;
9935 arm_insn_r->mem_rec_count = 2;
9936 }
9937 }
9938 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9939 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9940 {
9941 /* 3) Store, immediate pre-indexed. */
9942 /* 5) Store, immediate post-indexed. */
9943 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9944 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9945 offset_8 = (immed_high << 4) | immed_low;
9946 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9947 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9948 /* Calculate target store address, Rn +/- Rm, register offset. */
9949 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9950 {
9951 tgt_mem_addr = u_regval[0] + offset_8;
9952 }
9953 else
9954 {
9955 tgt_mem_addr = u_regval[0] - offset_8;
9956 }
9957 if (ARM_RECORD_STRH == str_type)
9958 {
9959 record_buf_mem[0] = 2;
9960 record_buf_mem[1] = tgt_mem_addr;
9961 arm_insn_r->mem_rec_count = 1;
9962 }
9963 else if (ARM_RECORD_STRD == str_type)
9964 {
9965 record_buf_mem[0] = 4;
9966 record_buf_mem[1] = tgt_mem_addr;
9967 record_buf_mem[2] = 4;
9968 record_buf_mem[3] = tgt_mem_addr + 4;
9969 arm_insn_r->mem_rec_count = 2;
9970 }
9971 /* Record Rn also as it changes. */
9972 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9973 arm_insn_r->reg_rec_count = 1;
9974 }
9975 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9976 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9977 {
9978 /* 4) Store, register pre-indexed. */
9979 /* 6) Store, register post -indexed. */
9980 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9981 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9982 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9983 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9984 /* Calculate target store address, Rn +/- Rm, register offset. */
9985 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9986 {
9987 tgt_mem_addr = u_regval[0] + u_regval[1];
9988 }
9989 else
9990 {
9991 tgt_mem_addr = u_regval[1] - u_regval[0];
9992 }
9993 if (ARM_RECORD_STRH == str_type)
9994 {
9995 record_buf_mem[0] = 2;
9996 record_buf_mem[1] = tgt_mem_addr;
9997 arm_insn_r->mem_rec_count = 1;
9998 }
9999 else if (ARM_RECORD_STRD == str_type)
10000 {
10001 record_buf_mem[0] = 4;
10002 record_buf_mem[1] = tgt_mem_addr;
10003 record_buf_mem[2] = 4;
10004 record_buf_mem[3] = tgt_mem_addr + 4;
10005 arm_insn_r->mem_rec_count = 2;
10006 }
10007 /* Record Rn also as it changes. */
10008 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10009 arm_insn_r->reg_rec_count = 1;
10010 }
10011 return 0;
10012}
10013
10014/* Handling ARM extension space insns. */
10015
10016static int
10017arm_record_extension_space (insn_decode_record *arm_insn_r)
10018{
10019 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10020 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10021 uint32_t record_buf[8], record_buf_mem[8];
10022 uint32_t reg_src1 = 0;
72508ac0
PO
10023 struct regcache *reg_cache = arm_insn_r->regcache;
10024 ULONGEST u_regval = 0;
10025
10026 gdb_assert (!INSN_RECORDED(arm_insn_r));
10027 /* Handle unconditional insn extension space. */
10028
10029 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10030 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10031 if (arm_insn_r->cond)
10032 {
10033 /* PLD has no affect on architectural state, it just affects
10034 the caches. */
10035 if (5 == ((opcode1 & 0xE0) >> 5))
10036 {
10037 /* BLX(1) */
10038 record_buf[0] = ARM_PS_REGNUM;
10039 record_buf[1] = ARM_LR_REGNUM;
10040 arm_insn_r->reg_rec_count = 2;
10041 }
10042 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10043 }
10044
10045
10046 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10047 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10048 {
10049 ret = -1;
10050 /* Undefined instruction on ARM V5; need to handle if later
10051 versions define it. */
10052 }
10053
10054 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10055 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10056 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10057
10058 /* Handle arithmetic insn extension space. */
10059 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10060 && !INSN_RECORDED(arm_insn_r))
10061 {
10062 /* Handle MLA(S) and MUL(S). */
10063 if (0 <= insn_op1 && 3 >= insn_op1)
10064 {
10065 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10066 record_buf[1] = ARM_PS_REGNUM;
10067 arm_insn_r->reg_rec_count = 2;
10068 }
10069 else if (4 <= insn_op1 && 15 >= insn_op1)
10070 {
10071 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10072 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10073 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10074 record_buf[2] = ARM_PS_REGNUM;
10075 arm_insn_r->reg_rec_count = 3;
10076 }
10077 }
10078
10079 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10080 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10081 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10082
10083 /* Handle control insn extension space. */
10084
10085 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10086 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10087 {
10088 if (!bit (arm_insn_r->arm_insn,25))
10089 {
10090 if (!bits (arm_insn_r->arm_insn, 4, 7))
10091 {
10092 if ((0 == insn_op1) || (2 == insn_op1))
10093 {
10094 /* MRS. */
10095 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10096 arm_insn_r->reg_rec_count = 1;
10097 }
10098 else if (1 == insn_op1)
10099 {
10100 /* CSPR is going to be changed. */
10101 record_buf[0] = ARM_PS_REGNUM;
10102 arm_insn_r->reg_rec_count = 1;
10103 }
10104 else if (3 == insn_op1)
10105 {
10106 /* SPSR is going to be changed. */
10107 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10108 return -1;
10109 }
10110 }
10111 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10112 {
10113 if (1 == insn_op1)
10114 {
10115 /* BX. */
10116 record_buf[0] = ARM_PS_REGNUM;
10117 arm_insn_r->reg_rec_count = 1;
10118 }
10119 else if (3 == insn_op1)
10120 {
10121 /* CLZ. */
10122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10123 arm_insn_r->reg_rec_count = 1;
10124 }
10125 }
10126 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10127 {
10128 /* BLX. */
10129 record_buf[0] = ARM_PS_REGNUM;
10130 record_buf[1] = ARM_LR_REGNUM;
10131 arm_insn_r->reg_rec_count = 2;
10132 }
10133 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10134 {
10135 /* QADD, QSUB, QDADD, QDSUB */
10136 record_buf[0] = ARM_PS_REGNUM;
10137 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10138 arm_insn_r->reg_rec_count = 2;
10139 }
10140 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10141 {
10142 /* BKPT. */
10143 record_buf[0] = ARM_PS_REGNUM;
10144 record_buf[1] = ARM_LR_REGNUM;
10145 arm_insn_r->reg_rec_count = 2;
10146
10147 /* Save SPSR also;how? */
72508ac0
PO
10148 return -1;
10149 }
10150 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10151 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10152 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10153 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10154 )
10155 {
10156 if (0 == insn_op1 || 1 == insn_op1)
10157 {
10158 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10159 /* We dont do optimization for SMULW<y> where we
10160 need only Rd. */
10161 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10162 record_buf[1] = ARM_PS_REGNUM;
10163 arm_insn_r->reg_rec_count = 2;
10164 }
10165 else if (2 == insn_op1)
10166 {
10167 /* SMLAL<x><y>. */
10168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10169 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10170 arm_insn_r->reg_rec_count = 2;
10171 }
10172 else if (3 == insn_op1)
10173 {
10174 /* SMUL<x><y>. */
10175 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10176 arm_insn_r->reg_rec_count = 1;
10177 }
10178 }
10179 }
10180 else
10181 {
10182 /* MSR : immediate form. */
10183 if (1 == insn_op1)
10184 {
10185 /* CSPR is going to be changed. */
10186 record_buf[0] = ARM_PS_REGNUM;
10187 arm_insn_r->reg_rec_count = 1;
10188 }
10189 else if (3 == insn_op1)
10190 {
10191 /* SPSR is going to be changed. */
10192 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10193 return -1;
10194 }
10195 }
10196 }
10197
10198 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10199 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10200 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10201
10202 /* Handle load/store insn extension space. */
10203
10204 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10205 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10206 && !INSN_RECORDED(arm_insn_r))
10207 {
10208 /* SWP/SWPB. */
10209 if (0 == insn_op1)
10210 {
10211 /* These insn, changes register and memory as well. */
10212 /* SWP or SWPB insn. */
10213 /* Get memory address given by Rn. */
10214 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10215 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10216 /* SWP insn ?, swaps word. */
10217 if (8 == arm_insn_r->opcode)
10218 {
10219 record_buf_mem[0] = 4;
10220 }
10221 else
10222 {
10223 /* SWPB insn, swaps only byte. */
10224 record_buf_mem[0] = 1;
10225 }
10226 record_buf_mem[1] = u_regval;
10227 arm_insn_r->mem_rec_count = 1;
10228 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10229 arm_insn_r->reg_rec_count = 1;
10230 }
10231 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10232 {
10233 /* STRH. */
10234 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10235 ARM_RECORD_STRH);
10236 }
10237 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10238 {
10239 /* LDRD. */
10240 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10241 record_buf[1] = record_buf[0] + 1;
10242 arm_insn_r->reg_rec_count = 2;
10243 }
10244 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10245 {
10246 /* STRD. */
10247 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10248 ARM_RECORD_STRD);
10249 }
10250 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10251 {
10252 /* LDRH, LDRSB, LDRSH. */
10253 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10254 arm_insn_r->reg_rec_count = 1;
10255 }
10256
10257 }
10258
10259 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10260 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10261 && !INSN_RECORDED(arm_insn_r))
10262 {
10263 ret = -1;
10264 /* Handle coprocessor insn extension space. */
10265 }
10266
10267 /* To be done for ARMv5 and later; as of now we return -1. */
10268 if (-1 == ret)
ca92db2d 10269 return ret;
72508ac0
PO
10270
10271 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10272 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10273
10274 return ret;
10275}
10276
10277/* Handling opcode 000 insns. */
10278
10279static int
10280arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10281{
10282 struct regcache *reg_cache = arm_insn_r->regcache;
10283 uint32_t record_buf[8], record_buf_mem[8];
10284 ULONGEST u_regval[2] = {0};
10285
bec2ab5a 10286 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10287 uint32_t opcode1 = 0;
10288
10289 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10290 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10291 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10292
10293 /* Data processing insn /multiply insn. */
10294 if (9 == arm_insn_r->decode
10295 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10296 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10297 {
10298 /* Handle multiply instructions. */
10299 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10300 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10301 {
10302 /* Handle MLA and MUL. */
10303 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10304 record_buf[1] = ARM_PS_REGNUM;
10305 arm_insn_r->reg_rec_count = 2;
10306 }
10307 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10308 {
10309 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10310 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10311 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10312 record_buf[2] = ARM_PS_REGNUM;
10313 arm_insn_r->reg_rec_count = 3;
10314 }
10315 }
10316 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10317 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10318 {
10319 /* Handle misc load insns, as 20th bit (L = 1). */
10320 /* LDR insn has a capability to do branching, if
10321 MOV LR, PC is precceded by LDR insn having Rn as R15
10322 in that case, it emulates branch and link insn, and hence we
10323 need to save CSPR and PC as well. I am not sure this is right
10324 place; as opcode = 010 LDR insn make this happen, if R15 was
10325 used. */
10326 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10327 if (15 != reg_dest)
10328 {
10329 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10330 arm_insn_r->reg_rec_count = 1;
10331 }
10332 else
10333 {
10334 record_buf[0] = reg_dest;
10335 record_buf[1] = ARM_PS_REGNUM;
10336 arm_insn_r->reg_rec_count = 2;
10337 }
10338 }
10339 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10340 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10341 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10342 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10343 {
10344 /* Handle MSR insn. */
10345 if (9 == arm_insn_r->opcode)
10346 {
10347 /* CSPR is going to be changed. */
10348 record_buf[0] = ARM_PS_REGNUM;
10349 arm_insn_r->reg_rec_count = 1;
10350 }
10351 else
10352 {
10353 /* SPSR is going to be changed. */
10354 /* How to read SPSR value? */
72508ac0
PO
10355 return -1;
10356 }
10357 }
10358 else if (9 == arm_insn_r->decode
10359 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10360 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10361 {
10362 /* Handling SWP, SWPB. */
10363 /* These insn, changes register and memory as well. */
10364 /* SWP or SWPB insn. */
10365
10366 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10367 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10368 /* SWP insn ?, swaps word. */
10369 if (8 == arm_insn_r->opcode)
10370 {
10371 record_buf_mem[0] = 4;
10372 }
10373 else
10374 {
10375 /* SWPB insn, swaps only byte. */
10376 record_buf_mem[0] = 1;
10377 }
10378 record_buf_mem[1] = u_regval[0];
10379 arm_insn_r->mem_rec_count = 1;
10380 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10381 arm_insn_r->reg_rec_count = 1;
10382 }
10383 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10384 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10385 {
10386 /* Handle BLX, branch and link/exchange. */
10387 if (9 == arm_insn_r->opcode)
10388 {
10389 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10390 and R14 stores the return address. */
10391 record_buf[0] = ARM_PS_REGNUM;
10392 record_buf[1] = ARM_LR_REGNUM;
10393 arm_insn_r->reg_rec_count = 2;
10394 }
10395 }
10396 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10397 {
10398 /* Handle enhanced software breakpoint insn, BKPT. */
10399 /* CPSR is changed to be executed in ARM state, disabling normal
10400 interrupts, entering abort mode. */
10401 /* According to high vector configuration PC is set. */
10402 /* user hit breakpoint and type reverse, in
10403 that case, we need to go back with previous CPSR and
10404 Program Counter. */
10405 record_buf[0] = ARM_PS_REGNUM;
10406 record_buf[1] = ARM_LR_REGNUM;
10407 arm_insn_r->reg_rec_count = 2;
10408
10409 /* Save SPSR also; how? */
72508ac0
PO
10410 return -1;
10411 }
10412 else if (11 == arm_insn_r->decode
10413 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10414 {
10415 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10416
10417 /* Handle str(x) insn */
10418 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10419 ARM_RECORD_STRH);
10420 }
10421 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10422 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10423 {
10424 /* Handle BX, branch and link/exchange. */
10425 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10426 record_buf[0] = ARM_PS_REGNUM;
10427 arm_insn_r->reg_rec_count = 1;
10428 }
10429 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10430 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10431 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10432 {
10433 /* Count leading zeros: CLZ. */
10434 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10435 arm_insn_r->reg_rec_count = 1;
10436 }
10437 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10438 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10439 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10440 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10441 )
10442 {
10443 /* Handle MRS insn. */
10444 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10445 arm_insn_r->reg_rec_count = 1;
10446 }
10447 else if (arm_insn_r->opcode <= 15)
10448 {
10449 /* Normal data processing insns. */
10450 /* Out of 11 shifter operands mode, all the insn modifies destination
10451 register, which is specified by 13-16 decode. */
10452 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10453 record_buf[1] = ARM_PS_REGNUM;
10454 arm_insn_r->reg_rec_count = 2;
10455 }
10456 else
10457 {
10458 return -1;
10459 }
10460
10461 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10462 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10463 return 0;
10464}
10465
10466/* Handling opcode 001 insns. */
10467
10468static int
10469arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10470{
10471 uint32_t record_buf[8], record_buf_mem[8];
10472
10473 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10474 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10475
10476 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10477 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10478 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10479 )
10480 {
10481 /* Handle MSR insn. */
10482 if (9 == arm_insn_r->opcode)
10483 {
10484 /* CSPR is going to be changed. */
10485 record_buf[0] = ARM_PS_REGNUM;
10486 arm_insn_r->reg_rec_count = 1;
10487 }
10488 else
10489 {
10490 /* SPSR is going to be changed. */
10491 }
10492 }
10493 else if (arm_insn_r->opcode <= 15)
10494 {
10495 /* Normal data processing insns. */
10496 /* Out of 11 shifter operands mode, all the insn modifies destination
10497 register, which is specified by 13-16 decode. */
10498 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10499 record_buf[1] = ARM_PS_REGNUM;
10500 arm_insn_r->reg_rec_count = 2;
10501 }
10502 else
10503 {
10504 return -1;
10505 }
10506
10507 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10508 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10509 return 0;
10510}
10511
c55978a6
YQ
10512static int
10513arm_record_media (insn_decode_record *arm_insn_r)
10514{
10515 uint32_t record_buf[8];
10516
10517 switch (bits (arm_insn_r->arm_insn, 22, 24))
10518 {
10519 case 0:
10520 /* Parallel addition and subtraction, signed */
10521 case 1:
10522 /* Parallel addition and subtraction, unsigned */
10523 case 2:
10524 case 3:
10525 /* Packing, unpacking, saturation and reversal */
10526 {
10527 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10528
10529 record_buf[arm_insn_r->reg_rec_count++] = rd;
10530 }
10531 break;
10532
10533 case 4:
10534 case 5:
10535 /* Signed multiplies */
10536 {
10537 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10538 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10539
10540 record_buf[arm_insn_r->reg_rec_count++] = rd;
10541 if (op1 == 0x0)
10542 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10543 else if (op1 == 0x4)
10544 record_buf[arm_insn_r->reg_rec_count++]
10545 = bits (arm_insn_r->arm_insn, 12, 15);
10546 }
10547 break;
10548
10549 case 6:
10550 {
10551 if (bit (arm_insn_r->arm_insn, 21)
10552 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10553 {
10554 /* SBFX */
10555 record_buf[arm_insn_r->reg_rec_count++]
10556 = bits (arm_insn_r->arm_insn, 12, 15);
10557 }
10558 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10559 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10560 {
10561 /* USAD8 and USADA8 */
10562 record_buf[arm_insn_r->reg_rec_count++]
10563 = bits (arm_insn_r->arm_insn, 16, 19);
10564 }
10565 }
10566 break;
10567
10568 case 7:
10569 {
10570 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10571 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10572 {
10573 /* Permanently UNDEFINED */
10574 return -1;
10575 }
10576 else
10577 {
10578 /* BFC, BFI and UBFX */
10579 record_buf[arm_insn_r->reg_rec_count++]
10580 = bits (arm_insn_r->arm_insn, 12, 15);
10581 }
10582 }
10583 break;
10584
10585 default:
10586 return -1;
10587 }
10588
10589 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10590
10591 return 0;
10592}
10593
71e396f9 10594/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10595
10596static int
10597arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10598{
10599 struct regcache *reg_cache = arm_insn_r->regcache;
10600
71e396f9
LM
10601 uint32_t reg_base , reg_dest;
10602 uint32_t offset_12, tgt_mem_addr;
72508ac0 10603 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10604 unsigned char wback;
10605 ULONGEST u_regval;
72508ac0 10606
71e396f9
LM
10607 /* Calculate wback. */
10608 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10609 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10610
71e396f9
LM
10611 arm_insn_r->reg_rec_count = 0;
10612 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10613
10614 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10615 {
71e396f9
LM
10616 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10617 and LDRT. */
10618
72508ac0 10619 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10620 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10621
10622 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10623 preceeds a LDR instruction having R15 as reg_base, it
10624 emulates a branch and link instruction, and hence we need to save
10625 CPSR and PC as well. */
10626 if (ARM_PC_REGNUM == reg_dest)
10627 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10628
10629 /* If wback is true, also save the base register, which is going to be
10630 written to. */
10631 if (wback)
10632 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10633 }
10634 else
10635 {
71e396f9
LM
10636 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10637
72508ac0 10638 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10639 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10640
10641 /* Handle bit U. */
72508ac0 10642 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10643 {
10644 /* U == 1: Add the offset. */
10645 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10646 }
72508ac0 10647 else
71e396f9
LM
10648 {
10649 /* U == 0: subtract the offset. */
10650 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10651 }
10652
10653 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10654 bytes. */
10655 if (bit (arm_insn_r->arm_insn, 22))
10656 {
10657 /* STRB and STRBT: 1 byte. */
10658 record_buf_mem[0] = 1;
10659 }
10660 else
10661 {
10662 /* STR and STRT: 4 bytes. */
10663 record_buf_mem[0] = 4;
10664 }
10665
10666 /* Handle bit P. */
10667 if (bit (arm_insn_r->arm_insn, 24))
10668 record_buf_mem[1] = tgt_mem_addr;
10669 else
10670 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10671
72508ac0
PO
10672 arm_insn_r->mem_rec_count = 1;
10673
71e396f9
LM
10674 /* If wback is true, also save the base register, which is going to be
10675 written to. */
10676 if (wback)
10677 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10678 }
10679
10680 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10681 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10682 return 0;
10683}
10684
10685/* Handling opcode 011 insns. */
10686
10687static int
10688arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10689{
10690 struct regcache *reg_cache = arm_insn_r->regcache;
10691
10692 uint32_t shift_imm = 0;
10693 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10694 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10695 uint32_t record_buf[8], record_buf_mem[8];
10696
10697 LONGEST s_word;
10698 ULONGEST u_regval[2];
10699
c55978a6
YQ
10700 if (bit (arm_insn_r->arm_insn, 4))
10701 return arm_record_media (arm_insn_r);
10702
72508ac0
PO
10703 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10704 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10705
10706 /* Handle enhanced store insns and LDRD DSP insn,
10707 order begins according to addressing modes for store insns
10708 STRH insn. */
10709
10710 /* LDR or STR? */
10711 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10712 {
10713 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10714 /* LDR insn has a capability to do branching, if
10715 MOV LR, PC is precedded by LDR insn having Rn as R15
10716 in that case, it emulates branch and link insn, and hence we
10717 need to save CSPR and PC as well. */
10718 if (15 != reg_dest)
10719 {
10720 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10721 arm_insn_r->reg_rec_count = 1;
10722 }
10723 else
10724 {
10725 record_buf[0] = reg_dest;
10726 record_buf[1] = ARM_PS_REGNUM;
10727 arm_insn_r->reg_rec_count = 2;
10728 }
10729 }
10730 else
10731 {
10732 if (! bits (arm_insn_r->arm_insn, 4, 11))
10733 {
10734 /* Store insn, register offset and register pre-indexed,
10735 register post-indexed. */
10736 /* Get Rm. */
10737 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10738 /* Get Rn. */
10739 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10740 regcache_raw_read_unsigned (reg_cache, reg_src1
10741 , &u_regval[0]);
10742 regcache_raw_read_unsigned (reg_cache, reg_src2
10743 , &u_regval[1]);
10744 if (15 == reg_src2)
10745 {
10746 /* If R15 was used as Rn, hence current PC+8. */
10747 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10748 u_regval[0] = u_regval[0] + 8;
10749 }
10750 /* Calculate target store address, Rn +/- Rm, register offset. */
10751 /* U == 1. */
10752 if (bit (arm_insn_r->arm_insn, 23))
10753 {
10754 tgt_mem_addr = u_regval[0] + u_regval[1];
10755 }
10756 else
10757 {
10758 tgt_mem_addr = u_regval[1] - u_regval[0];
10759 }
10760
10761 switch (arm_insn_r->opcode)
10762 {
10763 /* STR. */
10764 case 8:
10765 case 12:
10766 /* STR. */
10767 case 9:
10768 case 13:
10769 /* STRT. */
10770 case 1:
10771 case 5:
10772 /* STR. */
10773 case 0:
10774 case 4:
10775 record_buf_mem[0] = 4;
10776 break;
10777
10778 /* STRB. */
10779 case 10:
10780 case 14:
10781 /* STRB. */
10782 case 11:
10783 case 15:
10784 /* STRBT. */
10785 case 3:
10786 case 7:
10787 /* STRB. */
10788 case 2:
10789 case 6:
10790 record_buf_mem[0] = 1;
10791 break;
10792
10793 default:
10794 gdb_assert_not_reached ("no decoding pattern found");
10795 break;
10796 }
10797 record_buf_mem[1] = tgt_mem_addr;
10798 arm_insn_r->mem_rec_count = 1;
10799
10800 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10801 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10802 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10803 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10804 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10805 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10806 )
10807 {
10808 /* Rn is going to be changed in pre-indexed mode and
10809 post-indexed mode as well. */
10810 record_buf[0] = reg_src2;
10811 arm_insn_r->reg_rec_count = 1;
10812 }
10813 }
10814 else
10815 {
10816 /* Store insn, scaled register offset; scaled pre-indexed. */
10817 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10818 /* Get Rm. */
10819 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10820 /* Get Rn. */
10821 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10822 /* Get shift_imm. */
10823 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10824 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10825 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10826 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10827 /* Offset_12 used as shift. */
10828 switch (offset_12)
10829 {
10830 case 0:
10831 /* Offset_12 used as index. */
10832 offset_12 = u_regval[0] << shift_imm;
10833 break;
10834
10835 case 1:
10836 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10837 break;
10838
10839 case 2:
10840 if (!shift_imm)
10841 {
10842 if (bit (u_regval[0], 31))
10843 {
10844 offset_12 = 0xFFFFFFFF;
10845 }
10846 else
10847 {
10848 offset_12 = 0;
10849 }
10850 }
10851 else
10852 {
10853 /* This is arithmetic shift. */
10854 offset_12 = s_word >> shift_imm;
10855 }
10856 break;
10857
10858 case 3:
10859 if (!shift_imm)
10860 {
10861 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10862 &u_regval[1]);
10863 /* Get C flag value and shift it by 31. */
10864 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10865 | (u_regval[0]) >> 1);
10866 }
10867 else
10868 {
10869 offset_12 = (u_regval[0] >> shift_imm) \
10870 | (u_regval[0] <<
10871 (sizeof(uint32_t) - shift_imm));
10872 }
10873 break;
10874
10875 default:
10876 gdb_assert_not_reached ("no decoding pattern found");
10877 break;
10878 }
10879
10880 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10881 /* bit U set. */
10882 if (bit (arm_insn_r->arm_insn, 23))
10883 {
10884 tgt_mem_addr = u_regval[1] + offset_12;
10885 }
10886 else
10887 {
10888 tgt_mem_addr = u_regval[1] - offset_12;
10889 }
10890
10891 switch (arm_insn_r->opcode)
10892 {
10893 /* STR. */
10894 case 8:
10895 case 12:
10896 /* STR. */
10897 case 9:
10898 case 13:
10899 /* STRT. */
10900 case 1:
10901 case 5:
10902 /* STR. */
10903 case 0:
10904 case 4:
10905 record_buf_mem[0] = 4;
10906 break;
10907
10908 /* STRB. */
10909 case 10:
10910 case 14:
10911 /* STRB. */
10912 case 11:
10913 case 15:
10914 /* STRBT. */
10915 case 3:
10916 case 7:
10917 /* STRB. */
10918 case 2:
10919 case 6:
10920 record_buf_mem[0] = 1;
10921 break;
10922
10923 default:
10924 gdb_assert_not_reached ("no decoding pattern found");
10925 break;
10926 }
10927 record_buf_mem[1] = tgt_mem_addr;
10928 arm_insn_r->mem_rec_count = 1;
10929
10930 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10931 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10932 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10933 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10934 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10935 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10936 )
10937 {
10938 /* Rn is going to be changed in register scaled pre-indexed
10939 mode,and scaled post indexed mode. */
10940 record_buf[0] = reg_src2;
10941 arm_insn_r->reg_rec_count = 1;
10942 }
10943 }
10944 }
10945
10946 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10947 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10948 return 0;
10949}
10950
71e396f9 10951/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10952
10953static int
10954arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10955{
10956 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10957 uint32_t register_count = 0, register_bits;
10958 uint32_t reg_base, addr_mode;
72508ac0 10959 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10960 uint32_t wback;
10961 ULONGEST u_regval;
72508ac0 10962
71e396f9
LM
10963 /* Fetch the list of registers. */
10964 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10965 arm_insn_r->reg_rec_count = 0;
10966
10967 /* Fetch the base register that contains the address we are loading data
10968 to. */
10969 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10970
71e396f9
LM
10971 /* Calculate wback. */
10972 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10973
10974 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10975 {
71e396f9 10976 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10977
71e396f9 10978 /* Find out which registers are going to be loaded from memory. */
72508ac0 10979 while (register_bits)
71e396f9
LM
10980 {
10981 if (register_bits & 0x00000001)
10982 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10983 register_bits = register_bits >> 1;
10984 register_count++;
10985 }
72508ac0 10986
71e396f9
LM
10987
10988 /* If wback is true, also save the base register, which is going to be
10989 written to. */
10990 if (wback)
10991 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10992
10993 /* Save the CPSR register. */
10994 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10995 }
10996 else
10997 {
71e396f9 10998 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10999
71e396f9
LM
11000 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11001
11002 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11003
11004 /* Find out how many registers are going to be stored to memory. */
72508ac0 11005 while (register_bits)
71e396f9
LM
11006 {
11007 if (register_bits & 0x00000001)
11008 register_count++;
11009 register_bits = register_bits >> 1;
11010 }
72508ac0
PO
11011
11012 switch (addr_mode)
71e396f9
LM
11013 {
11014 /* STMDA (STMED): Decrement after. */
11015 case 0:
11016 record_buf_mem[1] = (uint32_t) u_regval
11017 - register_count * INT_REGISTER_SIZE + 4;
11018 break;
11019 /* STM (STMIA, STMEA): Increment after. */
11020 case 1:
11021 record_buf_mem[1] = (uint32_t) u_regval;
11022 break;
11023 /* STMDB (STMFD): Decrement before. */
11024 case 2:
11025 record_buf_mem[1] = (uint32_t) u_regval
11026 - register_count * INT_REGISTER_SIZE;
11027 break;
11028 /* STMIB (STMFA): Increment before. */
11029 case 3:
11030 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11031 break;
11032 default:
11033 gdb_assert_not_reached ("no decoding pattern found");
11034 break;
11035 }
72508ac0 11036
71e396f9
LM
11037 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11038 arm_insn_r->mem_rec_count = 1;
11039
11040 /* If wback is true, also save the base register, which is going to be
11041 written to. */
11042 if (wback)
11043 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11044 }
11045
11046 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11047 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11048 return 0;
11049}
11050
11051/* Handling opcode 101 insns. */
11052
11053static int
11054arm_record_b_bl (insn_decode_record *arm_insn_r)
11055{
11056 uint32_t record_buf[8];
11057
11058 /* Handle B, BL, BLX(1) insns. */
11059 /* B simply branches so we do nothing here. */
11060 /* Note: BLX(1) doesnt fall here but instead it falls into
11061 extension space. */
11062 if (bit (arm_insn_r->arm_insn, 24))
11063 {
11064 record_buf[0] = ARM_LR_REGNUM;
11065 arm_insn_r->reg_rec_count = 1;
11066 }
11067
11068 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11069
11070 return 0;
11071}
11072
72508ac0 11073static int
c6ec2b30 11074arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11075{
11076 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11077 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11078 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11079
11080 return -1;
11081}
11082
5a578da5
OJ
11083/* Record handler for vector data transfer instructions. */
11084
11085static int
11086arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11087{
11088 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11089 uint32_t record_buf[4];
11090
5a578da5
OJ
11091 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11092 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11093 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11094 bit_l = bit (arm_insn_r->arm_insn, 20);
11095 bit_c = bit (arm_insn_r->arm_insn, 8);
11096
11097 /* Handle VMOV instruction. */
11098 if (bit_l && bit_c)
11099 {
11100 record_buf[0] = reg_t;
11101 arm_insn_r->reg_rec_count = 1;
11102 }
11103 else if (bit_l && !bit_c)
11104 {
11105 /* Handle VMOV instruction. */
11106 if (bits_a == 0x00)
11107 {
f1771dce 11108 record_buf[0] = reg_t;
5a578da5
OJ
11109 arm_insn_r->reg_rec_count = 1;
11110 }
11111 /* Handle VMRS instruction. */
11112 else if (bits_a == 0x07)
11113 {
11114 if (reg_t == 15)
11115 reg_t = ARM_PS_REGNUM;
11116
11117 record_buf[0] = reg_t;
11118 arm_insn_r->reg_rec_count = 1;
11119 }
11120 }
11121 else if (!bit_l && !bit_c)
11122 {
11123 /* Handle VMOV instruction. */
11124 if (bits_a == 0x00)
11125 {
f1771dce 11126 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11127
11128 arm_insn_r->reg_rec_count = 1;
11129 }
11130 /* Handle VMSR instruction. */
11131 else if (bits_a == 0x07)
11132 {
11133 record_buf[0] = ARM_FPSCR_REGNUM;
11134 arm_insn_r->reg_rec_count = 1;
11135 }
11136 }
11137 else if (!bit_l && bit_c)
11138 {
11139 /* Handle VMOV instruction. */
11140 if (!(bits_a & 0x04))
11141 {
11142 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11143 + ARM_D0_REGNUM;
11144 arm_insn_r->reg_rec_count = 1;
11145 }
11146 /* Handle VDUP instruction. */
11147 else
11148 {
11149 if (bit (arm_insn_r->arm_insn, 21))
11150 {
11151 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11152 record_buf[0] = reg_v + ARM_D0_REGNUM;
11153 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11154 arm_insn_r->reg_rec_count = 2;
11155 }
11156 else
11157 {
11158 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11159 record_buf[0] = reg_v + ARM_D0_REGNUM;
11160 arm_insn_r->reg_rec_count = 1;
11161 }
11162 }
11163 }
11164
11165 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11166 return 0;
11167}
11168
f20f80dd
OJ
11169/* Record handler for extension register load/store instructions. */
11170
11171static int
11172arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11173{
11174 uint32_t opcode, single_reg;
11175 uint8_t op_vldm_vstm;
11176 uint32_t record_buf[8], record_buf_mem[128];
11177 ULONGEST u_regval = 0;
11178
11179 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11180
11181 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11182 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11183 op_vldm_vstm = opcode & 0x1b;
11184
11185 /* Handle VMOV instructions. */
11186 if ((opcode & 0x1e) == 0x04)
11187 {
9fde51ed 11188 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11189 {
11190 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11191 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11192 arm_insn_r->reg_rec_count = 2;
11193 }
f20f80dd 11194 else
01e57735 11195 {
9fde51ed
YQ
11196 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11197 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11198
9fde51ed 11199 if (single_reg)
01e57735 11200 {
9fde51ed
YQ
11201 /* The first S register number m is REG_M:M (M is bit 5),
11202 the corresponding D register number is REG_M:M / 2, which
11203 is REG_M. */
11204 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11205 /* The second S register number is REG_M:M + 1, the
11206 corresponding D register number is (REG_M:M + 1) / 2.
11207 IOW, if bit M is 1, the first and second S registers
11208 are mapped to different D registers, otherwise, they are
11209 in the same D register. */
11210 if (bit_m)
11211 {
11212 record_buf[arm_insn_r->reg_rec_count++]
11213 = ARM_D0_REGNUM + reg_m + 1;
11214 }
01e57735
YQ
11215 }
11216 else
11217 {
9fde51ed 11218 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11219 arm_insn_r->reg_rec_count = 1;
11220 }
11221 }
f20f80dd
OJ
11222 }
11223 /* Handle VSTM and VPUSH instructions. */
11224 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11225 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11226 {
11227 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11228 uint32_t memory_index = 0;
11229
11230 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11231 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11232 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11233 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11234 memory_count = imm_off8;
11235
11236 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11237 start_address = u_regval;
f20f80dd 11238 else
01e57735 11239 start_address = u_regval - imm_off32;
f20f80dd
OJ
11240
11241 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11242 {
11243 record_buf[0] = reg_rn;
11244 arm_insn_r->reg_rec_count = 1;
11245 }
f20f80dd
OJ
11246
11247 while (memory_count > 0)
01e57735 11248 {
9fde51ed 11249 if (single_reg)
01e57735 11250 {
9fde51ed
YQ
11251 record_buf_mem[memory_index] = 4;
11252 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11253 start_address = start_address + 4;
11254 memory_index = memory_index + 2;
11255 }
11256 else
11257 {
9fde51ed
YQ
11258 record_buf_mem[memory_index] = 4;
11259 record_buf_mem[memory_index + 1] = start_address;
11260 record_buf_mem[memory_index + 2] = 4;
11261 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11262 start_address = start_address + 8;
11263 memory_index = memory_index + 4;
11264 }
11265 memory_count--;
11266 }
f20f80dd
OJ
11267 arm_insn_r->mem_rec_count = (memory_index >> 1);
11268 }
11269 /* Handle VLDM instructions. */
11270 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11271 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11272 {
11273 uint32_t reg_count, reg_vd;
11274 uint32_t reg_index = 0;
9fde51ed 11275 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11276
11277 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11278 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11279
9fde51ed
YQ
11280 /* REG_VD is the first D register number. If the instruction
11281 loads memory to S registers (SINGLE_REG is TRUE), the register
11282 number is (REG_VD << 1 | bit D), so the corresponding D
11283 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11284 if (!single_reg)
11285 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11286
9fde51ed 11287 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11288 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11289
9fde51ed
YQ
11290 /* If the instruction loads memory to D register, REG_COUNT should
11291 be divided by 2, according to the ARM Architecture Reference
11292 Manual. If the instruction loads memory to S register, divide by
11293 2 as well because two S registers are mapped to D register. */
11294 reg_count = reg_count / 2;
11295 if (single_reg && bit_d)
01e57735 11296 {
9fde51ed
YQ
11297 /* Increase the register count if S register list starts from
11298 an odd number (bit d is one). */
11299 reg_count++;
11300 }
f20f80dd 11301
9fde51ed
YQ
11302 while (reg_count > 0)
11303 {
11304 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11305 reg_count--;
11306 }
f20f80dd
OJ
11307 arm_insn_r->reg_rec_count = reg_index;
11308 }
11309 /* VSTR Vector store register. */
11310 else if ((opcode & 0x13) == 0x10)
11311 {
bec2ab5a 11312 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11313 uint32_t memory_index = 0;
11314
11315 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11316 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11317 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11318 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11319
11320 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11321 start_address = u_regval + imm_off32;
f20f80dd 11322 else
01e57735 11323 start_address = u_regval - imm_off32;
f20f80dd
OJ
11324
11325 if (single_reg)
01e57735 11326 {
9fde51ed
YQ
11327 record_buf_mem[memory_index] = 4;
11328 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11329 arm_insn_r->mem_rec_count = 1;
11330 }
f20f80dd 11331 else
01e57735 11332 {
9fde51ed
YQ
11333 record_buf_mem[memory_index] = 4;
11334 record_buf_mem[memory_index + 1] = start_address;
11335 record_buf_mem[memory_index + 2] = 4;
11336 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11337 arm_insn_r->mem_rec_count = 2;
11338 }
f20f80dd
OJ
11339 }
11340 /* VLDR Vector load register. */
11341 else if ((opcode & 0x13) == 0x11)
11342 {
11343 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11344
11345 if (!single_reg)
01e57735
YQ
11346 {
11347 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11348 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11349 }
f20f80dd 11350 else
01e57735
YQ
11351 {
11352 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11353 /* Record register D rather than pseudo register S. */
11354 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11355 }
f20f80dd
OJ
11356 arm_insn_r->reg_rec_count = 1;
11357 }
11358
11359 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11360 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11361 return 0;
11362}
11363
851f26ae
OJ
11364/* Record handler for arm/thumb mode VFP data processing instructions. */
11365
11366static int
11367arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11368{
11369 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11370 uint32_t record_buf[4];
11371 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11372 enum insn_types curr_insn_type = INSN_INV;
11373
11374 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11375 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11376 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11377 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11378 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11379 bit_d = bit (arm_insn_r->arm_insn, 22);
11380 opc1 = opc1 & 0x04;
11381
11382 /* Handle VMLA, VMLS. */
11383 if (opc1 == 0x00)
11384 {
11385 if (bit (arm_insn_r->arm_insn, 10))
11386 {
11387 if (bit (arm_insn_r->arm_insn, 6))
11388 curr_insn_type = INSN_T0;
11389 else
11390 curr_insn_type = INSN_T1;
11391 }
11392 else
11393 {
11394 if (dp_op_sz)
11395 curr_insn_type = INSN_T1;
11396 else
11397 curr_insn_type = INSN_T2;
11398 }
11399 }
11400 /* Handle VNMLA, VNMLS, VNMUL. */
11401 else if (opc1 == 0x01)
11402 {
11403 if (dp_op_sz)
11404 curr_insn_type = INSN_T1;
11405 else
11406 curr_insn_type = INSN_T2;
11407 }
11408 /* Handle VMUL. */
11409 else if (opc1 == 0x02 && !(opc3 & 0x01))
11410 {
11411 if (bit (arm_insn_r->arm_insn, 10))
11412 {
11413 if (bit (arm_insn_r->arm_insn, 6))
11414 curr_insn_type = INSN_T0;
11415 else
11416 curr_insn_type = INSN_T1;
11417 }
11418 else
11419 {
11420 if (dp_op_sz)
11421 curr_insn_type = INSN_T1;
11422 else
11423 curr_insn_type = INSN_T2;
11424 }
11425 }
11426 /* Handle VADD, VSUB. */
11427 else if (opc1 == 0x03)
11428 {
11429 if (!bit (arm_insn_r->arm_insn, 9))
11430 {
11431 if (bit (arm_insn_r->arm_insn, 6))
11432 curr_insn_type = INSN_T0;
11433 else
11434 curr_insn_type = INSN_T1;
11435 }
11436 else
11437 {
11438 if (dp_op_sz)
11439 curr_insn_type = INSN_T1;
11440 else
11441 curr_insn_type = INSN_T2;
11442 }
11443 }
11444 /* Handle VDIV. */
11445 else if (opc1 == 0x0b)
11446 {
11447 if (dp_op_sz)
11448 curr_insn_type = INSN_T1;
11449 else
11450 curr_insn_type = INSN_T2;
11451 }
11452 /* Handle all other vfp data processing instructions. */
11453 else if (opc1 == 0x0b)
11454 {
11455 /* Handle VMOV. */
11456 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11457 {
11458 if (bit (arm_insn_r->arm_insn, 4))
11459 {
11460 if (bit (arm_insn_r->arm_insn, 6))
11461 curr_insn_type = INSN_T0;
11462 else
11463 curr_insn_type = INSN_T1;
11464 }
11465 else
11466 {
11467 if (dp_op_sz)
11468 curr_insn_type = INSN_T1;
11469 else
11470 curr_insn_type = INSN_T2;
11471 }
11472 }
11473 /* Handle VNEG and VABS. */
11474 else if ((opc2 == 0x01 && opc3 == 0x01)
11475 || (opc2 == 0x00 && opc3 == 0x03))
11476 {
11477 if (!bit (arm_insn_r->arm_insn, 11))
11478 {
11479 if (bit (arm_insn_r->arm_insn, 6))
11480 curr_insn_type = INSN_T0;
11481 else
11482 curr_insn_type = INSN_T1;
11483 }
11484 else
11485 {
11486 if (dp_op_sz)
11487 curr_insn_type = INSN_T1;
11488 else
11489 curr_insn_type = INSN_T2;
11490 }
11491 }
11492 /* Handle VSQRT. */
11493 else if (opc2 == 0x01 && opc3 == 0x03)
11494 {
11495 if (dp_op_sz)
11496 curr_insn_type = INSN_T1;
11497 else
11498 curr_insn_type = INSN_T2;
11499 }
11500 /* Handle VCVT. */
11501 else if (opc2 == 0x07 && opc3 == 0x03)
11502 {
11503 if (!dp_op_sz)
11504 curr_insn_type = INSN_T1;
11505 else
11506 curr_insn_type = INSN_T2;
11507 }
11508 else if (opc3 & 0x01)
11509 {
11510 /* Handle VCVT. */
11511 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11512 {
11513 if (!bit (arm_insn_r->arm_insn, 18))
11514 curr_insn_type = INSN_T2;
11515 else
11516 {
11517 if (dp_op_sz)
11518 curr_insn_type = INSN_T1;
11519 else
11520 curr_insn_type = INSN_T2;
11521 }
11522 }
11523 /* Handle VCVT. */
11524 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11525 {
11526 if (dp_op_sz)
11527 curr_insn_type = INSN_T1;
11528 else
11529 curr_insn_type = INSN_T2;
11530 }
11531 /* Handle VCVTB, VCVTT. */
11532 else if ((opc2 & 0x0e) == 0x02)
11533 curr_insn_type = INSN_T2;
11534 /* Handle VCMP, VCMPE. */
11535 else if ((opc2 & 0x0e) == 0x04)
11536 curr_insn_type = INSN_T3;
11537 }
11538 }
11539
11540 switch (curr_insn_type)
11541 {
11542 case INSN_T0:
11543 reg_vd = reg_vd | (bit_d << 4);
11544 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11545 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11546 arm_insn_r->reg_rec_count = 2;
11547 break;
11548
11549 case INSN_T1:
11550 reg_vd = reg_vd | (bit_d << 4);
11551 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11552 arm_insn_r->reg_rec_count = 1;
11553 break;
11554
11555 case INSN_T2:
11556 reg_vd = (reg_vd << 1) | bit_d;
11557 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11558 arm_insn_r->reg_rec_count = 1;
11559 break;
11560
11561 case INSN_T3:
11562 record_buf[0] = ARM_FPSCR_REGNUM;
11563 arm_insn_r->reg_rec_count = 1;
11564 break;
11565
11566 default:
11567 gdb_assert_not_reached ("no decoding pattern found");
11568 break;
11569 }
11570
11571 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11572 return 0;
11573}
11574
60cc5e93
OJ
11575/* Handling opcode 110 insns. */
11576
11577static int
11578arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11579{
bec2ab5a 11580 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11581
11582 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11583 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11584 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11585
11586 if ((coproc & 0x0e) == 0x0a)
11587 {
11588 /* Handle extension register ld/st instructions. */
11589 if (!(op1 & 0x20))
f20f80dd 11590 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11591
11592 /* 64-bit transfers between arm core and extension registers. */
11593 if ((op1 & 0x3e) == 0x04)
f20f80dd 11594 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11595 }
11596 else
11597 {
11598 /* Handle coprocessor ld/st instructions. */
11599 if (!(op1 & 0x3a))
11600 {
11601 /* Store. */
11602 if (!op1_ebit)
11603 return arm_record_unsupported_insn (arm_insn_r);
11604 else
11605 /* Load. */
11606 return arm_record_unsupported_insn (arm_insn_r);
11607 }
11608
11609 /* Move to coprocessor from two arm core registers. */
11610 if (op1 == 0x4)
11611 return arm_record_unsupported_insn (arm_insn_r);
11612
11613 /* Move to two arm core registers from coprocessor. */
11614 if (op1 == 0x5)
11615 {
11616 uint32_t reg_t[2];
11617
11618 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11619 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11620 arm_insn_r->reg_rec_count = 2;
11621
11622 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11623 return 0;
11624 }
11625 }
11626 return arm_record_unsupported_insn (arm_insn_r);
11627}
11628
72508ac0
PO
11629/* Handling opcode 111 insns. */
11630
11631static int
11632arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11633{
60cc5e93 11634 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11635 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11636 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11637
11638 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11639 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11640 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11641 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11642 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11643
11644 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11645 if (op1_sbit)
97dfe206
OJ
11646 {
11647 if (tdep->arm_syscall_record != NULL)
11648 {
11649 ULONGEST svc_operand, svc_number;
11650
11651 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11652
11653 if (svc_operand) /* OABI. */
11654 svc_number = svc_operand - 0x900000;
11655 else /* EABI. */
11656 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11657
60cc5e93 11658 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11659 }
11660 else
11661 {
11662 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11663 return -1;
97dfe206
OJ
11664 }
11665 }
60cc5e93
OJ
11666
11667 if ((coproc & 0x0e) == 0x0a)
11668 {
11669 /* VFP data-processing instructions. */
11670 if (!op1_sbit && !op)
851f26ae 11671 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11672
11673 /* Advanced SIMD, VFP instructions. */
11674 if (!op1_sbit && op)
5a578da5 11675 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11676 }
97dfe206
OJ
11677 else
11678 {
60cc5e93
OJ
11679 /* Coprocessor data operations. */
11680 if (!op1_sbit && !op)
11681 return arm_record_unsupported_insn (arm_insn_r);
11682
11683 /* Move to Coprocessor from ARM core register. */
11684 if (!op1_sbit && !op1_ebit && op)
11685 return arm_record_unsupported_insn (arm_insn_r);
11686
11687 /* Move to arm core register from coprocessor. */
11688 if (!op1_sbit && op1_ebit && op)
11689 {
11690 uint32_t record_buf[1];
11691
11692 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11693 if (record_buf[0] == 15)
11694 record_buf[0] = ARM_PS_REGNUM;
11695
11696 arm_insn_r->reg_rec_count = 1;
11697 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11698 record_buf);
11699 return 0;
11700 }
97dfe206 11701 }
72508ac0 11702
60cc5e93 11703 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11704}
11705
11706/* Handling opcode 000 insns. */
11707
11708static int
11709thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11710{
11711 uint32_t record_buf[8];
11712 uint32_t reg_src1 = 0;
11713
11714 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11715
11716 record_buf[0] = ARM_PS_REGNUM;
11717 record_buf[1] = reg_src1;
11718 thumb_insn_r->reg_rec_count = 2;
11719
11720 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11721
11722 return 0;
11723}
11724
11725
11726/* Handling opcode 001 insns. */
11727
11728static int
11729thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11730{
11731 uint32_t record_buf[8];
11732 uint32_t reg_src1 = 0;
11733
11734 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11735
11736 record_buf[0] = ARM_PS_REGNUM;
11737 record_buf[1] = reg_src1;
11738 thumb_insn_r->reg_rec_count = 2;
11739
11740 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11741
11742 return 0;
11743}
11744
11745/* Handling opcode 010 insns. */
11746
11747static int
11748thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11749{
11750 struct regcache *reg_cache = thumb_insn_r->regcache;
11751 uint32_t record_buf[8], record_buf_mem[8];
11752
11753 uint32_t reg_src1 = 0, reg_src2 = 0;
11754 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11755
11756 ULONGEST u_regval[2] = {0};
11757
11758 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11759
11760 if (bit (thumb_insn_r->arm_insn, 12))
11761 {
11762 /* Handle load/store register offset. */
11763 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11764 if (opcode2 >= 12 && opcode2 <= 15)
11765 {
11766 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11767 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11768 record_buf[0] = reg_src1;
11769 thumb_insn_r->reg_rec_count = 1;
11770 }
11771 else if (opcode2 >= 8 && opcode2 <= 10)
11772 {
11773 /* STR(2), STRB(2), STRH(2) . */
11774 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11775 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11776 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11777 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11778 if (8 == opcode2)
11779 record_buf_mem[0] = 4; /* STR (2). */
11780 else if (10 == opcode2)
11781 record_buf_mem[0] = 1; /* STRB (2). */
11782 else if (9 == opcode2)
11783 record_buf_mem[0] = 2; /* STRH (2). */
11784 record_buf_mem[1] = u_regval[0] + u_regval[1];
11785 thumb_insn_r->mem_rec_count = 1;
11786 }
11787 }
11788 else if (bit (thumb_insn_r->arm_insn, 11))
11789 {
11790 /* Handle load from literal pool. */
11791 /* LDR(3). */
11792 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11793 record_buf[0] = reg_src1;
11794 thumb_insn_r->reg_rec_count = 1;
11795 }
11796 else if (opcode1)
11797 {
11798 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11799 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11800 if ((3 == opcode2) && (!opcode3))
11801 {
11802 /* Branch with exchange. */
11803 record_buf[0] = ARM_PS_REGNUM;
11804 thumb_insn_r->reg_rec_count = 1;
11805 }
11806 else
11807 {
1f33efec
YQ
11808 /* Format 8; special data processing insns. */
11809 record_buf[0] = ARM_PS_REGNUM;
11810 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11811 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11812 thumb_insn_r->reg_rec_count = 2;
11813 }
11814 }
11815 else
11816 {
11817 /* Format 5; data processing insns. */
11818 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11819 if (bit (thumb_insn_r->arm_insn, 7))
11820 {
11821 reg_src1 = reg_src1 + 8;
11822 }
11823 record_buf[0] = ARM_PS_REGNUM;
11824 record_buf[1] = reg_src1;
11825 thumb_insn_r->reg_rec_count = 2;
11826 }
11827
11828 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11829 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11830 record_buf_mem);
11831
11832 return 0;
11833}
11834
11835/* Handling opcode 001 insns. */
11836
11837static int
11838thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11839{
11840 struct regcache *reg_cache = thumb_insn_r->regcache;
11841 uint32_t record_buf[8], record_buf_mem[8];
11842
11843 uint32_t reg_src1 = 0;
11844 uint32_t opcode = 0, immed_5 = 0;
11845
11846 ULONGEST u_regval = 0;
11847
11848 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11849
11850 if (opcode)
11851 {
11852 /* LDR(1). */
11853 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11854 record_buf[0] = reg_src1;
11855 thumb_insn_r->reg_rec_count = 1;
11856 }
11857 else
11858 {
11859 /* STR(1). */
11860 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11861 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11862 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11863 record_buf_mem[0] = 4;
11864 record_buf_mem[1] = u_regval + (immed_5 * 4);
11865 thumb_insn_r->mem_rec_count = 1;
11866 }
11867
11868 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11869 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11870 record_buf_mem);
11871
11872 return 0;
11873}
11874
11875/* Handling opcode 100 insns. */
11876
11877static int
11878thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11879{
11880 struct regcache *reg_cache = thumb_insn_r->regcache;
11881 uint32_t record_buf[8], record_buf_mem[8];
11882
11883 uint32_t reg_src1 = 0;
11884 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11885
11886 ULONGEST u_regval = 0;
11887
11888 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11889
11890 if (3 == opcode)
11891 {
11892 /* LDR(4). */
11893 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11894 record_buf[0] = reg_src1;
11895 thumb_insn_r->reg_rec_count = 1;
11896 }
11897 else if (1 == opcode)
11898 {
11899 /* LDRH(1). */
11900 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11901 record_buf[0] = reg_src1;
11902 thumb_insn_r->reg_rec_count = 1;
11903 }
11904 else if (2 == opcode)
11905 {
11906 /* STR(3). */
11907 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11908 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11909 record_buf_mem[0] = 4;
11910 record_buf_mem[1] = u_regval + (immed_8 * 4);
11911 thumb_insn_r->mem_rec_count = 1;
11912 }
11913 else if (0 == opcode)
11914 {
11915 /* STRH(1). */
11916 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11917 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11918 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11919 record_buf_mem[0] = 2;
11920 record_buf_mem[1] = u_regval + (immed_5 * 2);
11921 thumb_insn_r->mem_rec_count = 1;
11922 }
11923
11924 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11925 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11926 record_buf_mem);
11927
11928 return 0;
11929}
11930
11931/* Handling opcode 101 insns. */
11932
11933static int
11934thumb_record_misc (insn_decode_record *thumb_insn_r)
11935{
11936 struct regcache *reg_cache = thumb_insn_r->regcache;
11937
11938 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11939 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11940 uint32_t index = 0, start_address = 0;
72508ac0
PO
11941 uint32_t record_buf[24], record_buf_mem[48];
11942 uint32_t reg_src1;
11943
11944 ULONGEST u_regval = 0;
11945
11946 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11947 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11948 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11949
11950 if (14 == opcode2)
11951 {
11952 /* POP. */
11953 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11954 while (register_bits)
f969241e
OJ
11955 {
11956 if (register_bits & 0x00000001)
11957 record_buf[index++] = register_count;
11958 register_bits = register_bits >> 1;
11959 register_count++;
11960 }
11961 record_buf[index++] = ARM_PS_REGNUM;
11962 record_buf[index++] = ARM_SP_REGNUM;
11963 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11964 }
11965 else if (10 == opcode2)
11966 {
11967 /* PUSH. */
11968 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11969 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11970 while (register_bits)
11971 {
11972 if (register_bits & 0x00000001)
11973 register_count++;
11974 register_bits = register_bits >> 1;
11975 }
11976 start_address = u_regval - \
11977 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11978 thumb_insn_r->mem_rec_count = register_count;
11979 while (register_count)
11980 {
11981 record_buf_mem[(register_count * 2) - 1] = start_address;
11982 record_buf_mem[(register_count * 2) - 2] = 4;
11983 start_address = start_address + 4;
11984 register_count--;
11985 }
11986 record_buf[0] = ARM_SP_REGNUM;
11987 thumb_insn_r->reg_rec_count = 1;
11988 }
11989 else if (0x1E == opcode1)
11990 {
11991 /* BKPT insn. */
11992 /* Handle enhanced software breakpoint insn, BKPT. */
11993 /* CPSR is changed to be executed in ARM state, disabling normal
11994 interrupts, entering abort mode. */
11995 /* According to high vector configuration PC is set. */
11996 /* User hits breakpoint and type reverse, in that case, we need to go back with
11997 previous CPSR and Program Counter. */
11998 record_buf[0] = ARM_PS_REGNUM;
11999 record_buf[1] = ARM_LR_REGNUM;
12000 thumb_insn_r->reg_rec_count = 2;
12001 /* We need to save SPSR value, which is not yet done. */
12002 printf_unfiltered (_("Process record does not support instruction "
12003 "0x%0x at address %s.\n"),
12004 thumb_insn_r->arm_insn,
12005 paddress (thumb_insn_r->gdbarch,
12006 thumb_insn_r->this_addr));
12007 return -1;
12008 }
12009 else if ((0 == opcode) || (1 == opcode))
12010 {
12011 /* ADD(5), ADD(6). */
12012 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12013 record_buf[0] = reg_src1;
12014 thumb_insn_r->reg_rec_count = 1;
12015 }
12016 else if (2 == opcode)
12017 {
12018 /* ADD(7), SUB(4). */
12019 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12020 record_buf[0] = ARM_SP_REGNUM;
12021 thumb_insn_r->reg_rec_count = 1;
12022 }
12023
12024 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12025 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12026 record_buf_mem);
12027
12028 return 0;
12029}
12030
12031/* Handling opcode 110 insns. */
12032
12033static int
12034thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12035{
12036 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12037 struct regcache *reg_cache = thumb_insn_r->regcache;
12038
12039 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12040 uint32_t reg_src1 = 0;
12041 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12042 uint32_t index = 0, start_address = 0;
72508ac0
PO
12043 uint32_t record_buf[24], record_buf_mem[48];
12044
12045 ULONGEST u_regval = 0;
12046
12047 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12048 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12049
12050 if (1 == opcode2)
12051 {
12052
12053 /* LDMIA. */
12054 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12055 /* Get Rn. */
12056 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12057 while (register_bits)
12058 {
12059 if (register_bits & 0x00000001)
f969241e 12060 record_buf[index++] = register_count;
72508ac0 12061 register_bits = register_bits >> 1;
f969241e 12062 register_count++;
72508ac0 12063 }
f969241e
OJ
12064 record_buf[index++] = reg_src1;
12065 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12066 }
12067 else if (0 == opcode2)
12068 {
12069 /* It handles both STMIA. */
12070 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12071 /* Get Rn. */
12072 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12073 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12074 while (register_bits)
12075 {
12076 if (register_bits & 0x00000001)
12077 register_count++;
12078 register_bits = register_bits >> 1;
12079 }
12080 start_address = u_regval;
12081 thumb_insn_r->mem_rec_count = register_count;
12082 while (register_count)
12083 {
12084 record_buf_mem[(register_count * 2) - 1] = start_address;
12085 record_buf_mem[(register_count * 2) - 2] = 4;
12086 start_address = start_address + 4;
12087 register_count--;
12088 }
12089 }
12090 else if (0x1F == opcode1)
12091 {
12092 /* Handle arm syscall insn. */
97dfe206 12093 if (tdep->arm_syscall_record != NULL)
72508ac0 12094 {
97dfe206
OJ
12095 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12096 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12097 }
12098 else
12099 {
12100 printf_unfiltered (_("no syscall record support\n"));
12101 return -1;
12102 }
12103 }
12104
12105 /* B (1), conditional branch is automatically taken care in process_record,
12106 as PC is saved there. */
12107
12108 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12109 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12110 record_buf_mem);
12111
12112 return ret;
12113}
12114
12115/* Handling opcode 111 insns. */
12116
12117static int
12118thumb_record_branch (insn_decode_record *thumb_insn_r)
12119{
12120 uint32_t record_buf[8];
12121 uint32_t bits_h = 0;
12122
12123 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12124
12125 if (2 == bits_h || 3 == bits_h)
12126 {
12127 /* BL */
12128 record_buf[0] = ARM_LR_REGNUM;
12129 thumb_insn_r->reg_rec_count = 1;
12130 }
12131 else if (1 == bits_h)
12132 {
12133 /* BLX(1). */
12134 record_buf[0] = ARM_PS_REGNUM;
12135 record_buf[1] = ARM_LR_REGNUM;
12136 thumb_insn_r->reg_rec_count = 2;
12137 }
12138
12139 /* B(2) is automatically taken care in process_record, as PC is
12140 saved there. */
12141
12142 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12143
12144 return 0;
12145}
12146
c6ec2b30
OJ
12147/* Handler for thumb2 load/store multiple instructions. */
12148
12149static int
12150thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12151{
12152 struct regcache *reg_cache = thumb2_insn_r->regcache;
12153
12154 uint32_t reg_rn, op;
12155 uint32_t register_bits = 0, register_count = 0;
12156 uint32_t index = 0, start_address = 0;
12157 uint32_t record_buf[24], record_buf_mem[48];
12158
12159 ULONGEST u_regval = 0;
12160
12161 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12162 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12163
12164 if (0 == op || 3 == op)
12165 {
12166 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12167 {
12168 /* Handle RFE instruction. */
12169 record_buf[0] = ARM_PS_REGNUM;
12170 thumb2_insn_r->reg_rec_count = 1;
12171 }
12172 else
12173 {
12174 /* Handle SRS instruction after reading banked SP. */
12175 return arm_record_unsupported_insn (thumb2_insn_r);
12176 }
12177 }
12178 else if (1 == op || 2 == op)
12179 {
12180 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12181 {
12182 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12183 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12184 while (register_bits)
12185 {
12186 if (register_bits & 0x00000001)
12187 record_buf[index++] = register_count;
12188
12189 register_count++;
12190 register_bits = register_bits >> 1;
12191 }
12192 record_buf[index++] = reg_rn;
12193 record_buf[index++] = ARM_PS_REGNUM;
12194 thumb2_insn_r->reg_rec_count = index;
12195 }
12196 else
12197 {
12198 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12199 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12200 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12201 while (register_bits)
12202 {
12203 if (register_bits & 0x00000001)
12204 register_count++;
12205
12206 register_bits = register_bits >> 1;
12207 }
12208
12209 if (1 == op)
12210 {
12211 /* Start address calculation for LDMDB/LDMEA. */
12212 start_address = u_regval;
12213 }
12214 else if (2 == op)
12215 {
12216 /* Start address calculation for LDMDB/LDMEA. */
12217 start_address = u_regval - register_count * 4;
12218 }
12219
12220 thumb2_insn_r->mem_rec_count = register_count;
12221 while (register_count)
12222 {
12223 record_buf_mem[register_count * 2 - 1] = start_address;
12224 record_buf_mem[register_count * 2 - 2] = 4;
12225 start_address = start_address + 4;
12226 register_count--;
12227 }
12228 record_buf[0] = reg_rn;
12229 record_buf[1] = ARM_PS_REGNUM;
12230 thumb2_insn_r->reg_rec_count = 2;
12231 }
12232 }
12233
12234 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12235 record_buf_mem);
12236 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12237 record_buf);
12238 return ARM_RECORD_SUCCESS;
12239}
12240
12241/* Handler for thumb2 load/store (dual/exclusive) and table branch
12242 instructions. */
12243
12244static int
12245thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12246{
12247 struct regcache *reg_cache = thumb2_insn_r->regcache;
12248
12249 uint32_t reg_rd, reg_rn, offset_imm;
12250 uint32_t reg_dest1, reg_dest2;
12251 uint32_t address, offset_addr;
12252 uint32_t record_buf[8], record_buf_mem[8];
12253 uint32_t op1, op2, op3;
c6ec2b30
OJ
12254
12255 ULONGEST u_regval[2];
12256
12257 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12258 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12259 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12260
12261 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12262 {
12263 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12264 {
12265 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12266 record_buf[0] = reg_dest1;
12267 record_buf[1] = ARM_PS_REGNUM;
12268 thumb2_insn_r->reg_rec_count = 2;
12269 }
12270
12271 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12272 {
12273 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12274 record_buf[2] = reg_dest2;
12275 thumb2_insn_r->reg_rec_count = 3;
12276 }
12277 }
12278 else
12279 {
12280 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12281 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12282
12283 if (0 == op1 && 0 == op2)
12284 {
12285 /* Handle STREX. */
12286 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12287 address = u_regval[0] + (offset_imm * 4);
12288 record_buf_mem[0] = 4;
12289 record_buf_mem[1] = address;
12290 thumb2_insn_r->mem_rec_count = 1;
12291 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12292 record_buf[0] = reg_rd;
12293 thumb2_insn_r->reg_rec_count = 1;
12294 }
12295 else if (1 == op1 && 0 == op2)
12296 {
12297 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12298 record_buf[0] = reg_rd;
12299 thumb2_insn_r->reg_rec_count = 1;
12300 address = u_regval[0];
12301 record_buf_mem[1] = address;
12302
12303 if (4 == op3)
12304 {
12305 /* Handle STREXB. */
12306 record_buf_mem[0] = 1;
12307 thumb2_insn_r->mem_rec_count = 1;
12308 }
12309 else if (5 == op3)
12310 {
12311 /* Handle STREXH. */
12312 record_buf_mem[0] = 2 ;
12313 thumb2_insn_r->mem_rec_count = 1;
12314 }
12315 else if (7 == op3)
12316 {
12317 /* Handle STREXD. */
12318 address = u_regval[0];
12319 record_buf_mem[0] = 4;
12320 record_buf_mem[2] = 4;
12321 record_buf_mem[3] = address + 4;
12322 thumb2_insn_r->mem_rec_count = 2;
12323 }
12324 }
12325 else
12326 {
12327 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12328
12329 if (bit (thumb2_insn_r->arm_insn, 24))
12330 {
12331 if (bit (thumb2_insn_r->arm_insn, 23))
12332 offset_addr = u_regval[0] + (offset_imm * 4);
12333 else
12334 offset_addr = u_regval[0] - (offset_imm * 4);
12335
12336 address = offset_addr;
12337 }
12338 else
12339 address = u_regval[0];
12340
12341 record_buf_mem[0] = 4;
12342 record_buf_mem[1] = address;
12343 record_buf_mem[2] = 4;
12344 record_buf_mem[3] = address + 4;
12345 thumb2_insn_r->mem_rec_count = 2;
12346 record_buf[0] = reg_rn;
12347 thumb2_insn_r->reg_rec_count = 1;
12348 }
12349 }
12350
12351 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12352 record_buf);
12353 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12354 record_buf_mem);
12355 return ARM_RECORD_SUCCESS;
12356}
12357
12358/* Handler for thumb2 data processing (shift register and modified immediate)
12359 instructions. */
12360
12361static int
12362thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12363{
12364 uint32_t reg_rd, op;
12365 uint32_t record_buf[8];
12366
12367 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12368 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12369
12370 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12371 {
12372 record_buf[0] = ARM_PS_REGNUM;
12373 thumb2_insn_r->reg_rec_count = 1;
12374 }
12375 else
12376 {
12377 record_buf[0] = reg_rd;
12378 record_buf[1] = ARM_PS_REGNUM;
12379 thumb2_insn_r->reg_rec_count = 2;
12380 }
12381
12382 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12383 record_buf);
12384 return ARM_RECORD_SUCCESS;
12385}
12386
12387/* Generic handler for thumb2 instructions which effect destination and PS
12388 registers. */
12389
12390static int
12391thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12392{
12393 uint32_t reg_rd;
12394 uint32_t record_buf[8];
12395
12396 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12397
12398 record_buf[0] = reg_rd;
12399 record_buf[1] = ARM_PS_REGNUM;
12400 thumb2_insn_r->reg_rec_count = 2;
12401
12402 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12403 record_buf);
12404 return ARM_RECORD_SUCCESS;
12405}
12406
12407/* Handler for thumb2 branch and miscellaneous control instructions. */
12408
12409static int
12410thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12411{
12412 uint32_t op, op1, op2;
12413 uint32_t record_buf[8];
12414
12415 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12416 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12417 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12418
12419 /* Handle MSR insn. */
12420 if (!(op1 & 0x2) && 0x38 == op)
12421 {
12422 if (!(op2 & 0x3))
12423 {
12424 /* CPSR is going to be changed. */
12425 record_buf[0] = ARM_PS_REGNUM;
12426 thumb2_insn_r->reg_rec_count = 1;
12427 }
12428 else
12429 {
12430 arm_record_unsupported_insn(thumb2_insn_r);
12431 return -1;
12432 }
12433 }
12434 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12435 {
12436 /* BLX. */
12437 record_buf[0] = ARM_PS_REGNUM;
12438 record_buf[1] = ARM_LR_REGNUM;
12439 thumb2_insn_r->reg_rec_count = 2;
12440 }
12441
12442 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12443 record_buf);
12444 return ARM_RECORD_SUCCESS;
12445}
12446
12447/* Handler for thumb2 store single data item instructions. */
12448
12449static int
12450thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12451{
12452 struct regcache *reg_cache = thumb2_insn_r->regcache;
12453
12454 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12455 uint32_t address, offset_addr;
12456 uint32_t record_buf[8], record_buf_mem[8];
12457 uint32_t op1, op2;
12458
12459 ULONGEST u_regval[2];
12460
12461 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12462 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12463 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12464 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12465
12466 if (bit (thumb2_insn_r->arm_insn, 23))
12467 {
12468 /* T2 encoding. */
12469 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12470 offset_addr = u_regval[0] + offset_imm;
12471 address = offset_addr;
12472 }
12473 else
12474 {
12475 /* T3 encoding. */
12476 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12477 {
12478 /* Handle STRB (register). */
12479 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12480 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12481 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12482 offset_addr = u_regval[1] << shift_imm;
12483 address = u_regval[0] + offset_addr;
12484 }
12485 else
12486 {
12487 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12488 if (bit (thumb2_insn_r->arm_insn, 10))
12489 {
12490 if (bit (thumb2_insn_r->arm_insn, 9))
12491 offset_addr = u_regval[0] + offset_imm;
12492 else
12493 offset_addr = u_regval[0] - offset_imm;
12494
12495 address = offset_addr;
12496 }
12497 else
12498 address = u_regval[0];
12499 }
12500 }
12501
12502 switch (op1)
12503 {
12504 /* Store byte instructions. */
12505 case 4:
12506 case 0:
12507 record_buf_mem[0] = 1;
12508 break;
12509 /* Store half word instructions. */
12510 case 1:
12511 case 5:
12512 record_buf_mem[0] = 2;
12513 break;
12514 /* Store word instructions. */
12515 case 2:
12516 case 6:
12517 record_buf_mem[0] = 4;
12518 break;
12519
12520 default:
12521 gdb_assert_not_reached ("no decoding pattern found");
12522 break;
12523 }
12524
12525 record_buf_mem[1] = address;
12526 thumb2_insn_r->mem_rec_count = 1;
12527 record_buf[0] = reg_rn;
12528 thumb2_insn_r->reg_rec_count = 1;
12529
12530 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12531 record_buf);
12532 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12533 record_buf_mem);
12534 return ARM_RECORD_SUCCESS;
12535}
12536
12537/* Handler for thumb2 load memory hints instructions. */
12538
12539static int
12540thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12541{
12542 uint32_t record_buf[8];
12543 uint32_t reg_rt, reg_rn;
12544
12545 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12546 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12547
12548 if (ARM_PC_REGNUM != reg_rt)
12549 {
12550 record_buf[0] = reg_rt;
12551 record_buf[1] = reg_rn;
12552 record_buf[2] = ARM_PS_REGNUM;
12553 thumb2_insn_r->reg_rec_count = 3;
12554
12555 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12556 record_buf);
12557 return ARM_RECORD_SUCCESS;
12558 }
12559
12560 return ARM_RECORD_FAILURE;
12561}
12562
12563/* Handler for thumb2 load word instructions. */
12564
12565static int
12566thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12567{
c6ec2b30
OJ
12568 uint32_t record_buf[8];
12569
12570 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12571 record_buf[1] = ARM_PS_REGNUM;
12572 thumb2_insn_r->reg_rec_count = 2;
12573
12574 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12575 record_buf);
12576 return ARM_RECORD_SUCCESS;
12577}
12578
12579/* Handler for thumb2 long multiply, long multiply accumulate, and
12580 divide instructions. */
12581
12582static int
12583thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12584{
12585 uint32_t opcode1 = 0, opcode2 = 0;
12586 uint32_t record_buf[8];
c6ec2b30
OJ
12587
12588 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12589 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12590
12591 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12592 {
12593 /* Handle SMULL, UMULL, SMULAL. */
12594 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12595 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12596 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12597 record_buf[2] = ARM_PS_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 3;
12599 }
12600 else if (1 == opcode1 || 3 == opcode2)
12601 {
12602 /* Handle SDIV and UDIV. */
12603 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12604 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12605 record_buf[2] = ARM_PS_REGNUM;
12606 thumb2_insn_r->reg_rec_count = 3;
12607 }
12608 else
12609 return ARM_RECORD_FAILURE;
12610
12611 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12612 record_buf);
12613 return ARM_RECORD_SUCCESS;
12614}
12615
60cc5e93
OJ
12616/* Record handler for thumb32 coprocessor instructions. */
12617
12618static int
12619thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12620{
12621 if (bit (thumb2_insn_r->arm_insn, 25))
12622 return arm_record_coproc_data_proc (thumb2_insn_r);
12623 else
12624 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12625}
12626
1e1b6563
OJ
12627/* Record handler for advance SIMD structure load/store instructions. */
12628
12629static int
12630thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12631{
12632 struct regcache *reg_cache = thumb2_insn_r->regcache;
12633 uint32_t l_bit, a_bit, b_bits;
12634 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12635 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12636 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12637 uint8_t f_ebytes;
12638
12639 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12640 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12641 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12642 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12643 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12644 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12645 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12646 f_elem = 8 / f_ebytes;
12647
12648 if (!l_bit)
12649 {
12650 ULONGEST u_regval = 0;
12651 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12652 address = u_regval;
12653
12654 if (!a_bit)
12655 {
12656 /* Handle VST1. */
12657 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12658 {
12659 if (b_bits == 0x07)
12660 bf_regs = 1;
12661 else if (b_bits == 0x0a)
12662 bf_regs = 2;
12663 else if (b_bits == 0x06)
12664 bf_regs = 3;
12665 else if (b_bits == 0x02)
12666 bf_regs = 4;
12667 else
12668 bf_regs = 0;
12669
12670 for (index_r = 0; index_r < bf_regs; index_r++)
12671 {
12672 for (index_e = 0; index_e < f_elem; index_e++)
12673 {
12674 record_buf_mem[index_m++] = f_ebytes;
12675 record_buf_mem[index_m++] = address;
12676 address = address + f_ebytes;
12677 thumb2_insn_r->mem_rec_count += 1;
12678 }
12679 }
12680 }
12681 /* Handle VST2. */
12682 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12683 {
12684 if (b_bits == 0x09 || b_bits == 0x08)
12685 bf_regs = 1;
12686 else if (b_bits == 0x03)
12687 bf_regs = 2;
12688 else
12689 bf_regs = 0;
12690
12691 for (index_r = 0; index_r < bf_regs; index_r++)
12692 for (index_e = 0; index_e < f_elem; index_e++)
12693 {
12694 for (loop_t = 0; loop_t < 2; loop_t++)
12695 {
12696 record_buf_mem[index_m++] = f_ebytes;
12697 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12698 thumb2_insn_r->mem_rec_count += 1;
12699 }
12700 address = address + (2 * f_ebytes);
12701 }
12702 }
12703 /* Handle VST3. */
12704 else if ((b_bits & 0x0e) == 0x04)
12705 {
12706 for (index_e = 0; index_e < f_elem; index_e++)
12707 {
12708 for (loop_t = 0; loop_t < 3; loop_t++)
12709 {
12710 record_buf_mem[index_m++] = f_ebytes;
12711 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12712 thumb2_insn_r->mem_rec_count += 1;
12713 }
12714 address = address + (3 * f_ebytes);
12715 }
12716 }
12717 /* Handle VST4. */
12718 else if (!(b_bits & 0x0e))
12719 {
12720 for (index_e = 0; index_e < f_elem; index_e++)
12721 {
12722 for (loop_t = 0; loop_t < 4; loop_t++)
12723 {
12724 record_buf_mem[index_m++] = f_ebytes;
12725 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12726 thumb2_insn_r->mem_rec_count += 1;
12727 }
12728 address = address + (4 * f_ebytes);
12729 }
12730 }
12731 }
12732 else
12733 {
12734 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12735
12736 if (bft_size == 0x00)
12737 f_ebytes = 1;
12738 else if (bft_size == 0x01)
12739 f_ebytes = 2;
12740 else if (bft_size == 0x02)
12741 f_ebytes = 4;
12742 else
12743 f_ebytes = 0;
12744
12745 /* Handle VST1. */
12746 if (!(b_bits & 0x0b) || b_bits == 0x08)
12747 thumb2_insn_r->mem_rec_count = 1;
12748 /* Handle VST2. */
12749 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12750 thumb2_insn_r->mem_rec_count = 2;
12751 /* Handle VST3. */
12752 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12753 thumb2_insn_r->mem_rec_count = 3;
12754 /* Handle VST4. */
12755 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12756 thumb2_insn_r->mem_rec_count = 4;
12757
12758 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12759 {
12760 record_buf_mem[index_m] = f_ebytes;
12761 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12762 }
12763 }
12764 }
12765 else
12766 {
12767 if (!a_bit)
12768 {
12769 /* Handle VLD1. */
12770 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12771 thumb2_insn_r->reg_rec_count = 1;
12772 /* Handle VLD2. */
12773 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12774 thumb2_insn_r->reg_rec_count = 2;
12775 /* Handle VLD3. */
12776 else if ((b_bits & 0x0e) == 0x04)
12777 thumb2_insn_r->reg_rec_count = 3;
12778 /* Handle VLD4. */
12779 else if (!(b_bits & 0x0e))
12780 thumb2_insn_r->reg_rec_count = 4;
12781 }
12782 else
12783 {
12784 /* Handle VLD1. */
12785 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12786 thumb2_insn_r->reg_rec_count = 1;
12787 /* Handle VLD2. */
12788 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12789 thumb2_insn_r->reg_rec_count = 2;
12790 /* Handle VLD3. */
12791 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12792 thumb2_insn_r->reg_rec_count = 3;
12793 /* Handle VLD4. */
12794 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12795 thumb2_insn_r->reg_rec_count = 4;
12796
12797 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12798 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12799 }
12800 }
12801
12802 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12803 {
12804 record_buf[index_r] = reg_rn;
12805 thumb2_insn_r->reg_rec_count += 1;
12806 }
12807
12808 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12809 record_buf);
12810 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12811 record_buf_mem);
12812 return 0;
12813}
12814
c6ec2b30
OJ
12815/* Decodes thumb2 instruction type and invokes its record handler. */
12816
12817static unsigned int
12818thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12819{
12820 uint32_t op, op1, op2;
12821
12822 op = bit (thumb2_insn_r->arm_insn, 15);
12823 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12824 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12825
12826 if (op1 == 0x01)
12827 {
12828 if (!(op2 & 0x64 ))
12829 {
12830 /* Load/store multiple instruction. */
12831 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12832 }
12833 else if (!((op2 & 0x64) ^ 0x04))
12834 {
12835 /* Load/store (dual/exclusive) and table branch instruction. */
12836 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12837 }
12838 else if (!((op2 & 0x20) ^ 0x20))
12839 {
12840 /* Data-processing (shifted register). */
12841 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12842 }
12843 else if (op2 & 0x40)
12844 {
12845 /* Co-processor instructions. */
60cc5e93 12846 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12847 }
12848 }
12849 else if (op1 == 0x02)
12850 {
12851 if (op)
12852 {
12853 /* Branches and miscellaneous control instructions. */
12854 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12855 }
12856 else if (op2 & 0x20)
12857 {
12858 /* Data-processing (plain binary immediate) instruction. */
12859 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12860 }
12861 else
12862 {
12863 /* Data-processing (modified immediate). */
12864 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12865 }
12866 }
12867 else if (op1 == 0x03)
12868 {
12869 if (!(op2 & 0x71 ))
12870 {
12871 /* Store single data item. */
12872 return thumb2_record_str_single_data (thumb2_insn_r);
12873 }
12874 else if (!((op2 & 0x71) ^ 0x10))
12875 {
12876 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12877 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12878 }
12879 else if (!((op2 & 0x67) ^ 0x01))
12880 {
12881 /* Load byte, memory hints instruction. */
12882 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12883 }
12884 else if (!((op2 & 0x67) ^ 0x03))
12885 {
12886 /* Load halfword, memory hints instruction. */
12887 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12888 }
12889 else if (!((op2 & 0x67) ^ 0x05))
12890 {
12891 /* Load word instruction. */
12892 return thumb2_record_ld_word (thumb2_insn_r);
12893 }
12894 else if (!((op2 & 0x70) ^ 0x20))
12895 {
12896 /* Data-processing (register) instruction. */
12897 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12898 }
12899 else if (!((op2 & 0x78) ^ 0x30))
12900 {
12901 /* Multiply, multiply accumulate, abs diff instruction. */
12902 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12903 }
12904 else if (!((op2 & 0x78) ^ 0x38))
12905 {
12906 /* Long multiply, long multiply accumulate, and divide. */
12907 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12908 }
12909 else if (op2 & 0x40)
12910 {
12911 /* Co-processor instructions. */
60cc5e93 12912 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12913 }
12914 }
12915
12916 return -1;
12917}
72508ac0
PO
12918
12919/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12920and positive val on fauilure. */
12921
12922static int
12923extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12924{
12925 gdb_byte buf[insn_size];
12926
12927 memset (&buf[0], 0, insn_size);
12928
12929 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12930 return 1;
12931 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12932 insn_size,
2959fed9 12933 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12934 return 0;
12935}
12936
12937typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12938
12939/* Decode arm/thumb insn depending on condition cods and opcodes; and
12940 dispatch it. */
12941
12942static int
12943decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12944 uint32_t insn_size)
72508ac0
PO
12945{
12946
01e57735
YQ
12947 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12948 instruction. */
0fa9c223 12949 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12950 {
12951 arm_record_data_proc_misc_ld_str, /* 000. */
12952 arm_record_data_proc_imm, /* 001. */
12953 arm_record_ld_st_imm_offset, /* 010. */
12954 arm_record_ld_st_reg_offset, /* 011. */
12955 arm_record_ld_st_multiple, /* 100. */
12956 arm_record_b_bl, /* 101. */
60cc5e93 12957 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12958 arm_record_coproc_data_proc /* 111. */
12959 };
12960
01e57735
YQ
12961 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12962 instruction. */
0fa9c223 12963 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12964 { \
12965 thumb_record_shift_add_sub, /* 000. */
12966 thumb_record_add_sub_cmp_mov, /* 001. */
12967 thumb_record_ld_st_reg_offset, /* 010. */
12968 thumb_record_ld_st_imm_offset, /* 011. */
12969 thumb_record_ld_st_stack, /* 100. */
12970 thumb_record_misc, /* 101. */
12971 thumb_record_ldm_stm_swi, /* 110. */
12972 thumb_record_branch /* 111. */
12973 };
12974
12975 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12976 uint32_t insn_id = 0;
12977
12978 if (extract_arm_insn (arm_record, insn_size))
12979 {
12980 if (record_debug)
01e57735
YQ
12981 {
12982 printf_unfiltered (_("Process record: error reading memory at "
12983 "addr %s len = %d.\n"),
12984 paddress (arm_record->gdbarch,
12985 arm_record->this_addr), insn_size);
12986 }
72508ac0
PO
12987 return -1;
12988 }
12989 else if (ARM_RECORD == record_type)
12990 {
12991 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12992 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12993
12994 if (arm_record->cond == 0xf)
12995 ret = arm_record_extension_space (arm_record);
12996 else
01e57735 12997 {
ca92db2d
YQ
12998 /* If this insn has fallen into extension space
12999 then we need not decode it anymore. */
01e57735
YQ
13000 ret = arm_handle_insn[insn_id] (arm_record);
13001 }
ca92db2d
YQ
13002 if (ret != ARM_RECORD_SUCCESS)
13003 {
13004 arm_record_unsupported_insn (arm_record);
13005 ret = -1;
13006 }
72508ac0
PO
13007 }
13008 else if (THUMB_RECORD == record_type)
13009 {
13010 /* As thumb does not have condition codes, we set negative. */
13011 arm_record->cond = -1;
13012 insn_id = bits (arm_record->arm_insn, 13, 15);
13013 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13014 if (ret != ARM_RECORD_SUCCESS)
13015 {
13016 arm_record_unsupported_insn (arm_record);
13017 ret = -1;
13018 }
72508ac0
PO
13019 }
13020 else if (THUMB2_RECORD == record_type)
13021 {
c6ec2b30
OJ
13022 /* As thumb does not have condition codes, we set negative. */
13023 arm_record->cond = -1;
13024
13025 /* Swap first half of 32bit thumb instruction with second half. */
13026 arm_record->arm_insn
01e57735 13027 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13028
ca92db2d 13029 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13030
ca92db2d 13031 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13032 {
13033 arm_record_unsupported_insn (arm_record);
13034 ret = -1;
13035 }
72508ac0
PO
13036 }
13037 else
13038 {
13039 /* Throw assertion. */
13040 gdb_assert_not_reached ("not a valid instruction, could not decode");
13041 }
13042
13043 return ret;
13044}
13045
13046
13047/* Cleans up local record registers and memory allocations. */
13048
13049static void
13050deallocate_reg_mem (insn_decode_record *record)
13051{
13052 xfree (record->arm_regs);
13053 xfree (record->arm_mems);
13054}
13055
13056
01e57735 13057/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13058 memory that will be changed in current instruction to record_arch_list".
13059 Return -1 if something is wrong. */
13060
13061int
01e57735
YQ
13062arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13063 CORE_ADDR insn_addr)
72508ac0
PO
13064{
13065
72508ac0
PO
13066 uint32_t no_of_rec = 0;
13067 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13068 ULONGEST t_bit = 0, insn_id = 0;
13069
13070 ULONGEST u_regval = 0;
13071
13072 insn_decode_record arm_record;
13073
13074 memset (&arm_record, 0, sizeof (insn_decode_record));
13075 arm_record.regcache = regcache;
13076 arm_record.this_addr = insn_addr;
13077 arm_record.gdbarch = gdbarch;
13078
13079
13080 if (record_debug > 1)
13081 {
13082 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13083 "addr = %s\n",
72508ac0
PO
13084 paddress (gdbarch, arm_record.this_addr));
13085 }
13086
13087 if (extract_arm_insn (&arm_record, 2))
13088 {
13089 if (record_debug)
01e57735
YQ
13090 {
13091 printf_unfiltered (_("Process record: error reading memory at "
13092 "addr %s len = %d.\n"),
13093 paddress (arm_record.gdbarch,
13094 arm_record.this_addr), 2);
13095 }
72508ac0
PO
13096 return -1;
13097 }
13098
13099 /* Check the insn, whether it is thumb or arm one. */
13100
13101 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13102 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13103
13104
13105 if (!(u_regval & t_bit))
13106 {
13107 /* We are decoding arm insn. */
13108 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13109 }
13110 else
13111 {
13112 insn_id = bits (arm_record.arm_insn, 11, 15);
13113 /* is it thumb2 insn? */
13114 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13115 {
13116 ret = decode_insn (&arm_record, THUMB2_RECORD,
13117 THUMB2_INSN_SIZE_BYTES);
13118 }
72508ac0 13119 else
01e57735
YQ
13120 {
13121 /* We are decoding thumb insn. */
13122 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13123 }
72508ac0
PO
13124 }
13125
13126 if (0 == ret)
13127 {
13128 /* Record registers. */
25ea693b 13129 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13130 if (arm_record.arm_regs)
01e57735
YQ
13131 {
13132 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13133 {
13134 if (record_full_arch_list_add_reg
25ea693b 13135 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13136 ret = -1;
13137 }
13138 }
72508ac0
PO
13139 /* Record memories. */
13140 if (arm_record.arm_mems)
01e57735
YQ
13141 {
13142 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13143 {
13144 if (record_full_arch_list_add_mem
13145 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13146 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13147 ret = -1;
13148 }
13149 }
72508ac0 13150
25ea693b 13151 if (record_full_arch_list_add_end ())
01e57735 13152 ret = -1;
72508ac0
PO
13153 }
13154
13155
13156 deallocate_reg_mem (&arm_record);
13157
13158 return ret;
13159}
This page took 2.04998 seconds and 4 git commands to generate.