Clear addr bit in next_pcs vector
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d 47
8689682c 48#include "arch/arm.h"
d9311bfa 49#include "arch/arm-get-next-pcs.h"
34e8f22d 50#include "arm-tdep.h"
26216b98 51#include "gdb/sim-arm.h"
34e8f22d 52
082fc60d
RE
53#include "elf-bfd.h"
54#include "coff/internal.h"
97e03143 55#include "elf/arm.h"
c906108c 56
60c5725c 57#include "vec.h"
26216b98 58
72508ac0 59#include "record.h"
d02ed0bb 60#include "record-full.h"
325fac50 61#include <algorithm>
72508ac0 62
0a69eedb
YQ
63#include "features/arm/arm-with-m.c"
64#include "features/arm/arm-with-m-fpa-layout.c"
65#include "features/arm/arm-with-m-vfp-d16.c"
66#include "features/arm/arm-with-iwmmxt.c"
67#include "features/arm/arm-with-vfpv2.c"
68#include "features/arm/arm-with-vfpv3.c"
69#include "features/arm/arm-with-neon.c"
9779414d 70
6529d2dd
AC
71static int arm_debug;
72
082fc60d
RE
73/* Macros for setting and testing a bit in a minimal symbol that marks
74 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 75 is used for this purpose.
082fc60d
RE
76
77 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 78 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 79
0963b4bd 80#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 81 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
82
83#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 84 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 85
60c5725c
DJ
86/* Per-objfile data used for mapping symbols. */
87static const struct objfile_data *arm_objfile_data_key;
88
89struct arm_mapping_symbol
90{
91 bfd_vma value;
92 char type;
93};
94typedef struct arm_mapping_symbol arm_mapping_symbol_s;
95DEF_VEC_O(arm_mapping_symbol_s);
96
97struct arm_per_objfile
98{
99 VEC(arm_mapping_symbol_s) **section_maps;
100};
101
afd7eef0
RE
102/* The list of available "set arm ..." and "show arm ..." commands. */
103static struct cmd_list_element *setarmcmdlist = NULL;
104static struct cmd_list_element *showarmcmdlist = NULL;
105
fd50bc42
RE
106/* The type of floating-point to use. Keep this in sync with enum
107 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 108static const char *const fp_model_strings[] =
fd50bc42
RE
109{
110 "auto",
111 "softfpa",
112 "fpa",
113 "softvfp",
28e97307
DJ
114 "vfp",
115 NULL
fd50bc42
RE
116};
117
118/* A variable that can be configured by the user. */
119static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
120static const char *current_fp_model = "auto";
121
28e97307 122/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 123static const char *const arm_abi_strings[] =
28e97307
DJ
124{
125 "auto",
126 "APCS",
127 "AAPCS",
128 NULL
129};
130
131/* A variable that can be configured by the user. */
132static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
133static const char *arm_abi_string = "auto";
134
0428b8f5 135/* The execution mode to assume. */
40478521 136static const char *const arm_mode_strings[] =
0428b8f5
DJ
137 {
138 "auto",
139 "arm",
68770265
MGD
140 "thumb",
141 NULL
0428b8f5
DJ
142 };
143
144static const char *arm_fallback_mode_string = "auto";
145static const char *arm_force_mode_string = "auto";
146
94c30b78 147/* Number of different reg name sets (options). */
afd7eef0 148static int num_disassembly_options;
bc90b915 149
f32bf4a4
YQ
150/* The standard register names, and all the valid aliases for them. Note
151 that `fp', `sp' and `pc' are not added in this alias list, because they
152 have been added as builtin user registers in
153 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
154static const struct
155{
156 const char *name;
157 int regnum;
158} arm_register_aliases[] = {
159 /* Basic register numbers. */
160 { "r0", 0 },
161 { "r1", 1 },
162 { "r2", 2 },
163 { "r3", 3 },
164 { "r4", 4 },
165 { "r5", 5 },
166 { "r6", 6 },
167 { "r7", 7 },
168 { "r8", 8 },
169 { "r9", 9 },
170 { "r10", 10 },
171 { "r11", 11 },
172 { "r12", 12 },
173 { "r13", 13 },
174 { "r14", 14 },
175 { "r15", 15 },
176 /* Synonyms (argument and variable registers). */
177 { "a1", 0 },
178 { "a2", 1 },
179 { "a3", 2 },
180 { "a4", 3 },
181 { "v1", 4 },
182 { "v2", 5 },
183 { "v3", 6 },
184 { "v4", 7 },
185 { "v5", 8 },
186 { "v6", 9 },
187 { "v7", 10 },
188 { "v8", 11 },
189 /* Other platform-specific names for r9. */
190 { "sb", 9 },
191 { "tr", 9 },
192 /* Special names. */
193 { "ip", 12 },
123dc839 194 { "lr", 14 },
123dc839
DJ
195 /* Names used by GCC (not listed in the ARM EABI). */
196 { "sl", 10 },
123dc839
DJ
197 /* A special name from the older ATPCS. */
198 { "wr", 7 },
199};
bc90b915 200
123dc839 201static const char *const arm_register_names[] =
da59e081
JM
202{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
203 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
204 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
205 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
206 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
207 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 208 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 209
afd7eef0
RE
210/* Valid register name styles. */
211static const char **valid_disassembly_styles;
ed9a39eb 212
afd7eef0
RE
213/* Disassembly style to use. Default to "std" register names. */
214static const char *disassembly_style;
96baa820 215
ed9a39eb 216/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
217 style. */
218static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 219 struct cmd_list_element *);
afd7eef0 220static void set_disassembly_style (void);
ed9a39eb 221
b508a996 222static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 223 void *, int);
b508a996 224static void convert_to_extended (const struct floatformat *, void *,
be8626e0 225 const void *, int);
ed9a39eb 226
05d1431c
PA
227static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
228 struct regcache *regcache,
229 int regnum, gdb_byte *buf);
58d6951d
DJ
230static void arm_neon_quad_write (struct gdbarch *gdbarch,
231 struct regcache *regcache,
232 int regnum, const gdb_byte *buf);
233
e7cf25a8 234static CORE_ADDR
553cb527 235 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
236
237
d9311bfa
AT
238/* get_next_pcs operations. */
239static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
240 arm_get_next_pcs_read_memory_unsigned_integer,
241 arm_get_next_pcs_syscall_next_pc,
242 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
243 arm_get_next_pcs_is_thumb,
244 NULL,
d9311bfa
AT
245};
246
9b8d791a 247struct arm_prologue_cache
c3b4394c 248{
eb5492fa
DJ
249 /* The stack pointer at the time this frame was created; i.e. the
250 caller's stack pointer when this function was called. It is used
251 to identify this frame. */
252 CORE_ADDR prev_sp;
253
4be43953
DJ
254 /* The frame base for this frame is just prev_sp - frame size.
255 FRAMESIZE is the distance from the frame pointer to the
256 initial stack pointer. */
eb5492fa 257
c3b4394c 258 int framesize;
eb5492fa
DJ
259
260 /* The register used to hold the frame pointer for this frame. */
c3b4394c 261 int framereg;
eb5492fa
DJ
262
263 /* Saved register offsets. */
264 struct trad_frame_saved_reg *saved_regs;
c3b4394c 265};
ed9a39eb 266
0d39a070
DJ
267static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
268 CORE_ADDR prologue_start,
269 CORE_ADDR prologue_end,
270 struct arm_prologue_cache *cache);
271
cca44b1b
JB
272/* Architecture version for displaced stepping. This effects the behaviour of
273 certain instructions, and really should not be hard-wired. */
274
275#define DISPLACED_STEPPING_ARCH_VERSION 5
276
94c30b78 277/* Set to true if the 32-bit mode is in use. */
c906108c
SS
278
279int arm_apcs_32 = 1;
280
9779414d
DJ
281/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
282
478fd957 283int
9779414d
DJ
284arm_psr_thumb_bit (struct gdbarch *gdbarch)
285{
286 if (gdbarch_tdep (gdbarch)->is_m)
287 return XPSR_T;
288 else
289 return CPSR_T;
290}
291
d0e59a68
AT
292/* Determine if the processor is currently executing in Thumb mode. */
293
294int
295arm_is_thumb (struct regcache *regcache)
296{
297 ULONGEST cpsr;
298 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
299
300 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
301
302 return (cpsr & t_bit) != 0;
303}
304
b39cc962
DJ
305/* Determine if FRAME is executing in Thumb mode. */
306
25b41d01 307int
b39cc962
DJ
308arm_frame_is_thumb (struct frame_info *frame)
309{
310 CORE_ADDR cpsr;
9779414d 311 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
312
313 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
314 directly (from a signal frame or dummy frame) or by interpreting
315 the saved LR (from a prologue or DWARF frame). So consult it and
316 trust the unwinders. */
317 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
318
9779414d 319 return (cpsr & t_bit) != 0;
b39cc962
DJ
320}
321
60c5725c
DJ
322/* Callback for VEC_lower_bound. */
323
324static inline int
325arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
326 const struct arm_mapping_symbol *rhs)
327{
328 return lhs->value < rhs->value;
329}
330
f9d67f43
DJ
331/* Search for the mapping symbol covering MEMADDR. If one is found,
332 return its type. Otherwise, return 0. If START is non-NULL,
333 set *START to the location of the mapping symbol. */
c906108c 334
f9d67f43
DJ
335static char
336arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 337{
60c5725c 338 struct obj_section *sec;
0428b8f5 339
60c5725c
DJ
340 /* If there are mapping symbols, consult them. */
341 sec = find_pc_section (memaddr);
342 if (sec != NULL)
343 {
344 struct arm_per_objfile *data;
345 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
346 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
347 0 };
60c5725c
DJ
348 unsigned int idx;
349
9a3c8263
SM
350 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
351 arm_objfile_data_key);
60c5725c
DJ
352 if (data != NULL)
353 {
354 map = data->section_maps[sec->the_bfd_section->index];
355 if (!VEC_empty (arm_mapping_symbol_s, map))
356 {
357 struct arm_mapping_symbol *map_sym;
358
359 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
360 arm_compare_mapping_symbols);
361
362 /* VEC_lower_bound finds the earliest ordered insertion
363 point. If the following symbol starts at this exact
364 address, we use that; otherwise, the preceding
365 mapping symbol covers this address. */
366 if (idx < VEC_length (arm_mapping_symbol_s, map))
367 {
368 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
369 if (map_sym->value == map_key.value)
f9d67f43
DJ
370 {
371 if (start)
372 *start = map_sym->value + obj_section_addr (sec);
373 return map_sym->type;
374 }
60c5725c
DJ
375 }
376
377 if (idx > 0)
378 {
379 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
380 if (start)
381 *start = map_sym->value + obj_section_addr (sec);
382 return map_sym->type;
60c5725c
DJ
383 }
384 }
385 }
386 }
387
f9d67f43
DJ
388 return 0;
389}
390
391/* Determine if the program counter specified in MEMADDR is in a Thumb
392 function. This function should be called for addresses unrelated to
393 any executing frame; otherwise, prefer arm_frame_is_thumb. */
394
e3039479 395int
9779414d 396arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 397{
7cbd4a93 398 struct bound_minimal_symbol sym;
f9d67f43 399 char type;
a42244db
YQ
400 struct displaced_step_closure* dsc
401 = get_displaced_step_closure_by_addr(memaddr);
402
403 /* If checking the mode of displaced instruction in copy area, the mode
404 should be determined by instruction on the original address. */
405 if (dsc)
406 {
407 if (debug_displaced)
408 fprintf_unfiltered (gdb_stdlog,
409 "displaced: check mode of %.8lx instead of %.8lx\n",
410 (unsigned long) dsc->insn_addr,
411 (unsigned long) memaddr);
412 memaddr = dsc->insn_addr;
413 }
f9d67f43
DJ
414
415 /* If bit 0 of the address is set, assume this is a Thumb address. */
416 if (IS_THUMB_ADDR (memaddr))
417 return 1;
418
419 /* If the user wants to override the symbol table, let him. */
420 if (strcmp (arm_force_mode_string, "arm") == 0)
421 return 0;
422 if (strcmp (arm_force_mode_string, "thumb") == 0)
423 return 1;
424
9779414d
DJ
425 /* ARM v6-M and v7-M are always in Thumb mode. */
426 if (gdbarch_tdep (gdbarch)->is_m)
427 return 1;
428
f9d67f43
DJ
429 /* If there are mapping symbols, consult them. */
430 type = arm_find_mapping_symbol (memaddr, NULL);
431 if (type)
432 return type == 't';
433
ed9a39eb 434 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 435 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
436 if (sym.minsym)
437 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
438
439 /* If the user wants to override the fallback mode, let them. */
440 if (strcmp (arm_fallback_mode_string, "arm") == 0)
441 return 0;
442 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
443 return 1;
444
445 /* If we couldn't find any symbol, but we're talking to a running
446 target, then trust the current value of $cpsr. This lets
447 "display/i $pc" always show the correct mode (though if there is
448 a symbol table we will not reach here, so it still may not be
18819fa6 449 displayed in the mode it will be executed). */
0428b8f5 450 if (target_has_registers)
18819fa6 451 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
452
453 /* Otherwise we're out of luck; we assume ARM. */
454 return 0;
c906108c
SS
455}
456
ca90e760
FH
457/* Determine if the address specified equals any of these magic return
458 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
459 architectures.
460
461 From ARMv6-M Reference Manual B1.5.8
462 Table B1-5 Exception return behavior
463
464 EXC_RETURN Return To Return Stack
465 0xFFFFFFF1 Handler mode Main
466 0xFFFFFFF9 Thread mode Main
467 0xFFFFFFFD Thread mode Process
468
469 From ARMv7-M Reference Manual B1.5.8
470 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
471
472 EXC_RETURN Return To Return Stack
473 0xFFFFFFF1 Handler mode Main
474 0xFFFFFFF9 Thread mode Main
475 0xFFFFFFFD Thread mode Process
476
477 Table B1-9 EXC_RETURN definition of exception return behavior, with
478 FP
479
480 EXC_RETURN Return To Return Stack Frame Type
481 0xFFFFFFE1 Handler mode Main Extended
482 0xFFFFFFE9 Thread mode Main Extended
483 0xFFFFFFED Thread mode Process Extended
484 0xFFFFFFF1 Handler mode Main Basic
485 0xFFFFFFF9 Thread mode Main Basic
486 0xFFFFFFFD Thread mode Process Basic
487
488 For more details see "B1.5.8 Exception return behavior"
489 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
490
491static int
492arm_m_addr_is_magic (CORE_ADDR addr)
493{
494 switch (addr)
495 {
496 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
497 the exception return behavior. */
498 case 0xffffffe1:
499 case 0xffffffe9:
500 case 0xffffffed:
501 case 0xfffffff1:
502 case 0xfffffff9:
503 case 0xfffffffd:
504 /* Address is magic. */
505 return 1;
506
507 default:
508 /* Address is not magic. */
509 return 0;
510 }
511}
512
181c1381 513/* Remove useless bits from addresses in a running program. */
34e8f22d 514static CORE_ADDR
24568a2c 515arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 516{
2ae28aa9
YQ
517 /* On M-profile devices, do not strip the low bit from EXC_RETURN
518 (the magic exception return address). */
519 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 520 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
521 return val;
522
a3a2ee65 523 if (arm_apcs_32)
dd6be234 524 return UNMAKE_THUMB_ADDR (val);
c906108c 525 else
a3a2ee65 526 return (val & 0x03fffffc);
c906108c
SS
527}
528
0d39a070 529/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
530 can be safely ignored during prologue skipping. IS_THUMB is true
531 if the function is known to be a Thumb function due to the way it
532 is being called. */
0d39a070 533static int
e0634ccf 534skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 535{
e0634ccf 536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 537 struct bound_minimal_symbol msym;
0d39a070
DJ
538
539 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 540 if (msym.minsym != NULL
77e371c0 541 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 542 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 543 {
efd66ac6 544 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 545
e0634ccf
UW
546 /* The GNU linker's Thumb call stub to foo is named
547 __foo_from_thumb. */
548 if (strstr (name, "_from_thumb") != NULL)
549 name += 2;
0d39a070 550
e0634ccf
UW
551 /* On soft-float targets, __truncdfsf2 is called to convert promoted
552 arguments to their argument types in non-prototyped
553 functions. */
61012eef 554 if (startswith (name, "__truncdfsf2"))
e0634ccf 555 return 1;
61012eef 556 if (startswith (name, "__aeabi_d2f"))
e0634ccf 557 return 1;
0d39a070 558
e0634ccf 559 /* Internal functions related to thread-local storage. */
61012eef 560 if (startswith (name, "__tls_get_addr"))
e0634ccf 561 return 1;
61012eef 562 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
563 return 1;
564 }
565 else
566 {
567 /* If we run against a stripped glibc, we may be unable to identify
568 special functions by name. Check for one important case,
569 __aeabi_read_tp, by comparing the *code* against the default
570 implementation (this is hand-written ARM assembler in glibc). */
571
572 if (!is_thumb
573 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
574 == 0xe3e00a0f /* mov r0, #0xffff0fff */
575 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
576 == 0xe240f01f) /* sub pc, r0, #31 */
577 return 1;
578 }
ec3d575a 579
0d39a070
DJ
580 return 0;
581}
582
621c6d5b
YQ
583/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
584 the first 16-bit of instruction, and INSN2 is the second 16-bit of
585 instruction. */
586#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
587 ((bits ((insn1), 0, 3) << 12) \
588 | (bits ((insn1), 10, 10) << 11) \
589 | (bits ((insn2), 12, 14) << 8) \
590 | bits ((insn2), 0, 7))
591
592/* Extract the immediate from instruction movw/movt of encoding A. INSN is
593 the 32-bit instruction. */
594#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
595 ((bits ((insn), 16, 19) << 12) \
596 | bits ((insn), 0, 11))
597
ec3d575a
UW
598/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
599
600static unsigned int
601thumb_expand_immediate (unsigned int imm)
602{
603 unsigned int count = imm >> 7;
604
605 if (count < 8)
606 switch (count / 2)
607 {
608 case 0:
609 return imm & 0xff;
610 case 1:
611 return (imm & 0xff) | ((imm & 0xff) << 16);
612 case 2:
613 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
614 case 3:
615 return (imm & 0xff) | ((imm & 0xff) << 8)
616 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
617 }
618
619 return (0x80 | (imm & 0x7f)) << (32 - count);
620}
621
540314bd
YQ
622/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
623 epilogue, 0 otherwise. */
624
625static int
626thumb_instruction_restores_sp (unsigned short insn)
627{
628 return (insn == 0x46bd /* mov sp, r7 */
629 || (insn & 0xff80) == 0xb000 /* add sp, imm */
630 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
631}
632
29d73ae4
DJ
633/* Analyze a Thumb prologue, looking for a recognizable stack frame
634 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
635 clobber the stack frame unexpectedly, or an unknown instruction.
636 Return the last address which is definitely safe to skip for an
637 initial breakpoint. */
c906108c
SS
638
639static CORE_ADDR
29d73ae4
DJ
640thumb_analyze_prologue (struct gdbarch *gdbarch,
641 CORE_ADDR start, CORE_ADDR limit,
642 struct arm_prologue_cache *cache)
c906108c 643{
0d39a070 644 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 645 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
646 int i;
647 pv_t regs[16];
648 struct pv_area *stack;
649 struct cleanup *back_to;
650 CORE_ADDR offset;
ec3d575a 651 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 652
29d73ae4
DJ
653 for (i = 0; i < 16; i++)
654 regs[i] = pv_register (i, 0);
55f960e1 655 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
656 back_to = make_cleanup_free_pv_area (stack);
657
29d73ae4 658 while (start < limit)
c906108c 659 {
29d73ae4
DJ
660 unsigned short insn;
661
e17a4113 662 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 663
94c30b78 664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 665 {
29d73ae4
DJ
666 int regno;
667 int mask;
4be43953
DJ
668
669 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
670 break;
29d73ae4
DJ
671
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
675
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
679 {
29d73ae4
DJ
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
681 -4);
682 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
683 }
da59e081 684 }
1db01f22 685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 686 {
29d73ae4 687 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -offset);
da59e081 690 }
808f7ab1
YQ
691 else if (thumb_instruction_restores_sp (insn))
692 {
693 /* Don't scan past the epilogue. */
694 break;
695 }
0d39a070
DJ
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
698 (insn & 0xff) << 2);
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
702 bits (insn, 6, 8));
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
706 bits (insn, 0, 7));
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
714 {
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
718 }
29d73ae4 719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 720 {
29d73ae4
DJ
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
da59e081 724 }
29d73ae4 725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 726 {
29d73ae4
DJ
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
731 pv_t addr;
732
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
735
736 if (pv_area_store_would_trash (stack, addr))
737 break;
738
739 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 740 }
0d39a070
DJ
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
742 {
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
745 pv_t addr;
746
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
749
750 if (pv_area_store_would_trash (stack, addr))
751 break;
752
753 pv_area_store (stack, addr, 4, regs[rd]);
754 }
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
759 ;
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
764 ;
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
769 ;
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
774 ;
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
777 on Thumb. */
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
780 {
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
783 CORE_ADDR loc;
784
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
788 }
db24da6d 789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 790 {
0d39a070
DJ
791 unsigned short inst2;
792
793 inst2 = read_memory_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
795
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
797 {
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
801 CORE_ADDR nextpc;
802 int j1, j2, imm1, imm2;
803
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
808
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
811
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
816
e0634ccf
UW
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
0d39a070
DJ
819 break;
820 }
ec3d575a 821
0963b4bd
MS
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
823 { registers } */
ec3d575a
UW
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
825 {
826 pv_t addr = regs[bits (insn, 0, 3)];
827 int regno;
828
829 if (pv_area_store_would_trash (stack, addr))
830 break;
831
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
835 {
836 addr = pv_add_constant (addr, -4);
837 pv_area_store (stack, addr, 4, regs[regno]);
838 }
839
840 if (insn & 0x0020)
841 regs[bits (insn, 0, 3)] = addr;
842 }
843
0963b4bd
MS
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
845 [Rn, #+/-imm]{!} */
ec3d575a
UW
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
847 {
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
851
852 offset = inst2 & 0xff;
853 if (insn & 0x0080)
854 addr = pv_add_constant (addr, offset);
855 else
856 addr = pv_add_constant (addr, -offset);
857
858 if (pv_area_store_would_trash (stack, addr))
859 break;
860
861 pv_area_store (stack, addr, 4, regs[regno1]);
862 pv_area_store (stack, pv_add_constant (addr, 4),
863 4, regs[regno2]);
864
865 if (insn & 0x0020)
866 regs[bits (insn, 0, 3)] = addr;
867 }
868
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
872 {
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
875
876 offset = inst2 & 0xff;
877 if (inst2 & 0x0200)
878 addr = pv_add_constant (addr, offset);
879 else
880 addr = pv_add_constant (addr, -offset);
881
882 if (pv_area_store_would_trash (stack, addr))
883 break;
884
885 pv_area_store (stack, addr, 4, regs[regno]);
886
887 if (inst2 & 0x0100)
888 regs[bits (insn, 0, 3)] = addr;
889 }
890
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 {
894 int regno = bits (inst2, 12, 15);
895 pv_t addr;
896
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
899
900 if (pv_area_store_would_trash (stack, addr))
901 break;
902
903 pv_area_store (stack, addr, 4, regs[regno]);
904 }
905
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 908 /* Ignore stores of argument registers to the stack. */
0d39a070 909 ;
ec3d575a
UW
910
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
0d39a070 913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 914 /* Ignore stores of argument registers to the stack. */
0d39a070 915 ;
ec3d575a 916
0963b4bd
MS
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
918 { registers } */
ec3d575a
UW
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
0d39a070 923 ;
ec3d575a 924
0963b4bd
MS
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
926 [Rn, #+/-imm] */
0d39a070 927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 928 /* Similarly ignore dual loads from the stack. */
0d39a070 929 ;
ec3d575a
UW
930
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
0d39a070 933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 934 /* Similarly ignore single loads from the stack. */
0d39a070 935 ;
ec3d575a
UW
936
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 939 /* Similarly ignore single loads from the stack. */
0d39a070 940 ;
ec3d575a
UW
941
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
944 {
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
948
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
952 }
953
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
0d39a070 956 {
ec3d575a
UW
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
960
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
963 }
964
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
986 }
987
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
989 {
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
993
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
996 }
997
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
999 {
621c6d5b
YQ
1000 unsigned int imm
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1002
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1004 }
1005
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1008 {
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1012 }
1013
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1015 {
1016 /* Constant pool loads. */
1017 unsigned int constant;
1018 CORE_ADDR loc;
1019
cac395ea 1020 offset = bits (inst2, 0, 11);
ec3d575a
UW
1021 if (insn & 0x0080)
1022 loc = start + 4 + offset;
1023 else
1024 loc = start + 4 - offset;
1025
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1028 }
1029
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1031 {
1032 /* Constant pool loads. */
1033 unsigned int constant;
1034 CORE_ADDR loc;
1035
cac395ea 1036 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1037 if (insn & 0x0080)
1038 loc = start + 4 + offset;
1039 else
1040 loc = start + 4 - offset;
1041
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1044
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1047 }
1048
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1050 {
1051 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1052 break;
1053 }
ec3d575a
UW
1054 else
1055 {
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1059 }
0d39a070
DJ
1060
1061 start += 2;
1062 }
ec3d575a 1063 else if (thumb_instruction_changes_pc (insn))
3d74b771 1064 {
ec3d575a 1065 /* Don't scan past anything that might change control flow. */
da3c6d4a 1066 break;
3d74b771 1067 }
ec3d575a
UW
1068 else
1069 {
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1073 }
29d73ae4
DJ
1074
1075 start += 2;
c906108c
SS
1076 }
1077
0d39a070
DJ
1078 if (arm_debug)
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1081
ec3d575a
UW
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1084
29d73ae4
DJ
1085 if (cache == NULL)
1086 {
1087 do_cleanups (back_to);
ec3d575a 1088 return unrecognized_pc;
29d73ae4
DJ
1089 }
1090
29d73ae4
DJ
1091 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1092 {
1093 /* Frame pointer is fp. Frame size is constant. */
1094 cache->framereg = ARM_FP_REGNUM;
1095 cache->framesize = -regs[ARM_FP_REGNUM].k;
1096 }
1097 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1098 {
1099 /* Frame pointer is r7. Frame size is constant. */
1100 cache->framereg = THUMB_FP_REGNUM;
1101 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1102 }
72a2e3dc 1103 else
29d73ae4
DJ
1104 {
1105 /* Try the stack pointer... this is a bit desperate. */
1106 cache->framereg = ARM_SP_REGNUM;
1107 cache->framesize = -regs[ARM_SP_REGNUM].k;
1108 }
29d73ae4
DJ
1109
1110 for (i = 0; i < 16; i++)
1111 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1112 cache->saved_regs[i].addr = offset;
1113
1114 do_cleanups (back_to);
ec3d575a 1115 return unrecognized_pc;
c906108c
SS
1116}
1117
621c6d5b
YQ
1118
1119/* Try to analyze the instructions starting from PC, which load symbol
1120 __stack_chk_guard. Return the address of instruction after loading this
1121 symbol, set the dest register number to *BASEREG, and set the size of
1122 instructions for loading symbol in OFFSET. Return 0 if instructions are
1123 not recognized. */
1124
1125static CORE_ADDR
1126arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1127 unsigned int *destreg, int *offset)
1128{
1129 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1130 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1131 unsigned int low, high, address;
1132
1133 address = 0;
1134 if (is_thumb)
1135 {
1136 unsigned short insn1
1137 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1138
1139 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1140 {
1141 *destreg = bits (insn1, 8, 10);
1142 *offset = 2;
6ae274b7
YQ
1143 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1144 address = read_memory_unsigned_integer (address, 4,
1145 byte_order_for_code);
621c6d5b
YQ
1146 }
1147 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1148 {
1149 unsigned short insn2
1150 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1151
1152 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1153
1154 insn1
1155 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1156 insn2
1157 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1158
1159 /* movt Rd, #const */
1160 if ((insn1 & 0xfbc0) == 0xf2c0)
1161 {
1162 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1163 *destreg = bits (insn2, 8, 11);
1164 *offset = 8;
1165 address = (high << 16 | low);
1166 }
1167 }
1168 }
1169 else
1170 {
2e9e421f
UW
1171 unsigned int insn
1172 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1173
6ae274b7 1174 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1175 {
6ae274b7
YQ
1176 address = bits (insn, 0, 11) + pc + 8;
1177 address = read_memory_unsigned_integer (address, 4,
1178 byte_order_for_code);
1179
2e9e421f
UW
1180 *destreg = bits (insn, 12, 15);
1181 *offset = 4;
1182 }
1183 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1184 {
1185 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1186
1187 insn
1188 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1189
1190 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1191 {
1192 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1193 *destreg = bits (insn, 12, 15);
1194 *offset = 8;
1195 address = (high << 16 | low);
1196 }
1197 }
621c6d5b
YQ
1198 }
1199
1200 return address;
1201}
1202
1203/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1204 points to the first instruction of this sequence, return the address of
1205 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1206
1207 On arm, this sequence of instructions is composed of mainly three steps,
1208 Step 1: load symbol __stack_chk_guard,
1209 Step 2: load from address of __stack_chk_guard,
1210 Step 3: store it to somewhere else.
1211
1212 Usually, instructions on step 2 and step 3 are the same on various ARM
1213 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1214 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1215 instructions in step 1 vary from different ARM architectures. On ARMv7,
1216 they are,
1217
1218 movw Rn, #:lower16:__stack_chk_guard
1219 movt Rn, #:upper16:__stack_chk_guard
1220
1221 On ARMv5t, it is,
1222
1223 ldr Rn, .Label
1224 ....
1225 .Lable:
1226 .word __stack_chk_guard
1227
1228 Since ldr/str is a very popular instruction, we can't use them as
1229 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1230 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1231 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1232
1233static CORE_ADDR
1234arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1235{
1236 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1237 unsigned int basereg;
7cbd4a93 1238 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1239 int offset;
1240 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1241 CORE_ADDR addr;
1242
1243 /* Try to parse the instructions in Step 1. */
1244 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1245 &basereg, &offset);
1246 if (!addr)
1247 return pc;
1248
1249 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1250 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1251 Otherwise, this sequence cannot be for stack protector. */
1252 if (stack_chk_guard.minsym == NULL
61012eef 1253 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1254 return pc;
1255
1256 if (is_thumb)
1257 {
1258 unsigned int destreg;
1259 unsigned short insn
1260 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1261
1262 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1263 if ((insn & 0xf800) != 0x6800)
1264 return pc;
1265 if (bits (insn, 3, 5) != basereg)
1266 return pc;
1267 destreg = bits (insn, 0, 2);
1268
1269 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1270 byte_order_for_code);
1271 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1272 if ((insn & 0xf800) != 0x6000)
1273 return pc;
1274 if (destreg != bits (insn, 0, 2))
1275 return pc;
1276 }
1277 else
1278 {
1279 unsigned int destreg;
1280 unsigned int insn
1281 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1282
1283 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1284 if ((insn & 0x0e500000) != 0x04100000)
1285 return pc;
1286 if (bits (insn, 16, 19) != basereg)
1287 return pc;
1288 destreg = bits (insn, 12, 15);
1289 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1290 insn = read_memory_unsigned_integer (pc + offset + 4,
1291 4, byte_order_for_code);
1292 if ((insn & 0x0e500000) != 0x04000000)
1293 return pc;
1294 if (bits (insn, 12, 15) != destreg)
1295 return pc;
1296 }
1297 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1298 on arm. */
1299 if (is_thumb)
1300 return pc + offset + 4;
1301 else
1302 return pc + offset + 8;
1303}
1304
da3c6d4a
MS
1305/* Advance the PC across any function entry prologue instructions to
1306 reach some "real" code.
34e8f22d
RE
1307
1308 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1309 prologue:
c906108c 1310
c5aa993b
JM
1311 mov ip, sp
1312 [stmfd sp!, {a1,a2,a3,a4}]
1313 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1314 [stfe f7, [sp, #-12]!]
1315 [stfe f6, [sp, #-12]!]
1316 [stfe f5, [sp, #-12]!]
1317 [stfe f4, [sp, #-12]!]
0963b4bd 1318 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1319
34e8f22d 1320static CORE_ADDR
6093d2eb 1321arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1322{
a89fea3c 1323 CORE_ADDR func_addr, limit_pc;
c906108c 1324
a89fea3c
JL
1325 /* See if we can determine the end of the prologue via the symbol table.
1326 If so, then return either PC, or the PC after the prologue, whichever
1327 is greater. */
1328 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1329 {
d80b854b
UW
1330 CORE_ADDR post_prologue_pc
1331 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1332 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1333
621c6d5b
YQ
1334 if (post_prologue_pc)
1335 post_prologue_pc
1336 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1337
1338
0d39a070
DJ
1339 /* GCC always emits a line note before the prologue and another
1340 one after, even if the two are at the same address or on the
1341 same line. Take advantage of this so that we do not need to
1342 know every instruction that might appear in the prologue. We
1343 will have producer information for most binaries; if it is
1344 missing (e.g. for -gstabs), assuming the GNU tools. */
1345 if (post_prologue_pc
43f3e411
DE
1346 && (cust == NULL
1347 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1348 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1349 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1350 return post_prologue_pc;
1351
a89fea3c 1352 if (post_prologue_pc != 0)
0d39a070
DJ
1353 {
1354 CORE_ADDR analyzed_limit;
1355
1356 /* For non-GCC compilers, make sure the entire line is an
1357 acceptable prologue; GDB will round this function's
1358 return value up to the end of the following line so we
1359 can not skip just part of a line (and we do not want to).
1360
1361 RealView does not treat the prologue specially, but does
1362 associate prologue code with the opening brace; so this
1363 lets us skip the first line if we think it is the opening
1364 brace. */
9779414d 1365 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1366 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1367 post_prologue_pc, NULL);
1368 else
1369 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1370 post_prologue_pc, NULL);
1371
1372 if (analyzed_limit != post_prologue_pc)
1373 return func_addr;
1374
1375 return post_prologue_pc;
1376 }
c906108c
SS
1377 }
1378
a89fea3c
JL
1379 /* Can't determine prologue from the symbol table, need to examine
1380 instructions. */
c906108c 1381
a89fea3c
JL
1382 /* Find an upper limit on the function prologue using the debug
1383 information. If the debug information could not be used to provide
1384 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1385 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1386 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1387 if (limit_pc == 0)
1388 limit_pc = pc + 64; /* Magic. */
1389
c906108c 1390
29d73ae4 1391 /* Check if this is Thumb code. */
9779414d 1392 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1393 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1394 else
1395 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1396}
94c30b78 1397
c5aa993b 1398/* *INDENT-OFF* */
c906108c
SS
1399/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1400 This function decodes a Thumb function prologue to determine:
1401 1) the size of the stack frame
1402 2) which registers are saved on it
1403 3) the offsets of saved regs
1404 4) the offset from the stack pointer to the frame pointer
c906108c 1405
da59e081
JM
1406 A typical Thumb function prologue would create this stack frame
1407 (offsets relative to FP)
c906108c
SS
1408 old SP -> 24 stack parameters
1409 20 LR
1410 16 R7
1411 R7 -> 0 local variables (16 bytes)
1412 SP -> -12 additional stack space (12 bytes)
1413 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1414 12 bytes. The frame register is R7.
da59e081 1415
da3c6d4a
MS
1416 The comments for thumb_skip_prolog() describe the algorithm we use
1417 to detect the end of the prolog. */
c5aa993b
JM
1418/* *INDENT-ON* */
1419
c906108c 1420static void
be8626e0 1421thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1422 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1423{
1424 CORE_ADDR prologue_start;
1425 CORE_ADDR prologue_end;
c906108c 1426
b39cc962
DJ
1427 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1428 &prologue_end))
c906108c 1429 {
ec3d575a
UW
1430 /* See comment in arm_scan_prologue for an explanation of
1431 this heuristics. */
1432 if (prologue_end > prologue_start + 64)
1433 {
1434 prologue_end = prologue_start + 64;
1435 }
c906108c
SS
1436 }
1437 else
f7060f85
DJ
1438 /* We're in the boondocks: we have no idea where the start of the
1439 function is. */
1440 return;
c906108c 1441
325fac50 1442 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1443
be8626e0 1444 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1445}
1446
f303bc3e
YQ
1447/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1448 otherwise. */
1449
1450static int
1451arm_instruction_restores_sp (unsigned int insn)
1452{
1453 if (bits (insn, 28, 31) != INST_NV)
1454 {
1455 if ((insn & 0x0df0f000) == 0x0080d000
1456 /* ADD SP (register or immediate). */
1457 || (insn & 0x0df0f000) == 0x0040d000
1458 /* SUB SP (register or immediate). */
1459 || (insn & 0x0ffffff0) == 0x01a0d000
1460 /* MOV SP. */
1461 || (insn & 0x0fff0000) == 0x08bd0000
1462 /* POP (LDMIA). */
1463 || (insn & 0x0fff0000) == 0x049d0000)
1464 /* POP of a single register. */
1465 return 1;
1466 }
1467
1468 return 0;
1469}
1470
0d39a070
DJ
1471/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1472 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1473 fill it in. Return the first address not recognized as a prologue
1474 instruction.
eb5492fa 1475
0d39a070
DJ
1476 We recognize all the instructions typically found in ARM prologues,
1477 plus harmless instructions which can be skipped (either for analysis
1478 purposes, or a more restrictive set that can be skipped when finding
1479 the end of the prologue). */
1480
1481static CORE_ADDR
1482arm_analyze_prologue (struct gdbarch *gdbarch,
1483 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1484 struct arm_prologue_cache *cache)
1485{
0d39a070
DJ
1486 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1487 int regno;
1488 CORE_ADDR offset, current_pc;
1489 pv_t regs[ARM_FPS_REGNUM];
1490 struct pv_area *stack;
1491 struct cleanup *back_to;
0d39a070
DJ
1492 CORE_ADDR unrecognized_pc = 0;
1493
1494 /* Search the prologue looking for instructions that set up the
96baa820 1495 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1496
96baa820
JM
1497 Be careful, however, and if it doesn't look like a prologue,
1498 don't try to scan it. If, for instance, a frameless function
1499 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1500 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1501 and other operations that rely on a knowledge of the stack
0d39a070 1502 traceback. */
d4473757 1503
4be43953
DJ
1504 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1505 regs[regno] = pv_register (regno, 0);
55f960e1 1506 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1507 back_to = make_cleanup_free_pv_area (stack);
1508
94c30b78
MS
1509 for (current_pc = prologue_start;
1510 current_pc < prologue_end;
f43845b3 1511 current_pc += 4)
96baa820 1512 {
e17a4113
UW
1513 unsigned int insn
1514 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1515
94c30b78 1516 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1517 {
4be43953 1518 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1519 continue;
1520 }
0d39a070
DJ
1521 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1522 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1523 {
1524 unsigned imm = insn & 0xff; /* immediate value */
1525 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1526 int rd = bits (insn, 12, 15);
28cd8767 1527 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1528 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1529 continue;
1530 }
0d39a070
DJ
1531 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1532 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1533 {
1534 unsigned imm = insn & 0xff; /* immediate value */
1535 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1536 int rd = bits (insn, 12, 15);
28cd8767 1537 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1538 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1539 continue;
1540 }
0963b4bd
MS
1541 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1542 [sp, #-4]! */
f43845b3 1543 {
4be43953
DJ
1544 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1545 break;
1546 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1547 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1548 regs[bits (insn, 12, 15)]);
f43845b3
MS
1549 continue;
1550 }
1551 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1552 /* stmfd sp!, {..., fp, ip, lr, pc}
1553 or
1554 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1555 {
d4473757 1556 int mask = insn & 0xffff;
ed9a39eb 1557
4be43953
DJ
1558 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1559 break;
1560
94c30b78 1561 /* Calculate offsets of saved registers. */
34e8f22d 1562 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1563 if (mask & (1 << regno))
1564 {
0963b4bd
MS
1565 regs[ARM_SP_REGNUM]
1566 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1567 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1568 }
1569 }
0d39a070
DJ
1570 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1571 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1572 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1573 {
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1575 continue;
1576 }
0d39a070
DJ
1577 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1578 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1579 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1580 {
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1582 continue;
1583 }
0963b4bd
MS
1584 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1585 { registers } */
0d39a070
DJ
1586 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1587 {
1588 /* No need to add this to saved_regs -- it's just arg regs. */
1589 continue;
1590 }
d4473757
KB
1591 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1592 {
94c30b78
MS
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1595 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1596 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1597 }
1598 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1599 {
94c30b78
MS
1600 unsigned imm = insn & 0xff; /* immediate value */
1601 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1602 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1603 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1604 }
0963b4bd
MS
1605 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1606 [sp, -#c]! */
2af46ca0 1607 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1608 {
4be43953
DJ
1609 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1610 break;
1611
1612 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1613 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1614 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1615 }
0963b4bd
MS
1616 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1617 [sp!] */
2af46ca0 1618 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1619 {
1620 int n_saved_fp_regs;
1621 unsigned int fp_start_reg, fp_bound_reg;
1622
4be43953
DJ
1623 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1624 break;
1625
94c30b78 1626 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1627 {
d4473757
KB
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 3;
1630 else
1631 n_saved_fp_regs = 1;
96baa820 1632 }
d4473757 1633 else
96baa820 1634 {
d4473757
KB
1635 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs = 2;
1637 else
1638 n_saved_fp_regs = 4;
96baa820 1639 }
d4473757 1640
34e8f22d 1641 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1642 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1643 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1644 {
4be43953
DJ
1645 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1646 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1647 regs[fp_start_reg++]);
96baa820 1648 }
c906108c 1649 }
0d39a070
DJ
1650 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1651 {
1652 /* Allow some special function calls when skipping the
1653 prologue; GCC generates these before storing arguments to
1654 the stack. */
1655 CORE_ADDR dest = BranchDest (current_pc, insn);
1656
e0634ccf 1657 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1658 continue;
1659 else
1660 break;
1661 }
d4473757 1662 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1663 break; /* Condition not true, exit early. */
0d39a070
DJ
1664 else if (arm_instruction_changes_pc (insn))
1665 /* Don't scan past anything that might change control flow. */
1666 break;
f303bc3e
YQ
1667 else if (arm_instruction_restores_sp (insn))
1668 {
1669 /* Don't scan past the epilogue. */
1670 break;
1671 }
d19f7eee
UW
1672 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1673 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1674 /* Ignore block loads from the stack, potentially copying
1675 parameters from memory. */
1676 continue;
1677 else if ((insn & 0xfc500000) == 0xe4100000
1678 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1679 /* Similarly ignore single loads from the stack. */
1680 continue;
0d39a070
DJ
1681 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1682 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1683 register instead of the stack. */
d4473757 1684 continue;
0d39a070
DJ
1685 else
1686 {
21daaaaf
YQ
1687 /* The optimizer might shove anything into the prologue, if
1688 we build up cache (cache != NULL) from scanning prologue,
1689 we just skip what we don't recognize and scan further to
1690 make cache as complete as possible. However, if we skip
1691 prologue, we'll stop immediately on unrecognized
1692 instruction. */
0d39a070 1693 unrecognized_pc = current_pc;
21daaaaf
YQ
1694 if (cache != NULL)
1695 continue;
1696 else
1697 break;
0d39a070 1698 }
c906108c
SS
1699 }
1700
0d39a070
DJ
1701 if (unrecognized_pc == 0)
1702 unrecognized_pc = current_pc;
1703
0d39a070
DJ
1704 if (cache)
1705 {
4072f920
YQ
1706 int framereg, framesize;
1707
1708 /* The frame size is just the distance from the frame register
1709 to the original stack pointer. */
1710 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1711 {
1712 /* Frame pointer is fp. */
1713 framereg = ARM_FP_REGNUM;
1714 framesize = -regs[ARM_FP_REGNUM].k;
1715 }
1716 else
1717 {
1718 /* Try the stack pointer... this is a bit desperate. */
1719 framereg = ARM_SP_REGNUM;
1720 framesize = -regs[ARM_SP_REGNUM].k;
1721 }
1722
0d39a070
DJ
1723 cache->framereg = framereg;
1724 cache->framesize = framesize;
1725
1726 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1727 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1728 cache->saved_regs[regno].addr = offset;
1729 }
1730
1731 if (arm_debug)
1732 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1733 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1734
1735 do_cleanups (back_to);
0d39a070
DJ
1736 return unrecognized_pc;
1737}
1738
1739static void
1740arm_scan_prologue (struct frame_info *this_frame,
1741 struct arm_prologue_cache *cache)
1742{
1743 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1744 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1745 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1746 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1747 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1748
1749 /* Assume there is no frame until proven otherwise. */
1750 cache->framereg = ARM_SP_REGNUM;
1751 cache->framesize = 0;
1752
1753 /* Check for Thumb prologue. */
1754 if (arm_frame_is_thumb (this_frame))
1755 {
1756 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1757 return;
1758 }
1759
1760 /* Find the function prologue. If we can't find the function in
1761 the symbol table, peek in the stack frame to find the PC. */
1762 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1763 &prologue_end))
1764 {
1765 /* One way to find the end of the prologue (which works well
1766 for unoptimized code) is to do the following:
1767
1768 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1769
1770 if (sal.line == 0)
1771 prologue_end = prev_pc;
1772 else if (sal.end < prologue_end)
1773 prologue_end = sal.end;
1774
1775 This mechanism is very accurate so long as the optimizer
1776 doesn't move any instructions from the function body into the
1777 prologue. If this happens, sal.end will be the last
1778 instruction in the first hunk of prologue code just before
1779 the first instruction that the scheduler has moved from
1780 the body to the prologue.
1781
1782 In order to make sure that we scan all of the prologue
1783 instructions, we use a slightly less accurate mechanism which
1784 may scan more than necessary. To help compensate for this
1785 lack of accuracy, the prologue scanning loop below contains
1786 several clauses which'll cause the loop to terminate early if
1787 an implausible prologue instruction is encountered.
1788
1789 The expression
1790
1791 prologue_start + 64
1792
1793 is a suitable endpoint since it accounts for the largest
1794 possible prologue plus up to five instructions inserted by
1795 the scheduler. */
1796
1797 if (prologue_end > prologue_start + 64)
1798 {
1799 prologue_end = prologue_start + 64; /* See above. */
1800 }
1801 }
1802 else
1803 {
1804 /* We have no symbol information. Our only option is to assume this
1805 function has a standard stack frame and the normal frame register.
1806 Then, we can find the value of our frame pointer on entrance to
1807 the callee (or at the present moment if this is the innermost frame).
1808 The value stored there should be the address of the stmfd + 8. */
1809 CORE_ADDR frame_loc;
1810 LONGEST return_value;
1811
1812 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1813 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1814 return;
1815 else
1816 {
1817 prologue_start = gdbarch_addr_bits_remove
1818 (gdbarch, return_value) - 8;
1819 prologue_end = prologue_start + 64; /* See above. */
1820 }
1821 }
1822
1823 if (prev_pc < prologue_end)
1824 prologue_end = prev_pc;
1825
1826 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1827}
1828
eb5492fa 1829static struct arm_prologue_cache *
a262aec2 1830arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1831{
eb5492fa
DJ
1832 int reg;
1833 struct arm_prologue_cache *cache;
1834 CORE_ADDR unwound_fp;
c5aa993b 1835
35d5d4ee 1836 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1837 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1838
a262aec2 1839 arm_scan_prologue (this_frame, cache);
848cfffb 1840
a262aec2 1841 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1842 if (unwound_fp == 0)
1843 return cache;
c906108c 1844
4be43953 1845 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1846
eb5492fa
DJ
1847 /* Calculate actual addresses of saved registers using offsets
1848 determined by arm_scan_prologue. */
a262aec2 1849 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1850 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1851 cache->saved_regs[reg].addr += cache->prev_sp;
1852
1853 return cache;
c906108c
SS
1854}
1855
c1ee9414
LM
1856/* Implementation of the stop_reason hook for arm_prologue frames. */
1857
1858static enum unwind_stop_reason
1859arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1860 void **this_cache)
1861{
1862 struct arm_prologue_cache *cache;
1863 CORE_ADDR pc;
1864
1865 if (*this_cache == NULL)
1866 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1867 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1868
1869 /* This is meant to halt the backtrace at "_start". */
1870 pc = get_frame_pc (this_frame);
1871 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1872 return UNWIND_OUTERMOST;
1873
1874 /* If we've hit a wall, stop. */
1875 if (cache->prev_sp == 0)
1876 return UNWIND_OUTERMOST;
1877
1878 return UNWIND_NO_REASON;
1879}
1880
eb5492fa
DJ
1881/* Our frame ID for a normal frame is the current function's starting PC
1882 and the caller's SP when we were called. */
c906108c 1883
148754e5 1884static void
a262aec2 1885arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1886 void **this_cache,
1887 struct frame_id *this_id)
c906108c 1888{
eb5492fa
DJ
1889 struct arm_prologue_cache *cache;
1890 struct frame_id id;
2c404490 1891 CORE_ADDR pc, func;
f079148d 1892
eb5492fa 1893 if (*this_cache == NULL)
a262aec2 1894 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1895 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1896
0e9e9abd
UW
1897 /* Use function start address as part of the frame ID. If we cannot
1898 identify the start address (due to missing symbol information),
1899 fall back to just using the current PC. */
c1ee9414 1900 pc = get_frame_pc (this_frame);
2c404490 1901 func = get_frame_func (this_frame);
0e9e9abd
UW
1902 if (!func)
1903 func = pc;
1904
eb5492fa 1905 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1906 *this_id = id;
c906108c
SS
1907}
1908
a262aec2
DJ
1909static struct value *
1910arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1911 void **this_cache,
a262aec2 1912 int prev_regnum)
24de872b 1913{
24568a2c 1914 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1915 struct arm_prologue_cache *cache;
1916
eb5492fa 1917 if (*this_cache == NULL)
a262aec2 1918 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1919 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1920
eb5492fa 1921 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1922 instead. The prologue may save PC, but it will point into this
1923 frame's prologue, not the next frame's resume location. Also
1924 strip the saved T bit. A valid LR may have the low bit set, but
1925 a valid PC never does. */
eb5492fa 1926 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1927 {
1928 CORE_ADDR lr;
1929
1930 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1931 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1932 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1933 }
24de872b 1934
eb5492fa 1935 /* SP is generally not saved to the stack, but this frame is
a262aec2 1936 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1937 The value was already reconstructed into PREV_SP. */
1938 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1939 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1940
b39cc962
DJ
1941 /* The CPSR may have been changed by the call instruction and by the
1942 called function. The only bit we can reconstruct is the T bit,
1943 by checking the low bit of LR as of the call. This is a reliable
1944 indicator of Thumb-ness except for some ARM v4T pre-interworking
1945 Thumb code, which could get away with a clear low bit as long as
1946 the called function did not use bx. Guess that all other
1947 bits are unchanged; the condition flags are presumably lost,
1948 but the processor status is likely valid. */
1949 if (prev_regnum == ARM_PS_REGNUM)
1950 {
1951 CORE_ADDR lr, cpsr;
9779414d 1952 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1953
1954 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1955 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1956 if (IS_THUMB_ADDR (lr))
9779414d 1957 cpsr |= t_bit;
b39cc962 1958 else
9779414d 1959 cpsr &= ~t_bit;
b39cc962
DJ
1960 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1961 }
1962
a262aec2
DJ
1963 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1964 prev_regnum);
eb5492fa
DJ
1965}
1966
1967struct frame_unwind arm_prologue_unwind = {
1968 NORMAL_FRAME,
c1ee9414 1969 arm_prologue_unwind_stop_reason,
eb5492fa 1970 arm_prologue_this_id,
a262aec2
DJ
1971 arm_prologue_prev_register,
1972 NULL,
1973 default_frame_sniffer
eb5492fa
DJ
1974};
1975
0e9e9abd
UW
1976/* Maintain a list of ARM exception table entries per objfile, similar to the
1977 list of mapping symbols. We only cache entries for standard ARM-defined
1978 personality routines; the cache will contain only the frame unwinding
1979 instructions associated with the entry (not the descriptors). */
1980
1981static const struct objfile_data *arm_exidx_data_key;
1982
1983struct arm_exidx_entry
1984{
1985 bfd_vma addr;
1986 gdb_byte *entry;
1987};
1988typedef struct arm_exidx_entry arm_exidx_entry_s;
1989DEF_VEC_O(arm_exidx_entry_s);
1990
1991struct arm_exidx_data
1992{
1993 VEC(arm_exidx_entry_s) **section_maps;
1994};
1995
1996static void
1997arm_exidx_data_free (struct objfile *objfile, void *arg)
1998{
9a3c8263 1999 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2000 unsigned int i;
2001
2002 for (i = 0; i < objfile->obfd->section_count; i++)
2003 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2004}
2005
2006static inline int
2007arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2008 const struct arm_exidx_entry *rhs)
2009{
2010 return lhs->addr < rhs->addr;
2011}
2012
2013static struct obj_section *
2014arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2015{
2016 struct obj_section *osect;
2017
2018 ALL_OBJFILE_OSECTIONS (objfile, osect)
2019 if (bfd_get_section_flags (objfile->obfd,
2020 osect->the_bfd_section) & SEC_ALLOC)
2021 {
2022 bfd_vma start, size;
2023 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2024 size = bfd_get_section_size (osect->the_bfd_section);
2025
2026 if (start <= vma && vma < start + size)
2027 return osect;
2028 }
2029
2030 return NULL;
2031}
2032
2033/* Parse contents of exception table and exception index sections
2034 of OBJFILE, and fill in the exception table entry cache.
2035
2036 For each entry that refers to a standard ARM-defined personality
2037 routine, extract the frame unwinding instructions (from either
2038 the index or the table section). The unwinding instructions
2039 are normalized by:
2040 - extracting them from the rest of the table data
2041 - converting to host endianness
2042 - appending the implicit 0xb0 ("Finish") code
2043
2044 The extracted and normalized instructions are stored for later
2045 retrieval by the arm_find_exidx_entry routine. */
2046
2047static void
2048arm_exidx_new_objfile (struct objfile *objfile)
2049{
3bb47e8b 2050 struct cleanup *cleanups;
0e9e9abd
UW
2051 struct arm_exidx_data *data;
2052 asection *exidx, *extab;
2053 bfd_vma exidx_vma = 0, extab_vma = 0;
2054 bfd_size_type exidx_size = 0, extab_size = 0;
2055 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2056 LONGEST i;
2057
2058 /* If we've already touched this file, do nothing. */
2059 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2060 return;
3bb47e8b 2061 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2062
2063 /* Read contents of exception table and index. */
a5eda10c 2064 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2065 if (exidx)
2066 {
2067 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2068 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2069 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2070 make_cleanup (xfree, exidx_data);
2071
2072 if (!bfd_get_section_contents (objfile->obfd, exidx,
2073 exidx_data, 0, exidx_size))
2074 {
2075 do_cleanups (cleanups);
2076 return;
2077 }
2078 }
2079
2080 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2081 if (extab)
2082 {
2083 extab_vma = bfd_section_vma (objfile->obfd, extab);
2084 extab_size = bfd_get_section_size (extab);
224c3ddb 2085 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2086 make_cleanup (xfree, extab_data);
2087
2088 if (!bfd_get_section_contents (objfile->obfd, extab,
2089 extab_data, 0, extab_size))
2090 {
2091 do_cleanups (cleanups);
2092 return;
2093 }
2094 }
2095
2096 /* Allocate exception table data structure. */
2097 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2098 set_objfile_data (objfile, arm_exidx_data_key, data);
2099 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2100 objfile->obfd->section_count,
2101 VEC(arm_exidx_entry_s) *);
2102
2103 /* Fill in exception table. */
2104 for (i = 0; i < exidx_size / 8; i++)
2105 {
2106 struct arm_exidx_entry new_exidx_entry;
2107 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2108 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2109 bfd_vma addr = 0, word = 0;
2110 int n_bytes = 0, n_words = 0;
2111 struct obj_section *sec;
2112 gdb_byte *entry = NULL;
2113
2114 /* Extract address of start of function. */
2115 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2116 idx += exidx_vma + i * 8;
2117
2118 /* Find section containing function and compute section offset. */
2119 sec = arm_obj_section_from_vma (objfile, idx);
2120 if (sec == NULL)
2121 continue;
2122 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2123
2124 /* Determine address of exception table entry. */
2125 if (val == 1)
2126 {
2127 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2128 }
2129 else if ((val & 0xff000000) == 0x80000000)
2130 {
2131 /* Exception table entry embedded in .ARM.exidx
2132 -- must be short form. */
2133 word = val;
2134 n_bytes = 3;
2135 }
2136 else if (!(val & 0x80000000))
2137 {
2138 /* Exception table entry in .ARM.extab. */
2139 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2140 addr += exidx_vma + i * 8 + 4;
2141
2142 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2143 {
2144 word = bfd_h_get_32 (objfile->obfd,
2145 extab_data + addr - extab_vma);
2146 addr += 4;
2147
2148 if ((word & 0xff000000) == 0x80000000)
2149 {
2150 /* Short form. */
2151 n_bytes = 3;
2152 }
2153 else if ((word & 0xff000000) == 0x81000000
2154 || (word & 0xff000000) == 0x82000000)
2155 {
2156 /* Long form. */
2157 n_bytes = 2;
2158 n_words = ((word >> 16) & 0xff);
2159 }
2160 else if (!(word & 0x80000000))
2161 {
2162 bfd_vma pers;
2163 struct obj_section *pers_sec;
2164 int gnu_personality = 0;
2165
2166 /* Custom personality routine. */
2167 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2168 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2169
2170 /* Check whether we've got one of the variants of the
2171 GNU personality routines. */
2172 pers_sec = arm_obj_section_from_vma (objfile, pers);
2173 if (pers_sec)
2174 {
2175 static const char *personality[] =
2176 {
2177 "__gcc_personality_v0",
2178 "__gxx_personality_v0",
2179 "__gcj_personality_v0",
2180 "__gnu_objc_personality_v0",
2181 NULL
2182 };
2183
2184 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2185 int k;
2186
2187 for (k = 0; personality[k]; k++)
2188 if (lookup_minimal_symbol_by_pc_name
2189 (pc, personality[k], objfile))
2190 {
2191 gnu_personality = 1;
2192 break;
2193 }
2194 }
2195
2196 /* If so, the next word contains a word count in the high
2197 byte, followed by the same unwind instructions as the
2198 pre-defined forms. */
2199 if (gnu_personality
2200 && addr + 4 <= extab_vma + extab_size)
2201 {
2202 word = bfd_h_get_32 (objfile->obfd,
2203 extab_data + addr - extab_vma);
2204 addr += 4;
2205 n_bytes = 3;
2206 n_words = ((word >> 24) & 0xff);
2207 }
2208 }
2209 }
2210 }
2211
2212 /* Sanity check address. */
2213 if (n_words)
2214 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2215 n_words = n_bytes = 0;
2216
2217 /* The unwind instructions reside in WORD (only the N_BYTES least
2218 significant bytes are valid), followed by N_WORDS words in the
2219 extab section starting at ADDR. */
2220 if (n_bytes || n_words)
2221 {
224c3ddb
SM
2222 gdb_byte *p = entry
2223 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2224 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2225
2226 while (n_bytes--)
2227 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2228
2229 while (n_words--)
2230 {
2231 word = bfd_h_get_32 (objfile->obfd,
2232 extab_data + addr - extab_vma);
2233 addr += 4;
2234
2235 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2236 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2237 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2238 *p++ = (gdb_byte) (word & 0xff);
2239 }
2240
2241 /* Implied "Finish" to terminate the list. */
2242 *p++ = 0xb0;
2243 }
2244
2245 /* Push entry onto vector. They are guaranteed to always
2246 appear in order of increasing addresses. */
2247 new_exidx_entry.addr = idx;
2248 new_exidx_entry.entry = entry;
2249 VEC_safe_push (arm_exidx_entry_s,
2250 data->section_maps[sec->the_bfd_section->index],
2251 &new_exidx_entry);
2252 }
2253
2254 do_cleanups (cleanups);
2255}
2256
2257/* Search for the exception table entry covering MEMADDR. If one is found,
2258 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2259 set *START to the start of the region covered by this entry. */
2260
2261static gdb_byte *
2262arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2263{
2264 struct obj_section *sec;
2265
2266 sec = find_pc_section (memaddr);
2267 if (sec != NULL)
2268 {
2269 struct arm_exidx_data *data;
2270 VEC(arm_exidx_entry_s) *map;
2271 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2272 unsigned int idx;
2273
9a3c8263
SM
2274 data = ((struct arm_exidx_data *)
2275 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2276 if (data != NULL)
2277 {
2278 map = data->section_maps[sec->the_bfd_section->index];
2279 if (!VEC_empty (arm_exidx_entry_s, map))
2280 {
2281 struct arm_exidx_entry *map_sym;
2282
2283 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2284 arm_compare_exidx_entries);
2285
2286 /* VEC_lower_bound finds the earliest ordered insertion
2287 point. If the following symbol starts at this exact
2288 address, we use that; otherwise, the preceding
2289 exception table entry covers this address. */
2290 if (idx < VEC_length (arm_exidx_entry_s, map))
2291 {
2292 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2293 if (map_sym->addr == map_key.addr)
2294 {
2295 if (start)
2296 *start = map_sym->addr + obj_section_addr (sec);
2297 return map_sym->entry;
2298 }
2299 }
2300
2301 if (idx > 0)
2302 {
2303 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2304 if (start)
2305 *start = map_sym->addr + obj_section_addr (sec);
2306 return map_sym->entry;
2307 }
2308 }
2309 }
2310 }
2311
2312 return NULL;
2313}
2314
2315/* Given the current frame THIS_FRAME, and its associated frame unwinding
2316 instruction list from the ARM exception table entry ENTRY, allocate and
2317 return a prologue cache structure describing how to unwind this frame.
2318
2319 Return NULL if the unwinding instruction list contains a "spare",
2320 "reserved" or "refuse to unwind" instruction as defined in section
2321 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2322 for the ARM Architecture" document. */
2323
2324static struct arm_prologue_cache *
2325arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2326{
2327 CORE_ADDR vsp = 0;
2328 int vsp_valid = 0;
2329
2330 struct arm_prologue_cache *cache;
2331 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2332 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2333
2334 for (;;)
2335 {
2336 gdb_byte insn;
2337
2338 /* Whenever we reload SP, we actually have to retrieve its
2339 actual value in the current frame. */
2340 if (!vsp_valid)
2341 {
2342 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2343 {
2344 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2345 vsp = get_frame_register_unsigned (this_frame, reg);
2346 }
2347 else
2348 {
2349 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2350 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2351 }
2352
2353 vsp_valid = 1;
2354 }
2355
2356 /* Decode next unwind instruction. */
2357 insn = *entry++;
2358
2359 if ((insn & 0xc0) == 0)
2360 {
2361 int offset = insn & 0x3f;
2362 vsp += (offset << 2) + 4;
2363 }
2364 else if ((insn & 0xc0) == 0x40)
2365 {
2366 int offset = insn & 0x3f;
2367 vsp -= (offset << 2) + 4;
2368 }
2369 else if ((insn & 0xf0) == 0x80)
2370 {
2371 int mask = ((insn & 0xf) << 8) | *entry++;
2372 int i;
2373
2374 /* The special case of an all-zero mask identifies
2375 "Refuse to unwind". We return NULL to fall back
2376 to the prologue analyzer. */
2377 if (mask == 0)
2378 return NULL;
2379
2380 /* Pop registers r4..r15 under mask. */
2381 for (i = 0; i < 12; i++)
2382 if (mask & (1 << i))
2383 {
2384 cache->saved_regs[4 + i].addr = vsp;
2385 vsp += 4;
2386 }
2387
2388 /* Special-case popping SP -- we need to reload vsp. */
2389 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2390 vsp_valid = 0;
2391 }
2392 else if ((insn & 0xf0) == 0x90)
2393 {
2394 int reg = insn & 0xf;
2395
2396 /* Reserved cases. */
2397 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2398 return NULL;
2399
2400 /* Set SP from another register and mark VSP for reload. */
2401 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2402 vsp_valid = 0;
2403 }
2404 else if ((insn & 0xf0) == 0xa0)
2405 {
2406 int count = insn & 0x7;
2407 int pop_lr = (insn & 0x8) != 0;
2408 int i;
2409
2410 /* Pop r4..r[4+count]. */
2411 for (i = 0; i <= count; i++)
2412 {
2413 cache->saved_regs[4 + i].addr = vsp;
2414 vsp += 4;
2415 }
2416
2417 /* If indicated by flag, pop LR as well. */
2418 if (pop_lr)
2419 {
2420 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2421 vsp += 4;
2422 }
2423 }
2424 else if (insn == 0xb0)
2425 {
2426 /* We could only have updated PC by popping into it; if so, it
2427 will show up as address. Otherwise, copy LR into PC. */
2428 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2429 cache->saved_regs[ARM_PC_REGNUM]
2430 = cache->saved_regs[ARM_LR_REGNUM];
2431
2432 /* We're done. */
2433 break;
2434 }
2435 else if (insn == 0xb1)
2436 {
2437 int mask = *entry++;
2438 int i;
2439
2440 /* All-zero mask and mask >= 16 is "spare". */
2441 if (mask == 0 || mask >= 16)
2442 return NULL;
2443
2444 /* Pop r0..r3 under mask. */
2445 for (i = 0; i < 4; i++)
2446 if (mask & (1 << i))
2447 {
2448 cache->saved_regs[i].addr = vsp;
2449 vsp += 4;
2450 }
2451 }
2452 else if (insn == 0xb2)
2453 {
2454 ULONGEST offset = 0;
2455 unsigned shift = 0;
2456
2457 do
2458 {
2459 offset |= (*entry & 0x7f) << shift;
2460 shift += 7;
2461 }
2462 while (*entry++ & 0x80);
2463
2464 vsp += 0x204 + (offset << 2);
2465 }
2466 else if (insn == 0xb3)
2467 {
2468 int start = *entry >> 4;
2469 int count = (*entry++) & 0xf;
2470 int i;
2471
2472 /* Only registers D0..D15 are valid here. */
2473 if (start + count >= 16)
2474 return NULL;
2475
2476 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2477 for (i = 0; i <= count; i++)
2478 {
2479 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2480 vsp += 8;
2481 }
2482
2483 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2484 vsp += 4;
2485 }
2486 else if ((insn & 0xf8) == 0xb8)
2487 {
2488 int count = insn & 0x7;
2489 int i;
2490
2491 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2492 for (i = 0; i <= count; i++)
2493 {
2494 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2495 vsp += 8;
2496 }
2497
2498 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2499 vsp += 4;
2500 }
2501 else if (insn == 0xc6)
2502 {
2503 int start = *entry >> 4;
2504 int count = (*entry++) & 0xf;
2505 int i;
2506
2507 /* Only registers WR0..WR15 are valid. */
2508 if (start + count >= 16)
2509 return NULL;
2510
2511 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2512 for (i = 0; i <= count; i++)
2513 {
2514 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2515 vsp += 8;
2516 }
2517 }
2518 else if (insn == 0xc7)
2519 {
2520 int mask = *entry++;
2521 int i;
2522
2523 /* All-zero mask and mask >= 16 is "spare". */
2524 if (mask == 0 || mask >= 16)
2525 return NULL;
2526
2527 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2528 for (i = 0; i < 4; i++)
2529 if (mask & (1 << i))
2530 {
2531 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2532 vsp += 4;
2533 }
2534 }
2535 else if ((insn & 0xf8) == 0xc0)
2536 {
2537 int count = insn & 0x7;
2538 int i;
2539
2540 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2541 for (i = 0; i <= count; i++)
2542 {
2543 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2544 vsp += 8;
2545 }
2546 }
2547 else if (insn == 0xc8)
2548 {
2549 int start = *entry >> 4;
2550 int count = (*entry++) & 0xf;
2551 int i;
2552
2553 /* Only registers D0..D31 are valid. */
2554 if (start + count >= 16)
2555 return NULL;
2556
2557 /* Pop VFP double-precision registers
2558 D[16+start]..D[16+start+count]. */
2559 for (i = 0; i <= count; i++)
2560 {
2561 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2562 vsp += 8;
2563 }
2564 }
2565 else if (insn == 0xc9)
2566 {
2567 int start = *entry >> 4;
2568 int count = (*entry++) & 0xf;
2569 int i;
2570
2571 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2572 for (i = 0; i <= count; i++)
2573 {
2574 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2575 vsp += 8;
2576 }
2577 }
2578 else if ((insn & 0xf8) == 0xd0)
2579 {
2580 int count = insn & 0x7;
2581 int i;
2582
2583 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2584 for (i = 0; i <= count; i++)
2585 {
2586 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2587 vsp += 8;
2588 }
2589 }
2590 else
2591 {
2592 /* Everything else is "spare". */
2593 return NULL;
2594 }
2595 }
2596
2597 /* If we restore SP from a register, assume this was the frame register.
2598 Otherwise just fall back to SP as frame register. */
2599 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2600 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2601 else
2602 cache->framereg = ARM_SP_REGNUM;
2603
2604 /* Determine offset to previous frame. */
2605 cache->framesize
2606 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2607
2608 /* We already got the previous SP. */
2609 cache->prev_sp = vsp;
2610
2611 return cache;
2612}
2613
2614/* Unwinding via ARM exception table entries. Note that the sniffer
2615 already computes a filled-in prologue cache, which is then used
2616 with the same arm_prologue_this_id and arm_prologue_prev_register
2617 routines also used for prologue-parsing based unwinding. */
2618
2619static int
2620arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2621 struct frame_info *this_frame,
2622 void **this_prologue_cache)
2623{
2624 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2625 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2626 CORE_ADDR addr_in_block, exidx_region, func_start;
2627 struct arm_prologue_cache *cache;
2628 gdb_byte *entry;
2629
2630 /* See if we have an ARM exception table entry covering this address. */
2631 addr_in_block = get_frame_address_in_block (this_frame);
2632 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2633 if (!entry)
2634 return 0;
2635
2636 /* The ARM exception table does not describe unwind information
2637 for arbitrary PC values, but is guaranteed to be correct only
2638 at call sites. We have to decide here whether we want to use
2639 ARM exception table information for this frame, or fall back
2640 to using prologue parsing. (Note that if we have DWARF CFI,
2641 this sniffer isn't even called -- CFI is always preferred.)
2642
2643 Before we make this decision, however, we check whether we
2644 actually have *symbol* information for the current frame.
2645 If not, prologue parsing would not work anyway, so we might
2646 as well use the exception table and hope for the best. */
2647 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2648 {
2649 int exc_valid = 0;
2650
2651 /* If the next frame is "normal", we are at a call site in this
2652 frame, so exception information is guaranteed to be valid. */
2653 if (get_next_frame (this_frame)
2654 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2655 exc_valid = 1;
2656
2657 /* We also assume exception information is valid if we're currently
2658 blocked in a system call. The system library is supposed to
d9311bfa
AT
2659 ensure this, so that e.g. pthread cancellation works. */
2660 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2661 {
d9311bfa 2662 LONGEST insn;
416dc9c6 2663
d9311bfa
AT
2664 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2665 byte_order_for_code, &insn)
2666 && (insn & 0xff00) == 0xdf00 /* svc */)
2667 exc_valid = 1;
0e9e9abd 2668 }
d9311bfa
AT
2669 else
2670 {
2671 LONGEST insn;
416dc9c6 2672
d9311bfa
AT
2673 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2674 byte_order_for_code, &insn)
2675 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2676 exc_valid = 1;
2677 }
2678
0e9e9abd
UW
2679 /* Bail out if we don't know that exception information is valid. */
2680 if (!exc_valid)
2681 return 0;
2682
2683 /* The ARM exception index does not mark the *end* of the region
2684 covered by the entry, and some functions will not have any entry.
2685 To correctly recognize the end of the covered region, the linker
2686 should have inserted dummy records with a CANTUNWIND marker.
2687
2688 Unfortunately, current versions of GNU ld do not reliably do
2689 this, and thus we may have found an incorrect entry above.
2690 As a (temporary) sanity check, we only use the entry if it
2691 lies *within* the bounds of the function. Note that this check
2692 might reject perfectly valid entries that just happen to cover
2693 multiple functions; therefore this check ought to be removed
2694 once the linker is fixed. */
2695 if (func_start > exidx_region)
2696 return 0;
2697 }
2698
2699 /* Decode the list of unwinding instructions into a prologue cache.
2700 Note that this may fail due to e.g. a "refuse to unwind" code. */
2701 cache = arm_exidx_fill_cache (this_frame, entry);
2702 if (!cache)
2703 return 0;
2704
2705 *this_prologue_cache = cache;
2706 return 1;
2707}
2708
2709struct frame_unwind arm_exidx_unwind = {
2710 NORMAL_FRAME,
8fbca658 2711 default_frame_unwind_stop_reason,
0e9e9abd
UW
2712 arm_prologue_this_id,
2713 arm_prologue_prev_register,
2714 NULL,
2715 arm_exidx_unwind_sniffer
2716};
2717
779aa56f
YQ
2718static struct arm_prologue_cache *
2719arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2720{
2721 struct arm_prologue_cache *cache;
779aa56f
YQ
2722 int reg;
2723
2724 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2725 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2726
2727 /* Still rely on the offset calculated from prologue. */
2728 arm_scan_prologue (this_frame, cache);
2729
2730 /* Since we are in epilogue, the SP has been restored. */
2731 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2732
2733 /* Calculate actual addresses of saved registers using offsets
2734 determined by arm_scan_prologue. */
2735 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2736 if (trad_frame_addr_p (cache->saved_regs, reg))
2737 cache->saved_regs[reg].addr += cache->prev_sp;
2738
2739 return cache;
2740}
2741
2742/* Implementation of function hook 'this_id' in
2743 'struct frame_uwnind' for epilogue unwinder. */
2744
2745static void
2746arm_epilogue_frame_this_id (struct frame_info *this_frame,
2747 void **this_cache,
2748 struct frame_id *this_id)
2749{
2750 struct arm_prologue_cache *cache;
2751 CORE_ADDR pc, func;
2752
2753 if (*this_cache == NULL)
2754 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2755 cache = (struct arm_prologue_cache *) *this_cache;
2756
2757 /* Use function start address as part of the frame ID. If we cannot
2758 identify the start address (due to missing symbol information),
2759 fall back to just using the current PC. */
2760 pc = get_frame_pc (this_frame);
2761 func = get_frame_func (this_frame);
fb3f3d25 2762 if (func == 0)
779aa56f
YQ
2763 func = pc;
2764
2765 (*this_id) = frame_id_build (cache->prev_sp, pc);
2766}
2767
2768/* Implementation of function hook 'prev_register' in
2769 'struct frame_uwnind' for epilogue unwinder. */
2770
2771static struct value *
2772arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2773 void **this_cache, int regnum)
2774{
779aa56f
YQ
2775 if (*this_cache == NULL)
2776 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2777
2778 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2779}
2780
2781static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2782 CORE_ADDR pc);
2783static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2784 CORE_ADDR pc);
2785
2786/* Implementation of function hook 'sniffer' in
2787 'struct frame_uwnind' for epilogue unwinder. */
2788
2789static int
2790arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2791 struct frame_info *this_frame,
2792 void **this_prologue_cache)
2793{
2794 if (frame_relative_level (this_frame) == 0)
2795 {
2796 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2797 CORE_ADDR pc = get_frame_pc (this_frame);
2798
2799 if (arm_frame_is_thumb (this_frame))
2800 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2801 else
2802 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2803 }
2804 else
2805 return 0;
2806}
2807
2808/* Frame unwinder from epilogue. */
2809
2810static const struct frame_unwind arm_epilogue_frame_unwind =
2811{
2812 NORMAL_FRAME,
2813 default_frame_unwind_stop_reason,
2814 arm_epilogue_frame_this_id,
2815 arm_epilogue_frame_prev_register,
2816 NULL,
2817 arm_epilogue_frame_sniffer,
2818};
2819
80d8d390
YQ
2820/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2821 trampoline, return the target PC. Otherwise return 0.
2822
2823 void call0a (char c, short s, int i, long l) {}
2824
2825 int main (void)
2826 {
2827 (*pointer_to_call0a) (c, s, i, l);
2828 }
2829
2830 Instead of calling a stub library function _call_via_xx (xx is
2831 the register name), GCC may inline the trampoline in the object
2832 file as below (register r2 has the address of call0a).
2833
2834 .global main
2835 .type main, %function
2836 ...
2837 bl .L1
2838 ...
2839 .size main, .-main
2840
2841 .L1:
2842 bx r2
2843
2844 The trampoline 'bx r2' doesn't belong to main. */
2845
2846static CORE_ADDR
2847arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2848{
2849 /* The heuristics of recognizing such trampoline is that FRAME is
2850 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2851 if (arm_frame_is_thumb (frame))
2852 {
2853 gdb_byte buf[2];
2854
2855 if (target_read_memory (pc, buf, 2) == 0)
2856 {
2857 struct gdbarch *gdbarch = get_frame_arch (frame);
2858 enum bfd_endian byte_order_for_code
2859 = gdbarch_byte_order_for_code (gdbarch);
2860 uint16_t insn
2861 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2862
2863 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2864 {
2865 CORE_ADDR dest
2866 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2867
2868 /* Clear the LSB so that gdb core sets step-resume
2869 breakpoint at the right address. */
2870 return UNMAKE_THUMB_ADDR (dest);
2871 }
2872 }
2873 }
2874
2875 return 0;
2876}
2877
909cf6ea 2878static struct arm_prologue_cache *
a262aec2 2879arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2880{
909cf6ea 2881 struct arm_prologue_cache *cache;
909cf6ea 2882
35d5d4ee 2883 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2884 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2885
a262aec2 2886 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2887
2888 return cache;
2889}
2890
2891/* Our frame ID for a stub frame is the current SP and LR. */
2892
2893static void
a262aec2 2894arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2895 void **this_cache,
2896 struct frame_id *this_id)
2897{
2898 struct arm_prologue_cache *cache;
2899
2900 if (*this_cache == NULL)
a262aec2 2901 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2902 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2903
a262aec2 2904 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2905}
2906
a262aec2
DJ
2907static int
2908arm_stub_unwind_sniffer (const struct frame_unwind *self,
2909 struct frame_info *this_frame,
2910 void **this_prologue_cache)
909cf6ea 2911{
93d42b30 2912 CORE_ADDR addr_in_block;
948f8e3d 2913 gdb_byte dummy[4];
18d18ac8
YQ
2914 CORE_ADDR pc, start_addr;
2915 const char *name;
909cf6ea 2916
a262aec2 2917 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2918 pc = get_frame_pc (this_frame);
3e5d3a5a 2919 if (in_plt_section (addr_in_block)
fc36e839
DE
2920 /* We also use the stub winder if the target memory is unreadable
2921 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2922 || target_read_memory (pc, dummy, 4) != 0)
2923 return 1;
2924
2925 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2926 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2927 return 1;
909cf6ea 2928
a262aec2 2929 return 0;
909cf6ea
DJ
2930}
2931
a262aec2
DJ
2932struct frame_unwind arm_stub_unwind = {
2933 NORMAL_FRAME,
8fbca658 2934 default_frame_unwind_stop_reason,
a262aec2
DJ
2935 arm_stub_this_id,
2936 arm_prologue_prev_register,
2937 NULL,
2938 arm_stub_unwind_sniffer
2939};
2940
2ae28aa9
YQ
2941/* Put here the code to store, into CACHE->saved_regs, the addresses
2942 of the saved registers of frame described by THIS_FRAME. CACHE is
2943 returned. */
2944
2945static struct arm_prologue_cache *
2946arm_m_exception_cache (struct frame_info *this_frame)
2947{
2948 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2949 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2950 struct arm_prologue_cache *cache;
2951 CORE_ADDR unwound_sp;
2952 LONGEST xpsr;
2953
2954 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2955 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2956
2957 unwound_sp = get_frame_register_unsigned (this_frame,
2958 ARM_SP_REGNUM);
2959
2960 /* The hardware saves eight 32-bit words, comprising xPSR,
2961 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2962 "B1.5.6 Exception entry behavior" in
2963 "ARMv7-M Architecture Reference Manual". */
2964 cache->saved_regs[0].addr = unwound_sp;
2965 cache->saved_regs[1].addr = unwound_sp + 4;
2966 cache->saved_regs[2].addr = unwound_sp + 8;
2967 cache->saved_regs[3].addr = unwound_sp + 12;
2968 cache->saved_regs[12].addr = unwound_sp + 16;
2969 cache->saved_regs[14].addr = unwound_sp + 20;
2970 cache->saved_regs[15].addr = unwound_sp + 24;
2971 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2972
2973 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2974 aligner between the top of the 32-byte stack frame and the
2975 previous context's stack pointer. */
2976 cache->prev_sp = unwound_sp + 32;
2977 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2978 && (xpsr & (1 << 9)) != 0)
2979 cache->prev_sp += 4;
2980
2981 return cache;
2982}
2983
2984/* Implementation of function hook 'this_id' in
2985 'struct frame_uwnind'. */
2986
2987static void
2988arm_m_exception_this_id (struct frame_info *this_frame,
2989 void **this_cache,
2990 struct frame_id *this_id)
2991{
2992 struct arm_prologue_cache *cache;
2993
2994 if (*this_cache == NULL)
2995 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2996 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2997
2998 /* Our frame ID for a stub frame is the current SP and LR. */
2999 *this_id = frame_id_build (cache->prev_sp,
3000 get_frame_pc (this_frame));
3001}
3002
3003/* Implementation of function hook 'prev_register' in
3004 'struct frame_uwnind'. */
3005
3006static struct value *
3007arm_m_exception_prev_register (struct frame_info *this_frame,
3008 void **this_cache,
3009 int prev_regnum)
3010{
2ae28aa9
YQ
3011 struct arm_prologue_cache *cache;
3012
3013 if (*this_cache == NULL)
3014 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3015 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3016
3017 /* The value was already reconstructed into PREV_SP. */
3018 if (prev_regnum == ARM_SP_REGNUM)
3019 return frame_unwind_got_constant (this_frame, prev_regnum,
3020 cache->prev_sp);
3021
3022 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3023 prev_regnum);
3024}
3025
3026/* Implementation of function hook 'sniffer' in
3027 'struct frame_uwnind'. */
3028
3029static int
3030arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3031 struct frame_info *this_frame,
3032 void **this_prologue_cache)
3033{
3034 CORE_ADDR this_pc = get_frame_pc (this_frame);
3035
3036 /* No need to check is_m; this sniffer is only registered for
3037 M-profile architectures. */
3038
ca90e760
FH
3039 /* Check if exception frame returns to a magic PC value. */
3040 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3041}
3042
3043/* Frame unwinder for M-profile exceptions. */
3044
3045struct frame_unwind arm_m_exception_unwind =
3046{
3047 SIGTRAMP_FRAME,
3048 default_frame_unwind_stop_reason,
3049 arm_m_exception_this_id,
3050 arm_m_exception_prev_register,
3051 NULL,
3052 arm_m_exception_unwind_sniffer
3053};
3054
24de872b 3055static CORE_ADDR
a262aec2 3056arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3057{
3058 struct arm_prologue_cache *cache;
3059
eb5492fa 3060 if (*this_cache == NULL)
a262aec2 3061 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3062 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3063
4be43953 3064 return cache->prev_sp - cache->framesize;
24de872b
DJ
3065}
3066
eb5492fa
DJ
3067struct frame_base arm_normal_base = {
3068 &arm_prologue_unwind,
3069 arm_normal_frame_base,
3070 arm_normal_frame_base,
3071 arm_normal_frame_base
3072};
3073
a262aec2 3074/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3075 dummy frame. The frame ID's base needs to match the TOS value
3076 saved by save_dummy_frame_tos() and returned from
3077 arm_push_dummy_call, and the PC needs to match the dummy frame's
3078 breakpoint. */
c906108c 3079
eb5492fa 3080static struct frame_id
a262aec2 3081arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3082{
0963b4bd
MS
3083 return frame_id_build (get_frame_register_unsigned (this_frame,
3084 ARM_SP_REGNUM),
a262aec2 3085 get_frame_pc (this_frame));
eb5492fa 3086}
c3b4394c 3087
eb5492fa
DJ
3088/* Given THIS_FRAME, find the previous frame's resume PC (which will
3089 be used to construct the previous frame's ID, after looking up the
3090 containing function). */
c3b4394c 3091
eb5492fa
DJ
3092static CORE_ADDR
3093arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3094{
3095 CORE_ADDR pc;
3096 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3097 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3098}
3099
3100static CORE_ADDR
3101arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3102{
3103 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3104}
3105
b39cc962
DJ
3106static struct value *
3107arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3108 int regnum)
3109{
24568a2c 3110 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3111 CORE_ADDR lr, cpsr;
9779414d 3112 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3113
3114 switch (regnum)
3115 {
3116 case ARM_PC_REGNUM:
3117 /* The PC is normally copied from the return column, which
3118 describes saves of LR. However, that version may have an
3119 extra bit set to indicate Thumb state. The bit is not
3120 part of the PC. */
3121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3122 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3123 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3124
3125 case ARM_PS_REGNUM:
3126 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3127 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3128 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3129 if (IS_THUMB_ADDR (lr))
9779414d 3130 cpsr |= t_bit;
b39cc962 3131 else
9779414d 3132 cpsr &= ~t_bit;
ca38c58e 3133 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3134
3135 default:
3136 internal_error (__FILE__, __LINE__,
3137 _("Unexpected register %d"), regnum);
3138 }
3139}
3140
3141static void
3142arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3143 struct dwarf2_frame_state_reg *reg,
3144 struct frame_info *this_frame)
3145{
3146 switch (regnum)
3147 {
3148 case ARM_PC_REGNUM:
3149 case ARM_PS_REGNUM:
3150 reg->how = DWARF2_FRAME_REG_FN;
3151 reg->loc.fn = arm_dwarf2_prev_register;
3152 break;
3153 case ARM_SP_REGNUM:
3154 reg->how = DWARF2_FRAME_REG_CFA;
3155 break;
3156 }
3157}
3158
c9cf6e20 3159/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3160
3161static int
c9cf6e20 3162thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3163{
3164 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3165 unsigned int insn, insn2;
3166 int found_return = 0, found_stack_adjust = 0;
3167 CORE_ADDR func_start, func_end;
3168 CORE_ADDR scan_pc;
3169 gdb_byte buf[4];
3170
3171 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3172 return 0;
3173
3174 /* The epilogue is a sequence of instructions along the following lines:
3175
3176 - add stack frame size to SP or FP
3177 - [if frame pointer used] restore SP from FP
3178 - restore registers from SP [may include PC]
3179 - a return-type instruction [if PC wasn't already restored]
3180
3181 In a first pass, we scan forward from the current PC and verify the
3182 instructions we find as compatible with this sequence, ending in a
3183 return instruction.
3184
3185 However, this is not sufficient to distinguish indirect function calls
3186 within a function from indirect tail calls in the epilogue in some cases.
3187 Therefore, if we didn't already find any SP-changing instruction during
3188 forward scan, we add a backward scanning heuristic to ensure we actually
3189 are in the epilogue. */
3190
3191 scan_pc = pc;
3192 while (scan_pc < func_end && !found_return)
3193 {
3194 if (target_read_memory (scan_pc, buf, 2))
3195 break;
3196
3197 scan_pc += 2;
3198 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3199
3200 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3201 found_return = 1;
3202 else if (insn == 0x46f7) /* mov pc, lr */
3203 found_return = 1;
540314bd 3204 else if (thumb_instruction_restores_sp (insn))
4024ca99 3205 {
b7576e5c 3206 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3207 found_return = 1;
3208 }
db24da6d 3209 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3210 {
3211 if (target_read_memory (scan_pc, buf, 2))
3212 break;
3213
3214 scan_pc += 2;
3215 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3216
3217 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3218 {
4024ca99
UW
3219 if (insn2 & 0x8000) /* <registers> include PC. */
3220 found_return = 1;
3221 }
3222 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3223 && (insn2 & 0x0fff) == 0x0b04)
3224 {
4024ca99
UW
3225 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3226 found_return = 1;
3227 }
3228 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3229 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3230 ;
4024ca99
UW
3231 else
3232 break;
3233 }
3234 else
3235 break;
3236 }
3237
3238 if (!found_return)
3239 return 0;
3240
3241 /* Since any instruction in the epilogue sequence, with the possible
3242 exception of return itself, updates the stack pointer, we need to
3243 scan backwards for at most one instruction. Try either a 16-bit or
3244 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3245 too much about false positives. */
4024ca99 3246
6b65d1b6
YQ
3247 if (pc - 4 < func_start)
3248 return 0;
3249 if (target_read_memory (pc - 4, buf, 4))
3250 return 0;
4024ca99 3251
6b65d1b6
YQ
3252 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3253 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3254
3255 if (thumb_instruction_restores_sp (insn2))
3256 found_stack_adjust = 1;
3257 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3258 found_stack_adjust = 1;
3259 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3260 && (insn2 & 0x0fff) == 0x0b04)
3261 found_stack_adjust = 1;
3262 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3263 && (insn2 & 0x0e00) == 0x0a00)
3264 found_stack_adjust = 1;
4024ca99
UW
3265
3266 return found_stack_adjust;
3267}
3268
4024ca99 3269static int
c58b006a 3270arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3271{
3272 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3273 unsigned int insn;
f303bc3e 3274 int found_return;
4024ca99
UW
3275 CORE_ADDR func_start, func_end;
3276
4024ca99
UW
3277 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3278 return 0;
3279
3280 /* We are in the epilogue if the previous instruction was a stack
3281 adjustment and the next instruction is a possible return (bx, mov
3282 pc, or pop). We could have to scan backwards to find the stack
3283 adjustment, or forwards to find the return, but this is a decent
3284 approximation. First scan forwards. */
3285
3286 found_return = 0;
3287 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3288 if (bits (insn, 28, 31) != INST_NV)
3289 {
3290 if ((insn & 0x0ffffff0) == 0x012fff10)
3291 /* BX. */
3292 found_return = 1;
3293 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3294 /* MOV PC. */
3295 found_return = 1;
3296 else if ((insn & 0x0fff0000) == 0x08bd0000
3297 && (insn & 0x0000c000) != 0)
3298 /* POP (LDMIA), including PC or LR. */
3299 found_return = 1;
3300 }
3301
3302 if (!found_return)
3303 return 0;
3304
3305 /* Scan backwards. This is just a heuristic, so do not worry about
3306 false positives from mode changes. */
3307
3308 if (pc < func_start + 4)
3309 return 0;
3310
3311 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3312 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3313 return 1;
3314
3315 return 0;
3316}
3317
c58b006a
YQ
3318/* Implement the stack_frame_destroyed_p gdbarch method. */
3319
3320static int
3321arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3322{
3323 if (arm_pc_is_thumb (gdbarch, pc))
3324 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3325 else
3326 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3327}
4024ca99 3328
2dd604e7
RE
3329/* When arguments must be pushed onto the stack, they go on in reverse
3330 order. The code below implements a FILO (stack) to do this. */
3331
3332struct stack_item
3333{
3334 int len;
3335 struct stack_item *prev;
7c543f7b 3336 gdb_byte *data;
2dd604e7
RE
3337};
3338
3339static struct stack_item *
df3b6708 3340push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3341{
3342 struct stack_item *si;
8d749320 3343 si = XNEW (struct stack_item);
7c543f7b 3344 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3345 si->len = len;
3346 si->prev = prev;
3347 memcpy (si->data, contents, len);
3348 return si;
3349}
3350
3351static struct stack_item *
3352pop_stack_item (struct stack_item *si)
3353{
3354 struct stack_item *dead = si;
3355 si = si->prev;
3356 xfree (dead->data);
3357 xfree (dead);
3358 return si;
3359}
3360
2af48f68
PB
3361
3362/* Return the alignment (in bytes) of the given type. */
3363
3364static int
3365arm_type_align (struct type *t)
3366{
3367 int n;
3368 int align;
3369 int falign;
3370
3371 t = check_typedef (t);
3372 switch (TYPE_CODE (t))
3373 {
3374 default:
3375 /* Should never happen. */
3376 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3377 return 4;
3378
3379 case TYPE_CODE_PTR:
3380 case TYPE_CODE_ENUM:
3381 case TYPE_CODE_INT:
3382 case TYPE_CODE_FLT:
3383 case TYPE_CODE_SET:
3384 case TYPE_CODE_RANGE:
2af48f68
PB
3385 case TYPE_CODE_REF:
3386 case TYPE_CODE_CHAR:
3387 case TYPE_CODE_BOOL:
3388 return TYPE_LENGTH (t);
3389
3390 case TYPE_CODE_ARRAY:
c4312b19
YQ
3391 if (TYPE_VECTOR (t))
3392 {
3393 /* Use the natural alignment for vector types (the same for
3394 scalar type), but the maximum alignment is 64-bit. */
3395 if (TYPE_LENGTH (t) > 8)
3396 return 8;
3397 else
3398 return TYPE_LENGTH (t);
3399 }
3400 else
3401 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3402 case TYPE_CODE_COMPLEX:
2af48f68
PB
3403 return arm_type_align (TYPE_TARGET_TYPE (t));
3404
3405 case TYPE_CODE_STRUCT:
3406 case TYPE_CODE_UNION:
3407 align = 1;
3408 for (n = 0; n < TYPE_NFIELDS (t); n++)
3409 {
3410 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3411 if (falign > align)
3412 align = falign;
3413 }
3414 return align;
3415 }
3416}
3417
90445bd3
DJ
3418/* Possible base types for a candidate for passing and returning in
3419 VFP registers. */
3420
3421enum arm_vfp_cprc_base_type
3422{
3423 VFP_CPRC_UNKNOWN,
3424 VFP_CPRC_SINGLE,
3425 VFP_CPRC_DOUBLE,
3426 VFP_CPRC_VEC64,
3427 VFP_CPRC_VEC128
3428};
3429
3430/* The length of one element of base type B. */
3431
3432static unsigned
3433arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3434{
3435 switch (b)
3436 {
3437 case VFP_CPRC_SINGLE:
3438 return 4;
3439 case VFP_CPRC_DOUBLE:
3440 return 8;
3441 case VFP_CPRC_VEC64:
3442 return 8;
3443 case VFP_CPRC_VEC128:
3444 return 16;
3445 default:
3446 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3447 (int) b);
3448 }
3449}
3450
3451/* The character ('s', 'd' or 'q') for the type of VFP register used
3452 for passing base type B. */
3453
3454static int
3455arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3456{
3457 switch (b)
3458 {
3459 case VFP_CPRC_SINGLE:
3460 return 's';
3461 case VFP_CPRC_DOUBLE:
3462 return 'd';
3463 case VFP_CPRC_VEC64:
3464 return 'd';
3465 case VFP_CPRC_VEC128:
3466 return 'q';
3467 default:
3468 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3469 (int) b);
3470 }
3471}
3472
3473/* Determine whether T may be part of a candidate for passing and
3474 returning in VFP registers, ignoring the limit on the total number
3475 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3476 classification of the first valid component found; if it is not
3477 VFP_CPRC_UNKNOWN, all components must have the same classification
3478 as *BASE_TYPE. If it is found that T contains a type not permitted
3479 for passing and returning in VFP registers, a type differently
3480 classified from *BASE_TYPE, or two types differently classified
3481 from each other, return -1, otherwise return the total number of
3482 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3483 array). Vector types are not currently supported, matching the
3484 generic AAPCS support. */
90445bd3
DJ
3485
3486static int
3487arm_vfp_cprc_sub_candidate (struct type *t,
3488 enum arm_vfp_cprc_base_type *base_type)
3489{
3490 t = check_typedef (t);
3491 switch (TYPE_CODE (t))
3492 {
3493 case TYPE_CODE_FLT:
3494 switch (TYPE_LENGTH (t))
3495 {
3496 case 4:
3497 if (*base_type == VFP_CPRC_UNKNOWN)
3498 *base_type = VFP_CPRC_SINGLE;
3499 else if (*base_type != VFP_CPRC_SINGLE)
3500 return -1;
3501 return 1;
3502
3503 case 8:
3504 if (*base_type == VFP_CPRC_UNKNOWN)
3505 *base_type = VFP_CPRC_DOUBLE;
3506 else if (*base_type != VFP_CPRC_DOUBLE)
3507 return -1;
3508 return 1;
3509
3510 default:
3511 return -1;
3512 }
3513 break;
3514
817e0957
YQ
3515 case TYPE_CODE_COMPLEX:
3516 /* Arguments of complex T where T is one of the types float or
3517 double get treated as if they are implemented as:
3518
3519 struct complexT
3520 {
3521 T real;
3522 T imag;
5f52445b
YQ
3523 };
3524
3525 */
817e0957
YQ
3526 switch (TYPE_LENGTH (t))
3527 {
3528 case 8:
3529 if (*base_type == VFP_CPRC_UNKNOWN)
3530 *base_type = VFP_CPRC_SINGLE;
3531 else if (*base_type != VFP_CPRC_SINGLE)
3532 return -1;
3533 return 2;
3534
3535 case 16:
3536 if (*base_type == VFP_CPRC_UNKNOWN)
3537 *base_type = VFP_CPRC_DOUBLE;
3538 else if (*base_type != VFP_CPRC_DOUBLE)
3539 return -1;
3540 return 2;
3541
3542 default:
3543 return -1;
3544 }
3545 break;
3546
90445bd3
DJ
3547 case TYPE_CODE_ARRAY:
3548 {
c4312b19 3549 if (TYPE_VECTOR (t))
90445bd3 3550 {
c4312b19
YQ
3551 /* A 64-bit or 128-bit containerized vector type are VFP
3552 CPRCs. */
3553 switch (TYPE_LENGTH (t))
3554 {
3555 case 8:
3556 if (*base_type == VFP_CPRC_UNKNOWN)
3557 *base_type = VFP_CPRC_VEC64;
3558 return 1;
3559 case 16:
3560 if (*base_type == VFP_CPRC_UNKNOWN)
3561 *base_type = VFP_CPRC_VEC128;
3562 return 1;
3563 default:
3564 return -1;
3565 }
3566 }
3567 else
3568 {
3569 int count;
3570 unsigned unitlen;
3571
3572 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3573 base_type);
3574 if (count == -1)
3575 return -1;
3576 if (TYPE_LENGTH (t) == 0)
3577 {
3578 gdb_assert (count == 0);
3579 return 0;
3580 }
3581 else if (count == 0)
3582 return -1;
3583 unitlen = arm_vfp_cprc_unit_length (*base_type);
3584 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3585 return TYPE_LENGTH (t) / unitlen;
90445bd3 3586 }
90445bd3
DJ
3587 }
3588 break;
3589
3590 case TYPE_CODE_STRUCT:
3591 {
3592 int count = 0;
3593 unsigned unitlen;
3594 int i;
3595 for (i = 0; i < TYPE_NFIELDS (t); i++)
3596 {
1040b979
YQ
3597 int sub_count = 0;
3598
3599 if (!field_is_static (&TYPE_FIELD (t, i)))
3600 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3601 base_type);
90445bd3
DJ
3602 if (sub_count == -1)
3603 return -1;
3604 count += sub_count;
3605 }
3606 if (TYPE_LENGTH (t) == 0)
3607 {
3608 gdb_assert (count == 0);
3609 return 0;
3610 }
3611 else if (count == 0)
3612 return -1;
3613 unitlen = arm_vfp_cprc_unit_length (*base_type);
3614 if (TYPE_LENGTH (t) != unitlen * count)
3615 return -1;
3616 return count;
3617 }
3618
3619 case TYPE_CODE_UNION:
3620 {
3621 int count = 0;
3622 unsigned unitlen;
3623 int i;
3624 for (i = 0; i < TYPE_NFIELDS (t); i++)
3625 {
3626 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3627 base_type);
3628 if (sub_count == -1)
3629 return -1;
3630 count = (count > sub_count ? count : sub_count);
3631 }
3632 if (TYPE_LENGTH (t) == 0)
3633 {
3634 gdb_assert (count == 0);
3635 return 0;
3636 }
3637 else if (count == 0)
3638 return -1;
3639 unitlen = arm_vfp_cprc_unit_length (*base_type);
3640 if (TYPE_LENGTH (t) != unitlen * count)
3641 return -1;
3642 return count;
3643 }
3644
3645 default:
3646 break;
3647 }
3648
3649 return -1;
3650}
3651
3652/* Determine whether T is a VFP co-processor register candidate (CPRC)
3653 if passed to or returned from a non-variadic function with the VFP
3654 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3655 *BASE_TYPE to the base type for T and *COUNT to the number of
3656 elements of that base type before returning. */
3657
3658static int
3659arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3660 int *count)
3661{
3662 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3663 int c = arm_vfp_cprc_sub_candidate (t, &b);
3664 if (c <= 0 || c > 4)
3665 return 0;
3666 *base_type = b;
3667 *count = c;
3668 return 1;
3669}
3670
3671/* Return 1 if the VFP ABI should be used for passing arguments to and
3672 returning values from a function of type FUNC_TYPE, 0
3673 otherwise. */
3674
3675static int
3676arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3677{
3678 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3679 /* Variadic functions always use the base ABI. Assume that functions
3680 without debug info are not variadic. */
3681 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3682 return 0;
3683 /* The VFP ABI is only supported as a variant of AAPCS. */
3684 if (tdep->arm_abi != ARM_ABI_AAPCS)
3685 return 0;
3686 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3687}
3688
3689/* We currently only support passing parameters in integer registers, which
3690 conforms with GCC's default model, and VFP argument passing following
3691 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3692 we should probably support some of them based on the selected ABI. */
3693
3694static CORE_ADDR
7d9b040b 3695arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3696 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3697 struct value **args, CORE_ADDR sp, int struct_return,
3698 CORE_ADDR struct_addr)
2dd604e7 3699{
e17a4113 3700 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3701 int argnum;
3702 int argreg;
3703 int nstack;
3704 struct stack_item *si = NULL;
90445bd3
DJ
3705 int use_vfp_abi;
3706 struct type *ftype;
3707 unsigned vfp_regs_free = (1 << 16) - 1;
3708
3709 /* Determine the type of this function and whether the VFP ABI
3710 applies. */
3711 ftype = check_typedef (value_type (function));
3712 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3713 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3714 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3715
6a65450a
AC
3716 /* Set the return address. For the ARM, the return breakpoint is
3717 always at BP_ADDR. */
9779414d 3718 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3719 bp_addr |= 1;
6a65450a 3720 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3721
3722 /* Walk through the list of args and determine how large a temporary
3723 stack is required. Need to take care here as structs may be
7a9dd1b2 3724 passed on the stack, and we have to push them. */
2dd604e7
RE
3725 nstack = 0;
3726
3727 argreg = ARM_A1_REGNUM;
3728 nstack = 0;
3729
2dd604e7
RE
3730 /* The struct_return pointer occupies the first parameter
3731 passing register. */
3732 if (struct_return)
3733 {
3734 if (arm_debug)
5af949e3 3735 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3736 gdbarch_register_name (gdbarch, argreg),
5af949e3 3737 paddress (gdbarch, struct_addr));
2dd604e7
RE
3738 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3739 argreg++;
3740 }
3741
3742 for (argnum = 0; argnum < nargs; argnum++)
3743 {
3744 int len;
3745 struct type *arg_type;
3746 struct type *target_type;
3747 enum type_code typecode;
8c6363cf 3748 const bfd_byte *val;
2af48f68 3749 int align;
90445bd3
DJ
3750 enum arm_vfp_cprc_base_type vfp_base_type;
3751 int vfp_base_count;
3752 int may_use_core_reg = 1;
2dd604e7 3753
df407dfe 3754 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3755 len = TYPE_LENGTH (arg_type);
3756 target_type = TYPE_TARGET_TYPE (arg_type);
3757 typecode = TYPE_CODE (arg_type);
8c6363cf 3758 val = value_contents (args[argnum]);
2dd604e7 3759
2af48f68
PB
3760 align = arm_type_align (arg_type);
3761 /* Round alignment up to a whole number of words. */
3762 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3763 /* Different ABIs have different maximum alignments. */
3764 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3765 {
3766 /* The APCS ABI only requires word alignment. */
3767 align = INT_REGISTER_SIZE;
3768 }
3769 else
3770 {
3771 /* The AAPCS requires at most doubleword alignment. */
3772 if (align > INT_REGISTER_SIZE * 2)
3773 align = INT_REGISTER_SIZE * 2;
3774 }
3775
90445bd3
DJ
3776 if (use_vfp_abi
3777 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3778 &vfp_base_count))
3779 {
3780 int regno;
3781 int unit_length;
3782 int shift;
3783 unsigned mask;
3784
3785 /* Because this is a CPRC it cannot go in a core register or
3786 cause a core register to be skipped for alignment.
3787 Either it goes in VFP registers and the rest of this loop
3788 iteration is skipped for this argument, or it goes on the
3789 stack (and the stack alignment code is correct for this
3790 case). */
3791 may_use_core_reg = 0;
3792
3793 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3794 shift = unit_length / 4;
3795 mask = (1 << (shift * vfp_base_count)) - 1;
3796 for (regno = 0; regno < 16; regno += shift)
3797 if (((vfp_regs_free >> regno) & mask) == mask)
3798 break;
3799
3800 if (regno < 16)
3801 {
3802 int reg_char;
3803 int reg_scaled;
3804 int i;
3805
3806 vfp_regs_free &= ~(mask << regno);
3807 reg_scaled = regno / shift;
3808 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3809 for (i = 0; i < vfp_base_count; i++)
3810 {
3811 char name_buf[4];
3812 int regnum;
58d6951d
DJ
3813 if (reg_char == 'q')
3814 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3815 val + i * unit_length);
58d6951d
DJ
3816 else
3817 {
8c042590
PM
3818 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3819 reg_char, reg_scaled + i);
58d6951d
DJ
3820 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3821 strlen (name_buf));
3822 regcache_cooked_write (regcache, regnum,
3823 val + i * unit_length);
3824 }
90445bd3
DJ
3825 }
3826 continue;
3827 }
3828 else
3829 {
3830 /* This CPRC could not go in VFP registers, so all VFP
3831 registers are now marked as used. */
3832 vfp_regs_free = 0;
3833 }
3834 }
3835
2af48f68
PB
3836 /* Push stack padding for dowubleword alignment. */
3837 if (nstack & (align - 1))
3838 {
3839 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3840 nstack += INT_REGISTER_SIZE;
3841 }
3842
3843 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3844 if (may_use_core_reg
3845 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3846 && align > INT_REGISTER_SIZE
3847 && argreg & 1)
3848 argreg++;
3849
2dd604e7
RE
3850 /* If the argument is a pointer to a function, and it is a
3851 Thumb function, create a LOCAL copy of the value and set
3852 the THUMB bit in it. */
3853 if (TYPE_CODE_PTR == typecode
3854 && target_type != NULL
f96b8fa0 3855 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3856 {
e17a4113 3857 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3858 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3859 {
224c3ddb 3860 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3861 store_unsigned_integer (copy, len, byte_order,
e17a4113 3862 MAKE_THUMB_ADDR (regval));
8c6363cf 3863 val = copy;
2dd604e7
RE
3864 }
3865 }
3866
3867 /* Copy the argument to general registers or the stack in
3868 register-sized pieces. Large arguments are split between
3869 registers and stack. */
3870 while (len > 0)
3871 {
f0c9063c 3872 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3873 CORE_ADDR regval
3874 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3875
90445bd3 3876 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3877 {
3878 /* The argument is being passed in a general purpose
3879 register. */
e17a4113 3880 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3881 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3882 if (arm_debug)
3883 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3884 argnum,
3885 gdbarch_register_name
2af46ca0 3886 (gdbarch, argreg),
f0c9063c 3887 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3888 regcache_cooked_write_unsigned (regcache, argreg, regval);
3889 argreg++;
3890 }
3891 else
3892 {
ef9bd0b8
YQ
3893 gdb_byte buf[INT_REGISTER_SIZE];
3894
3895 memset (buf, 0, sizeof (buf));
3896 store_unsigned_integer (buf, partial_len, byte_order, regval);
3897
2dd604e7
RE
3898 /* Push the arguments onto the stack. */
3899 if (arm_debug)
3900 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3901 argnum, nstack);
ef9bd0b8 3902 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3903 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3904 }
3905
3906 len -= partial_len;
3907 val += partial_len;
3908 }
3909 }
3910 /* If we have an odd number of words to push, then decrement the stack
3911 by one word now, so first stack argument will be dword aligned. */
3912 if (nstack & 4)
3913 sp -= 4;
3914
3915 while (si)
3916 {
3917 sp -= si->len;
3918 write_memory (sp, si->data, si->len);
3919 si = pop_stack_item (si);
3920 }
3921
3922 /* Finally, update teh SP register. */
3923 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3924
3925 return sp;
3926}
3927
f53f0d0b
PB
3928
3929/* Always align the frame to an 8-byte boundary. This is required on
3930 some platforms and harmless on the rest. */
3931
3932static CORE_ADDR
3933arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3934{
3935 /* Align the stack to eight bytes. */
3936 return sp & ~ (CORE_ADDR) 7;
3937}
3938
c906108c 3939static void
12b27276 3940print_fpu_flags (struct ui_file *file, int flags)
c906108c 3941{
c5aa993b 3942 if (flags & (1 << 0))
12b27276 3943 fputs_filtered ("IVO ", file);
c5aa993b 3944 if (flags & (1 << 1))
12b27276 3945 fputs_filtered ("DVZ ", file);
c5aa993b 3946 if (flags & (1 << 2))
12b27276 3947 fputs_filtered ("OFL ", file);
c5aa993b 3948 if (flags & (1 << 3))
12b27276 3949 fputs_filtered ("UFL ", file);
c5aa993b 3950 if (flags & (1 << 4))
12b27276
WN
3951 fputs_filtered ("INX ", file);
3952 fputc_filtered ('\n', file);
c906108c
SS
3953}
3954
5e74b15c
RE
3955/* Print interesting information about the floating point processor
3956 (if present) or emulator. */
34e8f22d 3957static void
d855c300 3958arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3959 struct frame_info *frame, const char *args)
c906108c 3960{
9c9acae0 3961 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3962 int type;
3963
3964 type = (status >> 24) & 127;
edefbb7c 3965 if (status & (1 << 31))
12b27276 3966 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3967 else
12b27276 3968 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3969 /* i18n: [floating point unit] mask */
12b27276
WN
3970 fputs_filtered (_("mask: "), file);
3971 print_fpu_flags (file, status >> 16);
edefbb7c 3972 /* i18n: [floating point unit] flags */
12b27276
WN
3973 fputs_filtered (_("flags: "), file);
3974 print_fpu_flags (file, status);
c906108c
SS
3975}
3976
27067745
UW
3977/* Construct the ARM extended floating point type. */
3978static struct type *
3979arm_ext_type (struct gdbarch *gdbarch)
3980{
3981 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3982
3983 if (!tdep->arm_ext_type)
3984 tdep->arm_ext_type
e9bb382b 3985 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3986 floatformats_arm_ext);
3987
3988 return tdep->arm_ext_type;
3989}
3990
58d6951d
DJ
3991static struct type *
3992arm_neon_double_type (struct gdbarch *gdbarch)
3993{
3994 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3995
3996 if (tdep->neon_double_type == NULL)
3997 {
3998 struct type *t, *elem;
3999
4000 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4001 TYPE_CODE_UNION);
4002 elem = builtin_type (gdbarch)->builtin_uint8;
4003 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4004 elem = builtin_type (gdbarch)->builtin_uint16;
4005 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4006 elem = builtin_type (gdbarch)->builtin_uint32;
4007 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4008 elem = builtin_type (gdbarch)->builtin_uint64;
4009 append_composite_type_field (t, "u64", elem);
4010 elem = builtin_type (gdbarch)->builtin_float;
4011 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4012 elem = builtin_type (gdbarch)->builtin_double;
4013 append_composite_type_field (t, "f64", elem);
4014
4015 TYPE_VECTOR (t) = 1;
4016 TYPE_NAME (t) = "neon_d";
4017 tdep->neon_double_type = t;
4018 }
4019
4020 return tdep->neon_double_type;
4021}
4022
4023/* FIXME: The vector types are not correctly ordered on big-endian
4024 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4025 bits of d0 - regardless of what unit size is being held in d0. So
4026 the offset of the first uint8 in d0 is 7, but the offset of the
4027 first float is 4. This code works as-is for little-endian
4028 targets. */
4029
4030static struct type *
4031arm_neon_quad_type (struct gdbarch *gdbarch)
4032{
4033 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4034
4035 if (tdep->neon_quad_type == NULL)
4036 {
4037 struct type *t, *elem;
4038
4039 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4040 TYPE_CODE_UNION);
4041 elem = builtin_type (gdbarch)->builtin_uint8;
4042 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4043 elem = builtin_type (gdbarch)->builtin_uint16;
4044 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4045 elem = builtin_type (gdbarch)->builtin_uint32;
4046 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4047 elem = builtin_type (gdbarch)->builtin_uint64;
4048 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4049 elem = builtin_type (gdbarch)->builtin_float;
4050 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4051 elem = builtin_type (gdbarch)->builtin_double;
4052 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4053
4054 TYPE_VECTOR (t) = 1;
4055 TYPE_NAME (t) = "neon_q";
4056 tdep->neon_quad_type = t;
4057 }
4058
4059 return tdep->neon_quad_type;
4060}
4061
34e8f22d
RE
4062/* Return the GDB type object for the "standard" data type of data in
4063 register N. */
4064
4065static struct type *
7a5ea0d4 4066arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4067{
58d6951d
DJ
4068 int num_regs = gdbarch_num_regs (gdbarch);
4069
4070 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4071 && regnum >= num_regs && regnum < num_regs + 32)
4072 return builtin_type (gdbarch)->builtin_float;
4073
4074 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4075 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4076 return arm_neon_quad_type (gdbarch);
4077
4078 /* If the target description has register information, we are only
4079 in this function so that we can override the types of
4080 double-precision registers for NEON. */
4081 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4082 {
4083 struct type *t = tdesc_register_type (gdbarch, regnum);
4084
4085 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4086 && TYPE_CODE (t) == TYPE_CODE_FLT
4087 && gdbarch_tdep (gdbarch)->have_neon)
4088 return arm_neon_double_type (gdbarch);
4089 else
4090 return t;
4091 }
4092
34e8f22d 4093 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4094 {
4095 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4096 return builtin_type (gdbarch)->builtin_void;
4097
4098 return arm_ext_type (gdbarch);
4099 }
e4c16157 4100 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4101 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4102 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4103 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4104 else if (regnum >= ARRAY_SIZE (arm_register_names))
4105 /* These registers are only supported on targets which supply
4106 an XML description. */
df4df182 4107 return builtin_type (gdbarch)->builtin_int0;
032758dc 4108 else
df4df182 4109 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4110}
4111
ff6f572f
DJ
4112/* Map a DWARF register REGNUM onto the appropriate GDB register
4113 number. */
4114
4115static int
d3f73121 4116arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4117{
4118 /* Core integer regs. */
4119 if (reg >= 0 && reg <= 15)
4120 return reg;
4121
4122 /* Legacy FPA encoding. These were once used in a way which
4123 overlapped with VFP register numbering, so their use is
4124 discouraged, but GDB doesn't support the ARM toolchain
4125 which used them for VFP. */
4126 if (reg >= 16 && reg <= 23)
4127 return ARM_F0_REGNUM + reg - 16;
4128
4129 /* New assignments for the FPA registers. */
4130 if (reg >= 96 && reg <= 103)
4131 return ARM_F0_REGNUM + reg - 96;
4132
4133 /* WMMX register assignments. */
4134 if (reg >= 104 && reg <= 111)
4135 return ARM_WCGR0_REGNUM + reg - 104;
4136
4137 if (reg >= 112 && reg <= 127)
4138 return ARM_WR0_REGNUM + reg - 112;
4139
4140 if (reg >= 192 && reg <= 199)
4141 return ARM_WC0_REGNUM + reg - 192;
4142
58d6951d
DJ
4143 /* VFP v2 registers. A double precision value is actually
4144 in d1 rather than s2, but the ABI only defines numbering
4145 for the single precision registers. This will "just work"
4146 in GDB for little endian targets (we'll read eight bytes,
4147 starting in s0 and then progressing to s1), but will be
4148 reversed on big endian targets with VFP. This won't
4149 be a problem for the new Neon quad registers; you're supposed
4150 to use DW_OP_piece for those. */
4151 if (reg >= 64 && reg <= 95)
4152 {
4153 char name_buf[4];
4154
8c042590 4155 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4156 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4157 strlen (name_buf));
4158 }
4159
4160 /* VFP v3 / Neon registers. This range is also used for VFP v2
4161 registers, except that it now describes d0 instead of s0. */
4162 if (reg >= 256 && reg <= 287)
4163 {
4164 char name_buf[4];
4165
8c042590 4166 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4167 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4168 strlen (name_buf));
4169 }
4170
ff6f572f
DJ
4171 return -1;
4172}
4173
26216b98
AC
4174/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4175static int
e7faf938 4176arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4177{
4178 int reg = regnum;
e7faf938 4179 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4180
ff6f572f
DJ
4181 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4182 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4183
4184 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4185 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4186
4187 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4188 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4189
26216b98
AC
4190 if (reg < NUM_GREGS)
4191 return SIM_ARM_R0_REGNUM + reg;
4192 reg -= NUM_GREGS;
4193
4194 if (reg < NUM_FREGS)
4195 return SIM_ARM_FP0_REGNUM + reg;
4196 reg -= NUM_FREGS;
4197
4198 if (reg < NUM_SREGS)
4199 return SIM_ARM_FPS_REGNUM + reg;
4200 reg -= NUM_SREGS;
4201
edefbb7c 4202 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4203}
34e8f22d 4204
a37b3cc0
AC
4205/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4206 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4207 It is thought that this is is the floating-point register format on
4208 little-endian systems. */
c906108c 4209
ed9a39eb 4210static void
b508a996 4211convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4212 void *dbl, int endianess)
c906108c 4213{
a37b3cc0 4214 DOUBLEST d;
be8626e0
MD
4215
4216 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4217 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4218 else
4219 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4220 ptr, &d);
b508a996 4221 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4222}
4223
34e8f22d 4224static void
be8626e0
MD
4225convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4226 int endianess)
c906108c 4227{
a37b3cc0 4228 DOUBLEST d;
be8626e0 4229
b508a996 4230 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4231 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4232 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4233 else
4234 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4235 &d, dbl);
c906108c 4236}
ed9a39eb 4237
d9311bfa
AT
4238/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4239 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4240 NULL if an error occurs. BUF is freed. */
c906108c 4241
d9311bfa
AT
4242static gdb_byte *
4243extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4244 int old_len, int new_len)
4245{
4246 gdb_byte *new_buf;
4247 int bytes_to_read = new_len - old_len;
c906108c 4248
d9311bfa
AT
4249 new_buf = (gdb_byte *) xmalloc (new_len);
4250 memcpy (new_buf + bytes_to_read, buf, old_len);
4251 xfree (buf);
4252 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4253 {
4254 xfree (new_buf);
4255 return NULL;
c906108c 4256 }
d9311bfa 4257 return new_buf;
c906108c
SS
4258}
4259
d9311bfa
AT
4260/* An IT block is at most the 2-byte IT instruction followed by
4261 four 4-byte instructions. The furthest back we must search to
4262 find an IT block that affects the current instruction is thus
4263 2 + 3 * 4 == 14 bytes. */
4264#define MAX_IT_BLOCK_PREFIX 14
177321bd 4265
d9311bfa
AT
4266/* Use a quick scan if there are more than this many bytes of
4267 code. */
4268#define IT_SCAN_THRESHOLD 32
177321bd 4269
d9311bfa
AT
4270/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4271 A breakpoint in an IT block may not be hit, depending on the
4272 condition flags. */
ad527d2e 4273static CORE_ADDR
d9311bfa 4274arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4275{
d9311bfa
AT
4276 gdb_byte *buf;
4277 char map_type;
4278 CORE_ADDR boundary, func_start;
4279 int buf_len;
4280 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4281 int i, any, last_it, last_it_count;
177321bd 4282
d9311bfa
AT
4283 /* If we are using BKPT breakpoints, none of this is necessary. */
4284 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4285 return bpaddr;
177321bd 4286
d9311bfa
AT
4287 /* ARM mode does not have this problem. */
4288 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4289 return bpaddr;
177321bd 4290
d9311bfa
AT
4291 /* We are setting a breakpoint in Thumb code that could potentially
4292 contain an IT block. The first step is to find how much Thumb
4293 code there is; we do not need to read outside of known Thumb
4294 sequences. */
4295 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4296 if (map_type == 0)
4297 /* Thumb-2 code must have mapping symbols to have a chance. */
4298 return bpaddr;
9dca5578 4299
d9311bfa 4300 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4301
d9311bfa
AT
4302 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4303 && func_start > boundary)
4304 boundary = func_start;
9dca5578 4305
d9311bfa
AT
4306 /* Search for a candidate IT instruction. We have to do some fancy
4307 footwork to distinguish a real IT instruction from the second
4308 half of a 32-bit instruction, but there is no need for that if
4309 there's no candidate. */
325fac50 4310 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4311 if (buf_len == 0)
4312 /* No room for an IT instruction. */
4313 return bpaddr;
c906108c 4314
d9311bfa
AT
4315 buf = (gdb_byte *) xmalloc (buf_len);
4316 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4317 return bpaddr;
4318 any = 0;
4319 for (i = 0; i < buf_len; i += 2)
c906108c 4320 {
d9311bfa
AT
4321 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4322 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4323 {
d9311bfa
AT
4324 any = 1;
4325 break;
25b41d01 4326 }
c906108c 4327 }
d9311bfa
AT
4328
4329 if (any == 0)
c906108c 4330 {
d9311bfa
AT
4331 xfree (buf);
4332 return bpaddr;
f9d67f43
DJ
4333 }
4334
4335 /* OK, the code bytes before this instruction contain at least one
4336 halfword which resembles an IT instruction. We know that it's
4337 Thumb code, but there are still two possibilities. Either the
4338 halfword really is an IT instruction, or it is the second half of
4339 a 32-bit Thumb instruction. The only way we can tell is to
4340 scan forwards from a known instruction boundary. */
4341 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4342 {
4343 int definite;
4344
4345 /* There's a lot of code before this instruction. Start with an
4346 optimistic search; it's easy to recognize halfwords that can
4347 not be the start of a 32-bit instruction, and use that to
4348 lock on to the instruction boundaries. */
4349 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4350 if (buf == NULL)
4351 return bpaddr;
4352 buf_len = IT_SCAN_THRESHOLD;
4353
4354 definite = 0;
4355 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4356 {
4357 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4358 if (thumb_insn_size (inst1) == 2)
4359 {
4360 definite = 1;
4361 break;
4362 }
4363 }
4364
4365 /* At this point, if DEFINITE, BUF[I] is the first place we
4366 are sure that we know the instruction boundaries, and it is far
4367 enough from BPADDR that we could not miss an IT instruction
4368 affecting BPADDR. If ! DEFINITE, give up - start from a
4369 known boundary. */
4370 if (! definite)
4371 {
0963b4bd
MS
4372 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4373 bpaddr - boundary);
f9d67f43
DJ
4374 if (buf == NULL)
4375 return bpaddr;
4376 buf_len = bpaddr - boundary;
4377 i = 0;
4378 }
4379 }
4380 else
4381 {
4382 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4383 if (buf == NULL)
4384 return bpaddr;
4385 buf_len = bpaddr - boundary;
4386 i = 0;
4387 }
4388
4389 /* Scan forwards. Find the last IT instruction before BPADDR. */
4390 last_it = -1;
4391 last_it_count = 0;
4392 while (i < buf_len)
4393 {
4394 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4395 last_it_count--;
4396 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4397 {
4398 last_it = i;
4399 if (inst1 & 0x0001)
4400 last_it_count = 4;
4401 else if (inst1 & 0x0002)
4402 last_it_count = 3;
4403 else if (inst1 & 0x0004)
4404 last_it_count = 2;
4405 else
4406 last_it_count = 1;
4407 }
4408 i += thumb_insn_size (inst1);
4409 }
4410
4411 xfree (buf);
4412
4413 if (last_it == -1)
4414 /* There wasn't really an IT instruction after all. */
4415 return bpaddr;
4416
4417 if (last_it_count < 1)
4418 /* It was too far away. */
4419 return bpaddr;
4420
4421 /* This really is a trouble spot. Move the breakpoint to the IT
4422 instruction. */
4423 return bpaddr - buf_len + last_it;
4424}
4425
cca44b1b 4426/* ARM displaced stepping support.
c906108c 4427
cca44b1b 4428 Generally ARM displaced stepping works as follows:
c906108c 4429
cca44b1b 4430 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4431 arm_process_displaced_insn. Depending on the type of instruction, it is
4432 then copied to a scratch location, possibly in a modified form. The
4433 copy_* set of functions performs such modification, as necessary. A
4434 breakpoint is placed after the modified instruction in the scratch space
4435 to return control to GDB. Note in particular that instructions which
4436 modify the PC will no longer do so after modification.
c5aa993b 4437
cca44b1b
JB
4438 2. The instruction is single-stepped, by setting the PC to the scratch
4439 location address, and resuming. Control returns to GDB when the
4440 breakpoint is hit.
c5aa993b 4441
cca44b1b
JB
4442 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4443 function used for the current instruction. This function's job is to
4444 put the CPU/memory state back to what it would have been if the
4445 instruction had been executed unmodified in its original location. */
c5aa993b 4446
cca44b1b
JB
4447/* NOP instruction (mov r0, r0). */
4448#define ARM_NOP 0xe1a00000
34518530 4449#define THUMB_NOP 0x4600
cca44b1b
JB
4450
4451/* Helper for register reads for displaced stepping. In particular, this
4452 returns the PC as it would be seen by the instruction at its original
4453 location. */
4454
4455ULONGEST
36073a92
YQ
4456displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4457 int regno)
cca44b1b
JB
4458{
4459 ULONGEST ret;
36073a92 4460 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4461
bf9f652a 4462 if (regno == ARM_PC_REGNUM)
cca44b1b 4463 {
4db71c0b
YQ
4464 /* Compute pipeline offset:
4465 - When executing an ARM instruction, PC reads as the address of the
4466 current instruction plus 8.
4467 - When executing a Thumb instruction, PC reads as the address of the
4468 current instruction plus 4. */
4469
36073a92 4470 if (!dsc->is_thumb)
4db71c0b
YQ
4471 from += 8;
4472 else
4473 from += 4;
4474
cca44b1b
JB
4475 if (debug_displaced)
4476 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4477 (unsigned long) from);
4478 return (ULONGEST) from;
cca44b1b 4479 }
c906108c 4480 else
cca44b1b
JB
4481 {
4482 regcache_cooked_read_unsigned (regs, regno, &ret);
4483 if (debug_displaced)
4484 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4485 regno, (unsigned long) ret);
4486 return ret;
4487 }
c906108c
SS
4488}
4489
cca44b1b
JB
4490static int
4491displaced_in_arm_mode (struct regcache *regs)
4492{
4493 ULONGEST ps;
9779414d 4494 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4495
cca44b1b 4496 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4497
9779414d 4498 return (ps & t_bit) == 0;
cca44b1b 4499}
66e810cd 4500
cca44b1b 4501/* Write to the PC as from a branch instruction. */
c906108c 4502
cca44b1b 4503static void
36073a92
YQ
4504branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4505 ULONGEST val)
c906108c 4506{
36073a92 4507 if (!dsc->is_thumb)
cca44b1b
JB
4508 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4509 architecture versions < 6. */
0963b4bd
MS
4510 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4511 val & ~(ULONGEST) 0x3);
cca44b1b 4512 else
0963b4bd
MS
4513 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4514 val & ~(ULONGEST) 0x1);
cca44b1b 4515}
66e810cd 4516
cca44b1b
JB
4517/* Write to the PC as from a branch-exchange instruction. */
4518
4519static void
4520bx_write_pc (struct regcache *regs, ULONGEST val)
4521{
4522 ULONGEST ps;
9779414d 4523 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4524
4525 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4526
4527 if ((val & 1) == 1)
c906108c 4528 {
9779414d 4529 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4530 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4531 }
4532 else if ((val & 2) == 0)
4533 {
9779414d 4534 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4535 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4536 }
4537 else
4538 {
cca44b1b
JB
4539 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4540 mode, align dest to 4 bytes). */
4541 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4542 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4543 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4544 }
4545}
ed9a39eb 4546
cca44b1b 4547/* Write to the PC as if from a load instruction. */
ed9a39eb 4548
34e8f22d 4549static void
36073a92
YQ
4550load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4551 ULONGEST val)
ed9a39eb 4552{
cca44b1b
JB
4553 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4554 bx_write_pc (regs, val);
4555 else
36073a92 4556 branch_write_pc (regs, dsc, val);
cca44b1b 4557}
be8626e0 4558
cca44b1b
JB
4559/* Write to the PC as if from an ALU instruction. */
4560
4561static void
36073a92
YQ
4562alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4563 ULONGEST val)
cca44b1b 4564{
36073a92 4565 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4566 bx_write_pc (regs, val);
4567 else
36073a92 4568 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4569}
4570
4571/* Helper for writing to registers for displaced stepping. Writing to the PC
4572 has a varying effects depending on the instruction which does the write:
4573 this is controlled by the WRITE_PC argument. */
4574
4575void
4576displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4577 int regno, ULONGEST val, enum pc_write_style write_pc)
4578{
bf9f652a 4579 if (regno == ARM_PC_REGNUM)
08216dd7 4580 {
cca44b1b
JB
4581 if (debug_displaced)
4582 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4583 (unsigned long) val);
4584 switch (write_pc)
08216dd7 4585 {
cca44b1b 4586 case BRANCH_WRITE_PC:
36073a92 4587 branch_write_pc (regs, dsc, val);
08216dd7
RE
4588 break;
4589
cca44b1b
JB
4590 case BX_WRITE_PC:
4591 bx_write_pc (regs, val);
4592 break;
4593
4594 case LOAD_WRITE_PC:
36073a92 4595 load_write_pc (regs, dsc, val);
cca44b1b
JB
4596 break;
4597
4598 case ALU_WRITE_PC:
36073a92 4599 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4600 break;
4601
4602 case CANNOT_WRITE_PC:
4603 warning (_("Instruction wrote to PC in an unexpected way when "
4604 "single-stepping"));
08216dd7
RE
4605 break;
4606
4607 default:
97b9747c
JB
4608 internal_error (__FILE__, __LINE__,
4609 _("Invalid argument to displaced_write_reg"));
08216dd7 4610 }
b508a996 4611
cca44b1b 4612 dsc->wrote_to_pc = 1;
b508a996 4613 }
ed9a39eb 4614 else
b508a996 4615 {
cca44b1b
JB
4616 if (debug_displaced)
4617 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4618 regno, (unsigned long) val);
4619 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4620 }
34e8f22d
RE
4621}
4622
cca44b1b
JB
4623/* This function is used to concisely determine if an instruction INSN
4624 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4625 corresponding fields of BITMASK set to 0b1111. The function
4626 returns return 1 if any of these fields in INSN reference the PC
4627 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4628
4629static int
cca44b1b 4630insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4631{
cca44b1b 4632 uint32_t lowbit = 1;
67255d04 4633
cca44b1b
JB
4634 while (bitmask != 0)
4635 {
4636 uint32_t mask;
44e1a9eb 4637
cca44b1b
JB
4638 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4639 ;
67255d04 4640
cca44b1b
JB
4641 if (!lowbit)
4642 break;
67255d04 4643
cca44b1b 4644 mask = lowbit * 0xf;
67255d04 4645
cca44b1b
JB
4646 if ((insn & mask) == mask)
4647 return 1;
4648
4649 bitmask &= ~mask;
67255d04
RE
4650 }
4651
cca44b1b
JB
4652 return 0;
4653}
2af48f68 4654
cca44b1b
JB
4655/* The simplest copy function. Many instructions have the same effect no
4656 matter what address they are executed at: in those cases, use this. */
67255d04 4657
cca44b1b 4658static int
7ff120b4
YQ
4659arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4660 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4661{
4662 if (debug_displaced)
4663 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4664 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4665 iname);
67255d04 4666
cca44b1b 4667 dsc->modinsn[0] = insn;
67255d04 4668
cca44b1b
JB
4669 return 0;
4670}
4671
34518530
YQ
4672static int
4673thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4674 uint16_t insn2, const char *iname,
4675 struct displaced_step_closure *dsc)
4676{
4677 if (debug_displaced)
4678 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4679 "opcode/class '%s' unmodified\n", insn1, insn2,
4680 iname);
4681
4682 dsc->modinsn[0] = insn1;
4683 dsc->modinsn[1] = insn2;
4684 dsc->numinsns = 2;
4685
4686 return 0;
4687}
4688
4689/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4690 modification. */
4691static int
615234c1 4692thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4693 const char *iname,
4694 struct displaced_step_closure *dsc)
4695{
4696 if (debug_displaced)
4697 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4698 "opcode/class '%s' unmodified\n", insn,
4699 iname);
4700
4701 dsc->modinsn[0] = insn;
4702
4703 return 0;
4704}
4705
cca44b1b
JB
4706/* Preload instructions with immediate offset. */
4707
4708static void
6e39997a 4709cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4710 struct regcache *regs, struct displaced_step_closure *dsc)
4711{
4712 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4713 if (!dsc->u.preload.immed)
4714 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4715}
4716
7ff120b4
YQ
4717static void
4718install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4719 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4720{
cca44b1b 4721 ULONGEST rn_val;
cca44b1b
JB
4722 /* Preload instructions:
4723
4724 {pli/pld} [rn, #+/-imm]
4725 ->
4726 {pli/pld} [r0, #+/-imm]. */
4727
36073a92
YQ
4728 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4729 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4730 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4731 dsc->u.preload.immed = 1;
4732
cca44b1b 4733 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4734}
4735
cca44b1b 4736static int
7ff120b4 4737arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4738 struct displaced_step_closure *dsc)
4739{
4740 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4741
7ff120b4
YQ
4742 if (!insn_references_pc (insn, 0x000f0000ul))
4743 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4744
4745 if (debug_displaced)
4746 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4747 (unsigned long) insn);
4748
7ff120b4
YQ
4749 dsc->modinsn[0] = insn & 0xfff0ffff;
4750
4751 install_preload (gdbarch, regs, dsc, rn);
4752
4753 return 0;
4754}
4755
34518530
YQ
4756static int
4757thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4758 struct regcache *regs, struct displaced_step_closure *dsc)
4759{
4760 unsigned int rn = bits (insn1, 0, 3);
4761 unsigned int u_bit = bit (insn1, 7);
4762 int imm12 = bits (insn2, 0, 11);
4763 ULONGEST pc_val;
4764
4765 if (rn != ARM_PC_REGNUM)
4766 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4767
4768 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4769 PLD (literal) Encoding T1. */
4770 if (debug_displaced)
4771 fprintf_unfiltered (gdb_stdlog,
4772 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4773 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4774 imm12);
4775
4776 if (!u_bit)
4777 imm12 = -1 * imm12;
4778
4779 /* Rewrite instruction {pli/pld} PC imm12 into:
4780 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4781
4782 {pli/pld} [r0, r1]
4783
4784 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4785
4786 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4787 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4788
4789 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4790
4791 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4792 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4793 dsc->u.preload.immed = 0;
4794
4795 /* {pli/pld} [r0, r1] */
4796 dsc->modinsn[0] = insn1 & 0xfff0;
4797 dsc->modinsn[1] = 0xf001;
4798 dsc->numinsns = 2;
4799
4800 dsc->cleanup = &cleanup_preload;
4801 return 0;
4802}
4803
7ff120b4
YQ
4804/* Preload instructions with register offset. */
4805
4806static void
4807install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4808 struct displaced_step_closure *dsc, unsigned int rn,
4809 unsigned int rm)
4810{
4811 ULONGEST rn_val, rm_val;
4812
cca44b1b
JB
4813 /* Preload register-offset instructions:
4814
4815 {pli/pld} [rn, rm {, shift}]
4816 ->
4817 {pli/pld} [r0, r1 {, shift}]. */
4818
36073a92
YQ
4819 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4820 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4821 rn_val = displaced_read_reg (regs, dsc, rn);
4822 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4823 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4824 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4825 dsc->u.preload.immed = 0;
4826
cca44b1b 4827 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4828}
4829
4830static int
4831arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4832 struct regcache *regs,
4833 struct displaced_step_closure *dsc)
4834{
4835 unsigned int rn = bits (insn, 16, 19);
4836 unsigned int rm = bits (insn, 0, 3);
4837
4838
4839 if (!insn_references_pc (insn, 0x000f000ful))
4840 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4841
4842 if (debug_displaced)
4843 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4844 (unsigned long) insn);
4845
4846 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4847
7ff120b4 4848 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4849 return 0;
4850}
4851
4852/* Copy/cleanup coprocessor load and store instructions. */
4853
4854static void
6e39997a 4855cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4856 struct regcache *regs,
4857 struct displaced_step_closure *dsc)
4858{
36073a92 4859 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4860
4861 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4862
4863 if (dsc->u.ldst.writeback)
4864 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4865}
4866
7ff120b4
YQ
4867static void
4868install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4869 struct displaced_step_closure *dsc,
4870 int writeback, unsigned int rn)
cca44b1b 4871{
cca44b1b 4872 ULONGEST rn_val;
cca44b1b 4873
cca44b1b
JB
4874 /* Coprocessor load/store instructions:
4875
4876 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4877 ->
4878 {stc/stc2} [r0, #+/-imm].
4879
4880 ldc/ldc2 are handled identically. */
4881
36073a92
YQ
4882 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4883 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4884 /* PC should be 4-byte aligned. */
4885 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4886 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4887
7ff120b4 4888 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4889 dsc->u.ldst.rn = rn;
4890
7ff120b4
YQ
4891 dsc->cleanup = &cleanup_copro_load_store;
4892}
4893
4894static int
4895arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4896 struct regcache *regs,
4897 struct displaced_step_closure *dsc)
4898{
4899 unsigned int rn = bits (insn, 16, 19);
4900
4901 if (!insn_references_pc (insn, 0x000f0000ul))
4902 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4903
4904 if (debug_displaced)
4905 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4906 "load/store insn %.8lx\n", (unsigned long) insn);
4907
cca44b1b
JB
4908 dsc->modinsn[0] = insn & 0xfff0ffff;
4909
7ff120b4 4910 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4911
4912 return 0;
4913}
4914
34518530
YQ
4915static int
4916thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4917 uint16_t insn2, struct regcache *regs,
4918 struct displaced_step_closure *dsc)
4919{
4920 unsigned int rn = bits (insn1, 0, 3);
4921
4922 if (rn != ARM_PC_REGNUM)
4923 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4924 "copro load/store", dsc);
4925
4926 if (debug_displaced)
4927 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4928 "load/store insn %.4x%.4x\n", insn1, insn2);
4929
4930 dsc->modinsn[0] = insn1 & 0xfff0;
4931 dsc->modinsn[1] = insn2;
4932 dsc->numinsns = 2;
4933
4934 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4935 doesn't support writeback, so pass 0. */
4936 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4937
4938 return 0;
4939}
4940
cca44b1b
JB
4941/* Clean up branch instructions (actually perform the branch, by setting
4942 PC). */
4943
4944static void
6e39997a 4945cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4946 struct displaced_step_closure *dsc)
4947{
36073a92 4948 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4949 int branch_taken = condition_true (dsc->u.branch.cond, status);
4950 enum pc_write_style write_pc = dsc->u.branch.exchange
4951 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4952
4953 if (!branch_taken)
4954 return;
4955
4956 if (dsc->u.branch.link)
4957 {
8c8dba6d
YQ
4958 /* The value of LR should be the next insn of current one. In order
4959 not to confuse logic hanlding later insn `bx lr', if current insn mode
4960 is Thumb, the bit 0 of LR value should be set to 1. */
4961 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4962
4963 if (dsc->is_thumb)
4964 next_insn_addr |= 0x1;
4965
4966 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4967 CANNOT_WRITE_PC);
cca44b1b
JB
4968 }
4969
bf9f652a 4970 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4971}
4972
4973/* Copy B/BL/BLX instructions with immediate destinations. */
4974
7ff120b4
YQ
4975static void
4976install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4977 struct displaced_step_closure *dsc,
4978 unsigned int cond, int exchange, int link, long offset)
4979{
4980 /* Implement "BL<cond> <label>" as:
4981
4982 Preparation: cond <- instruction condition
4983 Insn: mov r0, r0 (nop)
4984 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4985
4986 B<cond> similar, but don't set r14 in cleanup. */
4987
4988 dsc->u.branch.cond = cond;
4989 dsc->u.branch.link = link;
4990 dsc->u.branch.exchange = exchange;
4991
2b16b2e3
YQ
4992 dsc->u.branch.dest = dsc->insn_addr;
4993 if (link && exchange)
4994 /* For BLX, offset is computed from the Align (PC, 4). */
4995 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4996
7ff120b4 4997 if (dsc->is_thumb)
2b16b2e3 4998 dsc->u.branch.dest += 4 + offset;
7ff120b4 4999 else
2b16b2e3 5000 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5001
5002 dsc->cleanup = &cleanup_branch;
5003}
cca44b1b 5004static int
7ff120b4
YQ
5005arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5006 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5007{
5008 unsigned int cond = bits (insn, 28, 31);
5009 int exchange = (cond == 0xf);
5010 int link = exchange || bit (insn, 24);
cca44b1b
JB
5011 long offset;
5012
5013 if (debug_displaced)
5014 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5015 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5016 (unsigned long) insn);
cca44b1b
JB
5017 if (exchange)
5018 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5019 then arrange the switch into Thumb mode. */
5020 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5021 else
5022 offset = bits (insn, 0, 23) << 2;
5023
5024 if (bit (offset, 25))
5025 offset = offset | ~0x3ffffff;
5026
cca44b1b
JB
5027 dsc->modinsn[0] = ARM_NOP;
5028
7ff120b4 5029 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5030 return 0;
5031}
5032
34518530
YQ
5033static int
5034thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5035 uint16_t insn2, struct regcache *regs,
5036 struct displaced_step_closure *dsc)
5037{
5038 int link = bit (insn2, 14);
5039 int exchange = link && !bit (insn2, 12);
5040 int cond = INST_AL;
5041 long offset = 0;
5042 int j1 = bit (insn2, 13);
5043 int j2 = bit (insn2, 11);
5044 int s = sbits (insn1, 10, 10);
5045 int i1 = !(j1 ^ bit (insn1, 10));
5046 int i2 = !(j2 ^ bit (insn1, 10));
5047
5048 if (!link && !exchange) /* B */
5049 {
5050 offset = (bits (insn2, 0, 10) << 1);
5051 if (bit (insn2, 12)) /* Encoding T4 */
5052 {
5053 offset |= (bits (insn1, 0, 9) << 12)
5054 | (i2 << 22)
5055 | (i1 << 23)
5056 | (s << 24);
5057 cond = INST_AL;
5058 }
5059 else /* Encoding T3 */
5060 {
5061 offset |= (bits (insn1, 0, 5) << 12)
5062 | (j1 << 18)
5063 | (j2 << 19)
5064 | (s << 20);
5065 cond = bits (insn1, 6, 9);
5066 }
5067 }
5068 else
5069 {
5070 offset = (bits (insn1, 0, 9) << 12);
5071 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5072 offset |= exchange ?
5073 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5074 }
5075
5076 if (debug_displaced)
5077 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5078 "%.4x %.4x with offset %.8lx\n",
5079 link ? (exchange) ? "blx" : "bl" : "b",
5080 insn1, insn2, offset);
5081
5082 dsc->modinsn[0] = THUMB_NOP;
5083
5084 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5085 return 0;
5086}
5087
5088/* Copy B Thumb instructions. */
5089static int
615234c1 5090thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5091 struct displaced_step_closure *dsc)
5092{
5093 unsigned int cond = 0;
5094 int offset = 0;
5095 unsigned short bit_12_15 = bits (insn, 12, 15);
5096 CORE_ADDR from = dsc->insn_addr;
5097
5098 if (bit_12_15 == 0xd)
5099 {
5100 /* offset = SignExtend (imm8:0, 32) */
5101 offset = sbits ((insn << 1), 0, 8);
5102 cond = bits (insn, 8, 11);
5103 }
5104 else if (bit_12_15 == 0xe) /* Encoding T2 */
5105 {
5106 offset = sbits ((insn << 1), 0, 11);
5107 cond = INST_AL;
5108 }
5109
5110 if (debug_displaced)
5111 fprintf_unfiltered (gdb_stdlog,
5112 "displaced: copying b immediate insn %.4x "
5113 "with offset %d\n", insn, offset);
5114
5115 dsc->u.branch.cond = cond;
5116 dsc->u.branch.link = 0;
5117 dsc->u.branch.exchange = 0;
5118 dsc->u.branch.dest = from + 4 + offset;
5119
5120 dsc->modinsn[0] = THUMB_NOP;
5121
5122 dsc->cleanup = &cleanup_branch;
5123
5124 return 0;
5125}
5126
cca44b1b
JB
5127/* Copy BX/BLX with register-specified destinations. */
5128
7ff120b4
YQ
5129static void
5130install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5131 struct displaced_step_closure *dsc, int link,
5132 unsigned int cond, unsigned int rm)
cca44b1b 5133{
cca44b1b
JB
5134 /* Implement {BX,BLX}<cond> <reg>" as:
5135
5136 Preparation: cond <- instruction condition
5137 Insn: mov r0, r0 (nop)
5138 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5139
5140 Don't set r14 in cleanup for BX. */
5141
36073a92 5142 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5143
5144 dsc->u.branch.cond = cond;
5145 dsc->u.branch.link = link;
cca44b1b 5146
7ff120b4 5147 dsc->u.branch.exchange = 1;
cca44b1b
JB
5148
5149 dsc->cleanup = &cleanup_branch;
7ff120b4 5150}
cca44b1b 5151
7ff120b4
YQ
5152static int
5153arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5154 struct regcache *regs, struct displaced_step_closure *dsc)
5155{
5156 unsigned int cond = bits (insn, 28, 31);
5157 /* BX: x12xxx1x
5158 BLX: x12xxx3x. */
5159 int link = bit (insn, 5);
5160 unsigned int rm = bits (insn, 0, 3);
5161
5162 if (debug_displaced)
5163 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5164 (unsigned long) insn);
5165
5166 dsc->modinsn[0] = ARM_NOP;
5167
5168 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5169 return 0;
5170}
5171
34518530
YQ
5172static int
5173thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5174 struct regcache *regs,
5175 struct displaced_step_closure *dsc)
5176{
5177 int link = bit (insn, 7);
5178 unsigned int rm = bits (insn, 3, 6);
5179
5180 if (debug_displaced)
5181 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5182 (unsigned short) insn);
5183
5184 dsc->modinsn[0] = THUMB_NOP;
5185
5186 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5187
5188 return 0;
5189}
5190
5191
0963b4bd 5192/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5193
5194static void
6e39997a 5195cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5196 struct regcache *regs, struct displaced_step_closure *dsc)
5197{
36073a92 5198 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5199 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5200 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5201 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5202}
5203
5204static int
7ff120b4
YQ
5205arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5206 struct displaced_step_closure *dsc)
cca44b1b
JB
5207{
5208 unsigned int rn = bits (insn, 16, 19);
5209 unsigned int rd = bits (insn, 12, 15);
5210 unsigned int op = bits (insn, 21, 24);
5211 int is_mov = (op == 0xd);
5212 ULONGEST rd_val, rn_val;
cca44b1b
JB
5213
5214 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5215 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5216
5217 if (debug_displaced)
5218 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5219 "%.8lx\n", is_mov ? "move" : "ALU",
5220 (unsigned long) insn);
5221
5222 /* Instruction is of form:
5223
5224 <op><cond> rd, [rn,] #imm
5225
5226 Rewrite as:
5227
5228 Preparation: tmp1, tmp2 <- r0, r1;
5229 r0, r1 <- rd, rn
5230 Insn: <op><cond> r0, r1, #imm
5231 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5232 */
5233
36073a92
YQ
5234 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5235 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5236 rn_val = displaced_read_reg (regs, dsc, rn);
5237 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5238 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5239 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5240 dsc->rd = rd;
5241
5242 if (is_mov)
5243 dsc->modinsn[0] = insn & 0xfff00fff;
5244 else
5245 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5246
5247 dsc->cleanup = &cleanup_alu_imm;
5248
5249 return 0;
5250}
5251
34518530
YQ
5252static int
5253thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5254 uint16_t insn2, struct regcache *regs,
5255 struct displaced_step_closure *dsc)
5256{
5257 unsigned int op = bits (insn1, 5, 8);
5258 unsigned int rn, rm, rd;
5259 ULONGEST rd_val, rn_val;
5260
5261 rn = bits (insn1, 0, 3); /* Rn */
5262 rm = bits (insn2, 0, 3); /* Rm */
5263 rd = bits (insn2, 8, 11); /* Rd */
5264
5265 /* This routine is only called for instruction MOV. */
5266 gdb_assert (op == 0x2 && rn == 0xf);
5267
5268 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5269 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5270
5271 if (debug_displaced)
5272 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5273 "ALU", insn1, insn2);
5274
5275 /* Instruction is of form:
5276
5277 <op><cond> rd, [rn,] #imm
5278
5279 Rewrite as:
5280
5281 Preparation: tmp1, tmp2 <- r0, r1;
5282 r0, r1 <- rd, rn
5283 Insn: <op><cond> r0, r1, #imm
5284 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5285 */
5286
5287 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5288 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5289 rn_val = displaced_read_reg (regs, dsc, rn);
5290 rd_val = displaced_read_reg (regs, dsc, rd);
5291 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5292 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5293 dsc->rd = rd;
5294
5295 dsc->modinsn[0] = insn1;
5296 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5297 dsc->numinsns = 2;
5298
5299 dsc->cleanup = &cleanup_alu_imm;
5300
5301 return 0;
5302}
5303
cca44b1b
JB
5304/* Copy/cleanup arithmetic/logic insns with register RHS. */
5305
5306static void
6e39997a 5307cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5308 struct regcache *regs, struct displaced_step_closure *dsc)
5309{
5310 ULONGEST rd_val;
5311 int i;
5312
36073a92 5313 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5314
5315 for (i = 0; i < 3; i++)
5316 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5317
5318 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5319}
5320
7ff120b4
YQ
5321static void
5322install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5323 struct displaced_step_closure *dsc,
5324 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5325{
cca44b1b 5326 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5327
cca44b1b
JB
5328 /* Instruction is of form:
5329
5330 <op><cond> rd, [rn,] rm [, <shift>]
5331
5332 Rewrite as:
5333
5334 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5335 r0, r1, r2 <- rd, rn, rm
ef713951 5336 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5337 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5338 */
5339
36073a92
YQ
5340 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5341 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5342 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5343 rd_val = displaced_read_reg (regs, dsc, rd);
5344 rn_val = displaced_read_reg (regs, dsc, rn);
5345 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5346 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5347 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5348 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5349 dsc->rd = rd;
5350
7ff120b4
YQ
5351 dsc->cleanup = &cleanup_alu_reg;
5352}
5353
5354static int
5355arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5356 struct displaced_step_closure *dsc)
5357{
5358 unsigned int op = bits (insn, 21, 24);
5359 int is_mov = (op == 0xd);
5360
5361 if (!insn_references_pc (insn, 0x000ff00ful))
5362 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5363
5364 if (debug_displaced)
5365 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5366 is_mov ? "move" : "ALU", (unsigned long) insn);
5367
cca44b1b
JB
5368 if (is_mov)
5369 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5370 else
5371 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5372
7ff120b4
YQ
5373 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5374 bits (insn, 0, 3));
cca44b1b
JB
5375 return 0;
5376}
5377
34518530
YQ
5378static int
5379thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5380 struct regcache *regs,
5381 struct displaced_step_closure *dsc)
5382{
ef713951 5383 unsigned rm, rd;
34518530 5384
ef713951
YQ
5385 rm = bits (insn, 3, 6);
5386 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5387
ef713951 5388 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5389 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5390
5391 if (debug_displaced)
ef713951
YQ
5392 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5393 (unsigned short) insn);
34518530 5394
ef713951 5395 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5396
ef713951 5397 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5398
5399 return 0;
5400}
5401
cca44b1b
JB
5402/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5403
5404static void
6e39997a 5405cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5406 struct regcache *regs,
5407 struct displaced_step_closure *dsc)
5408{
36073a92 5409 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5410 int i;
5411
5412 for (i = 0; i < 4; i++)
5413 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5414
5415 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5416}
5417
7ff120b4
YQ
5418static void
5419install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5420 struct displaced_step_closure *dsc,
5421 unsigned int rd, unsigned int rn, unsigned int rm,
5422 unsigned rs)
cca44b1b 5423{
7ff120b4 5424 int i;
cca44b1b 5425 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5426
cca44b1b
JB
5427 /* Instruction is of form:
5428
5429 <op><cond> rd, [rn,] rm, <shift> rs
5430
5431 Rewrite as:
5432
5433 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5434 r0, r1, r2, r3 <- rd, rn, rm, rs
5435 Insn: <op><cond> r0, r1, r2, <shift> r3
5436 Cleanup: tmp5 <- r0
5437 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5438 rd <- tmp5
5439 */
5440
5441 for (i = 0; i < 4; i++)
36073a92 5442 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5443
36073a92
YQ
5444 rd_val = displaced_read_reg (regs, dsc, rd);
5445 rn_val = displaced_read_reg (regs, dsc, rn);
5446 rm_val = displaced_read_reg (regs, dsc, rm);
5447 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5448 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5449 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5450 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5451 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5452 dsc->rd = rd;
7ff120b4
YQ
5453 dsc->cleanup = &cleanup_alu_shifted_reg;
5454}
5455
5456static int
5457arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5458 struct regcache *regs,
5459 struct displaced_step_closure *dsc)
5460{
5461 unsigned int op = bits (insn, 21, 24);
5462 int is_mov = (op == 0xd);
5463 unsigned int rd, rn, rm, rs;
5464
5465 if (!insn_references_pc (insn, 0x000fff0ful))
5466 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5467
5468 if (debug_displaced)
5469 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5470 "%.8lx\n", is_mov ? "move" : "ALU",
5471 (unsigned long) insn);
5472
5473 rn = bits (insn, 16, 19);
5474 rm = bits (insn, 0, 3);
5475 rs = bits (insn, 8, 11);
5476 rd = bits (insn, 12, 15);
cca44b1b
JB
5477
5478 if (is_mov)
5479 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5480 else
5481 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5482
7ff120b4 5483 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5484
5485 return 0;
5486}
5487
5488/* Clean up load instructions. */
5489
5490static void
6e39997a 5491cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5492 struct displaced_step_closure *dsc)
5493{
5494 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5495
36073a92 5496 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5497 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5498 rt_val2 = displaced_read_reg (regs, dsc, 1);
5499 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5500
5501 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5502 if (dsc->u.ldst.xfersize > 4)
5503 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5504 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5505 if (!dsc->u.ldst.immed)
5506 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5507
5508 /* Handle register writeback. */
5509 if (dsc->u.ldst.writeback)
5510 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5511 /* Put result in right place. */
5512 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5513 if (dsc->u.ldst.xfersize == 8)
5514 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5515}
5516
5517/* Clean up store instructions. */
5518
5519static void
6e39997a 5520cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5521 struct displaced_step_closure *dsc)
5522{
36073a92 5523 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5524
5525 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5526 if (dsc->u.ldst.xfersize > 4)
5527 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5528 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5529 if (!dsc->u.ldst.immed)
5530 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5531 if (!dsc->u.ldst.restore_r4)
5532 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5533
5534 /* Writeback. */
5535 if (dsc->u.ldst.writeback)
5536 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5537}
5538
5539/* Copy "extra" load/store instructions. These are halfword/doubleword
5540 transfers, which have a different encoding to byte/word transfers. */
5541
5542static int
550dc4e2 5543arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5544 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5545{
5546 unsigned int op1 = bits (insn, 20, 24);
5547 unsigned int op2 = bits (insn, 5, 6);
5548 unsigned int rt = bits (insn, 12, 15);
5549 unsigned int rn = bits (insn, 16, 19);
5550 unsigned int rm = bits (insn, 0, 3);
5551 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5552 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5553 int immed = (op1 & 0x4) != 0;
5554 int opcode;
5555 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5556
5557 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5558 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5559
5560 if (debug_displaced)
5561 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5562 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5563 (unsigned long) insn);
5564
5565 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5566
5567 if (opcode < 0)
5568 internal_error (__FILE__, __LINE__,
5569 _("copy_extra_ld_st: instruction decode error"));
5570
36073a92
YQ
5571 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5572 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5573 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5574 if (!immed)
36073a92 5575 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5576
36073a92 5577 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5578 if (bytesize[opcode] == 8)
36073a92
YQ
5579 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5580 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5581 if (!immed)
36073a92 5582 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5583
5584 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5585 if (bytesize[opcode] == 8)
5586 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5587 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5588 if (!immed)
5589 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5590
5591 dsc->rd = rt;
5592 dsc->u.ldst.xfersize = bytesize[opcode];
5593 dsc->u.ldst.rn = rn;
5594 dsc->u.ldst.immed = immed;
5595 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5596 dsc->u.ldst.restore_r4 = 0;
5597
5598 if (immed)
5599 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5600 ->
5601 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5602 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5603 else
5604 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5605 ->
5606 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5607 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5608
5609 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5610
5611 return 0;
5612}
5613
0f6f04ba 5614/* Copy byte/half word/word loads and stores. */
cca44b1b 5615
7ff120b4 5616static void
0f6f04ba
YQ
5617install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5618 struct displaced_step_closure *dsc, int load,
5619 int immed, int writeback, int size, int usermode,
5620 int rt, int rm, int rn)
cca44b1b 5621{
cca44b1b 5622 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5623
36073a92
YQ
5624 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5625 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5626 if (!immed)
36073a92 5627 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5628 if (!load)
36073a92 5629 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5630
36073a92
YQ
5631 rt_val = displaced_read_reg (regs, dsc, rt);
5632 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5633 if (!immed)
36073a92 5634 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5635
5636 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5637 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5638 if (!immed)
5639 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5640 dsc->rd = rt;
0f6f04ba 5641 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5642 dsc->u.ldst.rn = rn;
5643 dsc->u.ldst.immed = immed;
7ff120b4 5644 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5645
5646 /* To write PC we can do:
5647
494e194e
YQ
5648 Before this sequence of instructions:
5649 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5650 r2 is the Rn value got from dispalced_read_reg.
5651
5652 Insn1: push {pc} Write address of STR instruction + offset on stack
5653 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5654 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5655 = addr(Insn1) + offset - addr(Insn3) - 8
5656 = offset - 16
5657 Insn4: add r4, r4, #8 r4 = offset - 8
5658 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5659 = from + offset
5660 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5661
5662 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5663 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5664 of this can be found in Section "Saving from r15" in
5665 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5666
7ff120b4
YQ
5667 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5668}
5669
34518530
YQ
5670
5671static int
5672thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5673 uint16_t insn2, struct regcache *regs,
5674 struct displaced_step_closure *dsc, int size)
5675{
5676 unsigned int u_bit = bit (insn1, 7);
5677 unsigned int rt = bits (insn2, 12, 15);
5678 int imm12 = bits (insn2, 0, 11);
5679 ULONGEST pc_val;
5680
5681 if (debug_displaced)
5682 fprintf_unfiltered (gdb_stdlog,
5683 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5684 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5685 imm12);
5686
5687 if (!u_bit)
5688 imm12 = -1 * imm12;
5689
5690 /* Rewrite instruction LDR Rt imm12 into:
5691
5692 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5693
5694 LDR R0, R2, R3,
5695
5696 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5697
5698
5699 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5700 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5701 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5702
5703 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5704
5705 pc_val = pc_val & 0xfffffffc;
5706
5707 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5708 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5709
5710 dsc->rd = rt;
5711
5712 dsc->u.ldst.xfersize = size;
5713 dsc->u.ldst.immed = 0;
5714 dsc->u.ldst.writeback = 0;
5715 dsc->u.ldst.restore_r4 = 0;
5716
5717 /* LDR R0, R2, R3 */
5718 dsc->modinsn[0] = 0xf852;
5719 dsc->modinsn[1] = 0x3;
5720 dsc->numinsns = 2;
5721
5722 dsc->cleanup = &cleanup_load;
5723
5724 return 0;
5725}
5726
5727static int
5728thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5729 uint16_t insn2, struct regcache *regs,
5730 struct displaced_step_closure *dsc,
5731 int writeback, int immed)
5732{
5733 unsigned int rt = bits (insn2, 12, 15);
5734 unsigned int rn = bits (insn1, 0, 3);
5735 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5736 /* In LDR (register), there is also a register Rm, which is not allowed to
5737 be PC, so we don't have to check it. */
5738
5739 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5740 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5741 dsc);
5742
5743 if (debug_displaced)
5744 fprintf_unfiltered (gdb_stdlog,
5745 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5746 rt, rn, insn1, insn2);
5747
5748 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5749 0, rt, rm, rn);
5750
5751 dsc->u.ldst.restore_r4 = 0;
5752
5753 if (immed)
5754 /* ldr[b]<cond> rt, [rn, #imm], etc.
5755 ->
5756 ldr[b]<cond> r0, [r2, #imm]. */
5757 {
5758 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5759 dsc->modinsn[1] = insn2 & 0x0fff;
5760 }
5761 else
5762 /* ldr[b]<cond> rt, [rn, rm], etc.
5763 ->
5764 ldr[b]<cond> r0, [r2, r3]. */
5765 {
5766 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5767 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5768 }
5769
5770 dsc->numinsns = 2;
5771
5772 return 0;
5773}
5774
5775
7ff120b4
YQ
5776static int
5777arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5778 struct regcache *regs,
5779 struct displaced_step_closure *dsc,
0f6f04ba 5780 int load, int size, int usermode)
7ff120b4
YQ
5781{
5782 int immed = !bit (insn, 25);
5783 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5784 unsigned int rt = bits (insn, 12, 15);
5785 unsigned int rn = bits (insn, 16, 19);
5786 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5787
5788 if (!insn_references_pc (insn, 0x000ff00ful))
5789 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5790
5791 if (debug_displaced)
5792 fprintf_unfiltered (gdb_stdlog,
5793 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5794 load ? (size == 1 ? "ldrb" : "ldr")
5795 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5796 rt, rn,
5797 (unsigned long) insn);
5798
0f6f04ba
YQ
5799 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5800 usermode, rt, rm, rn);
7ff120b4 5801
bf9f652a 5802 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5803 {
5804 dsc->u.ldst.restore_r4 = 0;
5805
5806 if (immed)
5807 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5808 ->
5809 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5810 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5811 else
5812 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5813 ->
5814 {ldr,str}[b]<cond> r0, [r2, r3]. */
5815 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5816 }
5817 else
5818 {
5819 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5820 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5821 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5822 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5823 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5824 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5825 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5826
5827 /* As above. */
5828 if (immed)
5829 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5830 else
5831 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5832
cca44b1b
JB
5833 dsc->numinsns = 6;
5834 }
5835
5836 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5837
5838 return 0;
5839}
5840
5841/* Cleanup LDM instructions with fully-populated register list. This is an
5842 unfortunate corner case: it's impossible to implement correctly by modifying
5843 the instruction. The issue is as follows: we have an instruction,
5844
5845 ldm rN, {r0-r15}
5846
5847 which we must rewrite to avoid loading PC. A possible solution would be to
5848 do the load in two halves, something like (with suitable cleanup
5849 afterwards):
5850
5851 mov r8, rN
5852 ldm[id][ab] r8!, {r0-r7}
5853 str r7, <temp>
5854 ldm[id][ab] r8, {r7-r14}
5855 <bkpt>
5856
5857 but at present there's no suitable place for <temp>, since the scratch space
5858 is overwritten before the cleanup routine is called. For now, we simply
5859 emulate the instruction. */
5860
5861static void
5862cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5863 struct displaced_step_closure *dsc)
5864{
cca44b1b
JB
5865 int inc = dsc->u.block.increment;
5866 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5867 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5868 uint32_t regmask = dsc->u.block.regmask;
5869 int regno = inc ? 0 : 15;
5870 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5871 int exception_return = dsc->u.block.load && dsc->u.block.user
5872 && (regmask & 0x8000) != 0;
36073a92 5873 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5874 int do_transfer = condition_true (dsc->u.block.cond, status);
5875 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5876
5877 if (!do_transfer)
5878 return;
5879
5880 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5881 sensible we can do here. Complain loudly. */
5882 if (exception_return)
5883 error (_("Cannot single-step exception return"));
5884
5885 /* We don't handle any stores here for now. */
5886 gdb_assert (dsc->u.block.load != 0);
5887
5888 if (debug_displaced)
5889 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5890 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5891 dsc->u.block.increment ? "inc" : "dec",
5892 dsc->u.block.before ? "before" : "after");
5893
5894 while (regmask)
5895 {
5896 uint32_t memword;
5897
5898 if (inc)
bf9f652a 5899 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5900 regno++;
5901 else
5902 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5903 regno--;
5904
5905 xfer_addr += bump_before;
5906
5907 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5908 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5909
5910 xfer_addr += bump_after;
5911
5912 regmask &= ~(1 << regno);
5913 }
5914
5915 if (dsc->u.block.writeback)
5916 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5917 CANNOT_WRITE_PC);
5918}
5919
5920/* Clean up an STM which included the PC in the register list. */
5921
5922static void
5923cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5924 struct displaced_step_closure *dsc)
5925{
36073a92 5926 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5927 int store_executed = condition_true (dsc->u.block.cond, status);
5928 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5929 CORE_ADDR stm_insn_addr;
5930 uint32_t pc_val;
5931 long offset;
5932 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5933
5934 /* If condition code fails, there's nothing else to do. */
5935 if (!store_executed)
5936 return;
5937
5938 if (dsc->u.block.increment)
5939 {
5940 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5941
5942 if (dsc->u.block.before)
5943 pc_stored_at += 4;
5944 }
5945 else
5946 {
5947 pc_stored_at = dsc->u.block.xfer_addr;
5948
5949 if (dsc->u.block.before)
5950 pc_stored_at -= 4;
5951 }
5952
5953 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5954 stm_insn_addr = dsc->scratch_base;
5955 offset = pc_val - stm_insn_addr;
5956
5957 if (debug_displaced)
5958 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5959 "STM instruction\n", offset);
5960
5961 /* Rewrite the stored PC to the proper value for the non-displaced original
5962 instruction. */
5963 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5964 dsc->insn_addr + offset);
5965}
5966
5967/* Clean up an LDM which includes the PC in the register list. We clumped all
5968 the registers in the transferred list into a contiguous range r0...rX (to
5969 avoid loading PC directly and losing control of the debugged program), so we
5970 must undo that here. */
5971
5972static void
6e39997a 5973cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5974 struct regcache *regs,
5975 struct displaced_step_closure *dsc)
5976{
36073a92 5977 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5978 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5979 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5980 unsigned int regs_loaded = bitcount (mask);
5981 unsigned int num_to_shuffle = regs_loaded, clobbered;
5982
5983 /* The method employed here will fail if the register list is fully populated
5984 (we need to avoid loading PC directly). */
5985 gdb_assert (num_to_shuffle < 16);
5986
5987 if (!load_executed)
5988 return;
5989
5990 clobbered = (1 << num_to_shuffle) - 1;
5991
5992 while (num_to_shuffle > 0)
5993 {
5994 if ((mask & (1 << write_reg)) != 0)
5995 {
5996 unsigned int read_reg = num_to_shuffle - 1;
5997
5998 if (read_reg != write_reg)
5999 {
36073a92 6000 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6001 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6002 if (debug_displaced)
6003 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6004 "loaded register r%d to r%d\n"), read_reg,
6005 write_reg);
6006 }
6007 else if (debug_displaced)
6008 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6009 "r%d already in the right place\n"),
6010 write_reg);
6011
6012 clobbered &= ~(1 << write_reg);
6013
6014 num_to_shuffle--;
6015 }
6016
6017 write_reg--;
6018 }
6019
6020 /* Restore any registers we scribbled over. */
6021 for (write_reg = 0; clobbered != 0; write_reg++)
6022 {
6023 if ((clobbered & (1 << write_reg)) != 0)
6024 {
6025 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6026 CANNOT_WRITE_PC);
6027 if (debug_displaced)
6028 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6029 "clobbered register r%d\n"), write_reg);
6030 clobbered &= ~(1 << write_reg);
6031 }
6032 }
6033
6034 /* Perform register writeback manually. */
6035 if (dsc->u.block.writeback)
6036 {
6037 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6038
6039 if (dsc->u.block.increment)
6040 new_rn_val += regs_loaded * 4;
6041 else
6042 new_rn_val -= regs_loaded * 4;
6043
6044 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6045 CANNOT_WRITE_PC);
6046 }
6047}
6048
6049/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6050 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6051
6052static int
7ff120b4
YQ
6053arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6054 struct regcache *regs,
6055 struct displaced_step_closure *dsc)
cca44b1b
JB
6056{
6057 int load = bit (insn, 20);
6058 int user = bit (insn, 22);
6059 int increment = bit (insn, 23);
6060 int before = bit (insn, 24);
6061 int writeback = bit (insn, 21);
6062 int rn = bits (insn, 16, 19);
cca44b1b 6063
0963b4bd
MS
6064 /* Block transfers which don't mention PC can be run directly
6065 out-of-line. */
bf9f652a 6066 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6067 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6068
bf9f652a 6069 if (rn == ARM_PC_REGNUM)
cca44b1b 6070 {
0963b4bd
MS
6071 warning (_("displaced: Unpredictable LDM or STM with "
6072 "base register r15"));
7ff120b4 6073 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6074 }
6075
6076 if (debug_displaced)
6077 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6078 "%.8lx\n", (unsigned long) insn);
6079
36073a92 6080 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6081 dsc->u.block.rn = rn;
6082
6083 dsc->u.block.load = load;
6084 dsc->u.block.user = user;
6085 dsc->u.block.increment = increment;
6086 dsc->u.block.before = before;
6087 dsc->u.block.writeback = writeback;
6088 dsc->u.block.cond = bits (insn, 28, 31);
6089
6090 dsc->u.block.regmask = insn & 0xffff;
6091
6092 if (load)
6093 {
6094 if ((insn & 0xffff) == 0xffff)
6095 {
6096 /* LDM with a fully-populated register list. This case is
6097 particularly tricky. Implement for now by fully emulating the
6098 instruction (which might not behave perfectly in all cases, but
6099 these instructions should be rare enough for that not to matter
6100 too much). */
6101 dsc->modinsn[0] = ARM_NOP;
6102
6103 dsc->cleanup = &cleanup_block_load_all;
6104 }
6105 else
6106 {
6107 /* LDM of a list of registers which includes PC. Implement by
6108 rewriting the list of registers to be transferred into a
6109 contiguous chunk r0...rX before doing the transfer, then shuffling
6110 registers into the correct places in the cleanup routine. */
6111 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6112 unsigned int num_in_list = bitcount (regmask), new_regmask;
6113 unsigned int i;
cca44b1b
JB
6114
6115 for (i = 0; i < num_in_list; i++)
36073a92 6116 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6117
6118 /* Writeback makes things complicated. We need to avoid clobbering
6119 the base register with one of the registers in our modified
6120 register list, but just using a different register can't work in
6121 all cases, e.g.:
6122
6123 ldm r14!, {r0-r13,pc}
6124
6125 which would need to be rewritten as:
6126
6127 ldm rN!, {r0-r14}
6128
6129 but that can't work, because there's no free register for N.
6130
6131 Solve this by turning off the writeback bit, and emulating
6132 writeback manually in the cleanup routine. */
6133
6134 if (writeback)
6135 insn &= ~(1 << 21);
6136
6137 new_regmask = (1 << num_in_list) - 1;
6138
6139 if (debug_displaced)
6140 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6141 "{..., pc}: original reg list %.4x, modified "
6142 "list %.4x\n"), rn, writeback ? "!" : "",
6143 (int) insn & 0xffff, new_regmask);
6144
6145 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6146
6147 dsc->cleanup = &cleanup_block_load_pc;
6148 }
6149 }
6150 else
6151 {
6152 /* STM of a list of registers which includes PC. Run the instruction
6153 as-is, but out of line: this will store the wrong value for the PC,
6154 so we must manually fix up the memory in the cleanup routine.
6155 Doing things this way has the advantage that we can auto-detect
6156 the offset of the PC write (which is architecture-dependent) in
6157 the cleanup routine. */
6158 dsc->modinsn[0] = insn;
6159
6160 dsc->cleanup = &cleanup_block_store_pc;
6161 }
6162
6163 return 0;
6164}
6165
34518530
YQ
6166static int
6167thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6168 struct regcache *regs,
6169 struct displaced_step_closure *dsc)
cca44b1b 6170{
34518530
YQ
6171 int rn = bits (insn1, 0, 3);
6172 int load = bit (insn1, 4);
6173 int writeback = bit (insn1, 5);
cca44b1b 6174
34518530
YQ
6175 /* Block transfers which don't mention PC can be run directly
6176 out-of-line. */
6177 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6178 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6179
34518530
YQ
6180 if (rn == ARM_PC_REGNUM)
6181 {
6182 warning (_("displaced: Unpredictable LDM or STM with "
6183 "base register r15"));
6184 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6185 "unpredictable ldm/stm", dsc);
6186 }
cca44b1b
JB
6187
6188 if (debug_displaced)
34518530
YQ
6189 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6190 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6191
34518530
YQ
6192 /* Clear bit 13, since it should be always zero. */
6193 dsc->u.block.regmask = (insn2 & 0xdfff);
6194 dsc->u.block.rn = rn;
cca44b1b 6195
34518530
YQ
6196 dsc->u.block.load = load;
6197 dsc->u.block.user = 0;
6198 dsc->u.block.increment = bit (insn1, 7);
6199 dsc->u.block.before = bit (insn1, 8);
6200 dsc->u.block.writeback = writeback;
6201 dsc->u.block.cond = INST_AL;
6202 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6203
34518530
YQ
6204 if (load)
6205 {
6206 if (dsc->u.block.regmask == 0xffff)
6207 {
6208 /* This branch is impossible to happen. */
6209 gdb_assert (0);
6210 }
6211 else
6212 {
6213 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6214 unsigned int num_in_list = bitcount (regmask), new_regmask;
6215 unsigned int i;
34518530
YQ
6216
6217 for (i = 0; i < num_in_list; i++)
6218 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6219
6220 if (writeback)
6221 insn1 &= ~(1 << 5);
6222
6223 new_regmask = (1 << num_in_list) - 1;
6224
6225 if (debug_displaced)
6226 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6227 "{..., pc}: original reg list %.4x, modified "
6228 "list %.4x\n"), rn, writeback ? "!" : "",
6229 (int) dsc->u.block.regmask, new_regmask);
6230
6231 dsc->modinsn[0] = insn1;
6232 dsc->modinsn[1] = (new_regmask & 0xffff);
6233 dsc->numinsns = 2;
6234
6235 dsc->cleanup = &cleanup_block_load_pc;
6236 }
6237 }
6238 else
6239 {
6240 dsc->modinsn[0] = insn1;
6241 dsc->modinsn[1] = insn2;
6242 dsc->numinsns = 2;
6243 dsc->cleanup = &cleanup_block_store_pc;
6244 }
6245 return 0;
6246}
6247
d9311bfa
AT
6248/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6249 This is used to avoid a dependency on BFD's bfd_endian enum. */
6250
6251ULONGEST
6252arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6253 int byte_order)
6254{
5f2dfcfd
AT
6255 return read_memory_unsigned_integer (memaddr, len,
6256 (enum bfd_endian) byte_order);
d9311bfa
AT
6257}
6258
6259/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6260
6261CORE_ADDR
6262arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6263 CORE_ADDR val)
6264{
6265 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6266}
6267
6268/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6269
e7cf25a8 6270static CORE_ADDR
553cb527 6271arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6272{
d9311bfa
AT
6273 return 0;
6274}
6275
6276/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6277
6278int
6279arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6280{
6281 return arm_is_thumb (self->regcache);
6282}
6283
6284/* single_step() is called just before we want to resume the inferior,
6285 if we want to single-step it but there is no hardware or kernel
6286 single-step support. We find the target of the coming instructions
6287 and breakpoint them. */
6288
6289int
6290arm_software_single_step (struct frame_info *frame)
6291{
6292 struct regcache *regcache = get_current_regcache ();
6293 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6294 struct address_space *aspace = get_regcache_aspace (regcache);
6295 struct arm_get_next_pcs next_pcs_ctx;
6296 CORE_ADDR pc;
6297 int i;
6298 VEC (CORE_ADDR) *next_pcs = NULL;
6299 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6300
6301 arm_get_next_pcs_ctor (&next_pcs_ctx,
6302 &arm_get_next_pcs_ops,
6303 gdbarch_byte_order (gdbarch),
6304 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6305 0,
d9311bfa
AT
6306 regcache);
6307
4d18591b 6308 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6309
6310 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
771da62d
YQ
6311 {
6312 pc = gdbarch_addr_bits_remove (gdbarch, pc);
0bc5d801 6313 VEC_replace (CORE_ADDR, next_pcs, i, pc);
771da62d 6314 }
d9311bfa 6315
0bc5d801
YQ
6316 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6317 insert_single_step_breakpoint (gdbarch, aspace, pc);
6318
d9311bfa
AT
6319 do_cleanups (old_chain);
6320
6321 return 1;
6322}
6323
34518530
YQ
6324/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6325 for Linux, where some SVC instructions must be treated specially. */
6326
6327static void
6328cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6329 struct displaced_step_closure *dsc)
6330{
6331 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6332
6333 if (debug_displaced)
6334 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6335 "%.8lx\n", (unsigned long) resume_addr);
6336
6337 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6338}
6339
6340
6341/* Common copy routine for svc instruciton. */
6342
6343static int
6344install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6345 struct displaced_step_closure *dsc)
6346{
6347 /* Preparation: none.
6348 Insn: unmodified svc.
6349 Cleanup: pc <- insn_addr + insn_size. */
6350
6351 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6352 instruction. */
6353 dsc->wrote_to_pc = 1;
6354
6355 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6356 if (dsc->u.svc.copy_svc_os)
6357 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6358 else
6359 {
6360 dsc->cleanup = &cleanup_svc;
6361 return 0;
6362 }
34518530
YQ
6363}
6364
6365static int
6366arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6367 struct regcache *regs, struct displaced_step_closure *dsc)
6368{
6369
6370 if (debug_displaced)
6371 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6372 (unsigned long) insn);
6373
6374 dsc->modinsn[0] = insn;
6375
6376 return install_svc (gdbarch, regs, dsc);
6377}
6378
6379static int
6380thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6381 struct regcache *regs, struct displaced_step_closure *dsc)
6382{
6383
6384 if (debug_displaced)
6385 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6386 insn);
bd18283a 6387
34518530
YQ
6388 dsc->modinsn[0] = insn;
6389
6390 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6391}
6392
6393/* Copy undefined instructions. */
6394
6395static int
7ff120b4
YQ
6396arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6397 struct displaced_step_closure *dsc)
cca44b1b
JB
6398{
6399 if (debug_displaced)
0963b4bd
MS
6400 fprintf_unfiltered (gdb_stdlog,
6401 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6402 (unsigned long) insn);
6403
6404 dsc->modinsn[0] = insn;
6405
6406 return 0;
6407}
6408
34518530
YQ
6409static int
6410thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6411 struct displaced_step_closure *dsc)
6412{
6413
6414 if (debug_displaced)
6415 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6416 "%.4x %.4x\n", (unsigned short) insn1,
6417 (unsigned short) insn2);
6418
6419 dsc->modinsn[0] = insn1;
6420 dsc->modinsn[1] = insn2;
6421 dsc->numinsns = 2;
6422
6423 return 0;
6424}
6425
cca44b1b
JB
6426/* Copy unpredictable instructions. */
6427
6428static int
7ff120b4
YQ
6429arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6430 struct displaced_step_closure *dsc)
cca44b1b
JB
6431{
6432 if (debug_displaced)
6433 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6434 "%.8lx\n", (unsigned long) insn);
6435
6436 dsc->modinsn[0] = insn;
6437
6438 return 0;
6439}
6440
6441/* The decode_* functions are instruction decoding helpers. They mostly follow
6442 the presentation in the ARM ARM. */
6443
6444static int
7ff120b4
YQ
6445arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6446 struct regcache *regs,
6447 struct displaced_step_closure *dsc)
cca44b1b
JB
6448{
6449 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6450 unsigned int rn = bits (insn, 16, 19);
6451
6452 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6453 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6454 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6455 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6456 else if ((op1 & 0x60) == 0x20)
7ff120b4 6457 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6458 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6459 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6460 dsc);
cca44b1b 6461 else if ((op1 & 0x77) == 0x41)
7ff120b4 6462 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6463 else if ((op1 & 0x77) == 0x45)
7ff120b4 6464 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6465 else if ((op1 & 0x77) == 0x51)
6466 {
6467 if (rn != 0xf)
7ff120b4 6468 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6469 else
7ff120b4 6470 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6471 }
6472 else if ((op1 & 0x77) == 0x55)
7ff120b4 6473 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6474 else if (op1 == 0x57)
6475 switch (op2)
6476 {
7ff120b4
YQ
6477 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6478 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6479 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6480 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6481 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6482 }
6483 else if ((op1 & 0x63) == 0x43)
7ff120b4 6484 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6485 else if ((op2 & 0x1) == 0x0)
6486 switch (op1 & ~0x80)
6487 {
6488 case 0x61:
7ff120b4 6489 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6490 case 0x65:
7ff120b4 6491 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6492 case 0x71: case 0x75:
6493 /* pld/pldw reg. */
7ff120b4 6494 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6495 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6496 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6497 default:
7ff120b4 6498 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6499 }
6500 else
7ff120b4 6501 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6502}
6503
6504static int
7ff120b4
YQ
6505arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6506 struct regcache *regs,
6507 struct displaced_step_closure *dsc)
cca44b1b
JB
6508{
6509 if (bit (insn, 27) == 0)
7ff120b4 6510 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6511 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6512 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6513 {
6514 case 0x0: case 0x2:
7ff120b4 6515 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6516
6517 case 0x1: case 0x3:
7ff120b4 6518 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6519
6520 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6521 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6522
6523 case 0x8:
6524 switch ((insn & 0xe00000) >> 21)
6525 {
6526 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6527 /* stc/stc2. */
7ff120b4 6528 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6529
6530 case 0x2:
7ff120b4 6531 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6532
6533 default:
7ff120b4 6534 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6535 }
6536
6537 case 0x9:
6538 {
6539 int rn_f = (bits (insn, 16, 19) == 0xf);
6540 switch ((insn & 0xe00000) >> 21)
6541 {
6542 case 0x1: case 0x3:
6543 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6544 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6545 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6546
6547 case 0x2:
7ff120b4 6548 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6549
6550 case 0x4: case 0x5: case 0x6: case 0x7:
6551 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6552 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6553 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6554
6555 default:
7ff120b4 6556 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6557 }
6558 }
6559
6560 case 0xa:
7ff120b4 6561 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6562
6563 case 0xb:
6564 if (bits (insn, 16, 19) == 0xf)
6565 /* ldc/ldc2 lit. */
7ff120b4 6566 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6567 else
7ff120b4 6568 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6569
6570 case 0xc:
6571 if (bit (insn, 4))
7ff120b4 6572 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6573 else
7ff120b4 6574 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6575
6576 case 0xd:
6577 if (bit (insn, 4))
7ff120b4 6578 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6579 else
7ff120b4 6580 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6581
6582 default:
7ff120b4 6583 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6584 }
6585}
6586
6587/* Decode miscellaneous instructions in dp/misc encoding space. */
6588
6589static int
7ff120b4
YQ
6590arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6591 struct regcache *regs,
6592 struct displaced_step_closure *dsc)
cca44b1b
JB
6593{
6594 unsigned int op2 = bits (insn, 4, 6);
6595 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6596
6597 switch (op2)
6598 {
6599 case 0x0:
7ff120b4 6600 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6601
6602 case 0x1:
6603 if (op == 0x1) /* bx. */
7ff120b4 6604 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6605 else if (op == 0x3)
7ff120b4 6606 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6607 else
7ff120b4 6608 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6609
6610 case 0x2:
6611 if (op == 0x1)
6612 /* Not really supported. */
7ff120b4 6613 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6614 else
7ff120b4 6615 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6616
6617 case 0x3:
6618 if (op == 0x1)
7ff120b4 6619 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6620 regs, dsc); /* blx register. */
cca44b1b 6621 else
7ff120b4 6622 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6623
6624 case 0x5:
7ff120b4 6625 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6626
6627 case 0x7:
6628 if (op == 0x1)
7ff120b4 6629 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6630 else if (op == 0x3)
6631 /* Not really supported. */
7ff120b4 6632 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6633
6634 default:
7ff120b4 6635 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6636 }
6637}
6638
6639static int
7ff120b4
YQ
6640arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6641 struct regcache *regs,
6642 struct displaced_step_closure *dsc)
cca44b1b
JB
6643{
6644 if (bit (insn, 25))
6645 switch (bits (insn, 20, 24))
6646 {
6647 case 0x10:
7ff120b4 6648 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6649
6650 case 0x14:
7ff120b4 6651 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6652
6653 case 0x12: case 0x16:
7ff120b4 6654 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6655
6656 default:
7ff120b4 6657 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6658 }
6659 else
6660 {
6661 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6662
6663 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6664 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6665 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6666 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6667 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6668 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6669 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6670 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6671 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6672 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6673 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6674 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6675 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6676 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6677 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6678 dsc);
cca44b1b
JB
6679 }
6680
6681 /* Should be unreachable. */
6682 return 1;
6683}
6684
6685static int
7ff120b4
YQ
6686arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6687 struct regcache *regs,
6688 struct displaced_step_closure *dsc)
cca44b1b
JB
6689{
6690 int a = bit (insn, 25), b = bit (insn, 4);
6691 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6692
6693 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6694 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6695 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6696 else if ((!a && (op1 & 0x17) == 0x02)
6697 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6698 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6699 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6700 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6701 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6702 else if ((!a && (op1 & 0x17) == 0x03)
6703 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6704 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6705 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6706 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6707 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6708 else if ((!a && (op1 & 0x17) == 0x06)
6709 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6710 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6711 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6712 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6713 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6714 else if ((!a && (op1 & 0x17) == 0x07)
6715 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6716 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6717
6718 /* Should be unreachable. */
6719 return 1;
6720}
6721
6722static int
7ff120b4
YQ
6723arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6724 struct displaced_step_closure *dsc)
cca44b1b
JB
6725{
6726 switch (bits (insn, 20, 24))
6727 {
6728 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6729 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6730
6731 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6732 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6733
6734 case 0x08: case 0x09: case 0x0a: case 0x0b:
6735 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6736 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6737 "decode/pack/unpack/saturate/reverse", dsc);
6738
6739 case 0x18:
6740 if (bits (insn, 5, 7) == 0) /* op2. */
6741 {
6742 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6743 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6744 else
7ff120b4 6745 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6746 }
6747 else
7ff120b4 6748 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6749
6750 case 0x1a: case 0x1b:
6751 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6752 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6753 else
7ff120b4 6754 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6755
6756 case 0x1c: case 0x1d:
6757 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6758 {
6759 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6760 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6761 else
7ff120b4 6762 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6763 }
6764 else
7ff120b4 6765 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6766
6767 case 0x1e: case 0x1f:
6768 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6769 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6770 else
7ff120b4 6771 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6772 }
6773
6774 /* Should be unreachable. */
6775 return 1;
6776}
6777
6778static int
615234c1 6779arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6780 struct regcache *regs,
6781 struct displaced_step_closure *dsc)
cca44b1b
JB
6782{
6783 if (bit (insn, 25))
7ff120b4 6784 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6785 else
7ff120b4 6786 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6787}
6788
6789static int
7ff120b4
YQ
6790arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6791 struct regcache *regs,
6792 struct displaced_step_closure *dsc)
cca44b1b
JB
6793{
6794 unsigned int opcode = bits (insn, 20, 24);
6795
6796 switch (opcode)
6797 {
6798 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6799 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6800
6801 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6802 case 0x12: case 0x16:
7ff120b4 6803 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6804
6805 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6806 case 0x13: case 0x17:
7ff120b4 6807 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6808
6809 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6810 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6811 /* Note: no writeback for these instructions. Bit 25 will always be
6812 zero though (via caller), so the following works OK. */
7ff120b4 6813 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6814 }
6815
6816 /* Should be unreachable. */
6817 return 1;
6818}
6819
34518530
YQ
6820/* Decode shifted register instructions. */
6821
6822static int
6823thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6824 uint16_t insn2, struct regcache *regs,
6825 struct displaced_step_closure *dsc)
6826{
6827 /* PC is only allowed to be used in instruction MOV. */
6828
6829 unsigned int op = bits (insn1, 5, 8);
6830 unsigned int rn = bits (insn1, 0, 3);
6831
6832 if (op == 0x2 && rn == 0xf) /* MOV */
6833 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6834 else
6835 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6836 "dp (shift reg)", dsc);
6837}
6838
6839
6840/* Decode extension register load/store. Exactly the same as
6841 arm_decode_ext_reg_ld_st. */
6842
6843static int
6844thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6845 uint16_t insn2, struct regcache *regs,
6846 struct displaced_step_closure *dsc)
6847{
6848 unsigned int opcode = bits (insn1, 4, 8);
6849
6850 switch (opcode)
6851 {
6852 case 0x04: case 0x05:
6853 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6854 "vfp/neon vmov", dsc);
6855
6856 case 0x08: case 0x0c: /* 01x00 */
6857 case 0x0a: case 0x0e: /* 01x10 */
6858 case 0x12: case 0x16: /* 10x10 */
6859 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6860 "vfp/neon vstm/vpush", dsc);
6861
6862 case 0x09: case 0x0d: /* 01x01 */
6863 case 0x0b: case 0x0f: /* 01x11 */
6864 case 0x13: case 0x17: /* 10x11 */
6865 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6866 "vfp/neon vldm/vpop", dsc);
6867
6868 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6869 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6870 "vstr", dsc);
6871 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6872 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6873 }
6874
6875 /* Should be unreachable. */
6876 return 1;
6877}
6878
cca44b1b 6879static int
12545665 6880arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6881 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6882{
6883 unsigned int op1 = bits (insn, 20, 25);
6884 int op = bit (insn, 4);
6885 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6886
6887 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6888 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6889 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6890 && (coproc & 0xe) != 0xa)
6891 /* stc/stc2. */
7ff120b4 6892 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6893 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6894 && (coproc & 0xe) != 0xa)
6895 /* ldc/ldc2 imm/lit. */
7ff120b4 6896 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6897 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6898 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6899 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6900 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6901 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6902 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6903 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6904 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6905 else if ((op1 & 0x30) == 0x20 && !op)
6906 {
6907 if ((coproc & 0xe) == 0xa)
7ff120b4 6908 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6909 else
7ff120b4 6910 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6911 }
6912 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6913 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6914 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6915 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6916 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6917 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6918 else if ((op1 & 0x30) == 0x30)
7ff120b4 6919 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6920 else
7ff120b4 6921 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6922}
6923
34518530
YQ
6924static int
6925thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6926 uint16_t insn2, struct regcache *regs,
6927 struct displaced_step_closure *dsc)
6928{
6929 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6930 unsigned int bit_5_8 = bits (insn1, 5, 8);
6931 unsigned int bit_9 = bit (insn1, 9);
6932 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6933
6934 if (bit_9 == 0)
6935 {
6936 if (bit_5_8 == 2)
6937 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6938 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6939 dsc);
6940 else if (bit_5_8 == 0) /* UNDEFINED. */
6941 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6942 else
6943 {
6944 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6945 if ((coproc & 0xe) == 0xa)
6946 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6947 dsc);
6948 else /* coproc is not 101x. */
6949 {
6950 if (bit_4 == 0) /* STC/STC2. */
6951 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6952 "stc/stc2", dsc);
6953 else /* LDC/LDC2 {literal, immeidate}. */
6954 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6955 regs, dsc);
6956 }
6957 }
6958 }
6959 else
6960 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6961
6962 return 0;
6963}
6964
6965static void
6966install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6967 struct displaced_step_closure *dsc, int rd)
6968{
6969 /* ADR Rd, #imm
6970
6971 Rewrite as:
6972
6973 Preparation: Rd <- PC
6974 Insn: ADD Rd, #imm
6975 Cleanup: Null.
6976 */
6977
6978 /* Rd <- PC */
6979 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6980 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6981}
6982
6983static int
6984thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6985 struct displaced_step_closure *dsc,
6986 int rd, unsigned int imm)
6987{
6988
6989 /* Encoding T2: ADDS Rd, #imm */
6990 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6991
6992 install_pc_relative (gdbarch, regs, dsc, rd);
6993
6994 return 0;
6995}
6996
6997static int
6998thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6999 struct regcache *regs,
7000 struct displaced_step_closure *dsc)
7001{
7002 unsigned int rd = bits (insn, 8, 10);
7003 unsigned int imm8 = bits (insn, 0, 7);
7004
7005 if (debug_displaced)
7006 fprintf_unfiltered (gdb_stdlog,
7007 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7008 rd, imm8, insn);
7009
7010 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7011}
7012
7013static int
7014thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7015 uint16_t insn2, struct regcache *regs,
7016 struct displaced_step_closure *dsc)
7017{
7018 unsigned int rd = bits (insn2, 8, 11);
7019 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7020 extract raw immediate encoding rather than computing immediate. When
7021 generating ADD or SUB instruction, we can simply perform OR operation to
7022 set immediate into ADD. */
7023 unsigned int imm_3_8 = insn2 & 0x70ff;
7024 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7025
7026 if (debug_displaced)
7027 fprintf_unfiltered (gdb_stdlog,
7028 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7029 rd, imm_i, imm_3_8, insn1, insn2);
7030
7031 if (bit (insn1, 7)) /* Encoding T2 */
7032 {
7033 /* Encoding T3: SUB Rd, Rd, #imm */
7034 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7035 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7036 }
7037 else /* Encoding T3 */
7038 {
7039 /* Encoding T3: ADD Rd, Rd, #imm */
7040 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7041 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7042 }
7043 dsc->numinsns = 2;
7044
7045 install_pc_relative (gdbarch, regs, dsc, rd);
7046
7047 return 0;
7048}
7049
7050static int
615234c1 7051thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7052 struct regcache *regs,
7053 struct displaced_step_closure *dsc)
7054{
7055 unsigned int rt = bits (insn1, 8, 10);
7056 unsigned int pc;
7057 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7058
7059 /* LDR Rd, #imm8
7060
7061 Rwrite as:
7062
7063 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7064
7065 Insn: LDR R0, [R2, R3];
7066 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7067
7068 if (debug_displaced)
7069 fprintf_unfiltered (gdb_stdlog,
7070 "displaced: copying thumb ldr r%d [pc #%d]\n"
7071 , rt, imm8);
7072
7073 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7074 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7075 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7076 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7077 /* The assembler calculates the required value of the offset from the
7078 Align(PC,4) value of this instruction to the label. */
7079 pc = pc & 0xfffffffc;
7080
7081 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7082 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7083
7084 dsc->rd = rt;
7085 dsc->u.ldst.xfersize = 4;
7086 dsc->u.ldst.rn = 0;
7087 dsc->u.ldst.immed = 0;
7088 dsc->u.ldst.writeback = 0;
7089 dsc->u.ldst.restore_r4 = 0;
7090
7091 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7092
7093 dsc->cleanup = &cleanup_load;
7094
7095 return 0;
7096}
7097
7098/* Copy Thumb cbnz/cbz insruction. */
7099
7100static int
7101thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7102 struct regcache *regs,
7103 struct displaced_step_closure *dsc)
7104{
7105 int non_zero = bit (insn1, 11);
7106 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7107 CORE_ADDR from = dsc->insn_addr;
7108 int rn = bits (insn1, 0, 2);
7109 int rn_val = displaced_read_reg (regs, dsc, rn);
7110
7111 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7112 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7113 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7114 condition is false, let it be, cleanup_branch will do nothing. */
7115 if (dsc->u.branch.cond)
7116 {
7117 dsc->u.branch.cond = INST_AL;
7118 dsc->u.branch.dest = from + 4 + imm5;
7119 }
7120 else
7121 dsc->u.branch.dest = from + 2;
7122
7123 dsc->u.branch.link = 0;
7124 dsc->u.branch.exchange = 0;
7125
7126 if (debug_displaced)
7127 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7128 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7129 rn, rn_val, insn1, dsc->u.branch.dest);
7130
7131 dsc->modinsn[0] = THUMB_NOP;
7132
7133 dsc->cleanup = &cleanup_branch;
7134 return 0;
7135}
7136
7137/* Copy Table Branch Byte/Halfword */
7138static int
7139thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7140 uint16_t insn2, struct regcache *regs,
7141 struct displaced_step_closure *dsc)
7142{
7143 ULONGEST rn_val, rm_val;
7144 int is_tbh = bit (insn2, 4);
7145 CORE_ADDR halfwords = 0;
7146 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7147
7148 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7149 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7150
7151 if (is_tbh)
7152 {
7153 gdb_byte buf[2];
7154
7155 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7156 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7157 }
7158 else
7159 {
7160 gdb_byte buf[1];
7161
7162 target_read_memory (rn_val + rm_val, buf, 1);
7163 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7164 }
7165
7166 if (debug_displaced)
7167 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7168 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7169 (unsigned int) rn_val, (unsigned int) rm_val,
7170 (unsigned int) halfwords);
7171
7172 dsc->u.branch.cond = INST_AL;
7173 dsc->u.branch.link = 0;
7174 dsc->u.branch.exchange = 0;
7175 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7176
7177 dsc->cleanup = &cleanup_branch;
7178
7179 return 0;
7180}
7181
7182static void
7183cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7184 struct displaced_step_closure *dsc)
7185{
7186 /* PC <- r7 */
7187 int val = displaced_read_reg (regs, dsc, 7);
7188 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7189
7190 /* r7 <- r8 */
7191 val = displaced_read_reg (regs, dsc, 8);
7192 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7193
7194 /* r8 <- tmp[0] */
7195 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7196
7197}
7198
7199static int
615234c1 7200thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7201 struct regcache *regs,
7202 struct displaced_step_closure *dsc)
7203{
7204 dsc->u.block.regmask = insn1 & 0x00ff;
7205
7206 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7207 to :
7208
7209 (1) register list is full, that is, r0-r7 are used.
7210 Prepare: tmp[0] <- r8
7211
7212 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7213 MOV r8, r7; Move value of r7 to r8;
7214 POP {r7}; Store PC value into r7.
7215
7216 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7217
7218 (2) register list is not full, supposing there are N registers in
7219 register list (except PC, 0 <= N <= 7).
7220 Prepare: for each i, 0 - N, tmp[i] <- ri.
7221
7222 POP {r0, r1, ...., rN};
7223
7224 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7225 from tmp[] properly.
7226 */
7227 if (debug_displaced)
7228 fprintf_unfiltered (gdb_stdlog,
7229 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7230 dsc->u.block.regmask, insn1);
7231
7232 if (dsc->u.block.regmask == 0xff)
7233 {
7234 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7235
7236 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7237 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7238 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7239
7240 dsc->numinsns = 3;
7241 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7242 }
7243 else
7244 {
7245 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7246 unsigned int i;
7247 unsigned int new_regmask;
34518530
YQ
7248
7249 for (i = 0; i < num_in_list + 1; i++)
7250 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7251
7252 new_regmask = (1 << (num_in_list + 1)) - 1;
7253
7254 if (debug_displaced)
7255 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7256 "{..., pc}: original reg list %.4x,"
7257 " modified list %.4x\n"),
7258 (int) dsc->u.block.regmask, new_regmask);
7259
7260 dsc->u.block.regmask |= 0x8000;
7261 dsc->u.block.writeback = 0;
7262 dsc->u.block.cond = INST_AL;
7263
7264 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7265
7266 dsc->cleanup = &cleanup_block_load_pc;
7267 }
7268
7269 return 0;
7270}
7271
7272static void
7273thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7274 struct regcache *regs,
7275 struct displaced_step_closure *dsc)
7276{
7277 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7278 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7279 int err = 0;
7280
7281 /* 16-bit thumb instructions. */
7282 switch (op_bit_12_15)
7283 {
7284 /* Shift (imme), add, subtract, move and compare. */
7285 case 0: case 1: case 2: case 3:
7286 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7287 "shift/add/sub/mov/cmp",
7288 dsc);
7289 break;
7290 case 4:
7291 switch (op_bit_10_11)
7292 {
7293 case 0: /* Data-processing */
7294 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7295 "data-processing",
7296 dsc);
7297 break;
7298 case 1: /* Special data instructions and branch and exchange. */
7299 {
7300 unsigned short op = bits (insn1, 7, 9);
7301 if (op == 6 || op == 7) /* BX or BLX */
7302 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7303 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7304 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7305 else
7306 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7307 dsc);
7308 }
7309 break;
7310 default: /* LDR (literal) */
7311 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7312 }
7313 break;
7314 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7315 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7316 break;
7317 case 10:
7318 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7319 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7320 else /* Generate SP-relative address */
7321 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7322 break;
7323 case 11: /* Misc 16-bit instructions */
7324 {
7325 switch (bits (insn1, 8, 11))
7326 {
7327 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7328 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7329 break;
7330 case 12: case 13: /* POP */
7331 if (bit (insn1, 8)) /* PC is in register list. */
7332 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7333 else
7334 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7335 break;
7336 case 15: /* If-Then, and hints */
7337 if (bits (insn1, 0, 3))
7338 /* If-Then makes up to four following instructions conditional.
7339 IT instruction itself is not conditional, so handle it as a
7340 common unmodified instruction. */
7341 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7342 dsc);
7343 else
7344 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7345 break;
7346 default:
7347 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7348 }
7349 }
7350 break;
7351 case 12:
7352 if (op_bit_10_11 < 2) /* Store multiple registers */
7353 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7354 else /* Load multiple registers */
7355 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7356 break;
7357 case 13: /* Conditional branch and supervisor call */
7358 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7359 err = thumb_copy_b (gdbarch, insn1, dsc);
7360 else
7361 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7362 break;
7363 case 14: /* Unconditional branch */
7364 err = thumb_copy_b (gdbarch, insn1, dsc);
7365 break;
7366 default:
7367 err = 1;
7368 }
7369
7370 if (err)
7371 internal_error (__FILE__, __LINE__,
7372 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7373}
7374
7375static int
7376decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7377 uint16_t insn1, uint16_t insn2,
7378 struct regcache *regs,
7379 struct displaced_step_closure *dsc)
7380{
7381 int rt = bits (insn2, 12, 15);
7382 int rn = bits (insn1, 0, 3);
7383 int op1 = bits (insn1, 7, 8);
34518530
YQ
7384
7385 switch (bits (insn1, 5, 6))
7386 {
7387 case 0: /* Load byte and memory hints */
7388 if (rt == 0xf) /* PLD/PLI */
7389 {
7390 if (rn == 0xf)
7391 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7392 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7393 else
7394 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7395 "pli/pld", dsc);
7396 }
7397 else
7398 {
7399 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7400 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7401 1);
7402 else
7403 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7404 "ldrb{reg, immediate}/ldrbt",
7405 dsc);
7406 }
7407
7408 break;
7409 case 1: /* Load halfword and memory hints. */
7410 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7411 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7412 "pld/unalloc memhint", dsc);
7413 else
7414 {
7415 if (rn == 0xf)
7416 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7417 2);
7418 else
7419 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 "ldrh/ldrht", dsc);
7421 }
7422 break;
7423 case 2: /* Load word */
7424 {
7425 int insn2_bit_8_11 = bits (insn2, 8, 11);
7426
7427 if (rn == 0xf)
7428 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7429 else if (op1 == 0x1) /* Encoding T3 */
7430 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7431 0, 1);
7432 else /* op1 == 0x0 */
7433 {
7434 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7435 /* LDR (immediate) */
7436 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7437 dsc, bit (insn2, 8), 1);
7438 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7439 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7440 "ldrt", dsc);
7441 else
7442 /* LDR (register) */
7443 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7444 dsc, 0, 0);
7445 }
7446 break;
7447 }
7448 default:
7449 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7450 break;
7451 }
7452 return 0;
7453}
7454
7455static void
7456thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7457 uint16_t insn2, struct regcache *regs,
7458 struct displaced_step_closure *dsc)
7459{
7460 int err = 0;
7461 unsigned short op = bit (insn2, 15);
7462 unsigned int op1 = bits (insn1, 11, 12);
7463
7464 switch (op1)
7465 {
7466 case 1:
7467 {
7468 switch (bits (insn1, 9, 10))
7469 {
7470 case 0:
7471 if (bit (insn1, 6))
7472 {
7473 /* Load/store {dual, execlusive}, table branch. */
7474 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7475 && bits (insn2, 5, 7) == 0)
7476 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7477 dsc);
7478 else
7479 /* PC is not allowed to use in load/store {dual, exclusive}
7480 instructions. */
7481 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7482 "load/store dual/ex", dsc);
7483 }
7484 else /* load/store multiple */
7485 {
7486 switch (bits (insn1, 7, 8))
7487 {
7488 case 0: case 3: /* SRS, RFE */
7489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7490 "srs/rfe", dsc);
7491 break;
7492 case 1: case 2: /* LDM/STM/PUSH/POP */
7493 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7494 break;
7495 }
7496 }
7497 break;
7498
7499 case 1:
7500 /* Data-processing (shift register). */
7501 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7502 dsc);
7503 break;
7504 default: /* Coprocessor instructions. */
7505 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7506 break;
7507 }
7508 break;
7509 }
7510 case 2: /* op1 = 2 */
7511 if (op) /* Branch and misc control. */
7512 {
7513 if (bit (insn2, 14) /* BLX/BL */
7514 || bit (insn2, 12) /* Unconditional branch */
7515 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7516 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7517 else
7518 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7519 "misc ctrl", dsc);
7520 }
7521 else
7522 {
7523 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7524 {
7525 int op = bits (insn1, 4, 8);
7526 int rn = bits (insn1, 0, 3);
7527 if ((op == 0 || op == 0xa) && rn == 0xf)
7528 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7529 regs, dsc);
7530 else
7531 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7532 "dp/pb", dsc);
7533 }
7534 else /* Data processing (modified immeidate) */
7535 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7536 "dp/mi", dsc);
7537 }
7538 break;
7539 case 3: /* op1 = 3 */
7540 switch (bits (insn1, 9, 10))
7541 {
7542 case 0:
7543 if (bit (insn1, 4))
7544 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7545 regs, dsc);
7546 else /* NEON Load/Store and Store single data item */
7547 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7548 "neon elt/struct load/store",
7549 dsc);
7550 break;
7551 case 1: /* op1 = 3, bits (9, 10) == 1 */
7552 switch (bits (insn1, 7, 8))
7553 {
7554 case 0: case 1: /* Data processing (register) */
7555 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7556 "dp(reg)", dsc);
7557 break;
7558 case 2: /* Multiply and absolute difference */
7559 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7560 "mul/mua/diff", dsc);
7561 break;
7562 case 3: /* Long multiply and divide */
7563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7564 "lmul/lmua", dsc);
7565 break;
7566 }
7567 break;
7568 default: /* Coprocessor instructions */
7569 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7570 break;
7571 }
7572 break;
7573 default:
7574 err = 1;
7575 }
7576
7577 if (err)
7578 internal_error (__FILE__, __LINE__,
7579 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7580
7581}
7582
b434a28f
YQ
7583static void
7584thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7585 struct regcache *regs,
b434a28f
YQ
7586 struct displaced_step_closure *dsc)
7587{
34518530
YQ
7588 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7589 uint16_t insn1
7590 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7591
7592 if (debug_displaced)
7593 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7594 "at %.8lx\n", insn1, (unsigned long) from);
7595
7596 dsc->is_thumb = 1;
7597 dsc->insn_size = thumb_insn_size (insn1);
7598 if (thumb_insn_size (insn1) == 4)
7599 {
7600 uint16_t insn2
7601 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7602 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7603 }
7604 else
7605 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7606}
7607
cca44b1b 7608void
b434a28f
YQ
7609arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7610 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7611 struct displaced_step_closure *dsc)
7612{
7613 int err = 0;
b434a28f
YQ
7614 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7615 uint32_t insn;
cca44b1b
JB
7616
7617 /* Most displaced instructions use a 1-instruction scratch space, so set this
7618 here and override below if/when necessary. */
7619 dsc->numinsns = 1;
7620 dsc->insn_addr = from;
7621 dsc->scratch_base = to;
7622 dsc->cleanup = NULL;
7623 dsc->wrote_to_pc = 0;
7624
b434a28f 7625 if (!displaced_in_arm_mode (regs))
12545665 7626 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7627
4db71c0b
YQ
7628 dsc->is_thumb = 0;
7629 dsc->insn_size = 4;
b434a28f
YQ
7630 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7631 if (debug_displaced)
7632 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7633 "at %.8lx\n", (unsigned long) insn,
7634 (unsigned long) from);
7635
cca44b1b 7636 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7637 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7638 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7639 {
7640 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7641 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7642 break;
7643
7644 case 0x4: case 0x5: case 0x6:
7ff120b4 7645 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7646 break;
7647
7648 case 0x7:
7ff120b4 7649 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7650 break;
7651
7652 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7653 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7654 break;
7655
7656 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7657 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7658 break;
7659 }
7660
7661 if (err)
7662 internal_error (__FILE__, __LINE__,
7663 _("arm_process_displaced_insn: Instruction decode error"));
7664}
7665
7666/* Actually set up the scratch space for a displaced instruction. */
7667
7668void
7669arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7670 CORE_ADDR to, struct displaced_step_closure *dsc)
7671{
7672 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7673 unsigned int i, len, offset;
cca44b1b 7674 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7675 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7676 const gdb_byte *bkp_insn;
cca44b1b 7677
4db71c0b 7678 offset = 0;
cca44b1b
JB
7679 /* Poke modified instruction(s). */
7680 for (i = 0; i < dsc->numinsns; i++)
7681 {
7682 if (debug_displaced)
4db71c0b
YQ
7683 {
7684 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7685 if (size == 4)
7686 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7687 dsc->modinsn[i]);
7688 else if (size == 2)
7689 fprintf_unfiltered (gdb_stdlog, "%.4x",
7690 (unsigned short)dsc->modinsn[i]);
7691
7692 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7693 (unsigned long) to + offset);
7694
7695 }
7696 write_memory_unsigned_integer (to + offset, size,
7697 byte_order_for_code,
cca44b1b 7698 dsc->modinsn[i]);
4db71c0b
YQ
7699 offset += size;
7700 }
7701
7702 /* Choose the correct breakpoint instruction. */
7703 if (dsc->is_thumb)
7704 {
7705 bkp_insn = tdep->thumb_breakpoint;
7706 len = tdep->thumb_breakpoint_size;
7707 }
7708 else
7709 {
7710 bkp_insn = tdep->arm_breakpoint;
7711 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7712 }
7713
7714 /* Put breakpoint afterwards. */
4db71c0b 7715 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7716
7717 if (debug_displaced)
7718 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7719 paddress (gdbarch, from), paddress (gdbarch, to));
7720}
7721
cca44b1b
JB
7722/* Entry point for cleaning things up after a displaced instruction has been
7723 single-stepped. */
7724
7725void
7726arm_displaced_step_fixup (struct gdbarch *gdbarch,
7727 struct displaced_step_closure *dsc,
7728 CORE_ADDR from, CORE_ADDR to,
7729 struct regcache *regs)
7730{
7731 if (dsc->cleanup)
7732 dsc->cleanup (gdbarch, regs, dsc);
7733
7734 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7735 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7736 dsc->insn_addr + dsc->insn_size);
7737
cca44b1b
JB
7738}
7739
7740#include "bfd-in2.h"
7741#include "libcoff.h"
7742
7743static int
7744gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7745{
9a3c8263 7746 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
9779414d
DJ
7747
7748 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7749 {
7750 static asymbol *asym;
7751 static combined_entry_type ce;
7752 static struct coff_symbol_struct csym;
7753 static struct bfd fake_bfd;
7754 static bfd_target fake_target;
7755
7756 if (csym.native == NULL)
7757 {
7758 /* Create a fake symbol vector containing a Thumb symbol.
7759 This is solely so that the code in print_insn_little_arm()
7760 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7761 the presence of a Thumb symbol and switch to decoding
7762 Thumb instructions. */
7763
7764 fake_target.flavour = bfd_target_coff_flavour;
7765 fake_bfd.xvec = &fake_target;
7766 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7767 csym.native = &ce;
7768 csym.symbol.the_bfd = &fake_bfd;
7769 csym.symbol.name = "fake";
7770 asym = (asymbol *) & csym;
7771 }
7772
7773 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7774 info->symbols = &asym;
7775 }
7776 else
7777 info->symbols = NULL;
7778
7779 if (info->endian == BFD_ENDIAN_BIG)
7780 return print_insn_big_arm (memaddr, info);
7781 else
7782 return print_insn_little_arm (memaddr, info);
7783}
7784
7785/* The following define instruction sequences that will cause ARM
7786 cpu's to take an undefined instruction trap. These are used to
7787 signal a breakpoint to GDB.
7788
7789 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7790 modes. A different instruction is required for each mode. The ARM
7791 cpu's can also be big or little endian. Thus four different
7792 instructions are needed to support all cases.
7793
7794 Note: ARMv4 defines several new instructions that will take the
7795 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7796 not in fact add the new instructions. The new undefined
7797 instructions in ARMv4 are all instructions that had no defined
7798 behaviour in earlier chips. There is no guarantee that they will
7799 raise an exception, but may be treated as NOP's. In practice, it
7800 may only safe to rely on instructions matching:
7801
7802 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7803 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7804 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7805
0963b4bd 7806 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7807 following use a condition predicate of ALWAYS so it is always TRUE.
7808
7809 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7810 and NetBSD all use a software interrupt rather than an undefined
7811 instruction to force a trap. This can be handled by by the
7812 abi-specific code during establishment of the gdbarch vector. */
7813
7814#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7815#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7816#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7817#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7818
948f8e3d
PA
7819static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7820static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7821static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7822static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7823
cd6c3b4f
YQ
7824/* Implement the breakpoint_kind_from_pc gdbarch method. */
7825
d19280ad
YQ
7826static int
7827arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7828{
7829 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7830 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7831
9779414d 7832 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7833 {
7834 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7835
7836 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7837 check whether we are replacing a 32-bit instruction. */
7838 if (tdep->thumb2_breakpoint != NULL)
7839 {
7840 gdb_byte buf[2];
d19280ad 7841
177321bd
DJ
7842 if (target_read_memory (*pcptr, buf, 2) == 0)
7843 {
7844 unsigned short inst1;
d19280ad 7845
177321bd 7846 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7847 if (thumb_insn_size (inst1) == 4)
d19280ad 7848 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7849 }
7850 }
7851
d19280ad 7852 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7853 }
7854 else
d19280ad
YQ
7855 return ARM_BP_KIND_ARM;
7856
7857}
7858
cd6c3b4f
YQ
7859/* Implement the sw_breakpoint_from_kind gdbarch method. */
7860
d19280ad
YQ
7861static const gdb_byte *
7862arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7863{
7864 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7865
7866 switch (kind)
cca44b1b 7867 {
d19280ad
YQ
7868 case ARM_BP_KIND_ARM:
7869 *size = tdep->arm_breakpoint_size;
cca44b1b 7870 return tdep->arm_breakpoint;
d19280ad
YQ
7871 case ARM_BP_KIND_THUMB:
7872 *size = tdep->thumb_breakpoint_size;
7873 return tdep->thumb_breakpoint;
7874 case ARM_BP_KIND_THUMB2:
7875 *size = tdep->thumb2_breakpoint_size;
7876 return tdep->thumb2_breakpoint;
7877 default:
7878 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7879 }
7880}
7881
833b7ab5
YQ
7882/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7883
7884static int
7885arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7886 struct regcache *regcache,
7887 CORE_ADDR *pcptr)
7888{
7889 gdb_byte buf[4];
7890
7891 /* Check the memory pointed by PC is readable. */
7892 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7893 {
7894 struct arm_get_next_pcs next_pcs_ctx;
7895 CORE_ADDR pc;
7896 int i;
7897 VEC (CORE_ADDR) *next_pcs = NULL;
7898 struct cleanup *old_chain
7899 = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
7900
7901 arm_get_next_pcs_ctor (&next_pcs_ctx,
7902 &arm_get_next_pcs_ops,
7903 gdbarch_byte_order (gdbarch),
7904 gdbarch_byte_order_for_code (gdbarch),
7905 0,
7906 regcache);
7907
7908 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7909
7910 /* If MEMADDR is the next instruction of current pc, do the
7911 software single step computation, and get the thumb mode by
7912 the destination address. */
7913 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
7914 {
7915 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7916 {
7917 do_cleanups (old_chain);
7918
7919 if (IS_THUMB_ADDR (pc))
7920 {
7921 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7922 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7923 }
7924 else
7925 return ARM_BP_KIND_ARM;
7926 }
7927 }
7928
7929 do_cleanups (old_chain);
7930 }
7931
7932 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7933}
7934
cca44b1b
JB
7935/* Extract from an array REGBUF containing the (raw) register state a
7936 function return value of type TYPE, and copy that, in virtual
7937 format, into VALBUF. */
7938
7939static void
7940arm_extract_return_value (struct type *type, struct regcache *regs,
7941 gdb_byte *valbuf)
7942{
7943 struct gdbarch *gdbarch = get_regcache_arch (regs);
7944 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7945
7946 if (TYPE_CODE_FLT == TYPE_CODE (type))
7947 {
7948 switch (gdbarch_tdep (gdbarch)->fp_model)
7949 {
7950 case ARM_FLOAT_FPA:
7951 {
7952 /* The value is in register F0 in internal format. We need to
7953 extract the raw value and then convert it to the desired
7954 internal type. */
7955 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7956
7957 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7958 convert_from_extended (floatformat_from_type (type), tmpbuf,
7959 valbuf, gdbarch_byte_order (gdbarch));
7960 }
7961 break;
7962
7963 case ARM_FLOAT_SOFT_FPA:
7964 case ARM_FLOAT_SOFT_VFP:
7965 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7966 not using the VFP ABI code. */
7967 case ARM_FLOAT_VFP:
7968 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7969 if (TYPE_LENGTH (type) > 4)
7970 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7971 valbuf + INT_REGISTER_SIZE);
7972 break;
7973
7974 default:
0963b4bd
MS
7975 internal_error (__FILE__, __LINE__,
7976 _("arm_extract_return_value: "
7977 "Floating point model not supported"));
cca44b1b
JB
7978 break;
7979 }
7980 }
7981 else if (TYPE_CODE (type) == TYPE_CODE_INT
7982 || TYPE_CODE (type) == TYPE_CODE_CHAR
7983 || TYPE_CODE (type) == TYPE_CODE_BOOL
7984 || TYPE_CODE (type) == TYPE_CODE_PTR
7985 || TYPE_CODE (type) == TYPE_CODE_REF
7986 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7987 {
b021a221
MS
7988 /* If the type is a plain integer, then the access is
7989 straight-forward. Otherwise we have to play around a bit
7990 more. */
cca44b1b
JB
7991 int len = TYPE_LENGTH (type);
7992 int regno = ARM_A1_REGNUM;
7993 ULONGEST tmp;
7994
7995 while (len > 0)
7996 {
7997 /* By using store_unsigned_integer we avoid having to do
7998 anything special for small big-endian values. */
7999 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8000 store_unsigned_integer (valbuf,
8001 (len > INT_REGISTER_SIZE
8002 ? INT_REGISTER_SIZE : len),
8003 byte_order, tmp);
8004 len -= INT_REGISTER_SIZE;
8005 valbuf += INT_REGISTER_SIZE;
8006 }
8007 }
8008 else
8009 {
8010 /* For a structure or union the behaviour is as if the value had
8011 been stored to word-aligned memory and then loaded into
8012 registers with 32-bit load instruction(s). */
8013 int len = TYPE_LENGTH (type);
8014 int regno = ARM_A1_REGNUM;
8015 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8016
8017 while (len > 0)
8018 {
8019 regcache_cooked_read (regs, regno++, tmpbuf);
8020 memcpy (valbuf, tmpbuf,
8021 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8022 len -= INT_REGISTER_SIZE;
8023 valbuf += INT_REGISTER_SIZE;
8024 }
8025 }
8026}
8027
8028
8029/* Will a function return an aggregate type in memory or in a
8030 register? Return 0 if an aggregate type can be returned in a
8031 register, 1 if it must be returned in memory. */
8032
8033static int
8034arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8035{
cca44b1b
JB
8036 enum type_code code;
8037
f168693b 8038 type = check_typedef (type);
cca44b1b 8039
b13c8ab2
YQ
8040 /* Simple, non-aggregate types (ie not including vectors and
8041 complex) are always returned in a register (or registers). */
8042 code = TYPE_CODE (type);
8043 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8044 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8045 return 0;
cca44b1b 8046
c4312b19
YQ
8047 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8048 {
8049 /* Vector values should be returned using ARM registers if they
8050 are not over 16 bytes. */
8051 return (TYPE_LENGTH (type) > 16);
8052 }
8053
b13c8ab2 8054 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8055 {
b13c8ab2
YQ
8056 /* The AAPCS says all aggregates not larger than a word are returned
8057 in a register. */
8058 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8059 return 0;
8060
cca44b1b
JB
8061 return 1;
8062 }
b13c8ab2
YQ
8063 else
8064 {
8065 int nRc;
cca44b1b 8066
b13c8ab2
YQ
8067 /* All aggregate types that won't fit in a register must be returned
8068 in memory. */
8069 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8070 return 1;
cca44b1b 8071
b13c8ab2
YQ
8072 /* In the ARM ABI, "integer" like aggregate types are returned in
8073 registers. For an aggregate type to be integer like, its size
8074 must be less than or equal to INT_REGISTER_SIZE and the
8075 offset of each addressable subfield must be zero. Note that bit
8076 fields are not addressable, and all addressable subfields of
8077 unions always start at offset zero.
cca44b1b 8078
b13c8ab2
YQ
8079 This function is based on the behaviour of GCC 2.95.1.
8080 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8081
b13c8ab2
YQ
8082 Note: All versions of GCC before GCC 2.95.2 do not set up the
8083 parameters correctly for a function returning the following
8084 structure: struct { float f;}; This should be returned in memory,
8085 not a register. Richard Earnshaw sent me a patch, but I do not
8086 know of any way to detect if a function like the above has been
8087 compiled with the correct calling convention. */
8088
8089 /* Assume all other aggregate types can be returned in a register.
8090 Run a check for structures, unions and arrays. */
8091 nRc = 0;
67255d04 8092
b13c8ab2
YQ
8093 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8094 {
8095 int i;
8096 /* Need to check if this struct/union is "integer" like. For
8097 this to be true, its size must be less than or equal to
8098 INT_REGISTER_SIZE and the offset of each addressable
8099 subfield must be zero. Note that bit fields are not
8100 addressable, and unions always start at offset zero. If any
8101 of the subfields is a floating point type, the struct/union
8102 cannot be an integer type. */
8103
8104 /* For each field in the object, check:
8105 1) Is it FP? --> yes, nRc = 1;
8106 2) Is it addressable (bitpos != 0) and
8107 not packed (bitsize == 0)?
8108 --> yes, nRc = 1
8109 */
8110
8111 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8112 {
b13c8ab2
YQ
8113 enum type_code field_type_code;
8114
8115 field_type_code
8116 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8117 i)));
8118
8119 /* Is it a floating point type field? */
8120 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8121 {
8122 nRc = 1;
8123 break;
8124 }
b13c8ab2
YQ
8125
8126 /* If bitpos != 0, then we have to care about it. */
8127 if (TYPE_FIELD_BITPOS (type, i) != 0)
8128 {
8129 /* Bitfields are not addressable. If the field bitsize is
8130 zero, then the field is not packed. Hence it cannot be
8131 a bitfield or any other packed type. */
8132 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8133 {
8134 nRc = 1;
8135 break;
8136 }
8137 }
67255d04
RE
8138 }
8139 }
67255d04 8140
b13c8ab2
YQ
8141 return nRc;
8142 }
67255d04
RE
8143}
8144
34e8f22d
RE
8145/* Write into appropriate registers a function return value of type
8146 TYPE, given in virtual format. */
8147
8148static void
b508a996 8149arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8150 const gdb_byte *valbuf)
34e8f22d 8151{
be8626e0 8152 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8153 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8154
34e8f22d
RE
8155 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8156 {
e362b510 8157 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8158
be8626e0 8159 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8160 {
8161 case ARM_FLOAT_FPA:
8162
be8626e0
MD
8163 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8164 gdbarch_byte_order (gdbarch));
b508a996 8165 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8166 break;
8167
fd50bc42 8168 case ARM_FLOAT_SOFT_FPA:
08216dd7 8169 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8170 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8171 not using the VFP ABI code. */
8172 case ARM_FLOAT_VFP:
b508a996
RE
8173 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8174 if (TYPE_LENGTH (type) > 4)
8175 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8176 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8177 break;
8178
8179 default:
9b20d036
MS
8180 internal_error (__FILE__, __LINE__,
8181 _("arm_store_return_value: Floating "
8182 "point model not supported"));
08216dd7
RE
8183 break;
8184 }
34e8f22d 8185 }
b508a996
RE
8186 else if (TYPE_CODE (type) == TYPE_CODE_INT
8187 || TYPE_CODE (type) == TYPE_CODE_CHAR
8188 || TYPE_CODE (type) == TYPE_CODE_BOOL
8189 || TYPE_CODE (type) == TYPE_CODE_PTR
8190 || TYPE_CODE (type) == TYPE_CODE_REF
8191 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8192 {
8193 if (TYPE_LENGTH (type) <= 4)
8194 {
8195 /* Values of one word or less are zero/sign-extended and
8196 returned in r0. */
7a5ea0d4 8197 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8198 LONGEST val = unpack_long (type, valbuf);
8199
e17a4113 8200 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8201 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8202 }
8203 else
8204 {
8205 /* Integral values greater than one word are stored in consecutive
8206 registers starting with r0. This will always be a multiple of
8207 the regiser size. */
8208 int len = TYPE_LENGTH (type);
8209 int regno = ARM_A1_REGNUM;
8210
8211 while (len > 0)
8212 {
8213 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8214 len -= INT_REGISTER_SIZE;
8215 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8216 }
8217 }
8218 }
34e8f22d 8219 else
b508a996
RE
8220 {
8221 /* For a structure or union the behaviour is as if the value had
8222 been stored to word-aligned memory and then loaded into
8223 registers with 32-bit load instruction(s). */
8224 int len = TYPE_LENGTH (type);
8225 int regno = ARM_A1_REGNUM;
7a5ea0d4 8226 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8227
8228 while (len > 0)
8229 {
8230 memcpy (tmpbuf, valbuf,
7a5ea0d4 8231 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8232 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8233 len -= INT_REGISTER_SIZE;
8234 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8235 }
8236 }
34e8f22d
RE
8237}
8238
2af48f68
PB
8239
8240/* Handle function return values. */
8241
8242static enum return_value_convention
6a3a010b 8243arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8244 struct type *valtype, struct regcache *regcache,
8245 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8246{
7c00367c 8247 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8248 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8249 enum arm_vfp_cprc_base_type vfp_base_type;
8250 int vfp_base_count;
8251
8252 if (arm_vfp_abi_for_function (gdbarch, func_type)
8253 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8254 {
8255 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8256 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8257 int i;
8258 for (i = 0; i < vfp_base_count; i++)
8259 {
58d6951d
DJ
8260 if (reg_char == 'q')
8261 {
8262 if (writebuf)
8263 arm_neon_quad_write (gdbarch, regcache, i,
8264 writebuf + i * unit_length);
8265
8266 if (readbuf)
8267 arm_neon_quad_read (gdbarch, regcache, i,
8268 readbuf + i * unit_length);
8269 }
8270 else
8271 {
8272 char name_buf[4];
8273 int regnum;
8274
8c042590 8275 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8276 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8277 strlen (name_buf));
8278 if (writebuf)
8279 regcache_cooked_write (regcache, regnum,
8280 writebuf + i * unit_length);
8281 if (readbuf)
8282 regcache_cooked_read (regcache, regnum,
8283 readbuf + i * unit_length);
8284 }
90445bd3
DJ
8285 }
8286 return RETURN_VALUE_REGISTER_CONVENTION;
8287 }
7c00367c 8288
2af48f68
PB
8289 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8290 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8291 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8292 {
7c00367c
MK
8293 if (tdep->struct_return == pcc_struct_return
8294 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8295 return RETURN_VALUE_STRUCT_CONVENTION;
8296 }
b13c8ab2
YQ
8297 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8298 {
8299 if (arm_return_in_memory (gdbarch, valtype))
8300 return RETURN_VALUE_STRUCT_CONVENTION;
8301 }
7052e42c 8302
2af48f68
PB
8303 if (writebuf)
8304 arm_store_return_value (valtype, regcache, writebuf);
8305
8306 if (readbuf)
8307 arm_extract_return_value (valtype, regcache, readbuf);
8308
8309 return RETURN_VALUE_REGISTER_CONVENTION;
8310}
8311
8312
9df628e0 8313static int
60ade65d 8314arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8315{
e17a4113
UW
8316 struct gdbarch *gdbarch = get_frame_arch (frame);
8317 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8318 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8319 CORE_ADDR jb_addr;
e362b510 8320 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8321
60ade65d 8322 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8323
8324 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8325 INT_REGISTER_SIZE))
9df628e0
RE
8326 return 0;
8327
e17a4113 8328 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8329 return 1;
8330}
8331
faa95490
DJ
8332/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8333 return the target PC. Otherwise return 0. */
c906108c
SS
8334
8335CORE_ADDR
52f729a7 8336arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8337{
2c02bd72 8338 const char *name;
faa95490 8339 int namelen;
c906108c
SS
8340 CORE_ADDR start_addr;
8341
8342 /* Find the starting address and name of the function containing the PC. */
8343 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8344 {
8345 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8346 check here. */
8347 start_addr = arm_skip_bx_reg (frame, pc);
8348 if (start_addr != 0)
8349 return start_addr;
8350
8351 return 0;
8352 }
c906108c 8353
faa95490
DJ
8354 /* If PC is in a Thumb call or return stub, return the address of the
8355 target PC, which is in a register. The thunk functions are called
8356 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8357 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8358 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8359 if (startswith (name, "_call_via_")
8360 || startswith (name, "__ARM_call_via_"))
c906108c 8361 {
ed9a39eb
JM
8362 /* Use the name suffix to determine which register contains the
8363 target PC. */
c5aa993b
JM
8364 static char *table[15] =
8365 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8366 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8367 };
c906108c 8368 int regno;
faa95490 8369 int offset = strlen (name) - 2;
c906108c
SS
8370
8371 for (regno = 0; regno <= 14; regno++)
faa95490 8372 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8373 return get_frame_register_unsigned (frame, regno);
c906108c 8374 }
ed9a39eb 8375
faa95490
DJ
8376 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8377 non-interworking calls to foo. We could decode the stubs
8378 to find the target but it's easier to use the symbol table. */
8379 namelen = strlen (name);
8380 if (name[0] == '_' && name[1] == '_'
8381 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8382 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8383 || (namelen > 2 + strlen ("_from_arm")
61012eef 8384 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8385 {
8386 char *target_name;
8387 int target_len = namelen - 2;
3b7344d5 8388 struct bound_minimal_symbol minsym;
faa95490
DJ
8389 struct objfile *objfile;
8390 struct obj_section *sec;
8391
8392 if (name[namelen - 1] == 'b')
8393 target_len -= strlen ("_from_thumb");
8394 else
8395 target_len -= strlen ("_from_arm");
8396
224c3ddb 8397 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8398 memcpy (target_name, name + 2, target_len);
8399 target_name[target_len] = '\0';
8400
8401 sec = find_pc_section (pc);
8402 objfile = (sec == NULL) ? NULL : sec->objfile;
8403 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8404 if (minsym.minsym != NULL)
77e371c0 8405 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8406 else
8407 return 0;
8408 }
8409
c5aa993b 8410 return 0; /* not a stub */
c906108c
SS
8411}
8412
afd7eef0
RE
8413static void
8414set_arm_command (char *args, int from_tty)
8415{
edefbb7c
AC
8416 printf_unfiltered (_("\
8417\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8418 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8419}
8420
8421static void
8422show_arm_command (char *args, int from_tty)
8423{
26304000 8424 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8425}
8426
28e97307
DJ
8427static void
8428arm_update_current_architecture (void)
fd50bc42 8429{
28e97307 8430 struct gdbarch_info info;
fd50bc42 8431
28e97307 8432 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8433 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8434 return;
fd50bc42 8435
28e97307
DJ
8436 /* Update the architecture. */
8437 gdbarch_info_init (&info);
fd50bc42 8438
28e97307 8439 if (!gdbarch_update_p (info))
9b20d036 8440 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8441}
8442
8443static void
8444set_fp_model_sfunc (char *args, int from_tty,
8445 struct cmd_list_element *c)
8446{
570dc176 8447 int fp_model;
fd50bc42
RE
8448
8449 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8450 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8451 {
aead7601 8452 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8453 break;
8454 }
8455
8456 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8457 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8458 current_fp_model);
8459
28e97307 8460 arm_update_current_architecture ();
fd50bc42
RE
8461}
8462
8463static void
08546159
AC
8464show_fp_model (struct ui_file *file, int from_tty,
8465 struct cmd_list_element *c, const char *value)
fd50bc42 8466{
f5656ead 8467 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8468
28e97307 8469 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8470 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8471 fprintf_filtered (file, _("\
8472The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8473 fp_model_strings[tdep->fp_model]);
8474 else
8475 fprintf_filtered (file, _("\
8476The current ARM floating point model is \"%s\".\n"),
8477 fp_model_strings[arm_fp_model]);
8478}
8479
8480static void
8481arm_set_abi (char *args, int from_tty,
8482 struct cmd_list_element *c)
8483{
570dc176 8484 int arm_abi;
28e97307
DJ
8485
8486 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8487 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8488 {
aead7601 8489 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8490 break;
8491 }
8492
8493 if (arm_abi == ARM_ABI_LAST)
8494 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8495 arm_abi_string);
8496
8497 arm_update_current_architecture ();
8498}
8499
8500static void
8501arm_show_abi (struct ui_file *file, int from_tty,
8502 struct cmd_list_element *c, const char *value)
8503{
f5656ead 8504 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8505
8506 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8507 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8508 fprintf_filtered (file, _("\
8509The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8510 arm_abi_strings[tdep->arm_abi]);
8511 else
8512 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8513 arm_abi_string);
fd50bc42
RE
8514}
8515
0428b8f5
DJ
8516static void
8517arm_show_fallback_mode (struct ui_file *file, int from_tty,
8518 struct cmd_list_element *c, const char *value)
8519{
0963b4bd
MS
8520 fprintf_filtered (file,
8521 _("The current execution mode assumed "
8522 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8523 arm_fallback_mode_string);
8524}
8525
8526static void
8527arm_show_force_mode (struct ui_file *file, int from_tty,
8528 struct cmd_list_element *c, const char *value)
8529{
0963b4bd
MS
8530 fprintf_filtered (file,
8531 _("The current execution mode assumed "
8532 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8533 arm_force_mode_string);
8534}
8535
afd7eef0
RE
8536/* If the user changes the register disassembly style used for info
8537 register and other commands, we have to also switch the style used
8538 in opcodes for disassembly output. This function is run in the "set
8539 arm disassembly" command, and does that. */
bc90b915
FN
8540
8541static void
afd7eef0 8542set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8543 struct cmd_list_element *c)
8544{
afd7eef0 8545 set_disassembly_style ();
bc90b915
FN
8546}
8547\f
966fbf70 8548/* Return the ARM register name corresponding to register I. */
a208b0cb 8549static const char *
d93859e2 8550arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8551{
58d6951d
DJ
8552 const int num_regs = gdbarch_num_regs (gdbarch);
8553
8554 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8555 && i >= num_regs && i < num_regs + 32)
8556 {
8557 static const char *const vfp_pseudo_names[] = {
8558 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8559 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8560 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8561 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8562 };
8563
8564 return vfp_pseudo_names[i - num_regs];
8565 }
8566
8567 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8568 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8569 {
8570 static const char *const neon_pseudo_names[] = {
8571 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8572 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8573 };
8574
8575 return neon_pseudo_names[i - num_regs - 32];
8576 }
8577
ff6f572f
DJ
8578 if (i >= ARRAY_SIZE (arm_register_names))
8579 /* These registers are only supported on targets which supply
8580 an XML description. */
8581 return "";
8582
966fbf70
RE
8583 return arm_register_names[i];
8584}
8585
bc90b915 8586static void
afd7eef0 8587set_disassembly_style (void)
bc90b915 8588{
123dc839 8589 int current;
bc90b915 8590
123dc839
DJ
8591 /* Find the style that the user wants. */
8592 for (current = 0; current < num_disassembly_options; current++)
8593 if (disassembly_style == valid_disassembly_styles[current])
8594 break;
8595 gdb_assert (current < num_disassembly_options);
bc90b915 8596
94c30b78 8597 /* Synchronize the disassembler. */
bc90b915
FN
8598 set_arm_regname_option (current);
8599}
8600
082fc60d
RE
8601/* Test whether the coff symbol specific value corresponds to a Thumb
8602 function. */
8603
8604static int
8605coff_sym_is_thumb (int val)
8606{
f8bf5763
PM
8607 return (val == C_THUMBEXT
8608 || val == C_THUMBSTAT
8609 || val == C_THUMBEXTFUNC
8610 || val == C_THUMBSTATFUNC
8611 || val == C_THUMBLABEL);
082fc60d
RE
8612}
8613
8614/* arm_coff_make_msymbol_special()
8615 arm_elf_make_msymbol_special()
8616
8617 These functions test whether the COFF or ELF symbol corresponds to
8618 an address in thumb code, and set a "special" bit in a minimal
8619 symbol to indicate that it does. */
8620
34e8f22d 8621static void
082fc60d
RE
8622arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8623{
39d911fc
TP
8624 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8625
8626 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8627 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8628 MSYMBOL_SET_SPECIAL (msym);
8629}
8630
34e8f22d 8631static void
082fc60d
RE
8632arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8633{
8634 if (coff_sym_is_thumb (val))
8635 MSYMBOL_SET_SPECIAL (msym);
8636}
8637
60c5725c 8638static void
c1bd65d0 8639arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8640{
9a3c8263 8641 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8642 unsigned int i;
8643
8644 for (i = 0; i < objfile->obfd->section_count; i++)
8645 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8646}
8647
8648static void
8649arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8650 asymbol *sym)
8651{
8652 const char *name = bfd_asymbol_name (sym);
8653 struct arm_per_objfile *data;
8654 VEC(arm_mapping_symbol_s) **map_p;
8655 struct arm_mapping_symbol new_map_sym;
8656
8657 gdb_assert (name[0] == '$');
8658 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8659 return;
8660
9a3c8263
SM
8661 data = (struct arm_per_objfile *) objfile_data (objfile,
8662 arm_objfile_data_key);
60c5725c
DJ
8663 if (data == NULL)
8664 {
8665 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8666 struct arm_per_objfile);
8667 set_objfile_data (objfile, arm_objfile_data_key, data);
8668 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8669 objfile->obfd->section_count,
8670 VEC(arm_mapping_symbol_s) *);
8671 }
8672 map_p = &data->section_maps[bfd_get_section (sym)->index];
8673
8674 new_map_sym.value = sym->value;
8675 new_map_sym.type = name[1];
8676
8677 /* Assume that most mapping symbols appear in order of increasing
8678 value. If they were randomly distributed, it would be faster to
8679 always push here and then sort at first use. */
8680 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8681 {
8682 struct arm_mapping_symbol *prev_map_sym;
8683
8684 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8685 if (prev_map_sym->value >= sym->value)
8686 {
8687 unsigned int idx;
8688 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8689 arm_compare_mapping_symbols);
8690 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8691 return;
8692 }
8693 }
8694
8695 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8696}
8697
756fe439 8698static void
61a1198a 8699arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8700{
9779414d 8701 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8702 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8703
8704 /* If necessary, set the T bit. */
8705 if (arm_apcs_32)
8706 {
9779414d 8707 ULONGEST val, t_bit;
61a1198a 8708 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8709 t_bit = arm_psr_thumb_bit (gdbarch);
8710 if (arm_pc_is_thumb (gdbarch, pc))
8711 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8712 val | t_bit);
756fe439 8713 else
61a1198a 8714 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8715 val & ~t_bit);
756fe439
DJ
8716 }
8717}
123dc839 8718
58d6951d
DJ
8719/* Read the contents of a NEON quad register, by reading from two
8720 double registers. This is used to implement the quad pseudo
8721 registers, and for argument passing in case the quad registers are
8722 missing; vectors are passed in quad registers when using the VFP
8723 ABI, even if a NEON unit is not present. REGNUM is the index of
8724 the quad register, in [0, 15]. */
8725
05d1431c 8726static enum register_status
58d6951d
DJ
8727arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8728 int regnum, gdb_byte *buf)
8729{
8730 char name_buf[4];
8731 gdb_byte reg_buf[8];
8732 int offset, double_regnum;
05d1431c 8733 enum register_status status;
58d6951d 8734
8c042590 8735 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8736 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8737 strlen (name_buf));
8738
8739 /* d0 is always the least significant half of q0. */
8740 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8741 offset = 8;
8742 else
8743 offset = 0;
8744
05d1431c
PA
8745 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8746 if (status != REG_VALID)
8747 return status;
58d6951d
DJ
8748 memcpy (buf + offset, reg_buf, 8);
8749
8750 offset = 8 - offset;
05d1431c
PA
8751 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8752 if (status != REG_VALID)
8753 return status;
58d6951d 8754 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8755
8756 return REG_VALID;
58d6951d
DJ
8757}
8758
05d1431c 8759static enum register_status
58d6951d
DJ
8760arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8761 int regnum, gdb_byte *buf)
8762{
8763 const int num_regs = gdbarch_num_regs (gdbarch);
8764 char name_buf[4];
8765 gdb_byte reg_buf[8];
8766 int offset, double_regnum;
8767
8768 gdb_assert (regnum >= num_regs);
8769 regnum -= num_regs;
8770
8771 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8772 /* Quad-precision register. */
05d1431c 8773 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8774 else
8775 {
05d1431c
PA
8776 enum register_status status;
8777
58d6951d
DJ
8778 /* Single-precision register. */
8779 gdb_assert (regnum < 32);
8780
8781 /* s0 is always the least significant half of d0. */
8782 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8783 offset = (regnum & 1) ? 0 : 4;
8784 else
8785 offset = (regnum & 1) ? 4 : 0;
8786
8c042590 8787 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8788 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8789 strlen (name_buf));
8790
05d1431c
PA
8791 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8792 if (status == REG_VALID)
8793 memcpy (buf, reg_buf + offset, 4);
8794 return status;
58d6951d
DJ
8795 }
8796}
8797
8798/* Store the contents of BUF to a NEON quad register, by writing to
8799 two double registers. This is used to implement the quad pseudo
8800 registers, and for argument passing in case the quad registers are
8801 missing; vectors are passed in quad registers when using the VFP
8802 ABI, even if a NEON unit is not present. REGNUM is the index
8803 of the quad register, in [0, 15]. */
8804
8805static void
8806arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8807 int regnum, const gdb_byte *buf)
8808{
8809 char name_buf[4];
58d6951d
DJ
8810 int offset, double_regnum;
8811
8c042590 8812 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8813 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8814 strlen (name_buf));
8815
8816 /* d0 is always the least significant half of q0. */
8817 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8818 offset = 8;
8819 else
8820 offset = 0;
8821
8822 regcache_raw_write (regcache, double_regnum, buf + offset);
8823 offset = 8 - offset;
8824 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8825}
8826
8827static void
8828arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8829 int regnum, const gdb_byte *buf)
8830{
8831 const int num_regs = gdbarch_num_regs (gdbarch);
8832 char name_buf[4];
8833 gdb_byte reg_buf[8];
8834 int offset, double_regnum;
8835
8836 gdb_assert (regnum >= num_regs);
8837 regnum -= num_regs;
8838
8839 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8840 /* Quad-precision register. */
8841 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8842 else
8843 {
8844 /* Single-precision register. */
8845 gdb_assert (regnum < 32);
8846
8847 /* s0 is always the least significant half of d0. */
8848 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8849 offset = (regnum & 1) ? 0 : 4;
8850 else
8851 offset = (regnum & 1) ? 4 : 0;
8852
8c042590 8853 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8854 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8855 strlen (name_buf));
8856
8857 regcache_raw_read (regcache, double_regnum, reg_buf);
8858 memcpy (reg_buf + offset, buf, 4);
8859 regcache_raw_write (regcache, double_regnum, reg_buf);
8860 }
8861}
8862
123dc839
DJ
8863static struct value *
8864value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8865{
9a3c8263 8866 const int *reg_p = (const int *) baton;
123dc839
DJ
8867 return value_of_register (*reg_p, frame);
8868}
97e03143 8869\f
70f80edf
JT
8870static enum gdb_osabi
8871arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8872{
2af48f68 8873 unsigned int elfosabi;
70f80edf 8874 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8875
70f80edf 8876 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8877
28e97307
DJ
8878 if (elfosabi == ELFOSABI_ARM)
8879 /* GNU tools use this value. Check note sections in this case,
8880 as well. */
8881 bfd_map_over_sections (abfd,
8882 generic_elf_osabi_sniff_abi_tag_sections,
8883 &osabi);
97e03143 8884
28e97307 8885 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8886 return osabi;
97e03143
RE
8887}
8888
54483882
YQ
8889static int
8890arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8891 struct reggroup *group)
8892{
2c291032
YQ
8893 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8894 this, FPS register belongs to save_regroup, restore_reggroup, and
8895 all_reggroup, of course. */
54483882 8896 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8897 return (group == float_reggroup
8898 || group == save_reggroup
8899 || group == restore_reggroup
8900 || group == all_reggroup);
54483882
YQ
8901 else
8902 return default_register_reggroup_p (gdbarch, regnum, group);
8903}
8904
25f8c692
JL
8905\f
8906/* For backward-compatibility we allow two 'g' packet lengths with
8907 the remote protocol depending on whether FPA registers are
8908 supplied. M-profile targets do not have FPA registers, but some
8909 stubs already exist in the wild which use a 'g' packet which
8910 supplies them albeit with dummy values. The packet format which
8911 includes FPA registers should be considered deprecated for
8912 M-profile targets. */
8913
8914static void
8915arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8916{
8917 if (gdbarch_tdep (gdbarch)->is_m)
8918 {
8919 /* If we know from the executable this is an M-profile target,
8920 cater for remote targets whose register set layout is the
8921 same as the FPA layout. */
8922 register_remote_g_packet_guess (gdbarch,
03145bf4 8923 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8924 (16 * INT_REGISTER_SIZE)
8925 + (8 * FP_REGISTER_SIZE)
8926 + (2 * INT_REGISTER_SIZE),
8927 tdesc_arm_with_m_fpa_layout);
8928
8929 /* The regular M-profile layout. */
8930 register_remote_g_packet_guess (gdbarch,
8931 /* r0-r12,sp,lr,pc; xpsr */
8932 (16 * INT_REGISTER_SIZE)
8933 + INT_REGISTER_SIZE,
8934 tdesc_arm_with_m);
3184d3f9
JL
8935
8936 /* M-profile plus M4F VFP. */
8937 register_remote_g_packet_guess (gdbarch,
8938 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8939 (16 * INT_REGISTER_SIZE)
8940 + (16 * VFP_REGISTER_SIZE)
8941 + (2 * INT_REGISTER_SIZE),
8942 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8943 }
8944
8945 /* Otherwise we don't have a useful guess. */
8946}
8947
7eb89530
YQ
8948/* Implement the code_of_frame_writable gdbarch method. */
8949
8950static int
8951arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8952{
8953 if (gdbarch_tdep (gdbarch)->is_m
8954 && get_frame_type (frame) == SIGTRAMP_FRAME)
8955 {
8956 /* M-profile exception frames return to some magic PCs, where
8957 isn't writable at all. */
8958 return 0;
8959 }
8960 else
8961 return 1;
8962}
8963
70f80edf 8964\f
da3c6d4a
MS
8965/* Initialize the current architecture based on INFO. If possible,
8966 re-use an architecture from ARCHES, which is a list of
8967 architectures already created during this debugging session.
97e03143 8968
da3c6d4a
MS
8969 Called e.g. at program startup, when reading a core file, and when
8970 reading a binary file. */
97e03143 8971
39bbf761
RE
8972static struct gdbarch *
8973arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8974{
97e03143 8975 struct gdbarch_tdep *tdep;
39bbf761 8976 struct gdbarch *gdbarch;
28e97307
DJ
8977 struct gdbarch_list *best_arch;
8978 enum arm_abi_kind arm_abi = arm_abi_global;
8979 enum arm_float_model fp_model = arm_fp_model;
123dc839 8980 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8981 int i, is_m = 0;
330c6ca9 8982 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8983 int have_wmmx_registers = 0;
58d6951d 8984 int have_neon = 0;
ff6f572f 8985 int have_fpa_registers = 1;
9779414d
DJ
8986 const struct target_desc *tdesc = info.target_desc;
8987
8988 /* If we have an object to base this architecture on, try to determine
8989 its ABI. */
8990
8991 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8992 {
8993 int ei_osabi, e_flags;
8994
8995 switch (bfd_get_flavour (info.abfd))
8996 {
8997 case bfd_target_aout_flavour:
8998 /* Assume it's an old APCS-style ABI. */
8999 arm_abi = ARM_ABI_APCS;
9000 break;
9001
9002 case bfd_target_coff_flavour:
9003 /* Assume it's an old APCS-style ABI. */
9004 /* XXX WinCE? */
9005 arm_abi = ARM_ABI_APCS;
9006 break;
9007
9008 case bfd_target_elf_flavour:
9009 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9010 e_flags = elf_elfheader (info.abfd)->e_flags;
9011
9012 if (ei_osabi == ELFOSABI_ARM)
9013 {
9014 /* GNU tools used to use this value, but do not for EABI
9015 objects. There's nowhere to tag an EABI version
9016 anyway, so assume APCS. */
9017 arm_abi = ARM_ABI_APCS;
9018 }
d403db27 9019 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
9020 {
9021 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9022 int attr_arch, attr_profile;
9023
9024 switch (eabi_ver)
9025 {
9026 case EF_ARM_EABI_UNKNOWN:
9027 /* Assume GNU tools. */
9028 arm_abi = ARM_ABI_APCS;
9029 break;
9030
9031 case EF_ARM_EABI_VER4:
9032 case EF_ARM_EABI_VER5:
9033 arm_abi = ARM_ABI_AAPCS;
9034 /* EABI binaries default to VFP float ordering.
9035 They may also contain build attributes that can
9036 be used to identify if the VFP argument-passing
9037 ABI is in use. */
9038 if (fp_model == ARM_FLOAT_AUTO)
9039 {
9040#ifdef HAVE_ELF
9041 switch (bfd_elf_get_obj_attr_int (info.abfd,
9042 OBJ_ATTR_PROC,
9043 Tag_ABI_VFP_args))
9044 {
b35b0298 9045 case AEABI_VFP_args_base:
9779414d
DJ
9046 /* "The user intended FP parameter/result
9047 passing to conform to AAPCS, base
9048 variant". */
9049 fp_model = ARM_FLOAT_SOFT_VFP;
9050 break;
b35b0298 9051 case AEABI_VFP_args_vfp:
9779414d
DJ
9052 /* "The user intended FP parameter/result
9053 passing to conform to AAPCS, VFP
9054 variant". */
9055 fp_model = ARM_FLOAT_VFP;
9056 break;
b35b0298 9057 case AEABI_VFP_args_toolchain:
9779414d
DJ
9058 /* "The user intended FP parameter/result
9059 passing to conform to tool chain-specific
9060 conventions" - we don't know any such
9061 conventions, so leave it as "auto". */
9062 break;
b35b0298 9063 case AEABI_VFP_args_compatible:
5c294fee
TG
9064 /* "Code is compatible with both the base
9065 and VFP variants; the user did not permit
9066 non-variadic functions to pass FP
9067 parameters/results" - leave it as
9068 "auto". */
9069 break;
9779414d
DJ
9070 default:
9071 /* Attribute value not mentioned in the
5c294fee 9072 November 2012 ABI, so leave it as
9779414d
DJ
9073 "auto". */
9074 break;
9075 }
9076#else
9077 fp_model = ARM_FLOAT_SOFT_VFP;
9078#endif
9079 }
9080 break;
9081
9082 default:
9083 /* Leave it as "auto". */
9084 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9085 break;
9086 }
9087
9088#ifdef HAVE_ELF
9089 /* Detect M-profile programs. This only works if the
9090 executable file includes build attributes; GCC does
9091 copy them to the executable, but e.g. RealView does
9092 not. */
9093 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9094 Tag_CPU_arch);
0963b4bd
MS
9095 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9096 OBJ_ATTR_PROC,
9779414d
DJ
9097 Tag_CPU_arch_profile);
9098 /* GCC specifies the profile for v6-M; RealView only
9099 specifies the profile for architectures starting with
9100 V7 (as opposed to architectures with a tag
9101 numerically greater than TAG_CPU_ARCH_V7). */
9102 if (!tdesc_has_registers (tdesc)
9103 && (attr_arch == TAG_CPU_ARCH_V6_M
9104 || attr_arch == TAG_CPU_ARCH_V6S_M
9105 || attr_profile == 'M'))
25f8c692 9106 is_m = 1;
9779414d
DJ
9107#endif
9108 }
9109
9110 if (fp_model == ARM_FLOAT_AUTO)
9111 {
9112 int e_flags = elf_elfheader (info.abfd)->e_flags;
9113
9114 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9115 {
9116 case 0:
9117 /* Leave it as "auto". Strictly speaking this case
9118 means FPA, but almost nobody uses that now, and
9119 many toolchains fail to set the appropriate bits
9120 for the floating-point model they use. */
9121 break;
9122 case EF_ARM_SOFT_FLOAT:
9123 fp_model = ARM_FLOAT_SOFT_FPA;
9124 break;
9125 case EF_ARM_VFP_FLOAT:
9126 fp_model = ARM_FLOAT_VFP;
9127 break;
9128 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9129 fp_model = ARM_FLOAT_SOFT_VFP;
9130 break;
9131 }
9132 }
9133
9134 if (e_flags & EF_ARM_BE8)
9135 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9136
9137 break;
9138
9139 default:
9140 /* Leave it as "auto". */
9141 break;
9142 }
9143 }
123dc839
DJ
9144
9145 /* Check any target description for validity. */
9779414d 9146 if (tdesc_has_registers (tdesc))
123dc839
DJ
9147 {
9148 /* For most registers we require GDB's default names; but also allow
9149 the numeric names for sp / lr / pc, as a convenience. */
9150 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9151 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9152 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9153
9154 const struct tdesc_feature *feature;
58d6951d 9155 int valid_p;
123dc839 9156
9779414d 9157 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9158 "org.gnu.gdb.arm.core");
9159 if (feature == NULL)
9779414d
DJ
9160 {
9161 feature = tdesc_find_feature (tdesc,
9162 "org.gnu.gdb.arm.m-profile");
9163 if (feature == NULL)
9164 return NULL;
9165 else
9166 is_m = 1;
9167 }
123dc839
DJ
9168
9169 tdesc_data = tdesc_data_alloc ();
9170
9171 valid_p = 1;
9172 for (i = 0; i < ARM_SP_REGNUM; i++)
9173 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9174 arm_register_names[i]);
9175 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9176 ARM_SP_REGNUM,
9177 arm_sp_names);
9178 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9179 ARM_LR_REGNUM,
9180 arm_lr_names);
9181 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9182 ARM_PC_REGNUM,
9183 arm_pc_names);
9779414d
DJ
9184 if (is_m)
9185 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9186 ARM_PS_REGNUM, "xpsr");
9187 else
9188 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9189 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9190
9191 if (!valid_p)
9192 {
9193 tdesc_data_cleanup (tdesc_data);
9194 return NULL;
9195 }
9196
9779414d 9197 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9198 "org.gnu.gdb.arm.fpa");
9199 if (feature != NULL)
9200 {
9201 valid_p = 1;
9202 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9203 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9204 arm_register_names[i]);
9205 if (!valid_p)
9206 {
9207 tdesc_data_cleanup (tdesc_data);
9208 return NULL;
9209 }
9210 }
ff6f572f
DJ
9211 else
9212 have_fpa_registers = 0;
9213
9779414d 9214 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9215 "org.gnu.gdb.xscale.iwmmxt");
9216 if (feature != NULL)
9217 {
9218 static const char *const iwmmxt_names[] = {
9219 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9220 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9221 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9222 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9223 };
9224
9225 valid_p = 1;
9226 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9227 valid_p
9228 &= tdesc_numbered_register (feature, tdesc_data, i,
9229 iwmmxt_names[i - ARM_WR0_REGNUM]);
9230
9231 /* Check for the control registers, but do not fail if they
9232 are missing. */
9233 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9234 tdesc_numbered_register (feature, tdesc_data, i,
9235 iwmmxt_names[i - ARM_WR0_REGNUM]);
9236
9237 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9238 valid_p
9239 &= tdesc_numbered_register (feature, tdesc_data, i,
9240 iwmmxt_names[i - ARM_WR0_REGNUM]);
9241
9242 if (!valid_p)
9243 {
9244 tdesc_data_cleanup (tdesc_data);
9245 return NULL;
9246 }
a56cc1ce
YQ
9247
9248 have_wmmx_registers = 1;
ff6f572f 9249 }
58d6951d
DJ
9250
9251 /* If we have a VFP unit, check whether the single precision registers
9252 are present. If not, then we will synthesize them as pseudo
9253 registers. */
9779414d 9254 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9255 "org.gnu.gdb.arm.vfp");
9256 if (feature != NULL)
9257 {
9258 static const char *const vfp_double_names[] = {
9259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9263 };
9264
9265 /* Require the double precision registers. There must be either
9266 16 or 32. */
9267 valid_p = 1;
9268 for (i = 0; i < 32; i++)
9269 {
9270 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9271 ARM_D0_REGNUM + i,
9272 vfp_double_names[i]);
9273 if (!valid_p)
9274 break;
9275 }
2b9e5ea6
UW
9276 if (!valid_p && i == 16)
9277 valid_p = 1;
58d6951d 9278
2b9e5ea6
UW
9279 /* Also require FPSCR. */
9280 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9281 ARM_FPSCR_REGNUM, "fpscr");
9282 if (!valid_p)
58d6951d
DJ
9283 {
9284 tdesc_data_cleanup (tdesc_data);
9285 return NULL;
9286 }
9287
9288 if (tdesc_unnumbered_register (feature, "s0") == 0)
9289 have_vfp_pseudos = 1;
9290
330c6ca9 9291 vfp_register_count = i;
58d6951d
DJ
9292
9293 /* If we have VFP, also check for NEON. The architecture allows
9294 NEON without VFP (integer vector operations only), but GDB
9295 does not support that. */
9779414d 9296 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9297 "org.gnu.gdb.arm.neon");
9298 if (feature != NULL)
9299 {
9300 /* NEON requires 32 double-precision registers. */
9301 if (i != 32)
9302 {
9303 tdesc_data_cleanup (tdesc_data);
9304 return NULL;
9305 }
9306
9307 /* If there are quad registers defined by the stub, use
9308 their type; otherwise (normally) provide them with
9309 the default type. */
9310 if (tdesc_unnumbered_register (feature, "q0") == 0)
9311 have_neon_pseudos = 1;
9312
9313 have_neon = 1;
9314 }
9315 }
123dc839 9316 }
39bbf761 9317
28e97307
DJ
9318 /* If there is already a candidate, use it. */
9319 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9320 best_arch != NULL;
9321 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9322 {
b8926edc
DJ
9323 if (arm_abi != ARM_ABI_AUTO
9324 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9325 continue;
9326
b8926edc
DJ
9327 if (fp_model != ARM_FLOAT_AUTO
9328 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9329 continue;
9330
58d6951d
DJ
9331 /* There are various other properties in tdep that we do not
9332 need to check here: those derived from a target description,
9333 since gdbarches with a different target description are
9334 automatically disqualified. */
9335
9779414d
DJ
9336 /* Do check is_m, though, since it might come from the binary. */
9337 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9338 continue;
9339
28e97307
DJ
9340 /* Found a match. */
9341 break;
9342 }
97e03143 9343
28e97307 9344 if (best_arch != NULL)
123dc839
DJ
9345 {
9346 if (tdesc_data != NULL)
9347 tdesc_data_cleanup (tdesc_data);
9348 return best_arch->gdbarch;
9349 }
28e97307 9350
8d749320 9351 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9352 gdbarch = gdbarch_alloc (&info, tdep);
9353
28e97307
DJ
9354 /* Record additional information about the architecture we are defining.
9355 These are gdbarch discriminators, like the OSABI. */
9356 tdep->arm_abi = arm_abi;
9357 tdep->fp_model = fp_model;
9779414d 9358 tdep->is_m = is_m;
ff6f572f 9359 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9360 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9361 gdb_assert (vfp_register_count == 0
9362 || vfp_register_count == 16
9363 || vfp_register_count == 32);
9364 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9365 tdep->have_vfp_pseudos = have_vfp_pseudos;
9366 tdep->have_neon_pseudos = have_neon_pseudos;
9367 tdep->have_neon = have_neon;
08216dd7 9368
25f8c692
JL
9369 arm_register_g_packet_guesses (gdbarch);
9370
08216dd7 9371 /* Breakpoints. */
9d4fde75 9372 switch (info.byte_order_for_code)
67255d04
RE
9373 {
9374 case BFD_ENDIAN_BIG:
66e810cd
RE
9375 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9376 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9377 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9378 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9379
67255d04
RE
9380 break;
9381
9382 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9383 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9384 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9385 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9386 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9387
67255d04
RE
9388 break;
9389
9390 default:
9391 internal_error (__FILE__, __LINE__,
edefbb7c 9392 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9393 }
9394
d7b486e7
RE
9395 /* On ARM targets char defaults to unsigned. */
9396 set_gdbarch_char_signed (gdbarch, 0);
9397
cca44b1b
JB
9398 /* Note: for displaced stepping, this includes the breakpoint, and one word
9399 of additional scratch space. This setting isn't used for anything beside
9400 displaced stepping at present. */
9401 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9402
9df628e0 9403 /* This should be low enough for everything. */
97e03143 9404 tdep->lowest_pc = 0x20;
94c30b78 9405 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9406
7c00367c
MK
9407 /* The default, for both APCS and AAPCS, is to return small
9408 structures in registers. */
9409 tdep->struct_return = reg_struct_return;
9410
2dd604e7 9411 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9412 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9413
7eb89530
YQ
9414 if (is_m)
9415 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9416
756fe439
DJ
9417 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9418
148754e5 9419 /* Frame handling. */
a262aec2 9420 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9421 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9422 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9423
eb5492fa 9424 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9425
34e8f22d 9426 /* Address manipulation. */
34e8f22d
RE
9427 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9428
34e8f22d
RE
9429 /* Advance PC across function entry code. */
9430 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9431
c9cf6e20
MG
9432 /* Detect whether PC is at a point where the stack has been destroyed. */
9433 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9434
190dce09
UW
9435 /* Skip trampolines. */
9436 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9437
34e8f22d
RE
9438 /* The stack grows downward. */
9439 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9440
9441 /* Breakpoint manipulation. */
04180708
YQ
9442 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9443 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9444 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9445 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9446
9447 /* Information about registers, etc. */
34e8f22d
RE
9448 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9449 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9450 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9451 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9452 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9453
ff6f572f
DJ
9454 /* This "info float" is FPA-specific. Use the generic version if we
9455 do not have FPA. */
9456 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9457 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9458
26216b98 9459 /* Internal <-> external register number maps. */
ff6f572f 9460 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9461 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9462
34e8f22d
RE
9463 set_gdbarch_register_name (gdbarch, arm_register_name);
9464
9465 /* Returning results. */
2af48f68 9466 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9467
03d48a7d
RE
9468 /* Disassembly. */
9469 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9470
34e8f22d
RE
9471 /* Minsymbol frobbing. */
9472 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9473 set_gdbarch_coff_make_msymbol_special (gdbarch,
9474 arm_coff_make_msymbol_special);
60c5725c 9475 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9476
f9d67f43
DJ
9477 /* Thumb-2 IT block support. */
9478 set_gdbarch_adjust_breakpoint_address (gdbarch,
9479 arm_adjust_breakpoint_address);
9480
0d5de010
DJ
9481 /* Virtual tables. */
9482 set_gdbarch_vbit_in_delta (gdbarch, 1);
9483
97e03143 9484 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9485 gdbarch_init_osabi (info, gdbarch);
97e03143 9486
b39cc962
DJ
9487 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9488
eb5492fa 9489 /* Add some default predicates. */
2ae28aa9
YQ
9490 if (is_m)
9491 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9492 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9493 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9494 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9495 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9496 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9497
97e03143
RE
9498 /* Now we have tuned the configuration, set a few final things,
9499 based on what the OS ABI has told us. */
9500
b8926edc
DJ
9501 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9502 binaries are always marked. */
9503 if (tdep->arm_abi == ARM_ABI_AUTO)
9504 tdep->arm_abi = ARM_ABI_APCS;
9505
e3039479
UW
9506 /* Watchpoints are not steppable. */
9507 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9508
b8926edc
DJ
9509 /* We used to default to FPA for generic ARM, but almost nobody
9510 uses that now, and we now provide a way for the user to force
9511 the model. So default to the most useful variant. */
9512 if (tdep->fp_model == ARM_FLOAT_AUTO)
9513 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9514
9df628e0
RE
9515 if (tdep->jb_pc >= 0)
9516 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9517
08216dd7 9518 /* Floating point sizes and format. */
8da61cc4 9519 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9520 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9521 {
8da61cc4
DJ
9522 set_gdbarch_double_format
9523 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9524 set_gdbarch_long_double_format
9525 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9526 }
9527 else
9528 {
9529 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9530 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9531 }
9532
58d6951d
DJ
9533 if (have_vfp_pseudos)
9534 {
9535 /* NOTE: These are the only pseudo registers used by
9536 the ARM target at the moment. If more are added, a
9537 little more care in numbering will be needed. */
9538
9539 int num_pseudos = 32;
9540 if (have_neon_pseudos)
9541 num_pseudos += 16;
9542 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9543 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9544 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9545 }
9546
123dc839 9547 if (tdesc_data)
58d6951d
DJ
9548 {
9549 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9550
9779414d 9551 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9552
9553 /* Override tdesc_register_type to adjust the types of VFP
9554 registers for NEON. */
9555 set_gdbarch_register_type (gdbarch, arm_register_type);
9556 }
123dc839
DJ
9557
9558 /* Add standard register aliases. We add aliases even for those
9559 nanes which are used by the current architecture - it's simpler,
9560 and does no harm, since nothing ever lists user registers. */
9561 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9562 user_reg_add (gdbarch, arm_register_aliases[i].name,
9563 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9564
39bbf761
RE
9565 return gdbarch;
9566}
9567
97e03143 9568static void
2af46ca0 9569arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9570{
2af46ca0 9571 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9572
9573 if (tdep == NULL)
9574 return;
9575
edefbb7c 9576 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9577 (unsigned long) tdep->lowest_pc);
9578}
9579
a78f21af
AC
9580extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9581
c906108c 9582void
ed9a39eb 9583_initialize_arm_tdep (void)
c906108c 9584{
bc90b915
FN
9585 struct ui_file *stb;
9586 long length;
53904c9e
AC
9587 const char *setname;
9588 const char *setdesc;
4bd7b427 9589 const char *const *regnames;
bec2ab5a 9590 int i;
bc90b915 9591 static char *helptext;
edefbb7c
AC
9592 char regdesc[1024], *rdptr = regdesc;
9593 size_t rest = sizeof (regdesc);
085dd6e6 9594
42cf1509 9595 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9596
60c5725c 9597 arm_objfile_data_key
c1bd65d0 9598 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9599
0e9e9abd
UW
9600 /* Add ourselves to objfile event chain. */
9601 observer_attach_new_objfile (arm_exidx_new_objfile);
9602 arm_exidx_data_key
9603 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9604
70f80edf
JT
9605 /* Register an ELF OS ABI sniffer for ARM binaries. */
9606 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9607 bfd_target_elf_flavour,
9608 arm_elf_osabi_sniffer);
9609
9779414d
DJ
9610 /* Initialize the standard target descriptions. */
9611 initialize_tdesc_arm_with_m ();
25f8c692 9612 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9613 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9614 initialize_tdesc_arm_with_iwmmxt ();
9615 initialize_tdesc_arm_with_vfpv2 ();
9616 initialize_tdesc_arm_with_vfpv3 ();
9617 initialize_tdesc_arm_with_neon ();
9779414d 9618
94c30b78 9619 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9620 num_disassembly_options = get_arm_regname_num_options ();
9621
9622 /* Add root prefix command for all "set arm"/"show arm" commands. */
9623 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9624 _("Various ARM-specific commands."),
afd7eef0
RE
9625 &setarmcmdlist, "set arm ", 0, &setlist);
9626
9627 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9628 _("Various ARM-specific commands."),
afd7eef0 9629 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9630
94c30b78 9631 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9632 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9633
eefe576e
AC
9634 /* Initialize the array that will be passed to
9635 add_setshow_enum_cmd(). */
8d749320
SM
9636 valid_disassembly_styles = XNEWVEC (const char *,
9637 num_disassembly_options + 1);
afd7eef0 9638 for (i = 0; i < num_disassembly_options; i++)
bc90b915 9639 {
bec2ab5a 9640 get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9641 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9642 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9643 rdptr += length;
9644 rest -= length;
123dc839
DJ
9645 /* When we find the default names, tell the disassembler to use
9646 them. */
bc90b915
FN
9647 if (!strcmp (setname, "std"))
9648 {
afd7eef0 9649 disassembly_style = setname;
bc90b915
FN
9650 set_arm_regname_option (i);
9651 }
9652 }
94c30b78 9653 /* Mark the end of valid options. */
afd7eef0 9654 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9655
edefbb7c
AC
9656 /* Create the help text. */
9657 stb = mem_fileopen ();
9658 fprintf_unfiltered (stb, "%s%s%s",
9659 _("The valid values are:\n"),
9660 regdesc,
9661 _("The default is \"std\"."));
759ef836 9662 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 9663 ui_file_delete (stb);
ed9a39eb 9664
edefbb7c
AC
9665 add_setshow_enum_cmd("disassembler", no_class,
9666 valid_disassembly_styles, &disassembly_style,
9667 _("Set the disassembly style."),
9668 _("Show the disassembly style."),
9669 helptext,
2c5b56ce 9670 set_disassembly_style_sfunc,
0963b4bd
MS
9671 NULL, /* FIXME: i18n: The disassembly style is
9672 \"%s\". */
7376b4c2 9673 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9674
9675 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9676 _("Set usage of ARM 32-bit mode."),
9677 _("Show usage of ARM 32-bit mode."),
9678 _("When off, a 26-bit PC will be used."),
2c5b56ce 9679 NULL,
0963b4bd
MS
9680 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9681 mode is %s. */
26304000 9682 &setarmcmdlist, &showarmcmdlist);
c906108c 9683
fd50bc42 9684 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9685 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9686 _("Set the floating point type."),
9687 _("Show the floating point type."),
9688 _("auto - Determine the FP typefrom the OS-ABI.\n\
9689softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9690fpa - FPA co-processor (GCC compiled).\n\
9691softvfp - Software FP with pure-endian doubles.\n\
9692vfp - VFP co-processor."),
edefbb7c 9693 set_fp_model_sfunc, show_fp_model,
7376b4c2 9694 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9695
28e97307
DJ
9696 /* Add a command to allow the user to force the ABI. */
9697 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9698 _("Set the ABI."),
9699 _("Show the ABI."),
9700 NULL, arm_set_abi, arm_show_abi,
9701 &setarmcmdlist, &showarmcmdlist);
9702
0428b8f5
DJ
9703 /* Add two commands to allow the user to force the assumed
9704 execution mode. */
9705 add_setshow_enum_cmd ("fallback-mode", class_support,
9706 arm_mode_strings, &arm_fallback_mode_string,
9707 _("Set the mode assumed when symbols are unavailable."),
9708 _("Show the mode assumed when symbols are unavailable."),
9709 NULL, NULL, arm_show_fallback_mode,
9710 &setarmcmdlist, &showarmcmdlist);
9711 add_setshow_enum_cmd ("force-mode", class_support,
9712 arm_mode_strings, &arm_force_mode_string,
9713 _("Set the mode assumed even when symbols are available."),
9714 _("Show the mode assumed even when symbols are available."),
9715 NULL, NULL, arm_show_force_mode,
9716 &setarmcmdlist, &showarmcmdlist);
9717
6529d2dd 9718 /* Debugging flag. */
edefbb7c
AC
9719 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9720 _("Set ARM debugging."),
9721 _("Show ARM debugging."),
9722 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9723 NULL,
7915a72c 9724 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9725 &setdebuglist, &showdebuglist);
c906108c 9726}
72508ac0
PO
9727
9728/* ARM-reversible process record data structures. */
9729
9730#define ARM_INSN_SIZE_BYTES 4
9731#define THUMB_INSN_SIZE_BYTES 2
9732#define THUMB2_INSN_SIZE_BYTES 4
9733
9734
71e396f9
LM
9735/* Position of the bit within a 32-bit ARM instruction
9736 that defines whether the instruction is a load or store. */
72508ac0
PO
9737#define INSN_S_L_BIT_NUM 20
9738
9739#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9740 do \
9741 { \
9742 unsigned int reg_len = LENGTH; \
9743 if (reg_len) \
9744 { \
9745 REGS = XNEWVEC (uint32_t, reg_len); \
9746 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9747 } \
9748 } \
9749 while (0)
9750
9751#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9752 do \
9753 { \
9754 unsigned int mem_len = LENGTH; \
9755 if (mem_len) \
9756 { \
9757 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9758 memcpy(&MEMS->len, &RECORD_BUF[0], \
9759 sizeof(struct arm_mem_r) * LENGTH); \
9760 } \
9761 } \
9762 while (0)
9763
9764/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9765#define INSN_RECORDED(ARM_RECORD) \
9766 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9767
9768/* ARM memory record structure. */
9769struct arm_mem_r
9770{
9771 uint32_t len; /* Record length. */
bfbbec00 9772 uint32_t addr; /* Memory address. */
72508ac0
PO
9773};
9774
9775/* ARM instruction record contains opcode of current insn
9776 and execution state (before entry to decode_insn()),
9777 contains list of to-be-modified registers and
9778 memory blocks (on return from decode_insn()). */
9779
9780typedef struct insn_decode_record_t
9781{
9782 struct gdbarch *gdbarch;
9783 struct regcache *regcache;
9784 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9785 uint32_t arm_insn; /* Should accommodate thumb. */
9786 uint32_t cond; /* Condition code. */
9787 uint32_t opcode; /* Insn opcode. */
9788 uint32_t decode; /* Insn decode bits. */
9789 uint32_t mem_rec_count; /* No of mem records. */
9790 uint32_t reg_rec_count; /* No of reg records. */
9791 uint32_t *arm_regs; /* Registers to be saved for this record. */
9792 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9793} insn_decode_record;
9794
9795
9796/* Checks ARM SBZ and SBO mandatory fields. */
9797
9798static int
9799sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9800{
9801 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9802
9803 if (!len)
9804 return 1;
9805
9806 if (!sbo)
9807 ones = ~ones;
9808
9809 while (ones)
9810 {
9811 if (!(ones & sbo))
9812 {
9813 return 0;
9814 }
9815 ones = ones >> 1;
9816 }
9817 return 1;
9818}
9819
c6ec2b30
OJ
9820enum arm_record_result
9821{
9822 ARM_RECORD_SUCCESS = 0,
9823 ARM_RECORD_FAILURE = 1
9824};
9825
72508ac0
PO
9826typedef enum
9827{
9828 ARM_RECORD_STRH=1,
9829 ARM_RECORD_STRD
9830} arm_record_strx_t;
9831
9832typedef enum
9833{
9834 ARM_RECORD=1,
9835 THUMB_RECORD,
9836 THUMB2_RECORD
9837} record_type_t;
9838
9839
9840static int
9841arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9842 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9843{
9844
9845 struct regcache *reg_cache = arm_insn_r->regcache;
9846 ULONGEST u_regval[2]= {0};
9847
9848 uint32_t reg_src1 = 0, reg_src2 = 0;
9849 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9850
9851 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9852 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9853
9854 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9855 {
9856 /* 1) Handle misc store, immediate offset. */
9857 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9858 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9859 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9860 regcache_raw_read_unsigned (reg_cache, reg_src1,
9861 &u_regval[0]);
9862 if (ARM_PC_REGNUM == reg_src1)
9863 {
9864 /* If R15 was used as Rn, hence current PC+8. */
9865 u_regval[0] = u_regval[0] + 8;
9866 }
9867 offset_8 = (immed_high << 4) | immed_low;
9868 /* Calculate target store address. */
9869 if (14 == arm_insn_r->opcode)
9870 {
9871 tgt_mem_addr = u_regval[0] + offset_8;
9872 }
9873 else
9874 {
9875 tgt_mem_addr = u_regval[0] - offset_8;
9876 }
9877 if (ARM_RECORD_STRH == str_type)
9878 {
9879 record_buf_mem[0] = 2;
9880 record_buf_mem[1] = tgt_mem_addr;
9881 arm_insn_r->mem_rec_count = 1;
9882 }
9883 else if (ARM_RECORD_STRD == str_type)
9884 {
9885 record_buf_mem[0] = 4;
9886 record_buf_mem[1] = tgt_mem_addr;
9887 record_buf_mem[2] = 4;
9888 record_buf_mem[3] = tgt_mem_addr + 4;
9889 arm_insn_r->mem_rec_count = 2;
9890 }
9891 }
9892 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9893 {
9894 /* 2) Store, register offset. */
9895 /* Get Rm. */
9896 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9897 /* Get Rn. */
9898 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9899 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9900 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9901 if (15 == reg_src2)
9902 {
9903 /* If R15 was used as Rn, hence current PC+8. */
9904 u_regval[0] = u_regval[0] + 8;
9905 }
9906 /* Calculate target store address, Rn +/- Rm, register offset. */
9907 if (12 == arm_insn_r->opcode)
9908 {
9909 tgt_mem_addr = u_regval[0] + u_regval[1];
9910 }
9911 else
9912 {
9913 tgt_mem_addr = u_regval[1] - u_regval[0];
9914 }
9915 if (ARM_RECORD_STRH == str_type)
9916 {
9917 record_buf_mem[0] = 2;
9918 record_buf_mem[1] = tgt_mem_addr;
9919 arm_insn_r->mem_rec_count = 1;
9920 }
9921 else if (ARM_RECORD_STRD == str_type)
9922 {
9923 record_buf_mem[0] = 4;
9924 record_buf_mem[1] = tgt_mem_addr;
9925 record_buf_mem[2] = 4;
9926 record_buf_mem[3] = tgt_mem_addr + 4;
9927 arm_insn_r->mem_rec_count = 2;
9928 }
9929 }
9930 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9931 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9932 {
9933 /* 3) Store, immediate pre-indexed. */
9934 /* 5) Store, immediate post-indexed. */
9935 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9936 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9937 offset_8 = (immed_high << 4) | immed_low;
9938 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9939 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9940 /* Calculate target store address, Rn +/- Rm, register offset. */
9941 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9942 {
9943 tgt_mem_addr = u_regval[0] + offset_8;
9944 }
9945 else
9946 {
9947 tgt_mem_addr = u_regval[0] - offset_8;
9948 }
9949 if (ARM_RECORD_STRH == str_type)
9950 {
9951 record_buf_mem[0] = 2;
9952 record_buf_mem[1] = tgt_mem_addr;
9953 arm_insn_r->mem_rec_count = 1;
9954 }
9955 else if (ARM_RECORD_STRD == str_type)
9956 {
9957 record_buf_mem[0] = 4;
9958 record_buf_mem[1] = tgt_mem_addr;
9959 record_buf_mem[2] = 4;
9960 record_buf_mem[3] = tgt_mem_addr + 4;
9961 arm_insn_r->mem_rec_count = 2;
9962 }
9963 /* Record Rn also as it changes. */
9964 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9965 arm_insn_r->reg_rec_count = 1;
9966 }
9967 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9968 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9969 {
9970 /* 4) Store, register pre-indexed. */
9971 /* 6) Store, register post -indexed. */
9972 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9973 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9974 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9975 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9976 /* Calculate target store address, Rn +/- Rm, register offset. */
9977 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9978 {
9979 tgt_mem_addr = u_regval[0] + u_regval[1];
9980 }
9981 else
9982 {
9983 tgt_mem_addr = u_regval[1] - u_regval[0];
9984 }
9985 if (ARM_RECORD_STRH == str_type)
9986 {
9987 record_buf_mem[0] = 2;
9988 record_buf_mem[1] = tgt_mem_addr;
9989 arm_insn_r->mem_rec_count = 1;
9990 }
9991 else if (ARM_RECORD_STRD == str_type)
9992 {
9993 record_buf_mem[0] = 4;
9994 record_buf_mem[1] = tgt_mem_addr;
9995 record_buf_mem[2] = 4;
9996 record_buf_mem[3] = tgt_mem_addr + 4;
9997 arm_insn_r->mem_rec_count = 2;
9998 }
9999 /* Record Rn also as it changes. */
10000 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10001 arm_insn_r->reg_rec_count = 1;
10002 }
10003 return 0;
10004}
10005
10006/* Handling ARM extension space insns. */
10007
10008static int
10009arm_record_extension_space (insn_decode_record *arm_insn_r)
10010{
10011 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10012 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10013 uint32_t record_buf[8], record_buf_mem[8];
10014 uint32_t reg_src1 = 0;
72508ac0
PO
10015 struct regcache *reg_cache = arm_insn_r->regcache;
10016 ULONGEST u_regval = 0;
10017
10018 gdb_assert (!INSN_RECORDED(arm_insn_r));
10019 /* Handle unconditional insn extension space. */
10020
10021 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10022 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10023 if (arm_insn_r->cond)
10024 {
10025 /* PLD has no affect on architectural state, it just affects
10026 the caches. */
10027 if (5 == ((opcode1 & 0xE0) >> 5))
10028 {
10029 /* BLX(1) */
10030 record_buf[0] = ARM_PS_REGNUM;
10031 record_buf[1] = ARM_LR_REGNUM;
10032 arm_insn_r->reg_rec_count = 2;
10033 }
10034 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10035 }
10036
10037
10038 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10039 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10040 {
10041 ret = -1;
10042 /* Undefined instruction on ARM V5; need to handle if later
10043 versions define it. */
10044 }
10045
10046 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10047 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10048 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10049
10050 /* Handle arithmetic insn extension space. */
10051 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10052 && !INSN_RECORDED(arm_insn_r))
10053 {
10054 /* Handle MLA(S) and MUL(S). */
10055 if (0 <= insn_op1 && 3 >= insn_op1)
10056 {
10057 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10058 record_buf[1] = ARM_PS_REGNUM;
10059 arm_insn_r->reg_rec_count = 2;
10060 }
10061 else if (4 <= insn_op1 && 15 >= insn_op1)
10062 {
10063 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10064 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10065 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10066 record_buf[2] = ARM_PS_REGNUM;
10067 arm_insn_r->reg_rec_count = 3;
10068 }
10069 }
10070
10071 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10072 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10073 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10074
10075 /* Handle control insn extension space. */
10076
10077 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10078 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10079 {
10080 if (!bit (arm_insn_r->arm_insn,25))
10081 {
10082 if (!bits (arm_insn_r->arm_insn, 4, 7))
10083 {
10084 if ((0 == insn_op1) || (2 == insn_op1))
10085 {
10086 /* MRS. */
10087 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10088 arm_insn_r->reg_rec_count = 1;
10089 }
10090 else if (1 == insn_op1)
10091 {
10092 /* CSPR is going to be changed. */
10093 record_buf[0] = ARM_PS_REGNUM;
10094 arm_insn_r->reg_rec_count = 1;
10095 }
10096 else if (3 == insn_op1)
10097 {
10098 /* SPSR is going to be changed. */
10099 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10100 return -1;
10101 }
10102 }
10103 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10104 {
10105 if (1 == insn_op1)
10106 {
10107 /* BX. */
10108 record_buf[0] = ARM_PS_REGNUM;
10109 arm_insn_r->reg_rec_count = 1;
10110 }
10111 else if (3 == insn_op1)
10112 {
10113 /* CLZ. */
10114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10115 arm_insn_r->reg_rec_count = 1;
10116 }
10117 }
10118 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10119 {
10120 /* BLX. */
10121 record_buf[0] = ARM_PS_REGNUM;
10122 record_buf[1] = ARM_LR_REGNUM;
10123 arm_insn_r->reg_rec_count = 2;
10124 }
10125 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10126 {
10127 /* QADD, QSUB, QDADD, QDSUB */
10128 record_buf[0] = ARM_PS_REGNUM;
10129 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10130 arm_insn_r->reg_rec_count = 2;
10131 }
10132 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10133 {
10134 /* BKPT. */
10135 record_buf[0] = ARM_PS_REGNUM;
10136 record_buf[1] = ARM_LR_REGNUM;
10137 arm_insn_r->reg_rec_count = 2;
10138
10139 /* Save SPSR also;how? */
72508ac0
PO
10140 return -1;
10141 }
10142 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10143 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10144 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10145 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10146 )
10147 {
10148 if (0 == insn_op1 || 1 == insn_op1)
10149 {
10150 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10151 /* We dont do optimization for SMULW<y> where we
10152 need only Rd. */
10153 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10154 record_buf[1] = ARM_PS_REGNUM;
10155 arm_insn_r->reg_rec_count = 2;
10156 }
10157 else if (2 == insn_op1)
10158 {
10159 /* SMLAL<x><y>. */
10160 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10161 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10162 arm_insn_r->reg_rec_count = 2;
10163 }
10164 else if (3 == insn_op1)
10165 {
10166 /* SMUL<x><y>. */
10167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10168 arm_insn_r->reg_rec_count = 1;
10169 }
10170 }
10171 }
10172 else
10173 {
10174 /* MSR : immediate form. */
10175 if (1 == insn_op1)
10176 {
10177 /* CSPR is going to be changed. */
10178 record_buf[0] = ARM_PS_REGNUM;
10179 arm_insn_r->reg_rec_count = 1;
10180 }
10181 else if (3 == insn_op1)
10182 {
10183 /* SPSR is going to be changed. */
10184 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10185 return -1;
10186 }
10187 }
10188 }
10189
10190 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10191 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10192 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10193
10194 /* Handle load/store insn extension space. */
10195
10196 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10197 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10198 && !INSN_RECORDED(arm_insn_r))
10199 {
10200 /* SWP/SWPB. */
10201 if (0 == insn_op1)
10202 {
10203 /* These insn, changes register and memory as well. */
10204 /* SWP or SWPB insn. */
10205 /* Get memory address given by Rn. */
10206 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10207 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10208 /* SWP insn ?, swaps word. */
10209 if (8 == arm_insn_r->opcode)
10210 {
10211 record_buf_mem[0] = 4;
10212 }
10213 else
10214 {
10215 /* SWPB insn, swaps only byte. */
10216 record_buf_mem[0] = 1;
10217 }
10218 record_buf_mem[1] = u_regval;
10219 arm_insn_r->mem_rec_count = 1;
10220 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10221 arm_insn_r->reg_rec_count = 1;
10222 }
10223 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10224 {
10225 /* STRH. */
10226 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10227 ARM_RECORD_STRH);
10228 }
10229 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10230 {
10231 /* LDRD. */
10232 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10233 record_buf[1] = record_buf[0] + 1;
10234 arm_insn_r->reg_rec_count = 2;
10235 }
10236 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10237 {
10238 /* STRD. */
10239 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10240 ARM_RECORD_STRD);
10241 }
10242 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10243 {
10244 /* LDRH, LDRSB, LDRSH. */
10245 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10246 arm_insn_r->reg_rec_count = 1;
10247 }
10248
10249 }
10250
10251 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10252 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10253 && !INSN_RECORDED(arm_insn_r))
10254 {
10255 ret = -1;
10256 /* Handle coprocessor insn extension space. */
10257 }
10258
10259 /* To be done for ARMv5 and later; as of now we return -1. */
10260 if (-1 == ret)
ca92db2d 10261 return ret;
72508ac0
PO
10262
10263 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10264 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10265
10266 return ret;
10267}
10268
10269/* Handling opcode 000 insns. */
10270
10271static int
10272arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10273{
10274 struct regcache *reg_cache = arm_insn_r->regcache;
10275 uint32_t record_buf[8], record_buf_mem[8];
10276 ULONGEST u_regval[2] = {0};
10277
bec2ab5a 10278 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10279 uint32_t opcode1 = 0;
10280
10281 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10282 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10283 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10284
10285 /* Data processing insn /multiply insn. */
10286 if (9 == arm_insn_r->decode
10287 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10288 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10289 {
10290 /* Handle multiply instructions. */
10291 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10292 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10293 {
10294 /* Handle MLA and MUL. */
10295 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10296 record_buf[1] = ARM_PS_REGNUM;
10297 arm_insn_r->reg_rec_count = 2;
10298 }
10299 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10300 {
10301 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10302 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10303 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10304 record_buf[2] = ARM_PS_REGNUM;
10305 arm_insn_r->reg_rec_count = 3;
10306 }
10307 }
10308 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10309 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10310 {
10311 /* Handle misc load insns, as 20th bit (L = 1). */
10312 /* LDR insn has a capability to do branching, if
10313 MOV LR, PC is precceded by LDR insn having Rn as R15
10314 in that case, it emulates branch and link insn, and hence we
10315 need to save CSPR and PC as well. I am not sure this is right
10316 place; as opcode = 010 LDR insn make this happen, if R15 was
10317 used. */
10318 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10319 if (15 != reg_dest)
10320 {
10321 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10322 arm_insn_r->reg_rec_count = 1;
10323 }
10324 else
10325 {
10326 record_buf[0] = reg_dest;
10327 record_buf[1] = ARM_PS_REGNUM;
10328 arm_insn_r->reg_rec_count = 2;
10329 }
10330 }
10331 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10332 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10333 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10334 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10335 {
10336 /* Handle MSR insn. */
10337 if (9 == arm_insn_r->opcode)
10338 {
10339 /* CSPR is going to be changed. */
10340 record_buf[0] = ARM_PS_REGNUM;
10341 arm_insn_r->reg_rec_count = 1;
10342 }
10343 else
10344 {
10345 /* SPSR is going to be changed. */
10346 /* How to read SPSR value? */
72508ac0
PO
10347 return -1;
10348 }
10349 }
10350 else if (9 == arm_insn_r->decode
10351 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10352 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10353 {
10354 /* Handling SWP, SWPB. */
10355 /* These insn, changes register and memory as well. */
10356 /* SWP or SWPB insn. */
10357
10358 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10359 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10360 /* SWP insn ?, swaps word. */
10361 if (8 == arm_insn_r->opcode)
10362 {
10363 record_buf_mem[0] = 4;
10364 }
10365 else
10366 {
10367 /* SWPB insn, swaps only byte. */
10368 record_buf_mem[0] = 1;
10369 }
10370 record_buf_mem[1] = u_regval[0];
10371 arm_insn_r->mem_rec_count = 1;
10372 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10373 arm_insn_r->reg_rec_count = 1;
10374 }
10375 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10376 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10377 {
10378 /* Handle BLX, branch and link/exchange. */
10379 if (9 == arm_insn_r->opcode)
10380 {
10381 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10382 and R14 stores the return address. */
10383 record_buf[0] = ARM_PS_REGNUM;
10384 record_buf[1] = ARM_LR_REGNUM;
10385 arm_insn_r->reg_rec_count = 2;
10386 }
10387 }
10388 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10389 {
10390 /* Handle enhanced software breakpoint insn, BKPT. */
10391 /* CPSR is changed to be executed in ARM state, disabling normal
10392 interrupts, entering abort mode. */
10393 /* According to high vector configuration PC is set. */
10394 /* user hit breakpoint and type reverse, in
10395 that case, we need to go back with previous CPSR and
10396 Program Counter. */
10397 record_buf[0] = ARM_PS_REGNUM;
10398 record_buf[1] = ARM_LR_REGNUM;
10399 arm_insn_r->reg_rec_count = 2;
10400
10401 /* Save SPSR also; how? */
72508ac0
PO
10402 return -1;
10403 }
10404 else if (11 == arm_insn_r->decode
10405 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10406 {
10407 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10408
10409 /* Handle str(x) insn */
10410 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10411 ARM_RECORD_STRH);
10412 }
10413 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10414 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10415 {
10416 /* Handle BX, branch and link/exchange. */
10417 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10418 record_buf[0] = ARM_PS_REGNUM;
10419 arm_insn_r->reg_rec_count = 1;
10420 }
10421 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10422 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10423 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10424 {
10425 /* Count leading zeros: CLZ. */
10426 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10427 arm_insn_r->reg_rec_count = 1;
10428 }
10429 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10430 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10431 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10432 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10433 )
10434 {
10435 /* Handle MRS insn. */
10436 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10437 arm_insn_r->reg_rec_count = 1;
10438 }
10439 else if (arm_insn_r->opcode <= 15)
10440 {
10441 /* Normal data processing insns. */
10442 /* Out of 11 shifter operands mode, all the insn modifies destination
10443 register, which is specified by 13-16 decode. */
10444 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10445 record_buf[1] = ARM_PS_REGNUM;
10446 arm_insn_r->reg_rec_count = 2;
10447 }
10448 else
10449 {
10450 return -1;
10451 }
10452
10453 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10454 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10455 return 0;
10456}
10457
10458/* Handling opcode 001 insns. */
10459
10460static int
10461arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10462{
10463 uint32_t record_buf[8], record_buf_mem[8];
10464
10465 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10466 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10467
10468 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10469 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10470 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10471 )
10472 {
10473 /* Handle MSR insn. */
10474 if (9 == arm_insn_r->opcode)
10475 {
10476 /* CSPR is going to be changed. */
10477 record_buf[0] = ARM_PS_REGNUM;
10478 arm_insn_r->reg_rec_count = 1;
10479 }
10480 else
10481 {
10482 /* SPSR is going to be changed. */
10483 }
10484 }
10485 else if (arm_insn_r->opcode <= 15)
10486 {
10487 /* Normal data processing insns. */
10488 /* Out of 11 shifter operands mode, all the insn modifies destination
10489 register, which is specified by 13-16 decode. */
10490 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10491 record_buf[1] = ARM_PS_REGNUM;
10492 arm_insn_r->reg_rec_count = 2;
10493 }
10494 else
10495 {
10496 return -1;
10497 }
10498
10499 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10500 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10501 return 0;
10502}
10503
c55978a6
YQ
10504static int
10505arm_record_media (insn_decode_record *arm_insn_r)
10506{
10507 uint32_t record_buf[8];
10508
10509 switch (bits (arm_insn_r->arm_insn, 22, 24))
10510 {
10511 case 0:
10512 /* Parallel addition and subtraction, signed */
10513 case 1:
10514 /* Parallel addition and subtraction, unsigned */
10515 case 2:
10516 case 3:
10517 /* Packing, unpacking, saturation and reversal */
10518 {
10519 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10520
10521 record_buf[arm_insn_r->reg_rec_count++] = rd;
10522 }
10523 break;
10524
10525 case 4:
10526 case 5:
10527 /* Signed multiplies */
10528 {
10529 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10530 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10531
10532 record_buf[arm_insn_r->reg_rec_count++] = rd;
10533 if (op1 == 0x0)
10534 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10535 else if (op1 == 0x4)
10536 record_buf[arm_insn_r->reg_rec_count++]
10537 = bits (arm_insn_r->arm_insn, 12, 15);
10538 }
10539 break;
10540
10541 case 6:
10542 {
10543 if (bit (arm_insn_r->arm_insn, 21)
10544 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10545 {
10546 /* SBFX */
10547 record_buf[arm_insn_r->reg_rec_count++]
10548 = bits (arm_insn_r->arm_insn, 12, 15);
10549 }
10550 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10551 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10552 {
10553 /* USAD8 and USADA8 */
10554 record_buf[arm_insn_r->reg_rec_count++]
10555 = bits (arm_insn_r->arm_insn, 16, 19);
10556 }
10557 }
10558 break;
10559
10560 case 7:
10561 {
10562 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10563 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10564 {
10565 /* Permanently UNDEFINED */
10566 return -1;
10567 }
10568 else
10569 {
10570 /* BFC, BFI and UBFX */
10571 record_buf[arm_insn_r->reg_rec_count++]
10572 = bits (arm_insn_r->arm_insn, 12, 15);
10573 }
10574 }
10575 break;
10576
10577 default:
10578 return -1;
10579 }
10580
10581 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10582
10583 return 0;
10584}
10585
71e396f9 10586/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10587
10588static int
10589arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10590{
10591 struct regcache *reg_cache = arm_insn_r->regcache;
10592
71e396f9
LM
10593 uint32_t reg_base , reg_dest;
10594 uint32_t offset_12, tgt_mem_addr;
72508ac0 10595 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10596 unsigned char wback;
10597 ULONGEST u_regval;
72508ac0 10598
71e396f9
LM
10599 /* Calculate wback. */
10600 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10601 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10602
71e396f9
LM
10603 arm_insn_r->reg_rec_count = 0;
10604 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10605
10606 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10607 {
71e396f9
LM
10608 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10609 and LDRT. */
10610
72508ac0 10611 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10612 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10613
10614 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10615 preceeds a LDR instruction having R15 as reg_base, it
10616 emulates a branch and link instruction, and hence we need to save
10617 CPSR and PC as well. */
10618 if (ARM_PC_REGNUM == reg_dest)
10619 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10620
10621 /* If wback is true, also save the base register, which is going to be
10622 written to. */
10623 if (wback)
10624 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10625 }
10626 else
10627 {
71e396f9
LM
10628 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10629
72508ac0 10630 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10631 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10632
10633 /* Handle bit U. */
72508ac0 10634 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10635 {
10636 /* U == 1: Add the offset. */
10637 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10638 }
72508ac0 10639 else
71e396f9
LM
10640 {
10641 /* U == 0: subtract the offset. */
10642 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10643 }
10644
10645 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10646 bytes. */
10647 if (bit (arm_insn_r->arm_insn, 22))
10648 {
10649 /* STRB and STRBT: 1 byte. */
10650 record_buf_mem[0] = 1;
10651 }
10652 else
10653 {
10654 /* STR and STRT: 4 bytes. */
10655 record_buf_mem[0] = 4;
10656 }
10657
10658 /* Handle bit P. */
10659 if (bit (arm_insn_r->arm_insn, 24))
10660 record_buf_mem[1] = tgt_mem_addr;
10661 else
10662 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10663
72508ac0
PO
10664 arm_insn_r->mem_rec_count = 1;
10665
71e396f9
LM
10666 /* If wback is true, also save the base register, which is going to be
10667 written to. */
10668 if (wback)
10669 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10670 }
10671
10672 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10673 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10674 return 0;
10675}
10676
10677/* Handling opcode 011 insns. */
10678
10679static int
10680arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10681{
10682 struct regcache *reg_cache = arm_insn_r->regcache;
10683
10684 uint32_t shift_imm = 0;
10685 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10686 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10687 uint32_t record_buf[8], record_buf_mem[8];
10688
10689 LONGEST s_word;
10690 ULONGEST u_regval[2];
10691
c55978a6
YQ
10692 if (bit (arm_insn_r->arm_insn, 4))
10693 return arm_record_media (arm_insn_r);
10694
72508ac0
PO
10695 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10696 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10697
10698 /* Handle enhanced store insns and LDRD DSP insn,
10699 order begins according to addressing modes for store insns
10700 STRH insn. */
10701
10702 /* LDR or STR? */
10703 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10704 {
10705 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10706 /* LDR insn has a capability to do branching, if
10707 MOV LR, PC is precedded by LDR insn having Rn as R15
10708 in that case, it emulates branch and link insn, and hence we
10709 need to save CSPR and PC as well. */
10710 if (15 != reg_dest)
10711 {
10712 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10713 arm_insn_r->reg_rec_count = 1;
10714 }
10715 else
10716 {
10717 record_buf[0] = reg_dest;
10718 record_buf[1] = ARM_PS_REGNUM;
10719 arm_insn_r->reg_rec_count = 2;
10720 }
10721 }
10722 else
10723 {
10724 if (! bits (arm_insn_r->arm_insn, 4, 11))
10725 {
10726 /* Store insn, register offset and register pre-indexed,
10727 register post-indexed. */
10728 /* Get Rm. */
10729 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10730 /* Get Rn. */
10731 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10732 regcache_raw_read_unsigned (reg_cache, reg_src1
10733 , &u_regval[0]);
10734 regcache_raw_read_unsigned (reg_cache, reg_src2
10735 , &u_regval[1]);
10736 if (15 == reg_src2)
10737 {
10738 /* If R15 was used as Rn, hence current PC+8. */
10739 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10740 u_regval[0] = u_regval[0] + 8;
10741 }
10742 /* Calculate target store address, Rn +/- Rm, register offset. */
10743 /* U == 1. */
10744 if (bit (arm_insn_r->arm_insn, 23))
10745 {
10746 tgt_mem_addr = u_regval[0] + u_regval[1];
10747 }
10748 else
10749 {
10750 tgt_mem_addr = u_regval[1] - u_regval[0];
10751 }
10752
10753 switch (arm_insn_r->opcode)
10754 {
10755 /* STR. */
10756 case 8:
10757 case 12:
10758 /* STR. */
10759 case 9:
10760 case 13:
10761 /* STRT. */
10762 case 1:
10763 case 5:
10764 /* STR. */
10765 case 0:
10766 case 4:
10767 record_buf_mem[0] = 4;
10768 break;
10769
10770 /* STRB. */
10771 case 10:
10772 case 14:
10773 /* STRB. */
10774 case 11:
10775 case 15:
10776 /* STRBT. */
10777 case 3:
10778 case 7:
10779 /* STRB. */
10780 case 2:
10781 case 6:
10782 record_buf_mem[0] = 1;
10783 break;
10784
10785 default:
10786 gdb_assert_not_reached ("no decoding pattern found");
10787 break;
10788 }
10789 record_buf_mem[1] = tgt_mem_addr;
10790 arm_insn_r->mem_rec_count = 1;
10791
10792 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10793 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10794 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10795 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10796 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10797 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10798 )
10799 {
10800 /* Rn is going to be changed in pre-indexed mode and
10801 post-indexed mode as well. */
10802 record_buf[0] = reg_src2;
10803 arm_insn_r->reg_rec_count = 1;
10804 }
10805 }
10806 else
10807 {
10808 /* Store insn, scaled register offset; scaled pre-indexed. */
10809 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10810 /* Get Rm. */
10811 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10812 /* Get Rn. */
10813 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10814 /* Get shift_imm. */
10815 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10816 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10817 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10818 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10819 /* Offset_12 used as shift. */
10820 switch (offset_12)
10821 {
10822 case 0:
10823 /* Offset_12 used as index. */
10824 offset_12 = u_regval[0] << shift_imm;
10825 break;
10826
10827 case 1:
10828 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10829 break;
10830
10831 case 2:
10832 if (!shift_imm)
10833 {
10834 if (bit (u_regval[0], 31))
10835 {
10836 offset_12 = 0xFFFFFFFF;
10837 }
10838 else
10839 {
10840 offset_12 = 0;
10841 }
10842 }
10843 else
10844 {
10845 /* This is arithmetic shift. */
10846 offset_12 = s_word >> shift_imm;
10847 }
10848 break;
10849
10850 case 3:
10851 if (!shift_imm)
10852 {
10853 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10854 &u_regval[1]);
10855 /* Get C flag value and shift it by 31. */
10856 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10857 | (u_regval[0]) >> 1);
10858 }
10859 else
10860 {
10861 offset_12 = (u_regval[0] >> shift_imm) \
10862 | (u_regval[0] <<
10863 (sizeof(uint32_t) - shift_imm));
10864 }
10865 break;
10866
10867 default:
10868 gdb_assert_not_reached ("no decoding pattern found");
10869 break;
10870 }
10871
10872 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10873 /* bit U set. */
10874 if (bit (arm_insn_r->arm_insn, 23))
10875 {
10876 tgt_mem_addr = u_regval[1] + offset_12;
10877 }
10878 else
10879 {
10880 tgt_mem_addr = u_regval[1] - offset_12;
10881 }
10882
10883 switch (arm_insn_r->opcode)
10884 {
10885 /* STR. */
10886 case 8:
10887 case 12:
10888 /* STR. */
10889 case 9:
10890 case 13:
10891 /* STRT. */
10892 case 1:
10893 case 5:
10894 /* STR. */
10895 case 0:
10896 case 4:
10897 record_buf_mem[0] = 4;
10898 break;
10899
10900 /* STRB. */
10901 case 10:
10902 case 14:
10903 /* STRB. */
10904 case 11:
10905 case 15:
10906 /* STRBT. */
10907 case 3:
10908 case 7:
10909 /* STRB. */
10910 case 2:
10911 case 6:
10912 record_buf_mem[0] = 1;
10913 break;
10914
10915 default:
10916 gdb_assert_not_reached ("no decoding pattern found");
10917 break;
10918 }
10919 record_buf_mem[1] = tgt_mem_addr;
10920 arm_insn_r->mem_rec_count = 1;
10921
10922 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10923 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10924 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10925 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10926 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10927 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10928 )
10929 {
10930 /* Rn is going to be changed in register scaled pre-indexed
10931 mode,and scaled post indexed mode. */
10932 record_buf[0] = reg_src2;
10933 arm_insn_r->reg_rec_count = 1;
10934 }
10935 }
10936 }
10937
10938 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10939 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10940 return 0;
10941}
10942
71e396f9 10943/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10944
10945static int
10946arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10947{
10948 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10949 uint32_t register_count = 0, register_bits;
10950 uint32_t reg_base, addr_mode;
72508ac0 10951 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10952 uint32_t wback;
10953 ULONGEST u_regval;
72508ac0 10954
71e396f9
LM
10955 /* Fetch the list of registers. */
10956 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10957 arm_insn_r->reg_rec_count = 0;
10958
10959 /* Fetch the base register that contains the address we are loading data
10960 to. */
10961 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10962
71e396f9
LM
10963 /* Calculate wback. */
10964 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10965
10966 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10967 {
71e396f9 10968 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10969
71e396f9 10970 /* Find out which registers are going to be loaded from memory. */
72508ac0 10971 while (register_bits)
71e396f9
LM
10972 {
10973 if (register_bits & 0x00000001)
10974 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10975 register_bits = register_bits >> 1;
10976 register_count++;
10977 }
72508ac0 10978
71e396f9
LM
10979
10980 /* If wback is true, also save the base register, which is going to be
10981 written to. */
10982 if (wback)
10983 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10984
10985 /* Save the CPSR register. */
10986 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10987 }
10988 else
10989 {
71e396f9 10990 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10991
71e396f9
LM
10992 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10993
10994 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10995
10996 /* Find out how many registers are going to be stored to memory. */
72508ac0 10997 while (register_bits)
71e396f9
LM
10998 {
10999 if (register_bits & 0x00000001)
11000 register_count++;
11001 register_bits = register_bits >> 1;
11002 }
72508ac0
PO
11003
11004 switch (addr_mode)
71e396f9
LM
11005 {
11006 /* STMDA (STMED): Decrement after. */
11007 case 0:
11008 record_buf_mem[1] = (uint32_t) u_regval
11009 - register_count * INT_REGISTER_SIZE + 4;
11010 break;
11011 /* STM (STMIA, STMEA): Increment after. */
11012 case 1:
11013 record_buf_mem[1] = (uint32_t) u_regval;
11014 break;
11015 /* STMDB (STMFD): Decrement before. */
11016 case 2:
11017 record_buf_mem[1] = (uint32_t) u_regval
11018 - register_count * INT_REGISTER_SIZE;
11019 break;
11020 /* STMIB (STMFA): Increment before. */
11021 case 3:
11022 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11023 break;
11024 default:
11025 gdb_assert_not_reached ("no decoding pattern found");
11026 break;
11027 }
72508ac0 11028
71e396f9
LM
11029 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11030 arm_insn_r->mem_rec_count = 1;
11031
11032 /* If wback is true, also save the base register, which is going to be
11033 written to. */
11034 if (wback)
11035 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11036 }
11037
11038 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11039 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11040 return 0;
11041}
11042
11043/* Handling opcode 101 insns. */
11044
11045static int
11046arm_record_b_bl (insn_decode_record *arm_insn_r)
11047{
11048 uint32_t record_buf[8];
11049
11050 /* Handle B, BL, BLX(1) insns. */
11051 /* B simply branches so we do nothing here. */
11052 /* Note: BLX(1) doesnt fall here but instead it falls into
11053 extension space. */
11054 if (bit (arm_insn_r->arm_insn, 24))
11055 {
11056 record_buf[0] = ARM_LR_REGNUM;
11057 arm_insn_r->reg_rec_count = 1;
11058 }
11059
11060 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11061
11062 return 0;
11063}
11064
72508ac0 11065static int
c6ec2b30 11066arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11067{
11068 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11069 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11070 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11071
11072 return -1;
11073}
11074
5a578da5
OJ
11075/* Record handler for vector data transfer instructions. */
11076
11077static int
11078arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11079{
11080 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11081 uint32_t record_buf[4];
11082
5a578da5
OJ
11083 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11084 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11085 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11086 bit_l = bit (arm_insn_r->arm_insn, 20);
11087 bit_c = bit (arm_insn_r->arm_insn, 8);
11088
11089 /* Handle VMOV instruction. */
11090 if (bit_l && bit_c)
11091 {
11092 record_buf[0] = reg_t;
11093 arm_insn_r->reg_rec_count = 1;
11094 }
11095 else if (bit_l && !bit_c)
11096 {
11097 /* Handle VMOV instruction. */
11098 if (bits_a == 0x00)
11099 {
f1771dce 11100 record_buf[0] = reg_t;
5a578da5
OJ
11101 arm_insn_r->reg_rec_count = 1;
11102 }
11103 /* Handle VMRS instruction. */
11104 else if (bits_a == 0x07)
11105 {
11106 if (reg_t == 15)
11107 reg_t = ARM_PS_REGNUM;
11108
11109 record_buf[0] = reg_t;
11110 arm_insn_r->reg_rec_count = 1;
11111 }
11112 }
11113 else if (!bit_l && !bit_c)
11114 {
11115 /* Handle VMOV instruction. */
11116 if (bits_a == 0x00)
11117 {
f1771dce 11118 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11119
11120 arm_insn_r->reg_rec_count = 1;
11121 }
11122 /* Handle VMSR instruction. */
11123 else if (bits_a == 0x07)
11124 {
11125 record_buf[0] = ARM_FPSCR_REGNUM;
11126 arm_insn_r->reg_rec_count = 1;
11127 }
11128 }
11129 else if (!bit_l && bit_c)
11130 {
11131 /* Handle VMOV instruction. */
11132 if (!(bits_a & 0x04))
11133 {
11134 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11135 + ARM_D0_REGNUM;
11136 arm_insn_r->reg_rec_count = 1;
11137 }
11138 /* Handle VDUP instruction. */
11139 else
11140 {
11141 if (bit (arm_insn_r->arm_insn, 21))
11142 {
11143 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11144 record_buf[0] = reg_v + ARM_D0_REGNUM;
11145 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11146 arm_insn_r->reg_rec_count = 2;
11147 }
11148 else
11149 {
11150 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11151 record_buf[0] = reg_v + ARM_D0_REGNUM;
11152 arm_insn_r->reg_rec_count = 1;
11153 }
11154 }
11155 }
11156
11157 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11158 return 0;
11159}
11160
f20f80dd
OJ
11161/* Record handler for extension register load/store instructions. */
11162
11163static int
11164arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11165{
11166 uint32_t opcode, single_reg;
11167 uint8_t op_vldm_vstm;
11168 uint32_t record_buf[8], record_buf_mem[128];
11169 ULONGEST u_regval = 0;
11170
11171 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11172
11173 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11174 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11175 op_vldm_vstm = opcode & 0x1b;
11176
11177 /* Handle VMOV instructions. */
11178 if ((opcode & 0x1e) == 0x04)
11179 {
9fde51ed 11180 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11181 {
11182 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11183 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11184 arm_insn_r->reg_rec_count = 2;
11185 }
f20f80dd 11186 else
01e57735 11187 {
9fde51ed
YQ
11188 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11189 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11190
9fde51ed 11191 if (single_reg)
01e57735 11192 {
9fde51ed
YQ
11193 /* The first S register number m is REG_M:M (M is bit 5),
11194 the corresponding D register number is REG_M:M / 2, which
11195 is REG_M. */
11196 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11197 /* The second S register number is REG_M:M + 1, the
11198 corresponding D register number is (REG_M:M + 1) / 2.
11199 IOW, if bit M is 1, the first and second S registers
11200 are mapped to different D registers, otherwise, they are
11201 in the same D register. */
11202 if (bit_m)
11203 {
11204 record_buf[arm_insn_r->reg_rec_count++]
11205 = ARM_D0_REGNUM + reg_m + 1;
11206 }
01e57735
YQ
11207 }
11208 else
11209 {
9fde51ed 11210 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11211 arm_insn_r->reg_rec_count = 1;
11212 }
11213 }
f20f80dd
OJ
11214 }
11215 /* Handle VSTM and VPUSH instructions. */
11216 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11217 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11218 {
11219 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11220 uint32_t memory_index = 0;
11221
11222 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11223 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11224 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11225 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11226 memory_count = imm_off8;
11227
11228 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11229 start_address = u_regval;
f20f80dd 11230 else
01e57735 11231 start_address = u_regval - imm_off32;
f20f80dd
OJ
11232
11233 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11234 {
11235 record_buf[0] = reg_rn;
11236 arm_insn_r->reg_rec_count = 1;
11237 }
f20f80dd
OJ
11238
11239 while (memory_count > 0)
01e57735 11240 {
9fde51ed 11241 if (single_reg)
01e57735 11242 {
9fde51ed
YQ
11243 record_buf_mem[memory_index] = 4;
11244 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11245 start_address = start_address + 4;
11246 memory_index = memory_index + 2;
11247 }
11248 else
11249 {
9fde51ed
YQ
11250 record_buf_mem[memory_index] = 4;
11251 record_buf_mem[memory_index + 1] = start_address;
11252 record_buf_mem[memory_index + 2] = 4;
11253 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11254 start_address = start_address + 8;
11255 memory_index = memory_index + 4;
11256 }
11257 memory_count--;
11258 }
f20f80dd
OJ
11259 arm_insn_r->mem_rec_count = (memory_index >> 1);
11260 }
11261 /* Handle VLDM instructions. */
11262 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11263 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11264 {
11265 uint32_t reg_count, reg_vd;
11266 uint32_t reg_index = 0;
9fde51ed 11267 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11268
11269 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11270 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11271
9fde51ed
YQ
11272 /* REG_VD is the first D register number. If the instruction
11273 loads memory to S registers (SINGLE_REG is TRUE), the register
11274 number is (REG_VD << 1 | bit D), so the corresponding D
11275 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11276 if (!single_reg)
11277 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11278
9fde51ed 11279 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11280 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11281
9fde51ed
YQ
11282 /* If the instruction loads memory to D register, REG_COUNT should
11283 be divided by 2, according to the ARM Architecture Reference
11284 Manual. If the instruction loads memory to S register, divide by
11285 2 as well because two S registers are mapped to D register. */
11286 reg_count = reg_count / 2;
11287 if (single_reg && bit_d)
01e57735 11288 {
9fde51ed
YQ
11289 /* Increase the register count if S register list starts from
11290 an odd number (bit d is one). */
11291 reg_count++;
11292 }
f20f80dd 11293
9fde51ed
YQ
11294 while (reg_count > 0)
11295 {
11296 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11297 reg_count--;
11298 }
f20f80dd
OJ
11299 arm_insn_r->reg_rec_count = reg_index;
11300 }
11301 /* VSTR Vector store register. */
11302 else if ((opcode & 0x13) == 0x10)
11303 {
bec2ab5a 11304 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11305 uint32_t memory_index = 0;
11306
11307 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11308 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11309 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11310 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11311
11312 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11313 start_address = u_regval + imm_off32;
f20f80dd 11314 else
01e57735 11315 start_address = u_regval - imm_off32;
f20f80dd
OJ
11316
11317 if (single_reg)
01e57735 11318 {
9fde51ed
YQ
11319 record_buf_mem[memory_index] = 4;
11320 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11321 arm_insn_r->mem_rec_count = 1;
11322 }
f20f80dd 11323 else
01e57735 11324 {
9fde51ed
YQ
11325 record_buf_mem[memory_index] = 4;
11326 record_buf_mem[memory_index + 1] = start_address;
11327 record_buf_mem[memory_index + 2] = 4;
11328 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11329 arm_insn_r->mem_rec_count = 2;
11330 }
f20f80dd
OJ
11331 }
11332 /* VLDR Vector load register. */
11333 else if ((opcode & 0x13) == 0x11)
11334 {
11335 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11336
11337 if (!single_reg)
01e57735
YQ
11338 {
11339 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11340 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11341 }
f20f80dd 11342 else
01e57735
YQ
11343 {
11344 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11345 /* Record register D rather than pseudo register S. */
11346 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11347 }
f20f80dd
OJ
11348 arm_insn_r->reg_rec_count = 1;
11349 }
11350
11351 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11352 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11353 return 0;
11354}
11355
851f26ae
OJ
11356/* Record handler for arm/thumb mode VFP data processing instructions. */
11357
11358static int
11359arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11360{
11361 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11362 uint32_t record_buf[4];
11363 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11364 enum insn_types curr_insn_type = INSN_INV;
11365
11366 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11367 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11368 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11369 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11370 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11371 bit_d = bit (arm_insn_r->arm_insn, 22);
11372 opc1 = opc1 & 0x04;
11373
11374 /* Handle VMLA, VMLS. */
11375 if (opc1 == 0x00)
11376 {
11377 if (bit (arm_insn_r->arm_insn, 10))
11378 {
11379 if (bit (arm_insn_r->arm_insn, 6))
11380 curr_insn_type = INSN_T0;
11381 else
11382 curr_insn_type = INSN_T1;
11383 }
11384 else
11385 {
11386 if (dp_op_sz)
11387 curr_insn_type = INSN_T1;
11388 else
11389 curr_insn_type = INSN_T2;
11390 }
11391 }
11392 /* Handle VNMLA, VNMLS, VNMUL. */
11393 else if (opc1 == 0x01)
11394 {
11395 if (dp_op_sz)
11396 curr_insn_type = INSN_T1;
11397 else
11398 curr_insn_type = INSN_T2;
11399 }
11400 /* Handle VMUL. */
11401 else if (opc1 == 0x02 && !(opc3 & 0x01))
11402 {
11403 if (bit (arm_insn_r->arm_insn, 10))
11404 {
11405 if (bit (arm_insn_r->arm_insn, 6))
11406 curr_insn_type = INSN_T0;
11407 else
11408 curr_insn_type = INSN_T1;
11409 }
11410 else
11411 {
11412 if (dp_op_sz)
11413 curr_insn_type = INSN_T1;
11414 else
11415 curr_insn_type = INSN_T2;
11416 }
11417 }
11418 /* Handle VADD, VSUB. */
11419 else if (opc1 == 0x03)
11420 {
11421 if (!bit (arm_insn_r->arm_insn, 9))
11422 {
11423 if (bit (arm_insn_r->arm_insn, 6))
11424 curr_insn_type = INSN_T0;
11425 else
11426 curr_insn_type = INSN_T1;
11427 }
11428 else
11429 {
11430 if (dp_op_sz)
11431 curr_insn_type = INSN_T1;
11432 else
11433 curr_insn_type = INSN_T2;
11434 }
11435 }
11436 /* Handle VDIV. */
11437 else if (opc1 == 0x0b)
11438 {
11439 if (dp_op_sz)
11440 curr_insn_type = INSN_T1;
11441 else
11442 curr_insn_type = INSN_T2;
11443 }
11444 /* Handle all other vfp data processing instructions. */
11445 else if (opc1 == 0x0b)
11446 {
11447 /* Handle VMOV. */
11448 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11449 {
11450 if (bit (arm_insn_r->arm_insn, 4))
11451 {
11452 if (bit (arm_insn_r->arm_insn, 6))
11453 curr_insn_type = INSN_T0;
11454 else
11455 curr_insn_type = INSN_T1;
11456 }
11457 else
11458 {
11459 if (dp_op_sz)
11460 curr_insn_type = INSN_T1;
11461 else
11462 curr_insn_type = INSN_T2;
11463 }
11464 }
11465 /* Handle VNEG and VABS. */
11466 else if ((opc2 == 0x01 && opc3 == 0x01)
11467 || (opc2 == 0x00 && opc3 == 0x03))
11468 {
11469 if (!bit (arm_insn_r->arm_insn, 11))
11470 {
11471 if (bit (arm_insn_r->arm_insn, 6))
11472 curr_insn_type = INSN_T0;
11473 else
11474 curr_insn_type = INSN_T1;
11475 }
11476 else
11477 {
11478 if (dp_op_sz)
11479 curr_insn_type = INSN_T1;
11480 else
11481 curr_insn_type = INSN_T2;
11482 }
11483 }
11484 /* Handle VSQRT. */
11485 else if (opc2 == 0x01 && opc3 == 0x03)
11486 {
11487 if (dp_op_sz)
11488 curr_insn_type = INSN_T1;
11489 else
11490 curr_insn_type = INSN_T2;
11491 }
11492 /* Handle VCVT. */
11493 else if (opc2 == 0x07 && opc3 == 0x03)
11494 {
11495 if (!dp_op_sz)
11496 curr_insn_type = INSN_T1;
11497 else
11498 curr_insn_type = INSN_T2;
11499 }
11500 else if (opc3 & 0x01)
11501 {
11502 /* Handle VCVT. */
11503 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11504 {
11505 if (!bit (arm_insn_r->arm_insn, 18))
11506 curr_insn_type = INSN_T2;
11507 else
11508 {
11509 if (dp_op_sz)
11510 curr_insn_type = INSN_T1;
11511 else
11512 curr_insn_type = INSN_T2;
11513 }
11514 }
11515 /* Handle VCVT. */
11516 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11517 {
11518 if (dp_op_sz)
11519 curr_insn_type = INSN_T1;
11520 else
11521 curr_insn_type = INSN_T2;
11522 }
11523 /* Handle VCVTB, VCVTT. */
11524 else if ((opc2 & 0x0e) == 0x02)
11525 curr_insn_type = INSN_T2;
11526 /* Handle VCMP, VCMPE. */
11527 else if ((opc2 & 0x0e) == 0x04)
11528 curr_insn_type = INSN_T3;
11529 }
11530 }
11531
11532 switch (curr_insn_type)
11533 {
11534 case INSN_T0:
11535 reg_vd = reg_vd | (bit_d << 4);
11536 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11537 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11538 arm_insn_r->reg_rec_count = 2;
11539 break;
11540
11541 case INSN_T1:
11542 reg_vd = reg_vd | (bit_d << 4);
11543 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11544 arm_insn_r->reg_rec_count = 1;
11545 break;
11546
11547 case INSN_T2:
11548 reg_vd = (reg_vd << 1) | bit_d;
11549 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11550 arm_insn_r->reg_rec_count = 1;
11551 break;
11552
11553 case INSN_T3:
11554 record_buf[0] = ARM_FPSCR_REGNUM;
11555 arm_insn_r->reg_rec_count = 1;
11556 break;
11557
11558 default:
11559 gdb_assert_not_reached ("no decoding pattern found");
11560 break;
11561 }
11562
11563 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11564 return 0;
11565}
11566
60cc5e93
OJ
11567/* Handling opcode 110 insns. */
11568
11569static int
11570arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11571{
bec2ab5a 11572 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11573
11574 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11575 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11576 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11577
11578 if ((coproc & 0x0e) == 0x0a)
11579 {
11580 /* Handle extension register ld/st instructions. */
11581 if (!(op1 & 0x20))
f20f80dd 11582 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11583
11584 /* 64-bit transfers between arm core and extension registers. */
11585 if ((op1 & 0x3e) == 0x04)
f20f80dd 11586 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11587 }
11588 else
11589 {
11590 /* Handle coprocessor ld/st instructions. */
11591 if (!(op1 & 0x3a))
11592 {
11593 /* Store. */
11594 if (!op1_ebit)
11595 return arm_record_unsupported_insn (arm_insn_r);
11596 else
11597 /* Load. */
11598 return arm_record_unsupported_insn (arm_insn_r);
11599 }
11600
11601 /* Move to coprocessor from two arm core registers. */
11602 if (op1 == 0x4)
11603 return arm_record_unsupported_insn (arm_insn_r);
11604
11605 /* Move to two arm core registers from coprocessor. */
11606 if (op1 == 0x5)
11607 {
11608 uint32_t reg_t[2];
11609
11610 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11611 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11612 arm_insn_r->reg_rec_count = 2;
11613
11614 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11615 return 0;
11616 }
11617 }
11618 return arm_record_unsupported_insn (arm_insn_r);
11619}
11620
72508ac0
PO
11621/* Handling opcode 111 insns. */
11622
11623static int
11624arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11625{
60cc5e93 11626 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11627 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11628 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11629
11630 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11631 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11632 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11633 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11634 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11635
11636 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11637 if (op1_sbit)
97dfe206
OJ
11638 {
11639 if (tdep->arm_syscall_record != NULL)
11640 {
11641 ULONGEST svc_operand, svc_number;
11642
11643 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11644
11645 if (svc_operand) /* OABI. */
11646 svc_number = svc_operand - 0x900000;
11647 else /* EABI. */
11648 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11649
60cc5e93 11650 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11651 }
11652 else
11653 {
11654 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11655 return -1;
97dfe206
OJ
11656 }
11657 }
60cc5e93
OJ
11658
11659 if ((coproc & 0x0e) == 0x0a)
11660 {
11661 /* VFP data-processing instructions. */
11662 if (!op1_sbit && !op)
851f26ae 11663 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11664
11665 /* Advanced SIMD, VFP instructions. */
11666 if (!op1_sbit && op)
5a578da5 11667 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11668 }
97dfe206
OJ
11669 else
11670 {
60cc5e93
OJ
11671 /* Coprocessor data operations. */
11672 if (!op1_sbit && !op)
11673 return arm_record_unsupported_insn (arm_insn_r);
11674
11675 /* Move to Coprocessor from ARM core register. */
11676 if (!op1_sbit && !op1_ebit && op)
11677 return arm_record_unsupported_insn (arm_insn_r);
11678
11679 /* Move to arm core register from coprocessor. */
11680 if (!op1_sbit && op1_ebit && op)
11681 {
11682 uint32_t record_buf[1];
11683
11684 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11685 if (record_buf[0] == 15)
11686 record_buf[0] = ARM_PS_REGNUM;
11687
11688 arm_insn_r->reg_rec_count = 1;
11689 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11690 record_buf);
11691 return 0;
11692 }
97dfe206 11693 }
72508ac0 11694
60cc5e93 11695 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11696}
11697
11698/* Handling opcode 000 insns. */
11699
11700static int
11701thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11702{
11703 uint32_t record_buf[8];
11704 uint32_t reg_src1 = 0;
11705
11706 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11707
11708 record_buf[0] = ARM_PS_REGNUM;
11709 record_buf[1] = reg_src1;
11710 thumb_insn_r->reg_rec_count = 2;
11711
11712 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11713
11714 return 0;
11715}
11716
11717
11718/* Handling opcode 001 insns. */
11719
11720static int
11721thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11722{
11723 uint32_t record_buf[8];
11724 uint32_t reg_src1 = 0;
11725
11726 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11727
11728 record_buf[0] = ARM_PS_REGNUM;
11729 record_buf[1] = reg_src1;
11730 thumb_insn_r->reg_rec_count = 2;
11731
11732 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11733
11734 return 0;
11735}
11736
11737/* Handling opcode 010 insns. */
11738
11739static int
11740thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11741{
11742 struct regcache *reg_cache = thumb_insn_r->regcache;
11743 uint32_t record_buf[8], record_buf_mem[8];
11744
11745 uint32_t reg_src1 = 0, reg_src2 = 0;
11746 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11747
11748 ULONGEST u_regval[2] = {0};
11749
11750 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11751
11752 if (bit (thumb_insn_r->arm_insn, 12))
11753 {
11754 /* Handle load/store register offset. */
11755 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11756 if (opcode2 >= 12 && opcode2 <= 15)
11757 {
11758 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11759 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11760 record_buf[0] = reg_src1;
11761 thumb_insn_r->reg_rec_count = 1;
11762 }
11763 else if (opcode2 >= 8 && opcode2 <= 10)
11764 {
11765 /* STR(2), STRB(2), STRH(2) . */
11766 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11767 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11768 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11769 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11770 if (8 == opcode2)
11771 record_buf_mem[0] = 4; /* STR (2). */
11772 else if (10 == opcode2)
11773 record_buf_mem[0] = 1; /* STRB (2). */
11774 else if (9 == opcode2)
11775 record_buf_mem[0] = 2; /* STRH (2). */
11776 record_buf_mem[1] = u_regval[0] + u_regval[1];
11777 thumb_insn_r->mem_rec_count = 1;
11778 }
11779 }
11780 else if (bit (thumb_insn_r->arm_insn, 11))
11781 {
11782 /* Handle load from literal pool. */
11783 /* LDR(3). */
11784 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11785 record_buf[0] = reg_src1;
11786 thumb_insn_r->reg_rec_count = 1;
11787 }
11788 else if (opcode1)
11789 {
11790 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11791 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11792 if ((3 == opcode2) && (!opcode3))
11793 {
11794 /* Branch with exchange. */
11795 record_buf[0] = ARM_PS_REGNUM;
11796 thumb_insn_r->reg_rec_count = 1;
11797 }
11798 else
11799 {
1f33efec
YQ
11800 /* Format 8; special data processing insns. */
11801 record_buf[0] = ARM_PS_REGNUM;
11802 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11803 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11804 thumb_insn_r->reg_rec_count = 2;
11805 }
11806 }
11807 else
11808 {
11809 /* Format 5; data processing insns. */
11810 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11811 if (bit (thumb_insn_r->arm_insn, 7))
11812 {
11813 reg_src1 = reg_src1 + 8;
11814 }
11815 record_buf[0] = ARM_PS_REGNUM;
11816 record_buf[1] = reg_src1;
11817 thumb_insn_r->reg_rec_count = 2;
11818 }
11819
11820 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11821 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11822 record_buf_mem);
11823
11824 return 0;
11825}
11826
11827/* Handling opcode 001 insns. */
11828
11829static int
11830thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11831{
11832 struct regcache *reg_cache = thumb_insn_r->regcache;
11833 uint32_t record_buf[8], record_buf_mem[8];
11834
11835 uint32_t reg_src1 = 0;
11836 uint32_t opcode = 0, immed_5 = 0;
11837
11838 ULONGEST u_regval = 0;
11839
11840 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11841
11842 if (opcode)
11843 {
11844 /* LDR(1). */
11845 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11846 record_buf[0] = reg_src1;
11847 thumb_insn_r->reg_rec_count = 1;
11848 }
11849 else
11850 {
11851 /* STR(1). */
11852 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11853 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11854 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11855 record_buf_mem[0] = 4;
11856 record_buf_mem[1] = u_regval + (immed_5 * 4);
11857 thumb_insn_r->mem_rec_count = 1;
11858 }
11859
11860 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11861 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11862 record_buf_mem);
11863
11864 return 0;
11865}
11866
11867/* Handling opcode 100 insns. */
11868
11869static int
11870thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11871{
11872 struct regcache *reg_cache = thumb_insn_r->regcache;
11873 uint32_t record_buf[8], record_buf_mem[8];
11874
11875 uint32_t reg_src1 = 0;
11876 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11877
11878 ULONGEST u_regval = 0;
11879
11880 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11881
11882 if (3 == opcode)
11883 {
11884 /* LDR(4). */
11885 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11886 record_buf[0] = reg_src1;
11887 thumb_insn_r->reg_rec_count = 1;
11888 }
11889 else if (1 == opcode)
11890 {
11891 /* LDRH(1). */
11892 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11893 record_buf[0] = reg_src1;
11894 thumb_insn_r->reg_rec_count = 1;
11895 }
11896 else if (2 == opcode)
11897 {
11898 /* STR(3). */
11899 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11900 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11901 record_buf_mem[0] = 4;
11902 record_buf_mem[1] = u_regval + (immed_8 * 4);
11903 thumb_insn_r->mem_rec_count = 1;
11904 }
11905 else if (0 == opcode)
11906 {
11907 /* STRH(1). */
11908 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11909 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11910 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11911 record_buf_mem[0] = 2;
11912 record_buf_mem[1] = u_regval + (immed_5 * 2);
11913 thumb_insn_r->mem_rec_count = 1;
11914 }
11915
11916 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11917 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11918 record_buf_mem);
11919
11920 return 0;
11921}
11922
11923/* Handling opcode 101 insns. */
11924
11925static int
11926thumb_record_misc (insn_decode_record *thumb_insn_r)
11927{
11928 struct regcache *reg_cache = thumb_insn_r->regcache;
11929
11930 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11931 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11932 uint32_t index = 0, start_address = 0;
72508ac0
PO
11933 uint32_t record_buf[24], record_buf_mem[48];
11934 uint32_t reg_src1;
11935
11936 ULONGEST u_regval = 0;
11937
11938 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11939 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11940 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11941
11942 if (14 == opcode2)
11943 {
11944 /* POP. */
11945 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11946 while (register_bits)
f969241e
OJ
11947 {
11948 if (register_bits & 0x00000001)
11949 record_buf[index++] = register_count;
11950 register_bits = register_bits >> 1;
11951 register_count++;
11952 }
11953 record_buf[index++] = ARM_PS_REGNUM;
11954 record_buf[index++] = ARM_SP_REGNUM;
11955 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11956 }
11957 else if (10 == opcode2)
11958 {
11959 /* PUSH. */
11960 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11961 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11962 while (register_bits)
11963 {
11964 if (register_bits & 0x00000001)
11965 register_count++;
11966 register_bits = register_bits >> 1;
11967 }
11968 start_address = u_regval - \
11969 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11970 thumb_insn_r->mem_rec_count = register_count;
11971 while (register_count)
11972 {
11973 record_buf_mem[(register_count * 2) - 1] = start_address;
11974 record_buf_mem[(register_count * 2) - 2] = 4;
11975 start_address = start_address + 4;
11976 register_count--;
11977 }
11978 record_buf[0] = ARM_SP_REGNUM;
11979 thumb_insn_r->reg_rec_count = 1;
11980 }
11981 else if (0x1E == opcode1)
11982 {
11983 /* BKPT insn. */
11984 /* Handle enhanced software breakpoint insn, BKPT. */
11985 /* CPSR is changed to be executed in ARM state, disabling normal
11986 interrupts, entering abort mode. */
11987 /* According to high vector configuration PC is set. */
11988 /* User hits breakpoint and type reverse, in that case, we need to go back with
11989 previous CPSR and Program Counter. */
11990 record_buf[0] = ARM_PS_REGNUM;
11991 record_buf[1] = ARM_LR_REGNUM;
11992 thumb_insn_r->reg_rec_count = 2;
11993 /* We need to save SPSR value, which is not yet done. */
11994 printf_unfiltered (_("Process record does not support instruction "
11995 "0x%0x at address %s.\n"),
11996 thumb_insn_r->arm_insn,
11997 paddress (thumb_insn_r->gdbarch,
11998 thumb_insn_r->this_addr));
11999 return -1;
12000 }
12001 else if ((0 == opcode) || (1 == opcode))
12002 {
12003 /* ADD(5), ADD(6). */
12004 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12005 record_buf[0] = reg_src1;
12006 thumb_insn_r->reg_rec_count = 1;
12007 }
12008 else if (2 == opcode)
12009 {
12010 /* ADD(7), SUB(4). */
12011 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12012 record_buf[0] = ARM_SP_REGNUM;
12013 thumb_insn_r->reg_rec_count = 1;
12014 }
12015
12016 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12017 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12018 record_buf_mem);
12019
12020 return 0;
12021}
12022
12023/* Handling opcode 110 insns. */
12024
12025static int
12026thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12027{
12028 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12029 struct regcache *reg_cache = thumb_insn_r->regcache;
12030
12031 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12032 uint32_t reg_src1 = 0;
12033 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12034 uint32_t index = 0, start_address = 0;
72508ac0
PO
12035 uint32_t record_buf[24], record_buf_mem[48];
12036
12037 ULONGEST u_regval = 0;
12038
12039 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12040 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12041
12042 if (1 == opcode2)
12043 {
12044
12045 /* LDMIA. */
12046 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12047 /* Get Rn. */
12048 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12049 while (register_bits)
12050 {
12051 if (register_bits & 0x00000001)
f969241e 12052 record_buf[index++] = register_count;
72508ac0 12053 register_bits = register_bits >> 1;
f969241e 12054 register_count++;
72508ac0 12055 }
f969241e
OJ
12056 record_buf[index++] = reg_src1;
12057 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12058 }
12059 else if (0 == opcode2)
12060 {
12061 /* It handles both STMIA. */
12062 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12063 /* Get Rn. */
12064 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12065 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12066 while (register_bits)
12067 {
12068 if (register_bits & 0x00000001)
12069 register_count++;
12070 register_bits = register_bits >> 1;
12071 }
12072 start_address = u_regval;
12073 thumb_insn_r->mem_rec_count = register_count;
12074 while (register_count)
12075 {
12076 record_buf_mem[(register_count * 2) - 1] = start_address;
12077 record_buf_mem[(register_count * 2) - 2] = 4;
12078 start_address = start_address + 4;
12079 register_count--;
12080 }
12081 }
12082 else if (0x1F == opcode1)
12083 {
12084 /* Handle arm syscall insn. */
97dfe206 12085 if (tdep->arm_syscall_record != NULL)
72508ac0 12086 {
97dfe206
OJ
12087 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12088 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12089 }
12090 else
12091 {
12092 printf_unfiltered (_("no syscall record support\n"));
12093 return -1;
12094 }
12095 }
12096
12097 /* B (1), conditional branch is automatically taken care in process_record,
12098 as PC is saved there. */
12099
12100 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12101 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12102 record_buf_mem);
12103
12104 return ret;
12105}
12106
12107/* Handling opcode 111 insns. */
12108
12109static int
12110thumb_record_branch (insn_decode_record *thumb_insn_r)
12111{
12112 uint32_t record_buf[8];
12113 uint32_t bits_h = 0;
12114
12115 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12116
12117 if (2 == bits_h || 3 == bits_h)
12118 {
12119 /* BL */
12120 record_buf[0] = ARM_LR_REGNUM;
12121 thumb_insn_r->reg_rec_count = 1;
12122 }
12123 else if (1 == bits_h)
12124 {
12125 /* BLX(1). */
12126 record_buf[0] = ARM_PS_REGNUM;
12127 record_buf[1] = ARM_LR_REGNUM;
12128 thumb_insn_r->reg_rec_count = 2;
12129 }
12130
12131 /* B(2) is automatically taken care in process_record, as PC is
12132 saved there. */
12133
12134 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12135
12136 return 0;
12137}
12138
c6ec2b30
OJ
12139/* Handler for thumb2 load/store multiple instructions. */
12140
12141static int
12142thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12143{
12144 struct regcache *reg_cache = thumb2_insn_r->regcache;
12145
12146 uint32_t reg_rn, op;
12147 uint32_t register_bits = 0, register_count = 0;
12148 uint32_t index = 0, start_address = 0;
12149 uint32_t record_buf[24], record_buf_mem[48];
12150
12151 ULONGEST u_regval = 0;
12152
12153 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12154 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12155
12156 if (0 == op || 3 == op)
12157 {
12158 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12159 {
12160 /* Handle RFE instruction. */
12161 record_buf[0] = ARM_PS_REGNUM;
12162 thumb2_insn_r->reg_rec_count = 1;
12163 }
12164 else
12165 {
12166 /* Handle SRS instruction after reading banked SP. */
12167 return arm_record_unsupported_insn (thumb2_insn_r);
12168 }
12169 }
12170 else if (1 == op || 2 == op)
12171 {
12172 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12173 {
12174 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12175 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12176 while (register_bits)
12177 {
12178 if (register_bits & 0x00000001)
12179 record_buf[index++] = register_count;
12180
12181 register_count++;
12182 register_bits = register_bits >> 1;
12183 }
12184 record_buf[index++] = reg_rn;
12185 record_buf[index++] = ARM_PS_REGNUM;
12186 thumb2_insn_r->reg_rec_count = index;
12187 }
12188 else
12189 {
12190 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12191 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12192 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12193 while (register_bits)
12194 {
12195 if (register_bits & 0x00000001)
12196 register_count++;
12197
12198 register_bits = register_bits >> 1;
12199 }
12200
12201 if (1 == op)
12202 {
12203 /* Start address calculation for LDMDB/LDMEA. */
12204 start_address = u_regval;
12205 }
12206 else if (2 == op)
12207 {
12208 /* Start address calculation for LDMDB/LDMEA. */
12209 start_address = u_regval - register_count * 4;
12210 }
12211
12212 thumb2_insn_r->mem_rec_count = register_count;
12213 while (register_count)
12214 {
12215 record_buf_mem[register_count * 2 - 1] = start_address;
12216 record_buf_mem[register_count * 2 - 2] = 4;
12217 start_address = start_address + 4;
12218 register_count--;
12219 }
12220 record_buf[0] = reg_rn;
12221 record_buf[1] = ARM_PS_REGNUM;
12222 thumb2_insn_r->reg_rec_count = 2;
12223 }
12224 }
12225
12226 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12227 record_buf_mem);
12228 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12229 record_buf);
12230 return ARM_RECORD_SUCCESS;
12231}
12232
12233/* Handler for thumb2 load/store (dual/exclusive) and table branch
12234 instructions. */
12235
12236static int
12237thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12238{
12239 struct regcache *reg_cache = thumb2_insn_r->regcache;
12240
12241 uint32_t reg_rd, reg_rn, offset_imm;
12242 uint32_t reg_dest1, reg_dest2;
12243 uint32_t address, offset_addr;
12244 uint32_t record_buf[8], record_buf_mem[8];
12245 uint32_t op1, op2, op3;
c6ec2b30
OJ
12246
12247 ULONGEST u_regval[2];
12248
12249 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12250 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12251 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12252
12253 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12254 {
12255 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12256 {
12257 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12258 record_buf[0] = reg_dest1;
12259 record_buf[1] = ARM_PS_REGNUM;
12260 thumb2_insn_r->reg_rec_count = 2;
12261 }
12262
12263 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12264 {
12265 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12266 record_buf[2] = reg_dest2;
12267 thumb2_insn_r->reg_rec_count = 3;
12268 }
12269 }
12270 else
12271 {
12272 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12273 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12274
12275 if (0 == op1 && 0 == op2)
12276 {
12277 /* Handle STREX. */
12278 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12279 address = u_regval[0] + (offset_imm * 4);
12280 record_buf_mem[0] = 4;
12281 record_buf_mem[1] = address;
12282 thumb2_insn_r->mem_rec_count = 1;
12283 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12284 record_buf[0] = reg_rd;
12285 thumb2_insn_r->reg_rec_count = 1;
12286 }
12287 else if (1 == op1 && 0 == op2)
12288 {
12289 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12290 record_buf[0] = reg_rd;
12291 thumb2_insn_r->reg_rec_count = 1;
12292 address = u_regval[0];
12293 record_buf_mem[1] = address;
12294
12295 if (4 == op3)
12296 {
12297 /* Handle STREXB. */
12298 record_buf_mem[0] = 1;
12299 thumb2_insn_r->mem_rec_count = 1;
12300 }
12301 else if (5 == op3)
12302 {
12303 /* Handle STREXH. */
12304 record_buf_mem[0] = 2 ;
12305 thumb2_insn_r->mem_rec_count = 1;
12306 }
12307 else if (7 == op3)
12308 {
12309 /* Handle STREXD. */
12310 address = u_regval[0];
12311 record_buf_mem[0] = 4;
12312 record_buf_mem[2] = 4;
12313 record_buf_mem[3] = address + 4;
12314 thumb2_insn_r->mem_rec_count = 2;
12315 }
12316 }
12317 else
12318 {
12319 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12320
12321 if (bit (thumb2_insn_r->arm_insn, 24))
12322 {
12323 if (bit (thumb2_insn_r->arm_insn, 23))
12324 offset_addr = u_regval[0] + (offset_imm * 4);
12325 else
12326 offset_addr = u_regval[0] - (offset_imm * 4);
12327
12328 address = offset_addr;
12329 }
12330 else
12331 address = u_regval[0];
12332
12333 record_buf_mem[0] = 4;
12334 record_buf_mem[1] = address;
12335 record_buf_mem[2] = 4;
12336 record_buf_mem[3] = address + 4;
12337 thumb2_insn_r->mem_rec_count = 2;
12338 record_buf[0] = reg_rn;
12339 thumb2_insn_r->reg_rec_count = 1;
12340 }
12341 }
12342
12343 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12344 record_buf);
12345 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12346 record_buf_mem);
12347 return ARM_RECORD_SUCCESS;
12348}
12349
12350/* Handler for thumb2 data processing (shift register and modified immediate)
12351 instructions. */
12352
12353static int
12354thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12355{
12356 uint32_t reg_rd, op;
12357 uint32_t record_buf[8];
12358
12359 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12360 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12361
12362 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12363 {
12364 record_buf[0] = ARM_PS_REGNUM;
12365 thumb2_insn_r->reg_rec_count = 1;
12366 }
12367 else
12368 {
12369 record_buf[0] = reg_rd;
12370 record_buf[1] = ARM_PS_REGNUM;
12371 thumb2_insn_r->reg_rec_count = 2;
12372 }
12373
12374 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12375 record_buf);
12376 return ARM_RECORD_SUCCESS;
12377}
12378
12379/* Generic handler for thumb2 instructions which effect destination and PS
12380 registers. */
12381
12382static int
12383thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12384{
12385 uint32_t reg_rd;
12386 uint32_t record_buf[8];
12387
12388 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12389
12390 record_buf[0] = reg_rd;
12391 record_buf[1] = ARM_PS_REGNUM;
12392 thumb2_insn_r->reg_rec_count = 2;
12393
12394 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12395 record_buf);
12396 return ARM_RECORD_SUCCESS;
12397}
12398
12399/* Handler for thumb2 branch and miscellaneous control instructions. */
12400
12401static int
12402thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12403{
12404 uint32_t op, op1, op2;
12405 uint32_t record_buf[8];
12406
12407 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12408 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12409 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12410
12411 /* Handle MSR insn. */
12412 if (!(op1 & 0x2) && 0x38 == op)
12413 {
12414 if (!(op2 & 0x3))
12415 {
12416 /* CPSR is going to be changed. */
12417 record_buf[0] = ARM_PS_REGNUM;
12418 thumb2_insn_r->reg_rec_count = 1;
12419 }
12420 else
12421 {
12422 arm_record_unsupported_insn(thumb2_insn_r);
12423 return -1;
12424 }
12425 }
12426 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12427 {
12428 /* BLX. */
12429 record_buf[0] = ARM_PS_REGNUM;
12430 record_buf[1] = ARM_LR_REGNUM;
12431 thumb2_insn_r->reg_rec_count = 2;
12432 }
12433
12434 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12435 record_buf);
12436 return ARM_RECORD_SUCCESS;
12437}
12438
12439/* Handler for thumb2 store single data item instructions. */
12440
12441static int
12442thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12443{
12444 struct regcache *reg_cache = thumb2_insn_r->regcache;
12445
12446 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12447 uint32_t address, offset_addr;
12448 uint32_t record_buf[8], record_buf_mem[8];
12449 uint32_t op1, op2;
12450
12451 ULONGEST u_regval[2];
12452
12453 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12454 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12455 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12456 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12457
12458 if (bit (thumb2_insn_r->arm_insn, 23))
12459 {
12460 /* T2 encoding. */
12461 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12462 offset_addr = u_regval[0] + offset_imm;
12463 address = offset_addr;
12464 }
12465 else
12466 {
12467 /* T3 encoding. */
12468 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12469 {
12470 /* Handle STRB (register). */
12471 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12472 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12473 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12474 offset_addr = u_regval[1] << shift_imm;
12475 address = u_regval[0] + offset_addr;
12476 }
12477 else
12478 {
12479 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12480 if (bit (thumb2_insn_r->arm_insn, 10))
12481 {
12482 if (bit (thumb2_insn_r->arm_insn, 9))
12483 offset_addr = u_regval[0] + offset_imm;
12484 else
12485 offset_addr = u_regval[0] - offset_imm;
12486
12487 address = offset_addr;
12488 }
12489 else
12490 address = u_regval[0];
12491 }
12492 }
12493
12494 switch (op1)
12495 {
12496 /* Store byte instructions. */
12497 case 4:
12498 case 0:
12499 record_buf_mem[0] = 1;
12500 break;
12501 /* Store half word instructions. */
12502 case 1:
12503 case 5:
12504 record_buf_mem[0] = 2;
12505 break;
12506 /* Store word instructions. */
12507 case 2:
12508 case 6:
12509 record_buf_mem[0] = 4;
12510 break;
12511
12512 default:
12513 gdb_assert_not_reached ("no decoding pattern found");
12514 break;
12515 }
12516
12517 record_buf_mem[1] = address;
12518 thumb2_insn_r->mem_rec_count = 1;
12519 record_buf[0] = reg_rn;
12520 thumb2_insn_r->reg_rec_count = 1;
12521
12522 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12523 record_buf);
12524 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12525 record_buf_mem);
12526 return ARM_RECORD_SUCCESS;
12527}
12528
12529/* Handler for thumb2 load memory hints instructions. */
12530
12531static int
12532thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12533{
12534 uint32_t record_buf[8];
12535 uint32_t reg_rt, reg_rn;
12536
12537 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12538 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12539
12540 if (ARM_PC_REGNUM != reg_rt)
12541 {
12542 record_buf[0] = reg_rt;
12543 record_buf[1] = reg_rn;
12544 record_buf[2] = ARM_PS_REGNUM;
12545 thumb2_insn_r->reg_rec_count = 3;
12546
12547 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12548 record_buf);
12549 return ARM_RECORD_SUCCESS;
12550 }
12551
12552 return ARM_RECORD_FAILURE;
12553}
12554
12555/* Handler for thumb2 load word instructions. */
12556
12557static int
12558thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12559{
c6ec2b30
OJ
12560 uint32_t record_buf[8];
12561
12562 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12563 record_buf[1] = ARM_PS_REGNUM;
12564 thumb2_insn_r->reg_rec_count = 2;
12565
12566 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12567 record_buf);
12568 return ARM_RECORD_SUCCESS;
12569}
12570
12571/* Handler for thumb2 long multiply, long multiply accumulate, and
12572 divide instructions. */
12573
12574static int
12575thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12576{
12577 uint32_t opcode1 = 0, opcode2 = 0;
12578 uint32_t record_buf[8];
c6ec2b30
OJ
12579
12580 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12581 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12582
12583 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12584 {
12585 /* Handle SMULL, UMULL, SMULAL. */
12586 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12587 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12588 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12589 record_buf[2] = ARM_PS_REGNUM;
12590 thumb2_insn_r->reg_rec_count = 3;
12591 }
12592 else if (1 == opcode1 || 3 == opcode2)
12593 {
12594 /* Handle SDIV and UDIV. */
12595 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12596 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12597 record_buf[2] = ARM_PS_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 3;
12599 }
12600 else
12601 return ARM_RECORD_FAILURE;
12602
12603 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12604 record_buf);
12605 return ARM_RECORD_SUCCESS;
12606}
12607
60cc5e93
OJ
12608/* Record handler for thumb32 coprocessor instructions. */
12609
12610static int
12611thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12612{
12613 if (bit (thumb2_insn_r->arm_insn, 25))
12614 return arm_record_coproc_data_proc (thumb2_insn_r);
12615 else
12616 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12617}
12618
1e1b6563
OJ
12619/* Record handler for advance SIMD structure load/store instructions. */
12620
12621static int
12622thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12623{
12624 struct regcache *reg_cache = thumb2_insn_r->regcache;
12625 uint32_t l_bit, a_bit, b_bits;
12626 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12627 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12628 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12629 uint8_t f_ebytes;
12630
12631 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12632 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12633 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12634 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12635 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12636 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12637 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12638 f_elem = 8 / f_ebytes;
12639
12640 if (!l_bit)
12641 {
12642 ULONGEST u_regval = 0;
12643 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12644 address = u_regval;
12645
12646 if (!a_bit)
12647 {
12648 /* Handle VST1. */
12649 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12650 {
12651 if (b_bits == 0x07)
12652 bf_regs = 1;
12653 else if (b_bits == 0x0a)
12654 bf_regs = 2;
12655 else if (b_bits == 0x06)
12656 bf_regs = 3;
12657 else if (b_bits == 0x02)
12658 bf_regs = 4;
12659 else
12660 bf_regs = 0;
12661
12662 for (index_r = 0; index_r < bf_regs; index_r++)
12663 {
12664 for (index_e = 0; index_e < f_elem; index_e++)
12665 {
12666 record_buf_mem[index_m++] = f_ebytes;
12667 record_buf_mem[index_m++] = address;
12668 address = address + f_ebytes;
12669 thumb2_insn_r->mem_rec_count += 1;
12670 }
12671 }
12672 }
12673 /* Handle VST2. */
12674 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12675 {
12676 if (b_bits == 0x09 || b_bits == 0x08)
12677 bf_regs = 1;
12678 else if (b_bits == 0x03)
12679 bf_regs = 2;
12680 else
12681 bf_regs = 0;
12682
12683 for (index_r = 0; index_r < bf_regs; index_r++)
12684 for (index_e = 0; index_e < f_elem; index_e++)
12685 {
12686 for (loop_t = 0; loop_t < 2; loop_t++)
12687 {
12688 record_buf_mem[index_m++] = f_ebytes;
12689 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12690 thumb2_insn_r->mem_rec_count += 1;
12691 }
12692 address = address + (2 * f_ebytes);
12693 }
12694 }
12695 /* Handle VST3. */
12696 else if ((b_bits & 0x0e) == 0x04)
12697 {
12698 for (index_e = 0; index_e < f_elem; index_e++)
12699 {
12700 for (loop_t = 0; loop_t < 3; loop_t++)
12701 {
12702 record_buf_mem[index_m++] = f_ebytes;
12703 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12704 thumb2_insn_r->mem_rec_count += 1;
12705 }
12706 address = address + (3 * f_ebytes);
12707 }
12708 }
12709 /* Handle VST4. */
12710 else if (!(b_bits & 0x0e))
12711 {
12712 for (index_e = 0; index_e < f_elem; index_e++)
12713 {
12714 for (loop_t = 0; loop_t < 4; loop_t++)
12715 {
12716 record_buf_mem[index_m++] = f_ebytes;
12717 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12718 thumb2_insn_r->mem_rec_count += 1;
12719 }
12720 address = address + (4 * f_ebytes);
12721 }
12722 }
12723 }
12724 else
12725 {
12726 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12727
12728 if (bft_size == 0x00)
12729 f_ebytes = 1;
12730 else if (bft_size == 0x01)
12731 f_ebytes = 2;
12732 else if (bft_size == 0x02)
12733 f_ebytes = 4;
12734 else
12735 f_ebytes = 0;
12736
12737 /* Handle VST1. */
12738 if (!(b_bits & 0x0b) || b_bits == 0x08)
12739 thumb2_insn_r->mem_rec_count = 1;
12740 /* Handle VST2. */
12741 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12742 thumb2_insn_r->mem_rec_count = 2;
12743 /* Handle VST3. */
12744 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12745 thumb2_insn_r->mem_rec_count = 3;
12746 /* Handle VST4. */
12747 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12748 thumb2_insn_r->mem_rec_count = 4;
12749
12750 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12751 {
12752 record_buf_mem[index_m] = f_ebytes;
12753 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12754 }
12755 }
12756 }
12757 else
12758 {
12759 if (!a_bit)
12760 {
12761 /* Handle VLD1. */
12762 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12763 thumb2_insn_r->reg_rec_count = 1;
12764 /* Handle VLD2. */
12765 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12766 thumb2_insn_r->reg_rec_count = 2;
12767 /* Handle VLD3. */
12768 else if ((b_bits & 0x0e) == 0x04)
12769 thumb2_insn_r->reg_rec_count = 3;
12770 /* Handle VLD4. */
12771 else if (!(b_bits & 0x0e))
12772 thumb2_insn_r->reg_rec_count = 4;
12773 }
12774 else
12775 {
12776 /* Handle VLD1. */
12777 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12778 thumb2_insn_r->reg_rec_count = 1;
12779 /* Handle VLD2. */
12780 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12781 thumb2_insn_r->reg_rec_count = 2;
12782 /* Handle VLD3. */
12783 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12784 thumb2_insn_r->reg_rec_count = 3;
12785 /* Handle VLD4. */
12786 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12787 thumb2_insn_r->reg_rec_count = 4;
12788
12789 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12790 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12791 }
12792 }
12793
12794 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12795 {
12796 record_buf[index_r] = reg_rn;
12797 thumb2_insn_r->reg_rec_count += 1;
12798 }
12799
12800 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12801 record_buf);
12802 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12803 record_buf_mem);
12804 return 0;
12805}
12806
c6ec2b30
OJ
12807/* Decodes thumb2 instruction type and invokes its record handler. */
12808
12809static unsigned int
12810thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12811{
12812 uint32_t op, op1, op2;
12813
12814 op = bit (thumb2_insn_r->arm_insn, 15);
12815 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12816 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12817
12818 if (op1 == 0x01)
12819 {
12820 if (!(op2 & 0x64 ))
12821 {
12822 /* Load/store multiple instruction. */
12823 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12824 }
12825 else if (!((op2 & 0x64) ^ 0x04))
12826 {
12827 /* Load/store (dual/exclusive) and table branch instruction. */
12828 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12829 }
12830 else if (!((op2 & 0x20) ^ 0x20))
12831 {
12832 /* Data-processing (shifted register). */
12833 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12834 }
12835 else if (op2 & 0x40)
12836 {
12837 /* Co-processor instructions. */
60cc5e93 12838 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12839 }
12840 }
12841 else if (op1 == 0x02)
12842 {
12843 if (op)
12844 {
12845 /* Branches and miscellaneous control instructions. */
12846 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12847 }
12848 else if (op2 & 0x20)
12849 {
12850 /* Data-processing (plain binary immediate) instruction. */
12851 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12852 }
12853 else
12854 {
12855 /* Data-processing (modified immediate). */
12856 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12857 }
12858 }
12859 else if (op1 == 0x03)
12860 {
12861 if (!(op2 & 0x71 ))
12862 {
12863 /* Store single data item. */
12864 return thumb2_record_str_single_data (thumb2_insn_r);
12865 }
12866 else if (!((op2 & 0x71) ^ 0x10))
12867 {
12868 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12869 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12870 }
12871 else if (!((op2 & 0x67) ^ 0x01))
12872 {
12873 /* Load byte, memory hints instruction. */
12874 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12875 }
12876 else if (!((op2 & 0x67) ^ 0x03))
12877 {
12878 /* Load halfword, memory hints instruction. */
12879 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12880 }
12881 else if (!((op2 & 0x67) ^ 0x05))
12882 {
12883 /* Load word instruction. */
12884 return thumb2_record_ld_word (thumb2_insn_r);
12885 }
12886 else if (!((op2 & 0x70) ^ 0x20))
12887 {
12888 /* Data-processing (register) instruction. */
12889 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12890 }
12891 else if (!((op2 & 0x78) ^ 0x30))
12892 {
12893 /* Multiply, multiply accumulate, abs diff instruction. */
12894 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12895 }
12896 else if (!((op2 & 0x78) ^ 0x38))
12897 {
12898 /* Long multiply, long multiply accumulate, and divide. */
12899 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12900 }
12901 else if (op2 & 0x40)
12902 {
12903 /* Co-processor instructions. */
60cc5e93 12904 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12905 }
12906 }
12907
12908 return -1;
12909}
72508ac0
PO
12910
12911/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12912and positive val on fauilure. */
12913
12914static int
12915extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12916{
12917 gdb_byte buf[insn_size];
12918
12919 memset (&buf[0], 0, insn_size);
12920
12921 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12922 return 1;
12923 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12924 insn_size,
2959fed9 12925 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12926 return 0;
12927}
12928
12929typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12930
12931/* Decode arm/thumb insn depending on condition cods and opcodes; and
12932 dispatch it. */
12933
12934static int
12935decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12936 uint32_t insn_size)
72508ac0
PO
12937{
12938
01e57735
YQ
12939 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12940 instruction. */
0fa9c223 12941 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12942 {
12943 arm_record_data_proc_misc_ld_str, /* 000. */
12944 arm_record_data_proc_imm, /* 001. */
12945 arm_record_ld_st_imm_offset, /* 010. */
12946 arm_record_ld_st_reg_offset, /* 011. */
12947 arm_record_ld_st_multiple, /* 100. */
12948 arm_record_b_bl, /* 101. */
60cc5e93 12949 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12950 arm_record_coproc_data_proc /* 111. */
12951 };
12952
01e57735
YQ
12953 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12954 instruction. */
0fa9c223 12955 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12956 { \
12957 thumb_record_shift_add_sub, /* 000. */
12958 thumb_record_add_sub_cmp_mov, /* 001. */
12959 thumb_record_ld_st_reg_offset, /* 010. */
12960 thumb_record_ld_st_imm_offset, /* 011. */
12961 thumb_record_ld_st_stack, /* 100. */
12962 thumb_record_misc, /* 101. */
12963 thumb_record_ldm_stm_swi, /* 110. */
12964 thumb_record_branch /* 111. */
12965 };
12966
12967 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12968 uint32_t insn_id = 0;
12969
12970 if (extract_arm_insn (arm_record, insn_size))
12971 {
12972 if (record_debug)
01e57735
YQ
12973 {
12974 printf_unfiltered (_("Process record: error reading memory at "
12975 "addr %s len = %d.\n"),
12976 paddress (arm_record->gdbarch,
12977 arm_record->this_addr), insn_size);
12978 }
72508ac0
PO
12979 return -1;
12980 }
12981 else if (ARM_RECORD == record_type)
12982 {
12983 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12984 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12985
12986 if (arm_record->cond == 0xf)
12987 ret = arm_record_extension_space (arm_record);
12988 else
01e57735 12989 {
ca92db2d
YQ
12990 /* If this insn has fallen into extension space
12991 then we need not decode it anymore. */
01e57735
YQ
12992 ret = arm_handle_insn[insn_id] (arm_record);
12993 }
ca92db2d
YQ
12994 if (ret != ARM_RECORD_SUCCESS)
12995 {
12996 arm_record_unsupported_insn (arm_record);
12997 ret = -1;
12998 }
72508ac0
PO
12999 }
13000 else if (THUMB_RECORD == record_type)
13001 {
13002 /* As thumb does not have condition codes, we set negative. */
13003 arm_record->cond = -1;
13004 insn_id = bits (arm_record->arm_insn, 13, 15);
13005 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13006 if (ret != ARM_RECORD_SUCCESS)
13007 {
13008 arm_record_unsupported_insn (arm_record);
13009 ret = -1;
13010 }
72508ac0
PO
13011 }
13012 else if (THUMB2_RECORD == record_type)
13013 {
c6ec2b30
OJ
13014 /* As thumb does not have condition codes, we set negative. */
13015 arm_record->cond = -1;
13016
13017 /* Swap first half of 32bit thumb instruction with second half. */
13018 arm_record->arm_insn
01e57735 13019 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13020
ca92db2d 13021 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13022
ca92db2d 13023 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13024 {
13025 arm_record_unsupported_insn (arm_record);
13026 ret = -1;
13027 }
72508ac0
PO
13028 }
13029 else
13030 {
13031 /* Throw assertion. */
13032 gdb_assert_not_reached ("not a valid instruction, could not decode");
13033 }
13034
13035 return ret;
13036}
13037
13038
13039/* Cleans up local record registers and memory allocations. */
13040
13041static void
13042deallocate_reg_mem (insn_decode_record *record)
13043{
13044 xfree (record->arm_regs);
13045 xfree (record->arm_mems);
13046}
13047
13048
01e57735 13049/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13050 memory that will be changed in current instruction to record_arch_list".
13051 Return -1 if something is wrong. */
13052
13053int
01e57735
YQ
13054arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13055 CORE_ADDR insn_addr)
72508ac0
PO
13056{
13057
72508ac0
PO
13058 uint32_t no_of_rec = 0;
13059 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13060 ULONGEST t_bit = 0, insn_id = 0;
13061
13062 ULONGEST u_regval = 0;
13063
13064 insn_decode_record arm_record;
13065
13066 memset (&arm_record, 0, sizeof (insn_decode_record));
13067 arm_record.regcache = regcache;
13068 arm_record.this_addr = insn_addr;
13069 arm_record.gdbarch = gdbarch;
13070
13071
13072 if (record_debug > 1)
13073 {
13074 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13075 "addr = %s\n",
72508ac0
PO
13076 paddress (gdbarch, arm_record.this_addr));
13077 }
13078
13079 if (extract_arm_insn (&arm_record, 2))
13080 {
13081 if (record_debug)
01e57735
YQ
13082 {
13083 printf_unfiltered (_("Process record: error reading memory at "
13084 "addr %s len = %d.\n"),
13085 paddress (arm_record.gdbarch,
13086 arm_record.this_addr), 2);
13087 }
72508ac0
PO
13088 return -1;
13089 }
13090
13091 /* Check the insn, whether it is thumb or arm one. */
13092
13093 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13094 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13095
13096
13097 if (!(u_regval & t_bit))
13098 {
13099 /* We are decoding arm insn. */
13100 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13101 }
13102 else
13103 {
13104 insn_id = bits (arm_record.arm_insn, 11, 15);
13105 /* is it thumb2 insn? */
13106 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13107 {
13108 ret = decode_insn (&arm_record, THUMB2_RECORD,
13109 THUMB2_INSN_SIZE_BYTES);
13110 }
72508ac0 13111 else
01e57735
YQ
13112 {
13113 /* We are decoding thumb insn. */
13114 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13115 }
72508ac0
PO
13116 }
13117
13118 if (0 == ret)
13119 {
13120 /* Record registers. */
25ea693b 13121 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13122 if (arm_record.arm_regs)
01e57735
YQ
13123 {
13124 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13125 {
13126 if (record_full_arch_list_add_reg
25ea693b 13127 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13128 ret = -1;
13129 }
13130 }
72508ac0
PO
13131 /* Record memories. */
13132 if (arm_record.arm_mems)
01e57735
YQ
13133 {
13134 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13135 {
13136 if (record_full_arch_list_add_mem
13137 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13138 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13139 ret = -1;
13140 }
13141 }
72508ac0 13142
25ea693b 13143 if (record_full_arch_list_add_end ())
01e57735 13144 ret = -1;
72508ac0
PO
13145 }
13146
13147
13148 deallocate_reg_mem (&arm_record);
13149
13150 return ret;
13151}
This page took 1.809169 seconds and 4 git commands to generate.