New gdbarch methods breakpoint_kind_from_pc and sw_breakpoint_from_kind
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d 47
8689682c 48#include "arch/arm.h"
d9311bfa 49#include "arch/arm-get-next-pcs.h"
34e8f22d 50#include "arm-tdep.h"
26216b98 51#include "gdb/sim-arm.h"
34e8f22d 52
082fc60d
RE
53#include "elf-bfd.h"
54#include "coff/internal.h"
97e03143 55#include "elf/arm.h"
c906108c 56
60c5725c 57#include "vec.h"
26216b98 58
72508ac0 59#include "record.h"
d02ed0bb 60#include "record-full.h"
325fac50 61#include <algorithm>
72508ac0 62
0a69eedb
YQ
63#include "features/arm/arm-with-m.c"
64#include "features/arm/arm-with-m-fpa-layout.c"
65#include "features/arm/arm-with-m-vfp-d16.c"
66#include "features/arm/arm-with-iwmmxt.c"
67#include "features/arm/arm-with-vfpv2.c"
68#include "features/arm/arm-with-vfpv3.c"
69#include "features/arm/arm-with-neon.c"
9779414d 70
6529d2dd
AC
71static int arm_debug;
72
082fc60d
RE
73/* Macros for setting and testing a bit in a minimal symbol that marks
74 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 75 is used for this purpose.
082fc60d
RE
76
77 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 78 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 79
0963b4bd 80#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 81 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
82
83#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 84 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 85
60c5725c
DJ
86/* Per-objfile data used for mapping symbols. */
87static const struct objfile_data *arm_objfile_data_key;
88
89struct arm_mapping_symbol
90{
91 bfd_vma value;
92 char type;
93};
94typedef struct arm_mapping_symbol arm_mapping_symbol_s;
95DEF_VEC_O(arm_mapping_symbol_s);
96
97struct arm_per_objfile
98{
99 VEC(arm_mapping_symbol_s) **section_maps;
100};
101
afd7eef0
RE
102/* The list of available "set arm ..." and "show arm ..." commands. */
103static struct cmd_list_element *setarmcmdlist = NULL;
104static struct cmd_list_element *showarmcmdlist = NULL;
105
fd50bc42
RE
106/* The type of floating-point to use. Keep this in sync with enum
107 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 108static const char *const fp_model_strings[] =
fd50bc42
RE
109{
110 "auto",
111 "softfpa",
112 "fpa",
113 "softvfp",
28e97307
DJ
114 "vfp",
115 NULL
fd50bc42
RE
116};
117
118/* A variable that can be configured by the user. */
119static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
120static const char *current_fp_model = "auto";
121
28e97307 122/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 123static const char *const arm_abi_strings[] =
28e97307
DJ
124{
125 "auto",
126 "APCS",
127 "AAPCS",
128 NULL
129};
130
131/* A variable that can be configured by the user. */
132static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
133static const char *arm_abi_string = "auto";
134
0428b8f5 135/* The execution mode to assume. */
40478521 136static const char *const arm_mode_strings[] =
0428b8f5
DJ
137 {
138 "auto",
139 "arm",
68770265
MGD
140 "thumb",
141 NULL
0428b8f5
DJ
142 };
143
144static const char *arm_fallback_mode_string = "auto";
145static const char *arm_force_mode_string = "auto";
146
18819fa6
UW
147/* Internal override of the execution mode. -1 means no override,
148 0 means override to ARM mode, 1 means override to Thumb mode.
149 The effect is the same as if arm_force_mode has been set by the
150 user (except the internal override has precedence over a user's
151 arm_force_mode override). */
152static int arm_override_mode = -1;
153
94c30b78 154/* Number of different reg name sets (options). */
afd7eef0 155static int num_disassembly_options;
bc90b915 156
f32bf4a4
YQ
157/* The standard register names, and all the valid aliases for them. Note
158 that `fp', `sp' and `pc' are not added in this alias list, because they
159 have been added as builtin user registers in
160 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
161static const struct
162{
163 const char *name;
164 int regnum;
165} arm_register_aliases[] = {
166 /* Basic register numbers. */
167 { "r0", 0 },
168 { "r1", 1 },
169 { "r2", 2 },
170 { "r3", 3 },
171 { "r4", 4 },
172 { "r5", 5 },
173 { "r6", 6 },
174 { "r7", 7 },
175 { "r8", 8 },
176 { "r9", 9 },
177 { "r10", 10 },
178 { "r11", 11 },
179 { "r12", 12 },
180 { "r13", 13 },
181 { "r14", 14 },
182 { "r15", 15 },
183 /* Synonyms (argument and variable registers). */
184 { "a1", 0 },
185 { "a2", 1 },
186 { "a3", 2 },
187 { "a4", 3 },
188 { "v1", 4 },
189 { "v2", 5 },
190 { "v3", 6 },
191 { "v4", 7 },
192 { "v5", 8 },
193 { "v6", 9 },
194 { "v7", 10 },
195 { "v8", 11 },
196 /* Other platform-specific names for r9. */
197 { "sb", 9 },
198 { "tr", 9 },
199 /* Special names. */
200 { "ip", 12 },
123dc839 201 { "lr", 14 },
123dc839
DJ
202 /* Names used by GCC (not listed in the ARM EABI). */
203 { "sl", 10 },
123dc839
DJ
204 /* A special name from the older ATPCS. */
205 { "wr", 7 },
206};
bc90b915 207
123dc839 208static const char *const arm_register_names[] =
da59e081
JM
209{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
210 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
211 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
212 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
213 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
214 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 215 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 216
afd7eef0
RE
217/* Valid register name styles. */
218static const char **valid_disassembly_styles;
ed9a39eb 219
afd7eef0
RE
220/* Disassembly style to use. Default to "std" register names. */
221static const char *disassembly_style;
96baa820 222
ed9a39eb 223/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
224 style. */
225static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 226 struct cmd_list_element *);
afd7eef0 227static void set_disassembly_style (void);
ed9a39eb 228
b508a996 229static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 230 void *, int);
b508a996 231static void convert_to_extended (const struct floatformat *, void *,
be8626e0 232 const void *, int);
ed9a39eb 233
05d1431c
PA
234static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, gdb_byte *buf);
58d6951d
DJ
237static void arm_neon_quad_write (struct gdbarch *gdbarch,
238 struct regcache *regcache,
239 int regnum, const gdb_byte *buf);
240
e7cf25a8 241static CORE_ADDR
553cb527 242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
243
244
d9311bfa
AT
245/* get_next_pcs operations. */
246static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
250 arm_get_next_pcs_is_thumb,
251 NULL,
d9311bfa
AT
252};
253
9b8d791a 254struct arm_prologue_cache
c3b4394c 255{
eb5492fa
DJ
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
259 CORE_ADDR prev_sp;
260
4be43953
DJ
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
eb5492fa 264
c3b4394c 265 int framesize;
eb5492fa
DJ
266
267 /* The register used to hold the frame pointer for this frame. */
c3b4394c 268 int framereg;
eb5492fa
DJ
269
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
c3b4394c 272};
ed9a39eb 273
0d39a070
DJ
274static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
278
cca44b1b
JB
279/* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
281
282#define DISPLACED_STEPPING_ARCH_VERSION 5
283
94c30b78 284/* Set to true if the 32-bit mode is in use. */
c906108c
SS
285
286int arm_apcs_32 = 1;
287
9779414d
DJ
288/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
289
478fd957 290int
9779414d
DJ
291arm_psr_thumb_bit (struct gdbarch *gdbarch)
292{
293 if (gdbarch_tdep (gdbarch)->is_m)
294 return XPSR_T;
295 else
296 return CPSR_T;
297}
298
d0e59a68
AT
299/* Determine if the processor is currently executing in Thumb mode. */
300
301int
302arm_is_thumb (struct regcache *regcache)
303{
304 ULONGEST cpsr;
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308
309 return (cpsr & t_bit) != 0;
310}
311
b39cc962
DJ
312/* Determine if FRAME is executing in Thumb mode. */
313
25b41d01 314int
b39cc962
DJ
315arm_frame_is_thumb (struct frame_info *frame)
316{
317 CORE_ADDR cpsr;
9779414d 318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
319
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325
9779414d 326 return (cpsr & t_bit) != 0;
b39cc962
DJ
327}
328
60c5725c
DJ
329/* Callback for VEC_lower_bound. */
330
331static inline int
332arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
334{
335 return lhs->value < rhs->value;
336}
337
f9d67f43
DJ
338/* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
c906108c 341
f9d67f43
DJ
342static char
343arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 344{
60c5725c 345 struct obj_section *sec;
0428b8f5 346
60c5725c
DJ
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
349 if (sec != NULL)
350 {
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
354 0 };
60c5725c
DJ
355 unsigned int idx;
356
9a3c8263
SM
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
60c5725c
DJ
359 if (data != NULL)
360 {
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
363 {
364 struct arm_mapping_symbol *map_sym;
365
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
368
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 {
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
f9d67f43
DJ
377 {
378 if (start)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
381 }
60c5725c
DJ
382 }
383
384 if (idx > 0)
385 {
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
387 if (start)
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
60c5725c
DJ
390 }
391 }
392 }
393 }
394
f9d67f43
DJ
395 return 0;
396}
397
398/* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
401
e3039479 402int
9779414d 403arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 404{
7cbd4a93 405 struct bound_minimal_symbol sym;
f9d67f43 406 char type;
a42244db
YQ
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
409
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
412 if (dsc)
413 {
414 if (debug_displaced)
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
420 }
f9d67f43
DJ
421
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
424 return 1;
425
18819fa6
UW
426 /* Respect internal mode override if active. */
427 if (arm_override_mode != -1)
428 return arm_override_mode;
429
f9d67f43
DJ
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
434 return 1;
435
9779414d
DJ
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
438 return 1;
439
f9d67f43
DJ
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
442 if (type)
443 return type == 't';
444
ed9a39eb 445 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 446 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
447 if (sym.minsym)
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
449
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 return 0;
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
454 return 1;
455
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
18819fa6 460 displayed in the mode it will be executed). */
0428b8f5 461 if (target_has_registers)
18819fa6 462 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
463
464 /* Otherwise we're out of luck; we assume ARM. */
465 return 0;
c906108c
SS
466}
467
ca90e760
FH
468/* Determine if the address specified equals any of these magic return
469 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
470 architectures.
471
472 From ARMv6-M Reference Manual B1.5.8
473 Table B1-5 Exception return behavior
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 From ARMv7-M Reference Manual B1.5.8
481 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
482
483 EXC_RETURN Return To Return Stack
484 0xFFFFFFF1 Handler mode Main
485 0xFFFFFFF9 Thread mode Main
486 0xFFFFFFFD Thread mode Process
487
488 Table B1-9 EXC_RETURN definition of exception return behavior, with
489 FP
490
491 EXC_RETURN Return To Return Stack Frame Type
492 0xFFFFFFE1 Handler mode Main Extended
493 0xFFFFFFE9 Thread mode Main Extended
494 0xFFFFFFED Thread mode Process Extended
495 0xFFFFFFF1 Handler mode Main Basic
496 0xFFFFFFF9 Thread mode Main Basic
497 0xFFFFFFFD Thread mode Process Basic
498
499 For more details see "B1.5.8 Exception return behavior"
500 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
501
502static int
503arm_m_addr_is_magic (CORE_ADDR addr)
504{
505 switch (addr)
506 {
507 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
508 the exception return behavior. */
509 case 0xffffffe1:
510 case 0xffffffe9:
511 case 0xffffffed:
512 case 0xfffffff1:
513 case 0xfffffff9:
514 case 0xfffffffd:
515 /* Address is magic. */
516 return 1;
517
518 default:
519 /* Address is not magic. */
520 return 0;
521 }
522}
523
181c1381 524/* Remove useless bits from addresses in a running program. */
34e8f22d 525static CORE_ADDR
24568a2c 526arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 527{
2ae28aa9
YQ
528 /* On M-profile devices, do not strip the low bit from EXC_RETURN
529 (the magic exception return address). */
530 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 531 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
532 return val;
533
a3a2ee65 534 if (arm_apcs_32)
dd6be234 535 return UNMAKE_THUMB_ADDR (val);
c906108c 536 else
a3a2ee65 537 return (val & 0x03fffffc);
c906108c
SS
538}
539
0d39a070 540/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
541 can be safely ignored during prologue skipping. IS_THUMB is true
542 if the function is known to be a Thumb function due to the way it
543 is being called. */
0d39a070 544static int
e0634ccf 545skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 546{
e0634ccf 547 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 548 struct bound_minimal_symbol msym;
0d39a070
DJ
549
550 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 551 if (msym.minsym != NULL
77e371c0 552 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 553 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 554 {
efd66ac6 555 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 556
e0634ccf
UW
557 /* The GNU linker's Thumb call stub to foo is named
558 __foo_from_thumb. */
559 if (strstr (name, "_from_thumb") != NULL)
560 name += 2;
0d39a070 561
e0634ccf
UW
562 /* On soft-float targets, __truncdfsf2 is called to convert promoted
563 arguments to their argument types in non-prototyped
564 functions. */
61012eef 565 if (startswith (name, "__truncdfsf2"))
e0634ccf 566 return 1;
61012eef 567 if (startswith (name, "__aeabi_d2f"))
e0634ccf 568 return 1;
0d39a070 569
e0634ccf 570 /* Internal functions related to thread-local storage. */
61012eef 571 if (startswith (name, "__tls_get_addr"))
e0634ccf 572 return 1;
61012eef 573 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
574 return 1;
575 }
576 else
577 {
578 /* If we run against a stripped glibc, we may be unable to identify
579 special functions by name. Check for one important case,
580 __aeabi_read_tp, by comparing the *code* against the default
581 implementation (this is hand-written ARM assembler in glibc). */
582
583 if (!is_thumb
584 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
585 == 0xe3e00a0f /* mov r0, #0xffff0fff */
586 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
587 == 0xe240f01f) /* sub pc, r0, #31 */
588 return 1;
589 }
ec3d575a 590
0d39a070
DJ
591 return 0;
592}
593
621c6d5b
YQ
594/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
595 the first 16-bit of instruction, and INSN2 is the second 16-bit of
596 instruction. */
597#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
598 ((bits ((insn1), 0, 3) << 12) \
599 | (bits ((insn1), 10, 10) << 11) \
600 | (bits ((insn2), 12, 14) << 8) \
601 | bits ((insn2), 0, 7))
602
603/* Extract the immediate from instruction movw/movt of encoding A. INSN is
604 the 32-bit instruction. */
605#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
606 ((bits ((insn), 16, 19) << 12) \
607 | bits ((insn), 0, 11))
608
ec3d575a
UW
609/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
610
611static unsigned int
612thumb_expand_immediate (unsigned int imm)
613{
614 unsigned int count = imm >> 7;
615
616 if (count < 8)
617 switch (count / 2)
618 {
619 case 0:
620 return imm & 0xff;
621 case 1:
622 return (imm & 0xff) | ((imm & 0xff) << 16);
623 case 2:
624 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
625 case 3:
626 return (imm & 0xff) | ((imm & 0xff) << 8)
627 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
628 }
629
630 return (0x80 | (imm & 0x7f)) << (32 - count);
631}
632
540314bd
YQ
633/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
634 epilogue, 0 otherwise. */
635
636static int
637thumb_instruction_restores_sp (unsigned short insn)
638{
639 return (insn == 0x46bd /* mov sp, r7 */
640 || (insn & 0xff80) == 0xb000 /* add sp, imm */
641 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
642}
643
29d73ae4
DJ
644/* Analyze a Thumb prologue, looking for a recognizable stack frame
645 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
646 clobber the stack frame unexpectedly, or an unknown instruction.
647 Return the last address which is definitely safe to skip for an
648 initial breakpoint. */
c906108c
SS
649
650static CORE_ADDR
29d73ae4
DJ
651thumb_analyze_prologue (struct gdbarch *gdbarch,
652 CORE_ADDR start, CORE_ADDR limit,
653 struct arm_prologue_cache *cache)
c906108c 654{
0d39a070 655 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 656 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
657 int i;
658 pv_t regs[16];
659 struct pv_area *stack;
660 struct cleanup *back_to;
661 CORE_ADDR offset;
ec3d575a 662 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 663
29d73ae4
DJ
664 for (i = 0; i < 16; i++)
665 regs[i] = pv_register (i, 0);
55f960e1 666 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
667 back_to = make_cleanup_free_pv_area (stack);
668
29d73ae4 669 while (start < limit)
c906108c 670 {
29d73ae4
DJ
671 unsigned short insn;
672
e17a4113 673 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 674
94c30b78 675 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 676 {
29d73ae4
DJ
677 int regno;
678 int mask;
4be43953
DJ
679
680 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
681 break;
29d73ae4
DJ
682
683 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
684 whether to save LR (R14). */
685 mask = (insn & 0xff) | ((insn & 0x100) << 6);
686
687 /* Calculate offsets of saved R0-R7 and LR. */
688 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
689 if (mask & (1 << regno))
690 {
29d73ae4
DJ
691 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
692 -4);
693 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
694 }
da59e081 695 }
1db01f22 696 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 697 {
29d73ae4 698 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
700 -offset);
da59e081 701 }
808f7ab1
YQ
702 else if (thumb_instruction_restores_sp (insn))
703 {
704 /* Don't scan past the epilogue. */
705 break;
706 }
0d39a070
DJ
707 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
708 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
709 (insn & 0xff) << 2);
710 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
713 bits (insn, 6, 8));
714 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
717 bits (insn, 0, 7));
718 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
719 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
720 && pv_is_constant (regs[bits (insn, 3, 5)]))
721 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
722 regs[bits (insn, 6, 8)]);
723 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
724 && pv_is_constant (regs[bits (insn, 3, 6)]))
725 {
726 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
727 int rm = bits (insn, 3, 6);
728 regs[rd] = pv_add (regs[rd], regs[rm]);
729 }
29d73ae4 730 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 731 {
29d73ae4
DJ
732 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
733 int src_reg = (insn & 0x78) >> 3;
734 regs[dst_reg] = regs[src_reg];
da59e081 735 }
29d73ae4 736 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 737 {
29d73ae4
DJ
738 /* Handle stores to the stack. Normally pushes are used,
739 but with GCC -mtpcs-frame, there may be other stores
740 in the prologue to create the frame. */
741 int regno = (insn >> 8) & 0x7;
742 pv_t addr;
743
744 offset = (insn & 0xff) << 2;
745 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
746
747 if (pv_area_store_would_trash (stack, addr))
748 break;
749
750 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 751 }
0d39a070
DJ
752 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
753 {
754 int rd = bits (insn, 0, 2);
755 int rn = bits (insn, 3, 5);
756 pv_t addr;
757
758 offset = bits (insn, 6, 10) << 2;
759 addr = pv_add_constant (regs[rn], offset);
760
761 if (pv_area_store_would_trash (stack, addr))
762 break;
763
764 pv_area_store (stack, addr, 4, regs[rd]);
765 }
766 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
767 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
768 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
769 /* Ignore stores of argument registers to the stack. */
770 ;
771 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
772 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
773 /* Ignore block loads from the stack, potentially copying
774 parameters from memory. */
775 ;
776 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
777 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
778 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
779 /* Similarly ignore single loads from the stack. */
780 ;
781 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
782 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
783 /* Skip register copies, i.e. saves to another register
784 instead of the stack. */
785 ;
786 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
787 /* Recognize constant loads; even with small stacks these are necessary
788 on Thumb. */
789 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
790 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
791 {
792 /* Constant pool loads, for the same reason. */
793 unsigned int constant;
794 CORE_ADDR loc;
795
796 loc = start + 4 + bits (insn, 0, 7) * 4;
797 constant = read_memory_unsigned_integer (loc, 4, byte_order);
798 regs[bits (insn, 8, 10)] = pv_constant (constant);
799 }
db24da6d 800 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 801 {
0d39a070
DJ
802 unsigned short inst2;
803
804 inst2 = read_memory_unsigned_integer (start + 2, 2,
805 byte_order_for_code);
806
807 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
808 {
809 /* BL, BLX. Allow some special function calls when
810 skipping the prologue; GCC generates these before
811 storing arguments to the stack. */
812 CORE_ADDR nextpc;
813 int j1, j2, imm1, imm2;
814
815 imm1 = sbits (insn, 0, 10);
816 imm2 = bits (inst2, 0, 10);
817 j1 = bit (inst2, 13);
818 j2 = bit (inst2, 11);
819
820 offset = ((imm1 << 12) + (imm2 << 1));
821 offset ^= ((!j2) << 22) | ((!j1) << 23);
822
823 nextpc = start + 4 + offset;
824 /* For BLX make sure to clear the low bits. */
825 if (bit (inst2, 12) == 0)
826 nextpc = nextpc & 0xfffffffc;
827
e0634ccf
UW
828 if (!skip_prologue_function (gdbarch, nextpc,
829 bit (inst2, 12) != 0))
0d39a070
DJ
830 break;
831 }
ec3d575a 832
0963b4bd
MS
833 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
834 { registers } */
ec3d575a
UW
835 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
836 {
837 pv_t addr = regs[bits (insn, 0, 3)];
838 int regno;
839
840 if (pv_area_store_would_trash (stack, addr))
841 break;
842
843 /* Calculate offsets of saved registers. */
844 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
845 if (inst2 & (1 << regno))
846 {
847 addr = pv_add_constant (addr, -4);
848 pv_area_store (stack, addr, 4, regs[regno]);
849 }
850
851 if (insn & 0x0020)
852 regs[bits (insn, 0, 3)] = addr;
853 }
854
0963b4bd
MS
855 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
856 [Rn, #+/-imm]{!} */
ec3d575a
UW
857 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
858 {
859 int regno1 = bits (inst2, 12, 15);
860 int regno2 = bits (inst2, 8, 11);
861 pv_t addr = regs[bits (insn, 0, 3)];
862
863 offset = inst2 & 0xff;
864 if (insn & 0x0080)
865 addr = pv_add_constant (addr, offset);
866 else
867 addr = pv_add_constant (addr, -offset);
868
869 if (pv_area_store_would_trash (stack, addr))
870 break;
871
872 pv_area_store (stack, addr, 4, regs[regno1]);
873 pv_area_store (stack, pv_add_constant (addr, 4),
874 4, regs[regno2]);
875
876 if (insn & 0x0020)
877 regs[bits (insn, 0, 3)] = addr;
878 }
879
880 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
881 && (inst2 & 0x0c00) == 0x0c00
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 {
884 int regno = bits (inst2, 12, 15);
885 pv_t addr = regs[bits (insn, 0, 3)];
886
887 offset = inst2 & 0xff;
888 if (inst2 & 0x0200)
889 addr = pv_add_constant (addr, offset);
890 else
891 addr = pv_add_constant (addr, -offset);
892
893 if (pv_area_store_would_trash (stack, addr))
894 break;
895
896 pv_area_store (stack, addr, 4, regs[regno]);
897
898 if (inst2 & 0x0100)
899 regs[bits (insn, 0, 3)] = addr;
900 }
901
902 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
903 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
904 {
905 int regno = bits (inst2, 12, 15);
906 pv_t addr;
907
908 offset = inst2 & 0xfff;
909 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
910
911 if (pv_area_store_would_trash (stack, addr))
912 break;
913
914 pv_area_store (stack, addr, 4, regs[regno]);
915 }
916
917 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 918 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 919 /* Ignore stores of argument registers to the stack. */
0d39a070 920 ;
ec3d575a
UW
921
922 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
923 && (inst2 & 0x0d00) == 0x0c00
0d39a070 924 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 925 /* Ignore stores of argument registers to the stack. */
0d39a070 926 ;
ec3d575a 927
0963b4bd
MS
928 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
929 { registers } */
ec3d575a
UW
930 && (inst2 & 0x8000) == 0x0000
931 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
932 /* Ignore block loads from the stack, potentially copying
933 parameters from memory. */
0d39a070 934 ;
ec3d575a 935
0963b4bd
MS
936 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
937 [Rn, #+/-imm] */
0d39a070 938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 939 /* Similarly ignore dual loads from the stack. */
0d39a070 940 ;
ec3d575a
UW
941
942 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
943 && (inst2 & 0x0d00) == 0x0c00
0d39a070 944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 945 /* Similarly ignore single loads from the stack. */
0d39a070 946 ;
ec3d575a
UW
947
948 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 949 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 950 /* Similarly ignore single loads from the stack. */
0d39a070 951 ;
ec3d575a
UW
952
953 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
954 && (inst2 & 0x8000) == 0x0000)
955 {
956 unsigned int imm = ((bits (insn, 10, 10) << 11)
957 | (bits (inst2, 12, 14) << 8)
958 | bits (inst2, 0, 7));
959
960 regs[bits (inst2, 8, 11)]
961 = pv_add_constant (regs[bits (insn, 0, 3)],
962 thumb_expand_immediate (imm));
963 }
964
965 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
0d39a070 967 {
ec3d575a
UW
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
974 }
975
976 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
977 && (inst2 & 0x8000) == 0x0000)
978 {
979 unsigned int imm = ((bits (insn, 10, 10) << 11)
980 | (bits (inst2, 12, 14) << 8)
981 | bits (inst2, 0, 7));
982
983 regs[bits (inst2, 8, 11)]
984 = pv_add_constant (regs[bits (insn, 0, 3)],
985 - (CORE_ADDR) thumb_expand_immediate (imm));
986 }
987
988 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
989 && (inst2 & 0x8000) == 0x0000)
990 {
991 unsigned int imm = ((bits (insn, 10, 10) << 11)
992 | (bits (inst2, 12, 14) << 8)
993 | bits (inst2, 0, 7));
994
995 regs[bits (inst2, 8, 11)]
996 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
997 }
998
999 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1000 {
1001 unsigned int imm = ((bits (insn, 10, 10) << 11)
1002 | (bits (inst2, 12, 14) << 8)
1003 | bits (inst2, 0, 7));
1004
1005 regs[bits (inst2, 8, 11)]
1006 = pv_constant (thumb_expand_immediate (imm));
1007 }
1008
1009 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1010 {
621c6d5b
YQ
1011 unsigned int imm
1012 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1013
1014 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1015 }
1016
1017 else if (insn == 0xea5f /* mov.w Rd,Rm */
1018 && (inst2 & 0xf0f0) == 0)
1019 {
1020 int dst_reg = (inst2 & 0x0f00) >> 8;
1021 int src_reg = inst2 & 0xf;
1022 regs[dst_reg] = regs[src_reg];
1023 }
1024
1025 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1026 {
1027 /* Constant pool loads. */
1028 unsigned int constant;
1029 CORE_ADDR loc;
1030
cac395ea 1031 offset = bits (inst2, 0, 11);
ec3d575a
UW
1032 if (insn & 0x0080)
1033 loc = start + 4 + offset;
1034 else
1035 loc = start + 4 - offset;
1036
1037 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1038 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1039 }
1040
1041 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1042 {
1043 /* Constant pool loads. */
1044 unsigned int constant;
1045 CORE_ADDR loc;
1046
cac395ea 1047 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1048 if (insn & 0x0080)
1049 loc = start + 4 + offset;
1050 else
1051 loc = start + 4 - offset;
1052
1053 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1054 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1055
1056 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1057 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1058 }
1059
1060 else if (thumb2_instruction_changes_pc (insn, inst2))
1061 {
1062 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1063 break;
1064 }
ec3d575a
UW
1065 else
1066 {
1067 /* The optimizer might shove anything into the prologue,
1068 so we just skip what we don't recognize. */
1069 unrecognized_pc = start;
1070 }
0d39a070
DJ
1071
1072 start += 2;
1073 }
ec3d575a 1074 else if (thumb_instruction_changes_pc (insn))
3d74b771 1075 {
ec3d575a 1076 /* Don't scan past anything that might change control flow. */
da3c6d4a 1077 break;
3d74b771 1078 }
ec3d575a
UW
1079 else
1080 {
1081 /* The optimizer might shove anything into the prologue,
1082 so we just skip what we don't recognize. */
1083 unrecognized_pc = start;
1084 }
29d73ae4
DJ
1085
1086 start += 2;
c906108c
SS
1087 }
1088
0d39a070
DJ
1089 if (arm_debug)
1090 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1091 paddress (gdbarch, start));
1092
ec3d575a
UW
1093 if (unrecognized_pc == 0)
1094 unrecognized_pc = start;
1095
29d73ae4
DJ
1096 if (cache == NULL)
1097 {
1098 do_cleanups (back_to);
ec3d575a 1099 return unrecognized_pc;
29d73ae4
DJ
1100 }
1101
29d73ae4
DJ
1102 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1103 {
1104 /* Frame pointer is fp. Frame size is constant. */
1105 cache->framereg = ARM_FP_REGNUM;
1106 cache->framesize = -regs[ARM_FP_REGNUM].k;
1107 }
1108 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1109 {
1110 /* Frame pointer is r7. Frame size is constant. */
1111 cache->framereg = THUMB_FP_REGNUM;
1112 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1113 }
72a2e3dc 1114 else
29d73ae4
DJ
1115 {
1116 /* Try the stack pointer... this is a bit desperate. */
1117 cache->framereg = ARM_SP_REGNUM;
1118 cache->framesize = -regs[ARM_SP_REGNUM].k;
1119 }
29d73ae4
DJ
1120
1121 for (i = 0; i < 16; i++)
1122 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1123 cache->saved_regs[i].addr = offset;
1124
1125 do_cleanups (back_to);
ec3d575a 1126 return unrecognized_pc;
c906108c
SS
1127}
1128
621c6d5b
YQ
1129
1130/* Try to analyze the instructions starting from PC, which load symbol
1131 __stack_chk_guard. Return the address of instruction after loading this
1132 symbol, set the dest register number to *BASEREG, and set the size of
1133 instructions for loading symbol in OFFSET. Return 0 if instructions are
1134 not recognized. */
1135
1136static CORE_ADDR
1137arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1138 unsigned int *destreg, int *offset)
1139{
1140 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1141 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1142 unsigned int low, high, address;
1143
1144 address = 0;
1145 if (is_thumb)
1146 {
1147 unsigned short insn1
1148 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1149
1150 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1151 {
1152 *destreg = bits (insn1, 8, 10);
1153 *offset = 2;
6ae274b7
YQ
1154 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1155 address = read_memory_unsigned_integer (address, 4,
1156 byte_order_for_code);
621c6d5b
YQ
1157 }
1158 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1159 {
1160 unsigned short insn2
1161 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1162
1163 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1164
1165 insn1
1166 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1167 insn2
1168 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1169
1170 /* movt Rd, #const */
1171 if ((insn1 & 0xfbc0) == 0xf2c0)
1172 {
1173 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1174 *destreg = bits (insn2, 8, 11);
1175 *offset = 8;
1176 address = (high << 16 | low);
1177 }
1178 }
1179 }
1180 else
1181 {
2e9e421f
UW
1182 unsigned int insn
1183 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1184
6ae274b7 1185 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1186 {
6ae274b7
YQ
1187 address = bits (insn, 0, 11) + pc + 8;
1188 address = read_memory_unsigned_integer (address, 4,
1189 byte_order_for_code);
1190
2e9e421f
UW
1191 *destreg = bits (insn, 12, 15);
1192 *offset = 4;
1193 }
1194 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1195 {
1196 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1197
1198 insn
1199 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1200
1201 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1202 {
1203 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1204 *destreg = bits (insn, 12, 15);
1205 *offset = 8;
1206 address = (high << 16 | low);
1207 }
1208 }
621c6d5b
YQ
1209 }
1210
1211 return address;
1212}
1213
1214/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1215 points to the first instruction of this sequence, return the address of
1216 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1217
1218 On arm, this sequence of instructions is composed of mainly three steps,
1219 Step 1: load symbol __stack_chk_guard,
1220 Step 2: load from address of __stack_chk_guard,
1221 Step 3: store it to somewhere else.
1222
1223 Usually, instructions on step 2 and step 3 are the same on various ARM
1224 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1225 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1226 instructions in step 1 vary from different ARM architectures. On ARMv7,
1227 they are,
1228
1229 movw Rn, #:lower16:__stack_chk_guard
1230 movt Rn, #:upper16:__stack_chk_guard
1231
1232 On ARMv5t, it is,
1233
1234 ldr Rn, .Label
1235 ....
1236 .Lable:
1237 .word __stack_chk_guard
1238
1239 Since ldr/str is a very popular instruction, we can't use them as
1240 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1241 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1242 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1243
1244static CORE_ADDR
1245arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1246{
1247 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1248 unsigned int basereg;
7cbd4a93 1249 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1250 int offset;
1251 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1252 CORE_ADDR addr;
1253
1254 /* Try to parse the instructions in Step 1. */
1255 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1256 &basereg, &offset);
1257 if (!addr)
1258 return pc;
1259
1260 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1261 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1262 Otherwise, this sequence cannot be for stack protector. */
1263 if (stack_chk_guard.minsym == NULL
61012eef 1264 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1265 return pc;
1266
1267 if (is_thumb)
1268 {
1269 unsigned int destreg;
1270 unsigned short insn
1271 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1272
1273 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1274 if ((insn & 0xf800) != 0x6800)
1275 return pc;
1276 if (bits (insn, 3, 5) != basereg)
1277 return pc;
1278 destreg = bits (insn, 0, 2);
1279
1280 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1281 byte_order_for_code);
1282 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1283 if ((insn & 0xf800) != 0x6000)
1284 return pc;
1285 if (destreg != bits (insn, 0, 2))
1286 return pc;
1287 }
1288 else
1289 {
1290 unsigned int destreg;
1291 unsigned int insn
1292 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1293
1294 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1295 if ((insn & 0x0e500000) != 0x04100000)
1296 return pc;
1297 if (bits (insn, 16, 19) != basereg)
1298 return pc;
1299 destreg = bits (insn, 12, 15);
1300 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1301 insn = read_memory_unsigned_integer (pc + offset + 4,
1302 4, byte_order_for_code);
1303 if ((insn & 0x0e500000) != 0x04000000)
1304 return pc;
1305 if (bits (insn, 12, 15) != destreg)
1306 return pc;
1307 }
1308 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1309 on arm. */
1310 if (is_thumb)
1311 return pc + offset + 4;
1312 else
1313 return pc + offset + 8;
1314}
1315
da3c6d4a
MS
1316/* Advance the PC across any function entry prologue instructions to
1317 reach some "real" code.
34e8f22d
RE
1318
1319 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1320 prologue:
c906108c 1321
c5aa993b
JM
1322 mov ip, sp
1323 [stmfd sp!, {a1,a2,a3,a4}]
1324 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1325 [stfe f7, [sp, #-12]!]
1326 [stfe f6, [sp, #-12]!]
1327 [stfe f5, [sp, #-12]!]
1328 [stfe f4, [sp, #-12]!]
0963b4bd 1329 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1330
34e8f22d 1331static CORE_ADDR
6093d2eb 1332arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1333{
a89fea3c 1334 CORE_ADDR func_addr, limit_pc;
c906108c 1335
a89fea3c
JL
1336 /* See if we can determine the end of the prologue via the symbol table.
1337 If so, then return either PC, or the PC after the prologue, whichever
1338 is greater. */
1339 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1340 {
d80b854b
UW
1341 CORE_ADDR post_prologue_pc
1342 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1343 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1344
621c6d5b
YQ
1345 if (post_prologue_pc)
1346 post_prologue_pc
1347 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1348
1349
0d39a070
DJ
1350 /* GCC always emits a line note before the prologue and another
1351 one after, even if the two are at the same address or on the
1352 same line. Take advantage of this so that we do not need to
1353 know every instruction that might appear in the prologue. We
1354 will have producer information for most binaries; if it is
1355 missing (e.g. for -gstabs), assuming the GNU tools. */
1356 if (post_prologue_pc
43f3e411
DE
1357 && (cust == NULL
1358 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1359 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1360 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1361 return post_prologue_pc;
1362
a89fea3c 1363 if (post_prologue_pc != 0)
0d39a070
DJ
1364 {
1365 CORE_ADDR analyzed_limit;
1366
1367 /* For non-GCC compilers, make sure the entire line is an
1368 acceptable prologue; GDB will round this function's
1369 return value up to the end of the following line so we
1370 can not skip just part of a line (and we do not want to).
1371
1372 RealView does not treat the prologue specially, but does
1373 associate prologue code with the opening brace; so this
1374 lets us skip the first line if we think it is the opening
1375 brace. */
9779414d 1376 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1377 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1378 post_prologue_pc, NULL);
1379 else
1380 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1381 post_prologue_pc, NULL);
1382
1383 if (analyzed_limit != post_prologue_pc)
1384 return func_addr;
1385
1386 return post_prologue_pc;
1387 }
c906108c
SS
1388 }
1389
a89fea3c
JL
1390 /* Can't determine prologue from the symbol table, need to examine
1391 instructions. */
c906108c 1392
a89fea3c
JL
1393 /* Find an upper limit on the function prologue using the debug
1394 information. If the debug information could not be used to provide
1395 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1396 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1397 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1398 if (limit_pc == 0)
1399 limit_pc = pc + 64; /* Magic. */
1400
c906108c 1401
29d73ae4 1402 /* Check if this is Thumb code. */
9779414d 1403 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1404 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1405 else
1406 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1407}
94c30b78 1408
c5aa993b 1409/* *INDENT-OFF* */
c906108c
SS
1410/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1411 This function decodes a Thumb function prologue to determine:
1412 1) the size of the stack frame
1413 2) which registers are saved on it
1414 3) the offsets of saved regs
1415 4) the offset from the stack pointer to the frame pointer
c906108c 1416
da59e081
JM
1417 A typical Thumb function prologue would create this stack frame
1418 (offsets relative to FP)
c906108c
SS
1419 old SP -> 24 stack parameters
1420 20 LR
1421 16 R7
1422 R7 -> 0 local variables (16 bytes)
1423 SP -> -12 additional stack space (12 bytes)
1424 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1425 12 bytes. The frame register is R7.
da59e081 1426
da3c6d4a
MS
1427 The comments for thumb_skip_prolog() describe the algorithm we use
1428 to detect the end of the prolog. */
c5aa993b
JM
1429/* *INDENT-ON* */
1430
c906108c 1431static void
be8626e0 1432thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1433 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1434{
1435 CORE_ADDR prologue_start;
1436 CORE_ADDR prologue_end;
c906108c 1437
b39cc962
DJ
1438 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1439 &prologue_end))
c906108c 1440 {
ec3d575a
UW
1441 /* See comment in arm_scan_prologue for an explanation of
1442 this heuristics. */
1443 if (prologue_end > prologue_start + 64)
1444 {
1445 prologue_end = prologue_start + 64;
1446 }
c906108c
SS
1447 }
1448 else
f7060f85
DJ
1449 /* We're in the boondocks: we have no idea where the start of the
1450 function is. */
1451 return;
c906108c 1452
325fac50 1453 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1454
be8626e0 1455 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1456}
1457
f303bc3e
YQ
1458/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1459 otherwise. */
1460
1461static int
1462arm_instruction_restores_sp (unsigned int insn)
1463{
1464 if (bits (insn, 28, 31) != INST_NV)
1465 {
1466 if ((insn & 0x0df0f000) == 0x0080d000
1467 /* ADD SP (register or immediate). */
1468 || (insn & 0x0df0f000) == 0x0040d000
1469 /* SUB SP (register or immediate). */
1470 || (insn & 0x0ffffff0) == 0x01a0d000
1471 /* MOV SP. */
1472 || (insn & 0x0fff0000) == 0x08bd0000
1473 /* POP (LDMIA). */
1474 || (insn & 0x0fff0000) == 0x049d0000)
1475 /* POP of a single register. */
1476 return 1;
1477 }
1478
1479 return 0;
1480}
1481
0d39a070
DJ
1482/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1483 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1484 fill it in. Return the first address not recognized as a prologue
1485 instruction.
eb5492fa 1486
0d39a070
DJ
1487 We recognize all the instructions typically found in ARM prologues,
1488 plus harmless instructions which can be skipped (either for analysis
1489 purposes, or a more restrictive set that can be skipped when finding
1490 the end of the prologue). */
1491
1492static CORE_ADDR
1493arm_analyze_prologue (struct gdbarch *gdbarch,
1494 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1495 struct arm_prologue_cache *cache)
1496{
0d39a070
DJ
1497 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1498 int regno;
1499 CORE_ADDR offset, current_pc;
1500 pv_t regs[ARM_FPS_REGNUM];
1501 struct pv_area *stack;
1502 struct cleanup *back_to;
0d39a070
DJ
1503 CORE_ADDR unrecognized_pc = 0;
1504
1505 /* Search the prologue looking for instructions that set up the
96baa820 1506 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1507
96baa820
JM
1508 Be careful, however, and if it doesn't look like a prologue,
1509 don't try to scan it. If, for instance, a frameless function
1510 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1511 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1512 and other operations that rely on a knowledge of the stack
0d39a070 1513 traceback. */
d4473757 1514
4be43953
DJ
1515 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1516 regs[regno] = pv_register (regno, 0);
55f960e1 1517 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1518 back_to = make_cleanup_free_pv_area (stack);
1519
94c30b78
MS
1520 for (current_pc = prologue_start;
1521 current_pc < prologue_end;
f43845b3 1522 current_pc += 4)
96baa820 1523 {
e17a4113
UW
1524 unsigned int insn
1525 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1526
94c30b78 1527 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1528 {
4be43953 1529 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1530 continue;
1531 }
0d39a070
DJ
1532 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1533 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1534 {
1535 unsigned imm = insn & 0xff; /* immediate value */
1536 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1537 int rd = bits (insn, 12, 15);
28cd8767 1538 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1539 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1540 continue;
1541 }
0d39a070
DJ
1542 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1543 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1544 {
1545 unsigned imm = insn & 0xff; /* immediate value */
1546 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1547 int rd = bits (insn, 12, 15);
28cd8767 1548 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1549 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1550 continue;
1551 }
0963b4bd
MS
1552 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1553 [sp, #-4]! */
f43845b3 1554 {
4be43953
DJ
1555 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1556 break;
1557 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1558 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1559 regs[bits (insn, 12, 15)]);
f43845b3
MS
1560 continue;
1561 }
1562 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1563 /* stmfd sp!, {..., fp, ip, lr, pc}
1564 or
1565 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1566 {
d4473757 1567 int mask = insn & 0xffff;
ed9a39eb 1568
4be43953
DJ
1569 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1570 break;
1571
94c30b78 1572 /* Calculate offsets of saved registers. */
34e8f22d 1573 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1574 if (mask & (1 << regno))
1575 {
0963b4bd
MS
1576 regs[ARM_SP_REGNUM]
1577 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1578 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1579 }
1580 }
0d39a070
DJ
1581 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1582 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1583 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1584 {
1585 /* No need to add this to saved_regs -- it's just an arg reg. */
1586 continue;
1587 }
0d39a070
DJ
1588 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1589 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1590 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1591 {
1592 /* No need to add this to saved_regs -- it's just an arg reg. */
1593 continue;
1594 }
0963b4bd
MS
1595 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1596 { registers } */
0d39a070
DJ
1597 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1598 {
1599 /* No need to add this to saved_regs -- it's just arg regs. */
1600 continue;
1601 }
d4473757
KB
1602 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1603 {
94c30b78
MS
1604 unsigned imm = insn & 0xff; /* immediate value */
1605 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1606 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1607 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1608 }
1609 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1610 {
94c30b78
MS
1611 unsigned imm = insn & 0xff; /* immediate value */
1612 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1613 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1615 }
0963b4bd
MS
1616 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1617 [sp, -#c]! */
2af46ca0 1618 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1619 {
4be43953
DJ
1620 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1621 break;
1622
1623 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1624 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1625 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1626 }
0963b4bd
MS
1627 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1628 [sp!] */
2af46ca0 1629 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1630 {
1631 int n_saved_fp_regs;
1632 unsigned int fp_start_reg, fp_bound_reg;
1633
4be43953
DJ
1634 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1635 break;
1636
94c30b78 1637 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1638 {
d4473757
KB
1639 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1640 n_saved_fp_regs = 3;
1641 else
1642 n_saved_fp_regs = 1;
96baa820 1643 }
d4473757 1644 else
96baa820 1645 {
d4473757
KB
1646 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1647 n_saved_fp_regs = 2;
1648 else
1649 n_saved_fp_regs = 4;
96baa820 1650 }
d4473757 1651
34e8f22d 1652 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1653 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1654 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1655 {
4be43953
DJ
1656 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1657 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1658 regs[fp_start_reg++]);
96baa820 1659 }
c906108c 1660 }
0d39a070
DJ
1661 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1662 {
1663 /* Allow some special function calls when skipping the
1664 prologue; GCC generates these before storing arguments to
1665 the stack. */
1666 CORE_ADDR dest = BranchDest (current_pc, insn);
1667
e0634ccf 1668 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1669 continue;
1670 else
1671 break;
1672 }
d4473757 1673 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1674 break; /* Condition not true, exit early. */
0d39a070
DJ
1675 else if (arm_instruction_changes_pc (insn))
1676 /* Don't scan past anything that might change control flow. */
1677 break;
f303bc3e
YQ
1678 else if (arm_instruction_restores_sp (insn))
1679 {
1680 /* Don't scan past the epilogue. */
1681 break;
1682 }
d19f7eee
UW
1683 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1684 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1685 /* Ignore block loads from the stack, potentially copying
1686 parameters from memory. */
1687 continue;
1688 else if ((insn & 0xfc500000) == 0xe4100000
1689 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1690 /* Similarly ignore single loads from the stack. */
1691 continue;
0d39a070
DJ
1692 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1693 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1694 register instead of the stack. */
d4473757 1695 continue;
0d39a070
DJ
1696 else
1697 {
21daaaaf
YQ
1698 /* The optimizer might shove anything into the prologue, if
1699 we build up cache (cache != NULL) from scanning prologue,
1700 we just skip what we don't recognize and scan further to
1701 make cache as complete as possible. However, if we skip
1702 prologue, we'll stop immediately on unrecognized
1703 instruction. */
0d39a070 1704 unrecognized_pc = current_pc;
21daaaaf
YQ
1705 if (cache != NULL)
1706 continue;
1707 else
1708 break;
0d39a070 1709 }
c906108c
SS
1710 }
1711
0d39a070
DJ
1712 if (unrecognized_pc == 0)
1713 unrecognized_pc = current_pc;
1714
0d39a070
DJ
1715 if (cache)
1716 {
4072f920
YQ
1717 int framereg, framesize;
1718
1719 /* The frame size is just the distance from the frame register
1720 to the original stack pointer. */
1721 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1722 {
1723 /* Frame pointer is fp. */
1724 framereg = ARM_FP_REGNUM;
1725 framesize = -regs[ARM_FP_REGNUM].k;
1726 }
1727 else
1728 {
1729 /* Try the stack pointer... this is a bit desperate. */
1730 framereg = ARM_SP_REGNUM;
1731 framesize = -regs[ARM_SP_REGNUM].k;
1732 }
1733
0d39a070
DJ
1734 cache->framereg = framereg;
1735 cache->framesize = framesize;
1736
1737 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1738 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1739 cache->saved_regs[regno].addr = offset;
1740 }
1741
1742 if (arm_debug)
1743 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1744 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1745
1746 do_cleanups (back_to);
0d39a070
DJ
1747 return unrecognized_pc;
1748}
1749
1750static void
1751arm_scan_prologue (struct frame_info *this_frame,
1752 struct arm_prologue_cache *cache)
1753{
1754 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1755 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1756 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1757 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1758 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1759
1760 /* Assume there is no frame until proven otherwise. */
1761 cache->framereg = ARM_SP_REGNUM;
1762 cache->framesize = 0;
1763
1764 /* Check for Thumb prologue. */
1765 if (arm_frame_is_thumb (this_frame))
1766 {
1767 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1768 return;
1769 }
1770
1771 /* Find the function prologue. If we can't find the function in
1772 the symbol table, peek in the stack frame to find the PC. */
1773 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1774 &prologue_end))
1775 {
1776 /* One way to find the end of the prologue (which works well
1777 for unoptimized code) is to do the following:
1778
1779 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1780
1781 if (sal.line == 0)
1782 prologue_end = prev_pc;
1783 else if (sal.end < prologue_end)
1784 prologue_end = sal.end;
1785
1786 This mechanism is very accurate so long as the optimizer
1787 doesn't move any instructions from the function body into the
1788 prologue. If this happens, sal.end will be the last
1789 instruction in the first hunk of prologue code just before
1790 the first instruction that the scheduler has moved from
1791 the body to the prologue.
1792
1793 In order to make sure that we scan all of the prologue
1794 instructions, we use a slightly less accurate mechanism which
1795 may scan more than necessary. To help compensate for this
1796 lack of accuracy, the prologue scanning loop below contains
1797 several clauses which'll cause the loop to terminate early if
1798 an implausible prologue instruction is encountered.
1799
1800 The expression
1801
1802 prologue_start + 64
1803
1804 is a suitable endpoint since it accounts for the largest
1805 possible prologue plus up to five instructions inserted by
1806 the scheduler. */
1807
1808 if (prologue_end > prologue_start + 64)
1809 {
1810 prologue_end = prologue_start + 64; /* See above. */
1811 }
1812 }
1813 else
1814 {
1815 /* We have no symbol information. Our only option is to assume this
1816 function has a standard stack frame and the normal frame register.
1817 Then, we can find the value of our frame pointer on entrance to
1818 the callee (or at the present moment if this is the innermost frame).
1819 The value stored there should be the address of the stmfd + 8. */
1820 CORE_ADDR frame_loc;
1821 LONGEST return_value;
1822
1823 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1824 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1825 return;
1826 else
1827 {
1828 prologue_start = gdbarch_addr_bits_remove
1829 (gdbarch, return_value) - 8;
1830 prologue_end = prologue_start + 64; /* See above. */
1831 }
1832 }
1833
1834 if (prev_pc < prologue_end)
1835 prologue_end = prev_pc;
1836
1837 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1838}
1839
eb5492fa 1840static struct arm_prologue_cache *
a262aec2 1841arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1842{
eb5492fa
DJ
1843 int reg;
1844 struct arm_prologue_cache *cache;
1845 CORE_ADDR unwound_fp;
c5aa993b 1846
35d5d4ee 1847 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1848 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1849
a262aec2 1850 arm_scan_prologue (this_frame, cache);
848cfffb 1851
a262aec2 1852 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1853 if (unwound_fp == 0)
1854 return cache;
c906108c 1855
4be43953 1856 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1857
eb5492fa
DJ
1858 /* Calculate actual addresses of saved registers using offsets
1859 determined by arm_scan_prologue. */
a262aec2 1860 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1861 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1862 cache->saved_regs[reg].addr += cache->prev_sp;
1863
1864 return cache;
c906108c
SS
1865}
1866
c1ee9414
LM
1867/* Implementation of the stop_reason hook for arm_prologue frames. */
1868
1869static enum unwind_stop_reason
1870arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1871 void **this_cache)
1872{
1873 struct arm_prologue_cache *cache;
1874 CORE_ADDR pc;
1875
1876 if (*this_cache == NULL)
1877 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1878 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1879
1880 /* This is meant to halt the backtrace at "_start". */
1881 pc = get_frame_pc (this_frame);
1882 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1883 return UNWIND_OUTERMOST;
1884
1885 /* If we've hit a wall, stop. */
1886 if (cache->prev_sp == 0)
1887 return UNWIND_OUTERMOST;
1888
1889 return UNWIND_NO_REASON;
1890}
1891
eb5492fa
DJ
1892/* Our frame ID for a normal frame is the current function's starting PC
1893 and the caller's SP when we were called. */
c906108c 1894
148754e5 1895static void
a262aec2 1896arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1897 void **this_cache,
1898 struct frame_id *this_id)
c906108c 1899{
eb5492fa
DJ
1900 struct arm_prologue_cache *cache;
1901 struct frame_id id;
2c404490 1902 CORE_ADDR pc, func;
f079148d 1903
eb5492fa 1904 if (*this_cache == NULL)
a262aec2 1905 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1906 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1907
0e9e9abd
UW
1908 /* Use function start address as part of the frame ID. If we cannot
1909 identify the start address (due to missing symbol information),
1910 fall back to just using the current PC. */
c1ee9414 1911 pc = get_frame_pc (this_frame);
2c404490 1912 func = get_frame_func (this_frame);
0e9e9abd
UW
1913 if (!func)
1914 func = pc;
1915
eb5492fa 1916 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1917 *this_id = id;
c906108c
SS
1918}
1919
a262aec2
DJ
1920static struct value *
1921arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1922 void **this_cache,
a262aec2 1923 int prev_regnum)
24de872b 1924{
24568a2c 1925 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1926 struct arm_prologue_cache *cache;
1927
eb5492fa 1928 if (*this_cache == NULL)
a262aec2 1929 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1930 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1931
eb5492fa 1932 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1933 instead. The prologue may save PC, but it will point into this
1934 frame's prologue, not the next frame's resume location. Also
1935 strip the saved T bit. A valid LR may have the low bit set, but
1936 a valid PC never does. */
eb5492fa 1937 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1938 {
1939 CORE_ADDR lr;
1940
1941 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1942 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1943 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1944 }
24de872b 1945
eb5492fa 1946 /* SP is generally not saved to the stack, but this frame is
a262aec2 1947 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1948 The value was already reconstructed into PREV_SP. */
1949 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1950 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1951
b39cc962
DJ
1952 /* The CPSR may have been changed by the call instruction and by the
1953 called function. The only bit we can reconstruct is the T bit,
1954 by checking the low bit of LR as of the call. This is a reliable
1955 indicator of Thumb-ness except for some ARM v4T pre-interworking
1956 Thumb code, which could get away with a clear low bit as long as
1957 the called function did not use bx. Guess that all other
1958 bits are unchanged; the condition flags are presumably lost,
1959 but the processor status is likely valid. */
1960 if (prev_regnum == ARM_PS_REGNUM)
1961 {
1962 CORE_ADDR lr, cpsr;
9779414d 1963 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1964
1965 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1966 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1967 if (IS_THUMB_ADDR (lr))
9779414d 1968 cpsr |= t_bit;
b39cc962 1969 else
9779414d 1970 cpsr &= ~t_bit;
b39cc962
DJ
1971 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1972 }
1973
a262aec2
DJ
1974 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1975 prev_regnum);
eb5492fa
DJ
1976}
1977
1978struct frame_unwind arm_prologue_unwind = {
1979 NORMAL_FRAME,
c1ee9414 1980 arm_prologue_unwind_stop_reason,
eb5492fa 1981 arm_prologue_this_id,
a262aec2
DJ
1982 arm_prologue_prev_register,
1983 NULL,
1984 default_frame_sniffer
eb5492fa
DJ
1985};
1986
0e9e9abd
UW
1987/* Maintain a list of ARM exception table entries per objfile, similar to the
1988 list of mapping symbols. We only cache entries for standard ARM-defined
1989 personality routines; the cache will contain only the frame unwinding
1990 instructions associated with the entry (not the descriptors). */
1991
1992static const struct objfile_data *arm_exidx_data_key;
1993
1994struct arm_exidx_entry
1995{
1996 bfd_vma addr;
1997 gdb_byte *entry;
1998};
1999typedef struct arm_exidx_entry arm_exidx_entry_s;
2000DEF_VEC_O(arm_exidx_entry_s);
2001
2002struct arm_exidx_data
2003{
2004 VEC(arm_exidx_entry_s) **section_maps;
2005};
2006
2007static void
2008arm_exidx_data_free (struct objfile *objfile, void *arg)
2009{
9a3c8263 2010 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2011 unsigned int i;
2012
2013 for (i = 0; i < objfile->obfd->section_count; i++)
2014 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2015}
2016
2017static inline int
2018arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2019 const struct arm_exidx_entry *rhs)
2020{
2021 return lhs->addr < rhs->addr;
2022}
2023
2024static struct obj_section *
2025arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2026{
2027 struct obj_section *osect;
2028
2029 ALL_OBJFILE_OSECTIONS (objfile, osect)
2030 if (bfd_get_section_flags (objfile->obfd,
2031 osect->the_bfd_section) & SEC_ALLOC)
2032 {
2033 bfd_vma start, size;
2034 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2035 size = bfd_get_section_size (osect->the_bfd_section);
2036
2037 if (start <= vma && vma < start + size)
2038 return osect;
2039 }
2040
2041 return NULL;
2042}
2043
2044/* Parse contents of exception table and exception index sections
2045 of OBJFILE, and fill in the exception table entry cache.
2046
2047 For each entry that refers to a standard ARM-defined personality
2048 routine, extract the frame unwinding instructions (from either
2049 the index or the table section). The unwinding instructions
2050 are normalized by:
2051 - extracting them from the rest of the table data
2052 - converting to host endianness
2053 - appending the implicit 0xb0 ("Finish") code
2054
2055 The extracted and normalized instructions are stored for later
2056 retrieval by the arm_find_exidx_entry routine. */
2057
2058static void
2059arm_exidx_new_objfile (struct objfile *objfile)
2060{
3bb47e8b 2061 struct cleanup *cleanups;
0e9e9abd
UW
2062 struct arm_exidx_data *data;
2063 asection *exidx, *extab;
2064 bfd_vma exidx_vma = 0, extab_vma = 0;
2065 bfd_size_type exidx_size = 0, extab_size = 0;
2066 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2067 LONGEST i;
2068
2069 /* If we've already touched this file, do nothing. */
2070 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2071 return;
3bb47e8b 2072 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2073
2074 /* Read contents of exception table and index. */
a5eda10c 2075 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2076 if (exidx)
2077 {
2078 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2079 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2080 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2081 make_cleanup (xfree, exidx_data);
2082
2083 if (!bfd_get_section_contents (objfile->obfd, exidx,
2084 exidx_data, 0, exidx_size))
2085 {
2086 do_cleanups (cleanups);
2087 return;
2088 }
2089 }
2090
2091 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2092 if (extab)
2093 {
2094 extab_vma = bfd_section_vma (objfile->obfd, extab);
2095 extab_size = bfd_get_section_size (extab);
224c3ddb 2096 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2097 make_cleanup (xfree, extab_data);
2098
2099 if (!bfd_get_section_contents (objfile->obfd, extab,
2100 extab_data, 0, extab_size))
2101 {
2102 do_cleanups (cleanups);
2103 return;
2104 }
2105 }
2106
2107 /* Allocate exception table data structure. */
2108 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2109 set_objfile_data (objfile, arm_exidx_data_key, data);
2110 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2111 objfile->obfd->section_count,
2112 VEC(arm_exidx_entry_s) *);
2113
2114 /* Fill in exception table. */
2115 for (i = 0; i < exidx_size / 8; i++)
2116 {
2117 struct arm_exidx_entry new_exidx_entry;
2118 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2119 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2120 bfd_vma addr = 0, word = 0;
2121 int n_bytes = 0, n_words = 0;
2122 struct obj_section *sec;
2123 gdb_byte *entry = NULL;
2124
2125 /* Extract address of start of function. */
2126 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2127 idx += exidx_vma + i * 8;
2128
2129 /* Find section containing function and compute section offset. */
2130 sec = arm_obj_section_from_vma (objfile, idx);
2131 if (sec == NULL)
2132 continue;
2133 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2134
2135 /* Determine address of exception table entry. */
2136 if (val == 1)
2137 {
2138 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2139 }
2140 else if ((val & 0xff000000) == 0x80000000)
2141 {
2142 /* Exception table entry embedded in .ARM.exidx
2143 -- must be short form. */
2144 word = val;
2145 n_bytes = 3;
2146 }
2147 else if (!(val & 0x80000000))
2148 {
2149 /* Exception table entry in .ARM.extab. */
2150 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2151 addr += exidx_vma + i * 8 + 4;
2152
2153 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2154 {
2155 word = bfd_h_get_32 (objfile->obfd,
2156 extab_data + addr - extab_vma);
2157 addr += 4;
2158
2159 if ((word & 0xff000000) == 0x80000000)
2160 {
2161 /* Short form. */
2162 n_bytes = 3;
2163 }
2164 else if ((word & 0xff000000) == 0x81000000
2165 || (word & 0xff000000) == 0x82000000)
2166 {
2167 /* Long form. */
2168 n_bytes = 2;
2169 n_words = ((word >> 16) & 0xff);
2170 }
2171 else if (!(word & 0x80000000))
2172 {
2173 bfd_vma pers;
2174 struct obj_section *pers_sec;
2175 int gnu_personality = 0;
2176
2177 /* Custom personality routine. */
2178 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2179 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2180
2181 /* Check whether we've got one of the variants of the
2182 GNU personality routines. */
2183 pers_sec = arm_obj_section_from_vma (objfile, pers);
2184 if (pers_sec)
2185 {
2186 static const char *personality[] =
2187 {
2188 "__gcc_personality_v0",
2189 "__gxx_personality_v0",
2190 "__gcj_personality_v0",
2191 "__gnu_objc_personality_v0",
2192 NULL
2193 };
2194
2195 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2196 int k;
2197
2198 for (k = 0; personality[k]; k++)
2199 if (lookup_minimal_symbol_by_pc_name
2200 (pc, personality[k], objfile))
2201 {
2202 gnu_personality = 1;
2203 break;
2204 }
2205 }
2206
2207 /* If so, the next word contains a word count in the high
2208 byte, followed by the same unwind instructions as the
2209 pre-defined forms. */
2210 if (gnu_personality
2211 && addr + 4 <= extab_vma + extab_size)
2212 {
2213 word = bfd_h_get_32 (objfile->obfd,
2214 extab_data + addr - extab_vma);
2215 addr += 4;
2216 n_bytes = 3;
2217 n_words = ((word >> 24) & 0xff);
2218 }
2219 }
2220 }
2221 }
2222
2223 /* Sanity check address. */
2224 if (n_words)
2225 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2226 n_words = n_bytes = 0;
2227
2228 /* The unwind instructions reside in WORD (only the N_BYTES least
2229 significant bytes are valid), followed by N_WORDS words in the
2230 extab section starting at ADDR. */
2231 if (n_bytes || n_words)
2232 {
224c3ddb
SM
2233 gdb_byte *p = entry
2234 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2235 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2236
2237 while (n_bytes--)
2238 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2239
2240 while (n_words--)
2241 {
2242 word = bfd_h_get_32 (objfile->obfd,
2243 extab_data + addr - extab_vma);
2244 addr += 4;
2245
2246 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2247 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2248 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2249 *p++ = (gdb_byte) (word & 0xff);
2250 }
2251
2252 /* Implied "Finish" to terminate the list. */
2253 *p++ = 0xb0;
2254 }
2255
2256 /* Push entry onto vector. They are guaranteed to always
2257 appear in order of increasing addresses. */
2258 new_exidx_entry.addr = idx;
2259 new_exidx_entry.entry = entry;
2260 VEC_safe_push (arm_exidx_entry_s,
2261 data->section_maps[sec->the_bfd_section->index],
2262 &new_exidx_entry);
2263 }
2264
2265 do_cleanups (cleanups);
2266}
2267
2268/* Search for the exception table entry covering MEMADDR. If one is found,
2269 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2270 set *START to the start of the region covered by this entry. */
2271
2272static gdb_byte *
2273arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2274{
2275 struct obj_section *sec;
2276
2277 sec = find_pc_section (memaddr);
2278 if (sec != NULL)
2279 {
2280 struct arm_exidx_data *data;
2281 VEC(arm_exidx_entry_s) *map;
2282 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2283 unsigned int idx;
2284
9a3c8263
SM
2285 data = ((struct arm_exidx_data *)
2286 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2287 if (data != NULL)
2288 {
2289 map = data->section_maps[sec->the_bfd_section->index];
2290 if (!VEC_empty (arm_exidx_entry_s, map))
2291 {
2292 struct arm_exidx_entry *map_sym;
2293
2294 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2295 arm_compare_exidx_entries);
2296
2297 /* VEC_lower_bound finds the earliest ordered insertion
2298 point. If the following symbol starts at this exact
2299 address, we use that; otherwise, the preceding
2300 exception table entry covers this address. */
2301 if (idx < VEC_length (arm_exidx_entry_s, map))
2302 {
2303 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2304 if (map_sym->addr == map_key.addr)
2305 {
2306 if (start)
2307 *start = map_sym->addr + obj_section_addr (sec);
2308 return map_sym->entry;
2309 }
2310 }
2311
2312 if (idx > 0)
2313 {
2314 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2315 if (start)
2316 *start = map_sym->addr + obj_section_addr (sec);
2317 return map_sym->entry;
2318 }
2319 }
2320 }
2321 }
2322
2323 return NULL;
2324}
2325
2326/* Given the current frame THIS_FRAME, and its associated frame unwinding
2327 instruction list from the ARM exception table entry ENTRY, allocate and
2328 return a prologue cache structure describing how to unwind this frame.
2329
2330 Return NULL if the unwinding instruction list contains a "spare",
2331 "reserved" or "refuse to unwind" instruction as defined in section
2332 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2333 for the ARM Architecture" document. */
2334
2335static struct arm_prologue_cache *
2336arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2337{
2338 CORE_ADDR vsp = 0;
2339 int vsp_valid = 0;
2340
2341 struct arm_prologue_cache *cache;
2342 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2343 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2344
2345 for (;;)
2346 {
2347 gdb_byte insn;
2348
2349 /* Whenever we reload SP, we actually have to retrieve its
2350 actual value in the current frame. */
2351 if (!vsp_valid)
2352 {
2353 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2354 {
2355 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2356 vsp = get_frame_register_unsigned (this_frame, reg);
2357 }
2358 else
2359 {
2360 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2361 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2362 }
2363
2364 vsp_valid = 1;
2365 }
2366
2367 /* Decode next unwind instruction. */
2368 insn = *entry++;
2369
2370 if ((insn & 0xc0) == 0)
2371 {
2372 int offset = insn & 0x3f;
2373 vsp += (offset << 2) + 4;
2374 }
2375 else if ((insn & 0xc0) == 0x40)
2376 {
2377 int offset = insn & 0x3f;
2378 vsp -= (offset << 2) + 4;
2379 }
2380 else if ((insn & 0xf0) == 0x80)
2381 {
2382 int mask = ((insn & 0xf) << 8) | *entry++;
2383 int i;
2384
2385 /* The special case of an all-zero mask identifies
2386 "Refuse to unwind". We return NULL to fall back
2387 to the prologue analyzer. */
2388 if (mask == 0)
2389 return NULL;
2390
2391 /* Pop registers r4..r15 under mask. */
2392 for (i = 0; i < 12; i++)
2393 if (mask & (1 << i))
2394 {
2395 cache->saved_regs[4 + i].addr = vsp;
2396 vsp += 4;
2397 }
2398
2399 /* Special-case popping SP -- we need to reload vsp. */
2400 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2401 vsp_valid = 0;
2402 }
2403 else if ((insn & 0xf0) == 0x90)
2404 {
2405 int reg = insn & 0xf;
2406
2407 /* Reserved cases. */
2408 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2409 return NULL;
2410
2411 /* Set SP from another register and mark VSP for reload. */
2412 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2413 vsp_valid = 0;
2414 }
2415 else if ((insn & 0xf0) == 0xa0)
2416 {
2417 int count = insn & 0x7;
2418 int pop_lr = (insn & 0x8) != 0;
2419 int i;
2420
2421 /* Pop r4..r[4+count]. */
2422 for (i = 0; i <= count; i++)
2423 {
2424 cache->saved_regs[4 + i].addr = vsp;
2425 vsp += 4;
2426 }
2427
2428 /* If indicated by flag, pop LR as well. */
2429 if (pop_lr)
2430 {
2431 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2432 vsp += 4;
2433 }
2434 }
2435 else if (insn == 0xb0)
2436 {
2437 /* We could only have updated PC by popping into it; if so, it
2438 will show up as address. Otherwise, copy LR into PC. */
2439 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2440 cache->saved_regs[ARM_PC_REGNUM]
2441 = cache->saved_regs[ARM_LR_REGNUM];
2442
2443 /* We're done. */
2444 break;
2445 }
2446 else if (insn == 0xb1)
2447 {
2448 int mask = *entry++;
2449 int i;
2450
2451 /* All-zero mask and mask >= 16 is "spare". */
2452 if (mask == 0 || mask >= 16)
2453 return NULL;
2454
2455 /* Pop r0..r3 under mask. */
2456 for (i = 0; i < 4; i++)
2457 if (mask & (1 << i))
2458 {
2459 cache->saved_regs[i].addr = vsp;
2460 vsp += 4;
2461 }
2462 }
2463 else if (insn == 0xb2)
2464 {
2465 ULONGEST offset = 0;
2466 unsigned shift = 0;
2467
2468 do
2469 {
2470 offset |= (*entry & 0x7f) << shift;
2471 shift += 7;
2472 }
2473 while (*entry++ & 0x80);
2474
2475 vsp += 0x204 + (offset << 2);
2476 }
2477 else if (insn == 0xb3)
2478 {
2479 int start = *entry >> 4;
2480 int count = (*entry++) & 0xf;
2481 int i;
2482
2483 /* Only registers D0..D15 are valid here. */
2484 if (start + count >= 16)
2485 return NULL;
2486
2487 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2488 for (i = 0; i <= count; i++)
2489 {
2490 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2491 vsp += 8;
2492 }
2493
2494 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2495 vsp += 4;
2496 }
2497 else if ((insn & 0xf8) == 0xb8)
2498 {
2499 int count = insn & 0x7;
2500 int i;
2501
2502 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2503 for (i = 0; i <= count; i++)
2504 {
2505 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2506 vsp += 8;
2507 }
2508
2509 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2510 vsp += 4;
2511 }
2512 else if (insn == 0xc6)
2513 {
2514 int start = *entry >> 4;
2515 int count = (*entry++) & 0xf;
2516 int i;
2517
2518 /* Only registers WR0..WR15 are valid. */
2519 if (start + count >= 16)
2520 return NULL;
2521
2522 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2523 for (i = 0; i <= count; i++)
2524 {
2525 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2526 vsp += 8;
2527 }
2528 }
2529 else if (insn == 0xc7)
2530 {
2531 int mask = *entry++;
2532 int i;
2533
2534 /* All-zero mask and mask >= 16 is "spare". */
2535 if (mask == 0 || mask >= 16)
2536 return NULL;
2537
2538 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2539 for (i = 0; i < 4; i++)
2540 if (mask & (1 << i))
2541 {
2542 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2543 vsp += 4;
2544 }
2545 }
2546 else if ((insn & 0xf8) == 0xc0)
2547 {
2548 int count = insn & 0x7;
2549 int i;
2550
2551 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2552 for (i = 0; i <= count; i++)
2553 {
2554 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2555 vsp += 8;
2556 }
2557 }
2558 else if (insn == 0xc8)
2559 {
2560 int start = *entry >> 4;
2561 int count = (*entry++) & 0xf;
2562 int i;
2563
2564 /* Only registers D0..D31 are valid. */
2565 if (start + count >= 16)
2566 return NULL;
2567
2568 /* Pop VFP double-precision registers
2569 D[16+start]..D[16+start+count]. */
2570 for (i = 0; i <= count; i++)
2571 {
2572 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2573 vsp += 8;
2574 }
2575 }
2576 else if (insn == 0xc9)
2577 {
2578 int start = *entry >> 4;
2579 int count = (*entry++) & 0xf;
2580 int i;
2581
2582 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2583 for (i = 0; i <= count; i++)
2584 {
2585 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2586 vsp += 8;
2587 }
2588 }
2589 else if ((insn & 0xf8) == 0xd0)
2590 {
2591 int count = insn & 0x7;
2592 int i;
2593
2594 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2595 for (i = 0; i <= count; i++)
2596 {
2597 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2598 vsp += 8;
2599 }
2600 }
2601 else
2602 {
2603 /* Everything else is "spare". */
2604 return NULL;
2605 }
2606 }
2607
2608 /* If we restore SP from a register, assume this was the frame register.
2609 Otherwise just fall back to SP as frame register. */
2610 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2611 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2612 else
2613 cache->framereg = ARM_SP_REGNUM;
2614
2615 /* Determine offset to previous frame. */
2616 cache->framesize
2617 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2618
2619 /* We already got the previous SP. */
2620 cache->prev_sp = vsp;
2621
2622 return cache;
2623}
2624
2625/* Unwinding via ARM exception table entries. Note that the sniffer
2626 already computes a filled-in prologue cache, which is then used
2627 with the same arm_prologue_this_id and arm_prologue_prev_register
2628 routines also used for prologue-parsing based unwinding. */
2629
2630static int
2631arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2632 struct frame_info *this_frame,
2633 void **this_prologue_cache)
2634{
2635 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2636 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2637 CORE_ADDR addr_in_block, exidx_region, func_start;
2638 struct arm_prologue_cache *cache;
2639 gdb_byte *entry;
2640
2641 /* See if we have an ARM exception table entry covering this address. */
2642 addr_in_block = get_frame_address_in_block (this_frame);
2643 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2644 if (!entry)
2645 return 0;
2646
2647 /* The ARM exception table does not describe unwind information
2648 for arbitrary PC values, but is guaranteed to be correct only
2649 at call sites. We have to decide here whether we want to use
2650 ARM exception table information for this frame, or fall back
2651 to using prologue parsing. (Note that if we have DWARF CFI,
2652 this sniffer isn't even called -- CFI is always preferred.)
2653
2654 Before we make this decision, however, we check whether we
2655 actually have *symbol* information for the current frame.
2656 If not, prologue parsing would not work anyway, so we might
2657 as well use the exception table and hope for the best. */
2658 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2659 {
2660 int exc_valid = 0;
2661
2662 /* If the next frame is "normal", we are at a call site in this
2663 frame, so exception information is guaranteed to be valid. */
2664 if (get_next_frame (this_frame)
2665 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2666 exc_valid = 1;
2667
2668 /* We also assume exception information is valid if we're currently
2669 blocked in a system call. The system library is supposed to
d9311bfa
AT
2670 ensure this, so that e.g. pthread cancellation works. */
2671 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2672 {
d9311bfa 2673 LONGEST insn;
416dc9c6 2674
d9311bfa
AT
2675 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2676 byte_order_for_code, &insn)
2677 && (insn & 0xff00) == 0xdf00 /* svc */)
2678 exc_valid = 1;
0e9e9abd 2679 }
d9311bfa
AT
2680 else
2681 {
2682 LONGEST insn;
416dc9c6 2683
d9311bfa
AT
2684 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2685 byte_order_for_code, &insn)
2686 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2687 exc_valid = 1;
2688 }
2689
0e9e9abd
UW
2690 /* Bail out if we don't know that exception information is valid. */
2691 if (!exc_valid)
2692 return 0;
2693
2694 /* The ARM exception index does not mark the *end* of the region
2695 covered by the entry, and some functions will not have any entry.
2696 To correctly recognize the end of the covered region, the linker
2697 should have inserted dummy records with a CANTUNWIND marker.
2698
2699 Unfortunately, current versions of GNU ld do not reliably do
2700 this, and thus we may have found an incorrect entry above.
2701 As a (temporary) sanity check, we only use the entry if it
2702 lies *within* the bounds of the function. Note that this check
2703 might reject perfectly valid entries that just happen to cover
2704 multiple functions; therefore this check ought to be removed
2705 once the linker is fixed. */
2706 if (func_start > exidx_region)
2707 return 0;
2708 }
2709
2710 /* Decode the list of unwinding instructions into a prologue cache.
2711 Note that this may fail due to e.g. a "refuse to unwind" code. */
2712 cache = arm_exidx_fill_cache (this_frame, entry);
2713 if (!cache)
2714 return 0;
2715
2716 *this_prologue_cache = cache;
2717 return 1;
2718}
2719
2720struct frame_unwind arm_exidx_unwind = {
2721 NORMAL_FRAME,
8fbca658 2722 default_frame_unwind_stop_reason,
0e9e9abd
UW
2723 arm_prologue_this_id,
2724 arm_prologue_prev_register,
2725 NULL,
2726 arm_exidx_unwind_sniffer
2727};
2728
779aa56f
YQ
2729static struct arm_prologue_cache *
2730arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2731{
2732 struct arm_prologue_cache *cache;
779aa56f
YQ
2733 int reg;
2734
2735 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2736 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2737
2738 /* Still rely on the offset calculated from prologue. */
2739 arm_scan_prologue (this_frame, cache);
2740
2741 /* Since we are in epilogue, the SP has been restored. */
2742 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2743
2744 /* Calculate actual addresses of saved registers using offsets
2745 determined by arm_scan_prologue. */
2746 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2747 if (trad_frame_addr_p (cache->saved_regs, reg))
2748 cache->saved_regs[reg].addr += cache->prev_sp;
2749
2750 return cache;
2751}
2752
2753/* Implementation of function hook 'this_id' in
2754 'struct frame_uwnind' for epilogue unwinder. */
2755
2756static void
2757arm_epilogue_frame_this_id (struct frame_info *this_frame,
2758 void **this_cache,
2759 struct frame_id *this_id)
2760{
2761 struct arm_prologue_cache *cache;
2762 CORE_ADDR pc, func;
2763
2764 if (*this_cache == NULL)
2765 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2766 cache = (struct arm_prologue_cache *) *this_cache;
2767
2768 /* Use function start address as part of the frame ID. If we cannot
2769 identify the start address (due to missing symbol information),
2770 fall back to just using the current PC. */
2771 pc = get_frame_pc (this_frame);
2772 func = get_frame_func (this_frame);
fb3f3d25 2773 if (func == 0)
779aa56f
YQ
2774 func = pc;
2775
2776 (*this_id) = frame_id_build (cache->prev_sp, pc);
2777}
2778
2779/* Implementation of function hook 'prev_register' in
2780 'struct frame_uwnind' for epilogue unwinder. */
2781
2782static struct value *
2783arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2784 void **this_cache, int regnum)
2785{
779aa56f
YQ
2786 if (*this_cache == NULL)
2787 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2788
2789 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2790}
2791
2792static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2793 CORE_ADDR pc);
2794static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2795 CORE_ADDR pc);
2796
2797/* Implementation of function hook 'sniffer' in
2798 'struct frame_uwnind' for epilogue unwinder. */
2799
2800static int
2801arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2802 struct frame_info *this_frame,
2803 void **this_prologue_cache)
2804{
2805 if (frame_relative_level (this_frame) == 0)
2806 {
2807 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2808 CORE_ADDR pc = get_frame_pc (this_frame);
2809
2810 if (arm_frame_is_thumb (this_frame))
2811 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2812 else
2813 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2814 }
2815 else
2816 return 0;
2817}
2818
2819/* Frame unwinder from epilogue. */
2820
2821static const struct frame_unwind arm_epilogue_frame_unwind =
2822{
2823 NORMAL_FRAME,
2824 default_frame_unwind_stop_reason,
2825 arm_epilogue_frame_this_id,
2826 arm_epilogue_frame_prev_register,
2827 NULL,
2828 arm_epilogue_frame_sniffer,
2829};
2830
80d8d390
YQ
2831/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2832 trampoline, return the target PC. Otherwise return 0.
2833
2834 void call0a (char c, short s, int i, long l) {}
2835
2836 int main (void)
2837 {
2838 (*pointer_to_call0a) (c, s, i, l);
2839 }
2840
2841 Instead of calling a stub library function _call_via_xx (xx is
2842 the register name), GCC may inline the trampoline in the object
2843 file as below (register r2 has the address of call0a).
2844
2845 .global main
2846 .type main, %function
2847 ...
2848 bl .L1
2849 ...
2850 .size main, .-main
2851
2852 .L1:
2853 bx r2
2854
2855 The trampoline 'bx r2' doesn't belong to main. */
2856
2857static CORE_ADDR
2858arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2859{
2860 /* The heuristics of recognizing such trampoline is that FRAME is
2861 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2862 if (arm_frame_is_thumb (frame))
2863 {
2864 gdb_byte buf[2];
2865
2866 if (target_read_memory (pc, buf, 2) == 0)
2867 {
2868 struct gdbarch *gdbarch = get_frame_arch (frame);
2869 enum bfd_endian byte_order_for_code
2870 = gdbarch_byte_order_for_code (gdbarch);
2871 uint16_t insn
2872 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2873
2874 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2875 {
2876 CORE_ADDR dest
2877 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2878
2879 /* Clear the LSB so that gdb core sets step-resume
2880 breakpoint at the right address. */
2881 return UNMAKE_THUMB_ADDR (dest);
2882 }
2883 }
2884 }
2885
2886 return 0;
2887}
2888
909cf6ea 2889static struct arm_prologue_cache *
a262aec2 2890arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2891{
909cf6ea 2892 struct arm_prologue_cache *cache;
909cf6ea 2893
35d5d4ee 2894 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2895 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2896
a262aec2 2897 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2898
2899 return cache;
2900}
2901
2902/* Our frame ID for a stub frame is the current SP and LR. */
2903
2904static void
a262aec2 2905arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2906 void **this_cache,
2907 struct frame_id *this_id)
2908{
2909 struct arm_prologue_cache *cache;
2910
2911 if (*this_cache == NULL)
a262aec2 2912 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2913 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2914
a262aec2 2915 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2916}
2917
a262aec2
DJ
2918static int
2919arm_stub_unwind_sniffer (const struct frame_unwind *self,
2920 struct frame_info *this_frame,
2921 void **this_prologue_cache)
909cf6ea 2922{
93d42b30 2923 CORE_ADDR addr_in_block;
948f8e3d 2924 gdb_byte dummy[4];
18d18ac8
YQ
2925 CORE_ADDR pc, start_addr;
2926 const char *name;
909cf6ea 2927
a262aec2 2928 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2929 pc = get_frame_pc (this_frame);
3e5d3a5a 2930 if (in_plt_section (addr_in_block)
fc36e839
DE
2931 /* We also use the stub winder if the target memory is unreadable
2932 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2933 || target_read_memory (pc, dummy, 4) != 0)
2934 return 1;
2935
2936 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2937 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2938 return 1;
909cf6ea 2939
a262aec2 2940 return 0;
909cf6ea
DJ
2941}
2942
a262aec2
DJ
2943struct frame_unwind arm_stub_unwind = {
2944 NORMAL_FRAME,
8fbca658 2945 default_frame_unwind_stop_reason,
a262aec2
DJ
2946 arm_stub_this_id,
2947 arm_prologue_prev_register,
2948 NULL,
2949 arm_stub_unwind_sniffer
2950};
2951
2ae28aa9
YQ
2952/* Put here the code to store, into CACHE->saved_regs, the addresses
2953 of the saved registers of frame described by THIS_FRAME. CACHE is
2954 returned. */
2955
2956static struct arm_prologue_cache *
2957arm_m_exception_cache (struct frame_info *this_frame)
2958{
2959 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2960 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2961 struct arm_prologue_cache *cache;
2962 CORE_ADDR unwound_sp;
2963 LONGEST xpsr;
2964
2965 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2966 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2967
2968 unwound_sp = get_frame_register_unsigned (this_frame,
2969 ARM_SP_REGNUM);
2970
2971 /* The hardware saves eight 32-bit words, comprising xPSR,
2972 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2973 "B1.5.6 Exception entry behavior" in
2974 "ARMv7-M Architecture Reference Manual". */
2975 cache->saved_regs[0].addr = unwound_sp;
2976 cache->saved_regs[1].addr = unwound_sp + 4;
2977 cache->saved_regs[2].addr = unwound_sp + 8;
2978 cache->saved_regs[3].addr = unwound_sp + 12;
2979 cache->saved_regs[12].addr = unwound_sp + 16;
2980 cache->saved_regs[14].addr = unwound_sp + 20;
2981 cache->saved_regs[15].addr = unwound_sp + 24;
2982 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2983
2984 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2985 aligner between the top of the 32-byte stack frame and the
2986 previous context's stack pointer. */
2987 cache->prev_sp = unwound_sp + 32;
2988 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2989 && (xpsr & (1 << 9)) != 0)
2990 cache->prev_sp += 4;
2991
2992 return cache;
2993}
2994
2995/* Implementation of function hook 'this_id' in
2996 'struct frame_uwnind'. */
2997
2998static void
2999arm_m_exception_this_id (struct frame_info *this_frame,
3000 void **this_cache,
3001 struct frame_id *this_id)
3002{
3003 struct arm_prologue_cache *cache;
3004
3005 if (*this_cache == NULL)
3006 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3007 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3008
3009 /* Our frame ID for a stub frame is the current SP and LR. */
3010 *this_id = frame_id_build (cache->prev_sp,
3011 get_frame_pc (this_frame));
3012}
3013
3014/* Implementation of function hook 'prev_register' in
3015 'struct frame_uwnind'. */
3016
3017static struct value *
3018arm_m_exception_prev_register (struct frame_info *this_frame,
3019 void **this_cache,
3020 int prev_regnum)
3021{
2ae28aa9
YQ
3022 struct arm_prologue_cache *cache;
3023
3024 if (*this_cache == NULL)
3025 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3026 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3027
3028 /* The value was already reconstructed into PREV_SP. */
3029 if (prev_regnum == ARM_SP_REGNUM)
3030 return frame_unwind_got_constant (this_frame, prev_regnum,
3031 cache->prev_sp);
3032
3033 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3034 prev_regnum);
3035}
3036
3037/* Implementation of function hook 'sniffer' in
3038 'struct frame_uwnind'. */
3039
3040static int
3041arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3042 struct frame_info *this_frame,
3043 void **this_prologue_cache)
3044{
3045 CORE_ADDR this_pc = get_frame_pc (this_frame);
3046
3047 /* No need to check is_m; this sniffer is only registered for
3048 M-profile architectures. */
3049
ca90e760
FH
3050 /* Check if exception frame returns to a magic PC value. */
3051 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3052}
3053
3054/* Frame unwinder for M-profile exceptions. */
3055
3056struct frame_unwind arm_m_exception_unwind =
3057{
3058 SIGTRAMP_FRAME,
3059 default_frame_unwind_stop_reason,
3060 arm_m_exception_this_id,
3061 arm_m_exception_prev_register,
3062 NULL,
3063 arm_m_exception_unwind_sniffer
3064};
3065
24de872b 3066static CORE_ADDR
a262aec2 3067arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3068{
3069 struct arm_prologue_cache *cache;
3070
eb5492fa 3071 if (*this_cache == NULL)
a262aec2 3072 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3073 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3074
4be43953 3075 return cache->prev_sp - cache->framesize;
24de872b
DJ
3076}
3077
eb5492fa
DJ
3078struct frame_base arm_normal_base = {
3079 &arm_prologue_unwind,
3080 arm_normal_frame_base,
3081 arm_normal_frame_base,
3082 arm_normal_frame_base
3083};
3084
a262aec2 3085/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3086 dummy frame. The frame ID's base needs to match the TOS value
3087 saved by save_dummy_frame_tos() and returned from
3088 arm_push_dummy_call, and the PC needs to match the dummy frame's
3089 breakpoint. */
c906108c 3090
eb5492fa 3091static struct frame_id
a262aec2 3092arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3093{
0963b4bd
MS
3094 return frame_id_build (get_frame_register_unsigned (this_frame,
3095 ARM_SP_REGNUM),
a262aec2 3096 get_frame_pc (this_frame));
eb5492fa 3097}
c3b4394c 3098
eb5492fa
DJ
3099/* Given THIS_FRAME, find the previous frame's resume PC (which will
3100 be used to construct the previous frame's ID, after looking up the
3101 containing function). */
c3b4394c 3102
eb5492fa
DJ
3103static CORE_ADDR
3104arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3105{
3106 CORE_ADDR pc;
3107 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3108 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3109}
3110
3111static CORE_ADDR
3112arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3113{
3114 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3115}
3116
b39cc962
DJ
3117static struct value *
3118arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3119 int regnum)
3120{
24568a2c 3121 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3122 CORE_ADDR lr, cpsr;
9779414d 3123 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3124
3125 switch (regnum)
3126 {
3127 case ARM_PC_REGNUM:
3128 /* The PC is normally copied from the return column, which
3129 describes saves of LR. However, that version may have an
3130 extra bit set to indicate Thumb state. The bit is not
3131 part of the PC. */
3132 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3133 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3134 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3135
3136 case ARM_PS_REGNUM:
3137 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3138 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3139 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3140 if (IS_THUMB_ADDR (lr))
9779414d 3141 cpsr |= t_bit;
b39cc962 3142 else
9779414d 3143 cpsr &= ~t_bit;
ca38c58e 3144 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3145
3146 default:
3147 internal_error (__FILE__, __LINE__,
3148 _("Unexpected register %d"), regnum);
3149 }
3150}
3151
3152static void
3153arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3154 struct dwarf2_frame_state_reg *reg,
3155 struct frame_info *this_frame)
3156{
3157 switch (regnum)
3158 {
3159 case ARM_PC_REGNUM:
3160 case ARM_PS_REGNUM:
3161 reg->how = DWARF2_FRAME_REG_FN;
3162 reg->loc.fn = arm_dwarf2_prev_register;
3163 break;
3164 case ARM_SP_REGNUM:
3165 reg->how = DWARF2_FRAME_REG_CFA;
3166 break;
3167 }
3168}
3169
c9cf6e20 3170/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3171
3172static int
c9cf6e20 3173thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3174{
3175 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3176 unsigned int insn, insn2;
3177 int found_return = 0, found_stack_adjust = 0;
3178 CORE_ADDR func_start, func_end;
3179 CORE_ADDR scan_pc;
3180 gdb_byte buf[4];
3181
3182 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3183 return 0;
3184
3185 /* The epilogue is a sequence of instructions along the following lines:
3186
3187 - add stack frame size to SP or FP
3188 - [if frame pointer used] restore SP from FP
3189 - restore registers from SP [may include PC]
3190 - a return-type instruction [if PC wasn't already restored]
3191
3192 In a first pass, we scan forward from the current PC and verify the
3193 instructions we find as compatible with this sequence, ending in a
3194 return instruction.
3195
3196 However, this is not sufficient to distinguish indirect function calls
3197 within a function from indirect tail calls in the epilogue in some cases.
3198 Therefore, if we didn't already find any SP-changing instruction during
3199 forward scan, we add a backward scanning heuristic to ensure we actually
3200 are in the epilogue. */
3201
3202 scan_pc = pc;
3203 while (scan_pc < func_end && !found_return)
3204 {
3205 if (target_read_memory (scan_pc, buf, 2))
3206 break;
3207
3208 scan_pc += 2;
3209 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3210
3211 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3212 found_return = 1;
3213 else if (insn == 0x46f7) /* mov pc, lr */
3214 found_return = 1;
540314bd 3215 else if (thumb_instruction_restores_sp (insn))
4024ca99 3216 {
b7576e5c 3217 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3218 found_return = 1;
3219 }
db24da6d 3220 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3221 {
3222 if (target_read_memory (scan_pc, buf, 2))
3223 break;
3224
3225 scan_pc += 2;
3226 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3227
3228 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3229 {
4024ca99
UW
3230 if (insn2 & 0x8000) /* <registers> include PC. */
3231 found_return = 1;
3232 }
3233 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3234 && (insn2 & 0x0fff) == 0x0b04)
3235 {
4024ca99
UW
3236 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3237 found_return = 1;
3238 }
3239 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3240 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3241 ;
4024ca99
UW
3242 else
3243 break;
3244 }
3245 else
3246 break;
3247 }
3248
3249 if (!found_return)
3250 return 0;
3251
3252 /* Since any instruction in the epilogue sequence, with the possible
3253 exception of return itself, updates the stack pointer, we need to
3254 scan backwards for at most one instruction. Try either a 16-bit or
3255 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3256 too much about false positives. */
4024ca99 3257
6b65d1b6
YQ
3258 if (pc - 4 < func_start)
3259 return 0;
3260 if (target_read_memory (pc - 4, buf, 4))
3261 return 0;
4024ca99 3262
6b65d1b6
YQ
3263 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3264 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3265
3266 if (thumb_instruction_restores_sp (insn2))
3267 found_stack_adjust = 1;
3268 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3269 found_stack_adjust = 1;
3270 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3271 && (insn2 & 0x0fff) == 0x0b04)
3272 found_stack_adjust = 1;
3273 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3274 && (insn2 & 0x0e00) == 0x0a00)
3275 found_stack_adjust = 1;
4024ca99
UW
3276
3277 return found_stack_adjust;
3278}
3279
4024ca99 3280static int
c58b006a 3281arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3282{
3283 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3284 unsigned int insn;
f303bc3e 3285 int found_return;
4024ca99
UW
3286 CORE_ADDR func_start, func_end;
3287
4024ca99
UW
3288 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3289 return 0;
3290
3291 /* We are in the epilogue if the previous instruction was a stack
3292 adjustment and the next instruction is a possible return (bx, mov
3293 pc, or pop). We could have to scan backwards to find the stack
3294 adjustment, or forwards to find the return, but this is a decent
3295 approximation. First scan forwards. */
3296
3297 found_return = 0;
3298 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3299 if (bits (insn, 28, 31) != INST_NV)
3300 {
3301 if ((insn & 0x0ffffff0) == 0x012fff10)
3302 /* BX. */
3303 found_return = 1;
3304 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3305 /* MOV PC. */
3306 found_return = 1;
3307 else if ((insn & 0x0fff0000) == 0x08bd0000
3308 && (insn & 0x0000c000) != 0)
3309 /* POP (LDMIA), including PC or LR. */
3310 found_return = 1;
3311 }
3312
3313 if (!found_return)
3314 return 0;
3315
3316 /* Scan backwards. This is just a heuristic, so do not worry about
3317 false positives from mode changes. */
3318
3319 if (pc < func_start + 4)
3320 return 0;
3321
3322 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3323 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3324 return 1;
3325
3326 return 0;
3327}
3328
c58b006a
YQ
3329/* Implement the stack_frame_destroyed_p gdbarch method. */
3330
3331static int
3332arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3333{
3334 if (arm_pc_is_thumb (gdbarch, pc))
3335 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3336 else
3337 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3338}
4024ca99 3339
2dd604e7
RE
3340/* When arguments must be pushed onto the stack, they go on in reverse
3341 order. The code below implements a FILO (stack) to do this. */
3342
3343struct stack_item
3344{
3345 int len;
3346 struct stack_item *prev;
7c543f7b 3347 gdb_byte *data;
2dd604e7
RE
3348};
3349
3350static struct stack_item *
df3b6708 3351push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3352{
3353 struct stack_item *si;
8d749320 3354 si = XNEW (struct stack_item);
7c543f7b 3355 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3356 si->len = len;
3357 si->prev = prev;
3358 memcpy (si->data, contents, len);
3359 return si;
3360}
3361
3362static struct stack_item *
3363pop_stack_item (struct stack_item *si)
3364{
3365 struct stack_item *dead = si;
3366 si = si->prev;
3367 xfree (dead->data);
3368 xfree (dead);
3369 return si;
3370}
3371
2af48f68
PB
3372
3373/* Return the alignment (in bytes) of the given type. */
3374
3375static int
3376arm_type_align (struct type *t)
3377{
3378 int n;
3379 int align;
3380 int falign;
3381
3382 t = check_typedef (t);
3383 switch (TYPE_CODE (t))
3384 {
3385 default:
3386 /* Should never happen. */
3387 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3388 return 4;
3389
3390 case TYPE_CODE_PTR:
3391 case TYPE_CODE_ENUM:
3392 case TYPE_CODE_INT:
3393 case TYPE_CODE_FLT:
3394 case TYPE_CODE_SET:
3395 case TYPE_CODE_RANGE:
2af48f68
PB
3396 case TYPE_CODE_REF:
3397 case TYPE_CODE_CHAR:
3398 case TYPE_CODE_BOOL:
3399 return TYPE_LENGTH (t);
3400
3401 case TYPE_CODE_ARRAY:
c4312b19
YQ
3402 if (TYPE_VECTOR (t))
3403 {
3404 /* Use the natural alignment for vector types (the same for
3405 scalar type), but the maximum alignment is 64-bit. */
3406 if (TYPE_LENGTH (t) > 8)
3407 return 8;
3408 else
3409 return TYPE_LENGTH (t);
3410 }
3411 else
3412 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3413 case TYPE_CODE_COMPLEX:
2af48f68
PB
3414 return arm_type_align (TYPE_TARGET_TYPE (t));
3415
3416 case TYPE_CODE_STRUCT:
3417 case TYPE_CODE_UNION:
3418 align = 1;
3419 for (n = 0; n < TYPE_NFIELDS (t); n++)
3420 {
3421 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3422 if (falign > align)
3423 align = falign;
3424 }
3425 return align;
3426 }
3427}
3428
90445bd3
DJ
3429/* Possible base types for a candidate for passing and returning in
3430 VFP registers. */
3431
3432enum arm_vfp_cprc_base_type
3433{
3434 VFP_CPRC_UNKNOWN,
3435 VFP_CPRC_SINGLE,
3436 VFP_CPRC_DOUBLE,
3437 VFP_CPRC_VEC64,
3438 VFP_CPRC_VEC128
3439};
3440
3441/* The length of one element of base type B. */
3442
3443static unsigned
3444arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3445{
3446 switch (b)
3447 {
3448 case VFP_CPRC_SINGLE:
3449 return 4;
3450 case VFP_CPRC_DOUBLE:
3451 return 8;
3452 case VFP_CPRC_VEC64:
3453 return 8;
3454 case VFP_CPRC_VEC128:
3455 return 16;
3456 default:
3457 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3458 (int) b);
3459 }
3460}
3461
3462/* The character ('s', 'd' or 'q') for the type of VFP register used
3463 for passing base type B. */
3464
3465static int
3466arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3467{
3468 switch (b)
3469 {
3470 case VFP_CPRC_SINGLE:
3471 return 's';
3472 case VFP_CPRC_DOUBLE:
3473 return 'd';
3474 case VFP_CPRC_VEC64:
3475 return 'd';
3476 case VFP_CPRC_VEC128:
3477 return 'q';
3478 default:
3479 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3480 (int) b);
3481 }
3482}
3483
3484/* Determine whether T may be part of a candidate for passing and
3485 returning in VFP registers, ignoring the limit on the total number
3486 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3487 classification of the first valid component found; if it is not
3488 VFP_CPRC_UNKNOWN, all components must have the same classification
3489 as *BASE_TYPE. If it is found that T contains a type not permitted
3490 for passing and returning in VFP registers, a type differently
3491 classified from *BASE_TYPE, or two types differently classified
3492 from each other, return -1, otherwise return the total number of
3493 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3494 array). Vector types are not currently supported, matching the
3495 generic AAPCS support. */
90445bd3
DJ
3496
3497static int
3498arm_vfp_cprc_sub_candidate (struct type *t,
3499 enum arm_vfp_cprc_base_type *base_type)
3500{
3501 t = check_typedef (t);
3502 switch (TYPE_CODE (t))
3503 {
3504 case TYPE_CODE_FLT:
3505 switch (TYPE_LENGTH (t))
3506 {
3507 case 4:
3508 if (*base_type == VFP_CPRC_UNKNOWN)
3509 *base_type = VFP_CPRC_SINGLE;
3510 else if (*base_type != VFP_CPRC_SINGLE)
3511 return -1;
3512 return 1;
3513
3514 case 8:
3515 if (*base_type == VFP_CPRC_UNKNOWN)
3516 *base_type = VFP_CPRC_DOUBLE;
3517 else if (*base_type != VFP_CPRC_DOUBLE)
3518 return -1;
3519 return 1;
3520
3521 default:
3522 return -1;
3523 }
3524 break;
3525
817e0957
YQ
3526 case TYPE_CODE_COMPLEX:
3527 /* Arguments of complex T where T is one of the types float or
3528 double get treated as if they are implemented as:
3529
3530 struct complexT
3531 {
3532 T real;
3533 T imag;
5f52445b
YQ
3534 };
3535
3536 */
817e0957
YQ
3537 switch (TYPE_LENGTH (t))
3538 {
3539 case 8:
3540 if (*base_type == VFP_CPRC_UNKNOWN)
3541 *base_type = VFP_CPRC_SINGLE;
3542 else if (*base_type != VFP_CPRC_SINGLE)
3543 return -1;
3544 return 2;
3545
3546 case 16:
3547 if (*base_type == VFP_CPRC_UNKNOWN)
3548 *base_type = VFP_CPRC_DOUBLE;
3549 else if (*base_type != VFP_CPRC_DOUBLE)
3550 return -1;
3551 return 2;
3552
3553 default:
3554 return -1;
3555 }
3556 break;
3557
90445bd3
DJ
3558 case TYPE_CODE_ARRAY:
3559 {
c4312b19 3560 if (TYPE_VECTOR (t))
90445bd3 3561 {
c4312b19
YQ
3562 /* A 64-bit or 128-bit containerized vector type are VFP
3563 CPRCs. */
3564 switch (TYPE_LENGTH (t))
3565 {
3566 case 8:
3567 if (*base_type == VFP_CPRC_UNKNOWN)
3568 *base_type = VFP_CPRC_VEC64;
3569 return 1;
3570 case 16:
3571 if (*base_type == VFP_CPRC_UNKNOWN)
3572 *base_type = VFP_CPRC_VEC128;
3573 return 1;
3574 default:
3575 return -1;
3576 }
3577 }
3578 else
3579 {
3580 int count;
3581 unsigned unitlen;
3582
3583 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3584 base_type);
3585 if (count == -1)
3586 return -1;
3587 if (TYPE_LENGTH (t) == 0)
3588 {
3589 gdb_assert (count == 0);
3590 return 0;
3591 }
3592 else if (count == 0)
3593 return -1;
3594 unitlen = arm_vfp_cprc_unit_length (*base_type);
3595 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3596 return TYPE_LENGTH (t) / unitlen;
90445bd3 3597 }
90445bd3
DJ
3598 }
3599 break;
3600
3601 case TYPE_CODE_STRUCT:
3602 {
3603 int count = 0;
3604 unsigned unitlen;
3605 int i;
3606 for (i = 0; i < TYPE_NFIELDS (t); i++)
3607 {
1040b979
YQ
3608 int sub_count = 0;
3609
3610 if (!field_is_static (&TYPE_FIELD (t, i)))
3611 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3612 base_type);
90445bd3
DJ
3613 if (sub_count == -1)
3614 return -1;
3615 count += sub_count;
3616 }
3617 if (TYPE_LENGTH (t) == 0)
3618 {
3619 gdb_assert (count == 0);
3620 return 0;
3621 }
3622 else if (count == 0)
3623 return -1;
3624 unitlen = arm_vfp_cprc_unit_length (*base_type);
3625 if (TYPE_LENGTH (t) != unitlen * count)
3626 return -1;
3627 return count;
3628 }
3629
3630 case TYPE_CODE_UNION:
3631 {
3632 int count = 0;
3633 unsigned unitlen;
3634 int i;
3635 for (i = 0; i < TYPE_NFIELDS (t); i++)
3636 {
3637 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3638 base_type);
3639 if (sub_count == -1)
3640 return -1;
3641 count = (count > sub_count ? count : sub_count);
3642 }
3643 if (TYPE_LENGTH (t) == 0)
3644 {
3645 gdb_assert (count == 0);
3646 return 0;
3647 }
3648 else if (count == 0)
3649 return -1;
3650 unitlen = arm_vfp_cprc_unit_length (*base_type);
3651 if (TYPE_LENGTH (t) != unitlen * count)
3652 return -1;
3653 return count;
3654 }
3655
3656 default:
3657 break;
3658 }
3659
3660 return -1;
3661}
3662
3663/* Determine whether T is a VFP co-processor register candidate (CPRC)
3664 if passed to or returned from a non-variadic function with the VFP
3665 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3666 *BASE_TYPE to the base type for T and *COUNT to the number of
3667 elements of that base type before returning. */
3668
3669static int
3670arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3671 int *count)
3672{
3673 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3674 int c = arm_vfp_cprc_sub_candidate (t, &b);
3675 if (c <= 0 || c > 4)
3676 return 0;
3677 *base_type = b;
3678 *count = c;
3679 return 1;
3680}
3681
3682/* Return 1 if the VFP ABI should be used for passing arguments to and
3683 returning values from a function of type FUNC_TYPE, 0
3684 otherwise. */
3685
3686static int
3687arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3688{
3689 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3690 /* Variadic functions always use the base ABI. Assume that functions
3691 without debug info are not variadic. */
3692 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3693 return 0;
3694 /* The VFP ABI is only supported as a variant of AAPCS. */
3695 if (tdep->arm_abi != ARM_ABI_AAPCS)
3696 return 0;
3697 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3698}
3699
3700/* We currently only support passing parameters in integer registers, which
3701 conforms with GCC's default model, and VFP argument passing following
3702 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3703 we should probably support some of them based on the selected ABI. */
3704
3705static CORE_ADDR
7d9b040b 3706arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3707 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3708 struct value **args, CORE_ADDR sp, int struct_return,
3709 CORE_ADDR struct_addr)
2dd604e7 3710{
e17a4113 3711 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3712 int argnum;
3713 int argreg;
3714 int nstack;
3715 struct stack_item *si = NULL;
90445bd3
DJ
3716 int use_vfp_abi;
3717 struct type *ftype;
3718 unsigned vfp_regs_free = (1 << 16) - 1;
3719
3720 /* Determine the type of this function and whether the VFP ABI
3721 applies. */
3722 ftype = check_typedef (value_type (function));
3723 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3724 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3725 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3726
6a65450a
AC
3727 /* Set the return address. For the ARM, the return breakpoint is
3728 always at BP_ADDR. */
9779414d 3729 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3730 bp_addr |= 1;
6a65450a 3731 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3732
3733 /* Walk through the list of args and determine how large a temporary
3734 stack is required. Need to take care here as structs may be
7a9dd1b2 3735 passed on the stack, and we have to push them. */
2dd604e7
RE
3736 nstack = 0;
3737
3738 argreg = ARM_A1_REGNUM;
3739 nstack = 0;
3740
2dd604e7
RE
3741 /* The struct_return pointer occupies the first parameter
3742 passing register. */
3743 if (struct_return)
3744 {
3745 if (arm_debug)
5af949e3 3746 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3747 gdbarch_register_name (gdbarch, argreg),
5af949e3 3748 paddress (gdbarch, struct_addr));
2dd604e7
RE
3749 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3750 argreg++;
3751 }
3752
3753 for (argnum = 0; argnum < nargs; argnum++)
3754 {
3755 int len;
3756 struct type *arg_type;
3757 struct type *target_type;
3758 enum type_code typecode;
8c6363cf 3759 const bfd_byte *val;
2af48f68 3760 int align;
90445bd3
DJ
3761 enum arm_vfp_cprc_base_type vfp_base_type;
3762 int vfp_base_count;
3763 int may_use_core_reg = 1;
2dd604e7 3764
df407dfe 3765 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3766 len = TYPE_LENGTH (arg_type);
3767 target_type = TYPE_TARGET_TYPE (arg_type);
3768 typecode = TYPE_CODE (arg_type);
8c6363cf 3769 val = value_contents (args[argnum]);
2dd604e7 3770
2af48f68
PB
3771 align = arm_type_align (arg_type);
3772 /* Round alignment up to a whole number of words. */
3773 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3774 /* Different ABIs have different maximum alignments. */
3775 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3776 {
3777 /* The APCS ABI only requires word alignment. */
3778 align = INT_REGISTER_SIZE;
3779 }
3780 else
3781 {
3782 /* The AAPCS requires at most doubleword alignment. */
3783 if (align > INT_REGISTER_SIZE * 2)
3784 align = INT_REGISTER_SIZE * 2;
3785 }
3786
90445bd3
DJ
3787 if (use_vfp_abi
3788 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3789 &vfp_base_count))
3790 {
3791 int regno;
3792 int unit_length;
3793 int shift;
3794 unsigned mask;
3795
3796 /* Because this is a CPRC it cannot go in a core register or
3797 cause a core register to be skipped for alignment.
3798 Either it goes in VFP registers and the rest of this loop
3799 iteration is skipped for this argument, or it goes on the
3800 stack (and the stack alignment code is correct for this
3801 case). */
3802 may_use_core_reg = 0;
3803
3804 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3805 shift = unit_length / 4;
3806 mask = (1 << (shift * vfp_base_count)) - 1;
3807 for (regno = 0; regno < 16; regno += shift)
3808 if (((vfp_regs_free >> regno) & mask) == mask)
3809 break;
3810
3811 if (regno < 16)
3812 {
3813 int reg_char;
3814 int reg_scaled;
3815 int i;
3816
3817 vfp_regs_free &= ~(mask << regno);
3818 reg_scaled = regno / shift;
3819 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3820 for (i = 0; i < vfp_base_count; i++)
3821 {
3822 char name_buf[4];
3823 int regnum;
58d6951d
DJ
3824 if (reg_char == 'q')
3825 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3826 val + i * unit_length);
58d6951d
DJ
3827 else
3828 {
8c042590
PM
3829 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3830 reg_char, reg_scaled + i);
58d6951d
DJ
3831 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3832 strlen (name_buf));
3833 regcache_cooked_write (regcache, regnum,
3834 val + i * unit_length);
3835 }
90445bd3
DJ
3836 }
3837 continue;
3838 }
3839 else
3840 {
3841 /* This CPRC could not go in VFP registers, so all VFP
3842 registers are now marked as used. */
3843 vfp_regs_free = 0;
3844 }
3845 }
3846
2af48f68
PB
3847 /* Push stack padding for dowubleword alignment. */
3848 if (nstack & (align - 1))
3849 {
3850 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3851 nstack += INT_REGISTER_SIZE;
3852 }
3853
3854 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3855 if (may_use_core_reg
3856 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3857 && align > INT_REGISTER_SIZE
3858 && argreg & 1)
3859 argreg++;
3860
2dd604e7
RE
3861 /* If the argument is a pointer to a function, and it is a
3862 Thumb function, create a LOCAL copy of the value and set
3863 the THUMB bit in it. */
3864 if (TYPE_CODE_PTR == typecode
3865 && target_type != NULL
f96b8fa0 3866 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3867 {
e17a4113 3868 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3869 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3870 {
224c3ddb 3871 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3872 store_unsigned_integer (copy, len, byte_order,
e17a4113 3873 MAKE_THUMB_ADDR (regval));
8c6363cf 3874 val = copy;
2dd604e7
RE
3875 }
3876 }
3877
3878 /* Copy the argument to general registers or the stack in
3879 register-sized pieces. Large arguments are split between
3880 registers and stack. */
3881 while (len > 0)
3882 {
f0c9063c 3883 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3884 CORE_ADDR regval
3885 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3886
90445bd3 3887 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3888 {
3889 /* The argument is being passed in a general purpose
3890 register. */
e17a4113 3891 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3892 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3893 if (arm_debug)
3894 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3895 argnum,
3896 gdbarch_register_name
2af46ca0 3897 (gdbarch, argreg),
f0c9063c 3898 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3899 regcache_cooked_write_unsigned (regcache, argreg, regval);
3900 argreg++;
3901 }
3902 else
3903 {
ef9bd0b8
YQ
3904 gdb_byte buf[INT_REGISTER_SIZE];
3905
3906 memset (buf, 0, sizeof (buf));
3907 store_unsigned_integer (buf, partial_len, byte_order, regval);
3908
2dd604e7
RE
3909 /* Push the arguments onto the stack. */
3910 if (arm_debug)
3911 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3912 argnum, nstack);
ef9bd0b8 3913 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3914 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3915 }
3916
3917 len -= partial_len;
3918 val += partial_len;
3919 }
3920 }
3921 /* If we have an odd number of words to push, then decrement the stack
3922 by one word now, so first stack argument will be dword aligned. */
3923 if (nstack & 4)
3924 sp -= 4;
3925
3926 while (si)
3927 {
3928 sp -= si->len;
3929 write_memory (sp, si->data, si->len);
3930 si = pop_stack_item (si);
3931 }
3932
3933 /* Finally, update teh SP register. */
3934 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3935
3936 return sp;
3937}
3938
f53f0d0b
PB
3939
3940/* Always align the frame to an 8-byte boundary. This is required on
3941 some platforms and harmless on the rest. */
3942
3943static CORE_ADDR
3944arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3945{
3946 /* Align the stack to eight bytes. */
3947 return sp & ~ (CORE_ADDR) 7;
3948}
3949
c906108c 3950static void
12b27276 3951print_fpu_flags (struct ui_file *file, int flags)
c906108c 3952{
c5aa993b 3953 if (flags & (1 << 0))
12b27276 3954 fputs_filtered ("IVO ", file);
c5aa993b 3955 if (flags & (1 << 1))
12b27276 3956 fputs_filtered ("DVZ ", file);
c5aa993b 3957 if (flags & (1 << 2))
12b27276 3958 fputs_filtered ("OFL ", file);
c5aa993b 3959 if (flags & (1 << 3))
12b27276 3960 fputs_filtered ("UFL ", file);
c5aa993b 3961 if (flags & (1 << 4))
12b27276
WN
3962 fputs_filtered ("INX ", file);
3963 fputc_filtered ('\n', file);
c906108c
SS
3964}
3965
5e74b15c
RE
3966/* Print interesting information about the floating point processor
3967 (if present) or emulator. */
34e8f22d 3968static void
d855c300 3969arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3970 struct frame_info *frame, const char *args)
c906108c 3971{
9c9acae0 3972 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3973 int type;
3974
3975 type = (status >> 24) & 127;
edefbb7c 3976 if (status & (1 << 31))
12b27276 3977 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3978 else
12b27276 3979 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3980 /* i18n: [floating point unit] mask */
12b27276
WN
3981 fputs_filtered (_("mask: "), file);
3982 print_fpu_flags (file, status >> 16);
edefbb7c 3983 /* i18n: [floating point unit] flags */
12b27276
WN
3984 fputs_filtered (_("flags: "), file);
3985 print_fpu_flags (file, status);
c906108c
SS
3986}
3987
27067745
UW
3988/* Construct the ARM extended floating point type. */
3989static struct type *
3990arm_ext_type (struct gdbarch *gdbarch)
3991{
3992 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3993
3994 if (!tdep->arm_ext_type)
3995 tdep->arm_ext_type
e9bb382b 3996 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3997 floatformats_arm_ext);
3998
3999 return tdep->arm_ext_type;
4000}
4001
58d6951d
DJ
4002static struct type *
4003arm_neon_double_type (struct gdbarch *gdbarch)
4004{
4005 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4006
4007 if (tdep->neon_double_type == NULL)
4008 {
4009 struct type *t, *elem;
4010
4011 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4012 TYPE_CODE_UNION);
4013 elem = builtin_type (gdbarch)->builtin_uint8;
4014 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4015 elem = builtin_type (gdbarch)->builtin_uint16;
4016 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4017 elem = builtin_type (gdbarch)->builtin_uint32;
4018 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4019 elem = builtin_type (gdbarch)->builtin_uint64;
4020 append_composite_type_field (t, "u64", elem);
4021 elem = builtin_type (gdbarch)->builtin_float;
4022 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4023 elem = builtin_type (gdbarch)->builtin_double;
4024 append_composite_type_field (t, "f64", elem);
4025
4026 TYPE_VECTOR (t) = 1;
4027 TYPE_NAME (t) = "neon_d";
4028 tdep->neon_double_type = t;
4029 }
4030
4031 return tdep->neon_double_type;
4032}
4033
4034/* FIXME: The vector types are not correctly ordered on big-endian
4035 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4036 bits of d0 - regardless of what unit size is being held in d0. So
4037 the offset of the first uint8 in d0 is 7, but the offset of the
4038 first float is 4. This code works as-is for little-endian
4039 targets. */
4040
4041static struct type *
4042arm_neon_quad_type (struct gdbarch *gdbarch)
4043{
4044 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4045
4046 if (tdep->neon_quad_type == NULL)
4047 {
4048 struct type *t, *elem;
4049
4050 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4051 TYPE_CODE_UNION);
4052 elem = builtin_type (gdbarch)->builtin_uint8;
4053 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4054 elem = builtin_type (gdbarch)->builtin_uint16;
4055 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4056 elem = builtin_type (gdbarch)->builtin_uint32;
4057 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4058 elem = builtin_type (gdbarch)->builtin_uint64;
4059 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4060 elem = builtin_type (gdbarch)->builtin_float;
4061 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4062 elem = builtin_type (gdbarch)->builtin_double;
4063 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4064
4065 TYPE_VECTOR (t) = 1;
4066 TYPE_NAME (t) = "neon_q";
4067 tdep->neon_quad_type = t;
4068 }
4069
4070 return tdep->neon_quad_type;
4071}
4072
34e8f22d
RE
4073/* Return the GDB type object for the "standard" data type of data in
4074 register N. */
4075
4076static struct type *
7a5ea0d4 4077arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4078{
58d6951d
DJ
4079 int num_regs = gdbarch_num_regs (gdbarch);
4080
4081 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4082 && regnum >= num_regs && regnum < num_regs + 32)
4083 return builtin_type (gdbarch)->builtin_float;
4084
4085 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4086 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4087 return arm_neon_quad_type (gdbarch);
4088
4089 /* If the target description has register information, we are only
4090 in this function so that we can override the types of
4091 double-precision registers for NEON. */
4092 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4093 {
4094 struct type *t = tdesc_register_type (gdbarch, regnum);
4095
4096 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4097 && TYPE_CODE (t) == TYPE_CODE_FLT
4098 && gdbarch_tdep (gdbarch)->have_neon)
4099 return arm_neon_double_type (gdbarch);
4100 else
4101 return t;
4102 }
4103
34e8f22d 4104 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4105 {
4106 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4107 return builtin_type (gdbarch)->builtin_void;
4108
4109 return arm_ext_type (gdbarch);
4110 }
e4c16157 4111 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4112 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4113 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4114 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4115 else if (regnum >= ARRAY_SIZE (arm_register_names))
4116 /* These registers are only supported on targets which supply
4117 an XML description. */
df4df182 4118 return builtin_type (gdbarch)->builtin_int0;
032758dc 4119 else
df4df182 4120 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4121}
4122
ff6f572f
DJ
4123/* Map a DWARF register REGNUM onto the appropriate GDB register
4124 number. */
4125
4126static int
d3f73121 4127arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4128{
4129 /* Core integer regs. */
4130 if (reg >= 0 && reg <= 15)
4131 return reg;
4132
4133 /* Legacy FPA encoding. These were once used in a way which
4134 overlapped with VFP register numbering, so their use is
4135 discouraged, but GDB doesn't support the ARM toolchain
4136 which used them for VFP. */
4137 if (reg >= 16 && reg <= 23)
4138 return ARM_F0_REGNUM + reg - 16;
4139
4140 /* New assignments for the FPA registers. */
4141 if (reg >= 96 && reg <= 103)
4142 return ARM_F0_REGNUM + reg - 96;
4143
4144 /* WMMX register assignments. */
4145 if (reg >= 104 && reg <= 111)
4146 return ARM_WCGR0_REGNUM + reg - 104;
4147
4148 if (reg >= 112 && reg <= 127)
4149 return ARM_WR0_REGNUM + reg - 112;
4150
4151 if (reg >= 192 && reg <= 199)
4152 return ARM_WC0_REGNUM + reg - 192;
4153
58d6951d
DJ
4154 /* VFP v2 registers. A double precision value is actually
4155 in d1 rather than s2, but the ABI only defines numbering
4156 for the single precision registers. This will "just work"
4157 in GDB for little endian targets (we'll read eight bytes,
4158 starting in s0 and then progressing to s1), but will be
4159 reversed on big endian targets with VFP. This won't
4160 be a problem for the new Neon quad registers; you're supposed
4161 to use DW_OP_piece for those. */
4162 if (reg >= 64 && reg <= 95)
4163 {
4164 char name_buf[4];
4165
8c042590 4166 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4167 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4168 strlen (name_buf));
4169 }
4170
4171 /* VFP v3 / Neon registers. This range is also used for VFP v2
4172 registers, except that it now describes d0 instead of s0. */
4173 if (reg >= 256 && reg <= 287)
4174 {
4175 char name_buf[4];
4176
8c042590 4177 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4178 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4179 strlen (name_buf));
4180 }
4181
ff6f572f
DJ
4182 return -1;
4183}
4184
26216b98
AC
4185/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4186static int
e7faf938 4187arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4188{
4189 int reg = regnum;
e7faf938 4190 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4191
ff6f572f
DJ
4192 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4193 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4194
4195 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4196 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4197
4198 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4199 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4200
26216b98
AC
4201 if (reg < NUM_GREGS)
4202 return SIM_ARM_R0_REGNUM + reg;
4203 reg -= NUM_GREGS;
4204
4205 if (reg < NUM_FREGS)
4206 return SIM_ARM_FP0_REGNUM + reg;
4207 reg -= NUM_FREGS;
4208
4209 if (reg < NUM_SREGS)
4210 return SIM_ARM_FPS_REGNUM + reg;
4211 reg -= NUM_SREGS;
4212
edefbb7c 4213 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4214}
34e8f22d 4215
a37b3cc0
AC
4216/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4217 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4218 It is thought that this is is the floating-point register format on
4219 little-endian systems. */
c906108c 4220
ed9a39eb 4221static void
b508a996 4222convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4223 void *dbl, int endianess)
c906108c 4224{
a37b3cc0 4225 DOUBLEST d;
be8626e0
MD
4226
4227 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4228 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4229 else
4230 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4231 ptr, &d);
b508a996 4232 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4233}
4234
34e8f22d 4235static void
be8626e0
MD
4236convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4237 int endianess)
c906108c 4238{
a37b3cc0 4239 DOUBLEST d;
be8626e0 4240
b508a996 4241 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4242 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4243 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4244 else
4245 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4246 &d, dbl);
c906108c 4247}
ed9a39eb 4248
d9311bfa
AT
4249/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4250 of the appropriate mode (as encoded in the PC value), even if this
4251 differs from what would be expected according to the symbol tables. */
4252
4253void
4254arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4255 struct address_space *aspace,
4256 CORE_ADDR pc)
c906108c 4257{
b7b633e9
TT
4258 scoped_restore save_override_mode
4259 = make_scoped_restore (&arm_override_mode,
4260 (int) IS_THUMB_ADDR (pc));
d9311bfa 4261 pc = gdbarch_addr_bits_remove (gdbarch, pc);
c5aa993b 4262
d9311bfa 4263 insert_single_step_breakpoint (gdbarch, aspace, pc);
d9311bfa 4264}
c5aa993b 4265
d9311bfa
AT
4266/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4267 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4268 NULL if an error occurs. BUF is freed. */
c906108c 4269
d9311bfa
AT
4270static gdb_byte *
4271extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4272 int old_len, int new_len)
4273{
4274 gdb_byte *new_buf;
4275 int bytes_to_read = new_len - old_len;
c906108c 4276
d9311bfa
AT
4277 new_buf = (gdb_byte *) xmalloc (new_len);
4278 memcpy (new_buf + bytes_to_read, buf, old_len);
4279 xfree (buf);
4280 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4281 {
4282 xfree (new_buf);
4283 return NULL;
c906108c 4284 }
d9311bfa 4285 return new_buf;
c906108c
SS
4286}
4287
d9311bfa
AT
4288/* An IT block is at most the 2-byte IT instruction followed by
4289 four 4-byte instructions. The furthest back we must search to
4290 find an IT block that affects the current instruction is thus
4291 2 + 3 * 4 == 14 bytes. */
4292#define MAX_IT_BLOCK_PREFIX 14
177321bd 4293
d9311bfa
AT
4294/* Use a quick scan if there are more than this many bytes of
4295 code. */
4296#define IT_SCAN_THRESHOLD 32
177321bd 4297
d9311bfa
AT
4298/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4299 A breakpoint in an IT block may not be hit, depending on the
4300 condition flags. */
ad527d2e 4301static CORE_ADDR
d9311bfa 4302arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4303{
d9311bfa
AT
4304 gdb_byte *buf;
4305 char map_type;
4306 CORE_ADDR boundary, func_start;
4307 int buf_len;
4308 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4309 int i, any, last_it, last_it_count;
177321bd 4310
d9311bfa
AT
4311 /* If we are using BKPT breakpoints, none of this is necessary. */
4312 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4313 return bpaddr;
177321bd 4314
d9311bfa
AT
4315 /* ARM mode does not have this problem. */
4316 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4317 return bpaddr;
177321bd 4318
d9311bfa
AT
4319 /* We are setting a breakpoint in Thumb code that could potentially
4320 contain an IT block. The first step is to find how much Thumb
4321 code there is; we do not need to read outside of known Thumb
4322 sequences. */
4323 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4324 if (map_type == 0)
4325 /* Thumb-2 code must have mapping symbols to have a chance. */
4326 return bpaddr;
9dca5578 4327
d9311bfa 4328 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4329
d9311bfa
AT
4330 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4331 && func_start > boundary)
4332 boundary = func_start;
9dca5578 4333
d9311bfa
AT
4334 /* Search for a candidate IT instruction. We have to do some fancy
4335 footwork to distinguish a real IT instruction from the second
4336 half of a 32-bit instruction, but there is no need for that if
4337 there's no candidate. */
325fac50 4338 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4339 if (buf_len == 0)
4340 /* No room for an IT instruction. */
4341 return bpaddr;
c906108c 4342
d9311bfa
AT
4343 buf = (gdb_byte *) xmalloc (buf_len);
4344 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4345 return bpaddr;
4346 any = 0;
4347 for (i = 0; i < buf_len; i += 2)
c906108c 4348 {
d9311bfa
AT
4349 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4350 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4351 {
d9311bfa
AT
4352 any = 1;
4353 break;
25b41d01 4354 }
c906108c 4355 }
d9311bfa
AT
4356
4357 if (any == 0)
c906108c 4358 {
d9311bfa
AT
4359 xfree (buf);
4360 return bpaddr;
f9d67f43
DJ
4361 }
4362
4363 /* OK, the code bytes before this instruction contain at least one
4364 halfword which resembles an IT instruction. We know that it's
4365 Thumb code, but there are still two possibilities. Either the
4366 halfword really is an IT instruction, or it is the second half of
4367 a 32-bit Thumb instruction. The only way we can tell is to
4368 scan forwards from a known instruction boundary. */
4369 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4370 {
4371 int definite;
4372
4373 /* There's a lot of code before this instruction. Start with an
4374 optimistic search; it's easy to recognize halfwords that can
4375 not be the start of a 32-bit instruction, and use that to
4376 lock on to the instruction boundaries. */
4377 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4378 if (buf == NULL)
4379 return bpaddr;
4380 buf_len = IT_SCAN_THRESHOLD;
4381
4382 definite = 0;
4383 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4384 {
4385 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4386 if (thumb_insn_size (inst1) == 2)
4387 {
4388 definite = 1;
4389 break;
4390 }
4391 }
4392
4393 /* At this point, if DEFINITE, BUF[I] is the first place we
4394 are sure that we know the instruction boundaries, and it is far
4395 enough from BPADDR that we could not miss an IT instruction
4396 affecting BPADDR. If ! DEFINITE, give up - start from a
4397 known boundary. */
4398 if (! definite)
4399 {
0963b4bd
MS
4400 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4401 bpaddr - boundary);
f9d67f43
DJ
4402 if (buf == NULL)
4403 return bpaddr;
4404 buf_len = bpaddr - boundary;
4405 i = 0;
4406 }
4407 }
4408 else
4409 {
4410 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4411 if (buf == NULL)
4412 return bpaddr;
4413 buf_len = bpaddr - boundary;
4414 i = 0;
4415 }
4416
4417 /* Scan forwards. Find the last IT instruction before BPADDR. */
4418 last_it = -1;
4419 last_it_count = 0;
4420 while (i < buf_len)
4421 {
4422 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4423 last_it_count--;
4424 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4425 {
4426 last_it = i;
4427 if (inst1 & 0x0001)
4428 last_it_count = 4;
4429 else if (inst1 & 0x0002)
4430 last_it_count = 3;
4431 else if (inst1 & 0x0004)
4432 last_it_count = 2;
4433 else
4434 last_it_count = 1;
4435 }
4436 i += thumb_insn_size (inst1);
4437 }
4438
4439 xfree (buf);
4440
4441 if (last_it == -1)
4442 /* There wasn't really an IT instruction after all. */
4443 return bpaddr;
4444
4445 if (last_it_count < 1)
4446 /* It was too far away. */
4447 return bpaddr;
4448
4449 /* This really is a trouble spot. Move the breakpoint to the IT
4450 instruction. */
4451 return bpaddr - buf_len + last_it;
4452}
4453
cca44b1b 4454/* ARM displaced stepping support.
c906108c 4455
cca44b1b 4456 Generally ARM displaced stepping works as follows:
c906108c 4457
cca44b1b 4458 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4459 arm_process_displaced_insn. Depending on the type of instruction, it is
4460 then copied to a scratch location, possibly in a modified form. The
4461 copy_* set of functions performs such modification, as necessary. A
4462 breakpoint is placed after the modified instruction in the scratch space
4463 to return control to GDB. Note in particular that instructions which
4464 modify the PC will no longer do so after modification.
c5aa993b 4465
cca44b1b
JB
4466 2. The instruction is single-stepped, by setting the PC to the scratch
4467 location address, and resuming. Control returns to GDB when the
4468 breakpoint is hit.
c5aa993b 4469
cca44b1b
JB
4470 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4471 function used for the current instruction. This function's job is to
4472 put the CPU/memory state back to what it would have been if the
4473 instruction had been executed unmodified in its original location. */
c5aa993b 4474
cca44b1b
JB
4475/* NOP instruction (mov r0, r0). */
4476#define ARM_NOP 0xe1a00000
34518530 4477#define THUMB_NOP 0x4600
cca44b1b
JB
4478
4479/* Helper for register reads for displaced stepping. In particular, this
4480 returns the PC as it would be seen by the instruction at its original
4481 location. */
4482
4483ULONGEST
36073a92
YQ
4484displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4485 int regno)
cca44b1b
JB
4486{
4487 ULONGEST ret;
36073a92 4488 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4489
bf9f652a 4490 if (regno == ARM_PC_REGNUM)
cca44b1b 4491 {
4db71c0b
YQ
4492 /* Compute pipeline offset:
4493 - When executing an ARM instruction, PC reads as the address of the
4494 current instruction plus 8.
4495 - When executing a Thumb instruction, PC reads as the address of the
4496 current instruction plus 4. */
4497
36073a92 4498 if (!dsc->is_thumb)
4db71c0b
YQ
4499 from += 8;
4500 else
4501 from += 4;
4502
cca44b1b
JB
4503 if (debug_displaced)
4504 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4505 (unsigned long) from);
4506 return (ULONGEST) from;
cca44b1b 4507 }
c906108c 4508 else
cca44b1b
JB
4509 {
4510 regcache_cooked_read_unsigned (regs, regno, &ret);
4511 if (debug_displaced)
4512 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4513 regno, (unsigned long) ret);
4514 return ret;
4515 }
c906108c
SS
4516}
4517
cca44b1b
JB
4518static int
4519displaced_in_arm_mode (struct regcache *regs)
4520{
4521 ULONGEST ps;
9779414d 4522 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4523
cca44b1b 4524 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4525
9779414d 4526 return (ps & t_bit) == 0;
cca44b1b 4527}
66e810cd 4528
cca44b1b 4529/* Write to the PC as from a branch instruction. */
c906108c 4530
cca44b1b 4531static void
36073a92
YQ
4532branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4533 ULONGEST val)
c906108c 4534{
36073a92 4535 if (!dsc->is_thumb)
cca44b1b
JB
4536 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4537 architecture versions < 6. */
0963b4bd
MS
4538 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4539 val & ~(ULONGEST) 0x3);
cca44b1b 4540 else
0963b4bd
MS
4541 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4542 val & ~(ULONGEST) 0x1);
cca44b1b 4543}
66e810cd 4544
cca44b1b
JB
4545/* Write to the PC as from a branch-exchange instruction. */
4546
4547static void
4548bx_write_pc (struct regcache *regs, ULONGEST val)
4549{
4550 ULONGEST ps;
9779414d 4551 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4552
4553 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4554
4555 if ((val & 1) == 1)
c906108c 4556 {
9779414d 4557 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4558 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4559 }
4560 else if ((val & 2) == 0)
4561 {
9779414d 4562 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4563 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4564 }
4565 else
4566 {
cca44b1b
JB
4567 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4568 mode, align dest to 4 bytes). */
4569 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4570 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4571 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4572 }
4573}
ed9a39eb 4574
cca44b1b 4575/* Write to the PC as if from a load instruction. */
ed9a39eb 4576
34e8f22d 4577static void
36073a92
YQ
4578load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4579 ULONGEST val)
ed9a39eb 4580{
cca44b1b
JB
4581 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4582 bx_write_pc (regs, val);
4583 else
36073a92 4584 branch_write_pc (regs, dsc, val);
cca44b1b 4585}
be8626e0 4586
cca44b1b
JB
4587/* Write to the PC as if from an ALU instruction. */
4588
4589static void
36073a92
YQ
4590alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4591 ULONGEST val)
cca44b1b 4592{
36073a92 4593 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4594 bx_write_pc (regs, val);
4595 else
36073a92 4596 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4597}
4598
4599/* Helper for writing to registers for displaced stepping. Writing to the PC
4600 has a varying effects depending on the instruction which does the write:
4601 this is controlled by the WRITE_PC argument. */
4602
4603void
4604displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4605 int regno, ULONGEST val, enum pc_write_style write_pc)
4606{
bf9f652a 4607 if (regno == ARM_PC_REGNUM)
08216dd7 4608 {
cca44b1b
JB
4609 if (debug_displaced)
4610 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4611 (unsigned long) val);
4612 switch (write_pc)
08216dd7 4613 {
cca44b1b 4614 case BRANCH_WRITE_PC:
36073a92 4615 branch_write_pc (regs, dsc, val);
08216dd7
RE
4616 break;
4617
cca44b1b
JB
4618 case BX_WRITE_PC:
4619 bx_write_pc (regs, val);
4620 break;
4621
4622 case LOAD_WRITE_PC:
36073a92 4623 load_write_pc (regs, dsc, val);
cca44b1b
JB
4624 break;
4625
4626 case ALU_WRITE_PC:
36073a92 4627 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4628 break;
4629
4630 case CANNOT_WRITE_PC:
4631 warning (_("Instruction wrote to PC in an unexpected way when "
4632 "single-stepping"));
08216dd7
RE
4633 break;
4634
4635 default:
97b9747c
JB
4636 internal_error (__FILE__, __LINE__,
4637 _("Invalid argument to displaced_write_reg"));
08216dd7 4638 }
b508a996 4639
cca44b1b 4640 dsc->wrote_to_pc = 1;
b508a996 4641 }
ed9a39eb 4642 else
b508a996 4643 {
cca44b1b
JB
4644 if (debug_displaced)
4645 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4646 regno, (unsigned long) val);
4647 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4648 }
34e8f22d
RE
4649}
4650
cca44b1b
JB
4651/* This function is used to concisely determine if an instruction INSN
4652 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4653 corresponding fields of BITMASK set to 0b1111. The function
4654 returns return 1 if any of these fields in INSN reference the PC
4655 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4656
4657static int
cca44b1b 4658insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4659{
cca44b1b 4660 uint32_t lowbit = 1;
67255d04 4661
cca44b1b
JB
4662 while (bitmask != 0)
4663 {
4664 uint32_t mask;
44e1a9eb 4665
cca44b1b
JB
4666 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4667 ;
67255d04 4668
cca44b1b
JB
4669 if (!lowbit)
4670 break;
67255d04 4671
cca44b1b 4672 mask = lowbit * 0xf;
67255d04 4673
cca44b1b
JB
4674 if ((insn & mask) == mask)
4675 return 1;
4676
4677 bitmask &= ~mask;
67255d04
RE
4678 }
4679
cca44b1b
JB
4680 return 0;
4681}
2af48f68 4682
cca44b1b
JB
4683/* The simplest copy function. Many instructions have the same effect no
4684 matter what address they are executed at: in those cases, use this. */
67255d04 4685
cca44b1b 4686static int
7ff120b4
YQ
4687arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4688 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4689{
4690 if (debug_displaced)
4691 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4692 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4693 iname);
67255d04 4694
cca44b1b 4695 dsc->modinsn[0] = insn;
67255d04 4696
cca44b1b
JB
4697 return 0;
4698}
4699
34518530
YQ
4700static int
4701thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4702 uint16_t insn2, const char *iname,
4703 struct displaced_step_closure *dsc)
4704{
4705 if (debug_displaced)
4706 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4707 "opcode/class '%s' unmodified\n", insn1, insn2,
4708 iname);
4709
4710 dsc->modinsn[0] = insn1;
4711 dsc->modinsn[1] = insn2;
4712 dsc->numinsns = 2;
4713
4714 return 0;
4715}
4716
4717/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4718 modification. */
4719static int
615234c1 4720thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4721 const char *iname,
4722 struct displaced_step_closure *dsc)
4723{
4724 if (debug_displaced)
4725 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4726 "opcode/class '%s' unmodified\n", insn,
4727 iname);
4728
4729 dsc->modinsn[0] = insn;
4730
4731 return 0;
4732}
4733
cca44b1b
JB
4734/* Preload instructions with immediate offset. */
4735
4736static void
6e39997a 4737cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4738 struct regcache *regs, struct displaced_step_closure *dsc)
4739{
4740 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4741 if (!dsc->u.preload.immed)
4742 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4743}
4744
7ff120b4
YQ
4745static void
4746install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4747 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4748{
cca44b1b 4749 ULONGEST rn_val;
cca44b1b
JB
4750 /* Preload instructions:
4751
4752 {pli/pld} [rn, #+/-imm]
4753 ->
4754 {pli/pld} [r0, #+/-imm]. */
4755
36073a92
YQ
4756 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4757 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4758 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4759 dsc->u.preload.immed = 1;
4760
cca44b1b 4761 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4762}
4763
cca44b1b 4764static int
7ff120b4 4765arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4766 struct displaced_step_closure *dsc)
4767{
4768 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4769
7ff120b4
YQ
4770 if (!insn_references_pc (insn, 0x000f0000ul))
4771 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4772
4773 if (debug_displaced)
4774 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4775 (unsigned long) insn);
4776
7ff120b4
YQ
4777 dsc->modinsn[0] = insn & 0xfff0ffff;
4778
4779 install_preload (gdbarch, regs, dsc, rn);
4780
4781 return 0;
4782}
4783
34518530
YQ
4784static int
4785thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4786 struct regcache *regs, struct displaced_step_closure *dsc)
4787{
4788 unsigned int rn = bits (insn1, 0, 3);
4789 unsigned int u_bit = bit (insn1, 7);
4790 int imm12 = bits (insn2, 0, 11);
4791 ULONGEST pc_val;
4792
4793 if (rn != ARM_PC_REGNUM)
4794 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4795
4796 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4797 PLD (literal) Encoding T1. */
4798 if (debug_displaced)
4799 fprintf_unfiltered (gdb_stdlog,
4800 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4801 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4802 imm12);
4803
4804 if (!u_bit)
4805 imm12 = -1 * imm12;
4806
4807 /* Rewrite instruction {pli/pld} PC imm12 into:
4808 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4809
4810 {pli/pld} [r0, r1]
4811
4812 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4813
4814 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4815 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4816
4817 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4818
4819 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4820 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4821 dsc->u.preload.immed = 0;
4822
4823 /* {pli/pld} [r0, r1] */
4824 dsc->modinsn[0] = insn1 & 0xfff0;
4825 dsc->modinsn[1] = 0xf001;
4826 dsc->numinsns = 2;
4827
4828 dsc->cleanup = &cleanup_preload;
4829 return 0;
4830}
4831
7ff120b4
YQ
4832/* Preload instructions with register offset. */
4833
4834static void
4835install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4836 struct displaced_step_closure *dsc, unsigned int rn,
4837 unsigned int rm)
4838{
4839 ULONGEST rn_val, rm_val;
4840
cca44b1b
JB
4841 /* Preload register-offset instructions:
4842
4843 {pli/pld} [rn, rm {, shift}]
4844 ->
4845 {pli/pld} [r0, r1 {, shift}]. */
4846
36073a92
YQ
4847 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4848 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4849 rn_val = displaced_read_reg (regs, dsc, rn);
4850 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4851 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4852 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4853 dsc->u.preload.immed = 0;
4854
cca44b1b 4855 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4856}
4857
4858static int
4859arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4860 struct regcache *regs,
4861 struct displaced_step_closure *dsc)
4862{
4863 unsigned int rn = bits (insn, 16, 19);
4864 unsigned int rm = bits (insn, 0, 3);
4865
4866
4867 if (!insn_references_pc (insn, 0x000f000ful))
4868 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4869
4870 if (debug_displaced)
4871 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4872 (unsigned long) insn);
4873
4874 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4875
7ff120b4 4876 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4877 return 0;
4878}
4879
4880/* Copy/cleanup coprocessor load and store instructions. */
4881
4882static void
6e39997a 4883cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4884 struct regcache *regs,
4885 struct displaced_step_closure *dsc)
4886{
36073a92 4887 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4888
4889 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4890
4891 if (dsc->u.ldst.writeback)
4892 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4893}
4894
7ff120b4
YQ
4895static void
4896install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4897 struct displaced_step_closure *dsc,
4898 int writeback, unsigned int rn)
cca44b1b 4899{
cca44b1b 4900 ULONGEST rn_val;
cca44b1b 4901
cca44b1b
JB
4902 /* Coprocessor load/store instructions:
4903
4904 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4905 ->
4906 {stc/stc2} [r0, #+/-imm].
4907
4908 ldc/ldc2 are handled identically. */
4909
36073a92
YQ
4910 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4911 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4912 /* PC should be 4-byte aligned. */
4913 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4914 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4915
7ff120b4 4916 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4917 dsc->u.ldst.rn = rn;
4918
7ff120b4
YQ
4919 dsc->cleanup = &cleanup_copro_load_store;
4920}
4921
4922static int
4923arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4924 struct regcache *regs,
4925 struct displaced_step_closure *dsc)
4926{
4927 unsigned int rn = bits (insn, 16, 19);
4928
4929 if (!insn_references_pc (insn, 0x000f0000ul))
4930 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4931
4932 if (debug_displaced)
4933 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4934 "load/store insn %.8lx\n", (unsigned long) insn);
4935
cca44b1b
JB
4936 dsc->modinsn[0] = insn & 0xfff0ffff;
4937
7ff120b4 4938 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4939
4940 return 0;
4941}
4942
34518530
YQ
4943static int
4944thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4945 uint16_t insn2, struct regcache *regs,
4946 struct displaced_step_closure *dsc)
4947{
4948 unsigned int rn = bits (insn1, 0, 3);
4949
4950 if (rn != ARM_PC_REGNUM)
4951 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4952 "copro load/store", dsc);
4953
4954 if (debug_displaced)
4955 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4956 "load/store insn %.4x%.4x\n", insn1, insn2);
4957
4958 dsc->modinsn[0] = insn1 & 0xfff0;
4959 dsc->modinsn[1] = insn2;
4960 dsc->numinsns = 2;
4961
4962 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4963 doesn't support writeback, so pass 0. */
4964 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4965
4966 return 0;
4967}
4968
cca44b1b
JB
4969/* Clean up branch instructions (actually perform the branch, by setting
4970 PC). */
4971
4972static void
6e39997a 4973cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4974 struct displaced_step_closure *dsc)
4975{
36073a92 4976 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4977 int branch_taken = condition_true (dsc->u.branch.cond, status);
4978 enum pc_write_style write_pc = dsc->u.branch.exchange
4979 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4980
4981 if (!branch_taken)
4982 return;
4983
4984 if (dsc->u.branch.link)
4985 {
8c8dba6d
YQ
4986 /* The value of LR should be the next insn of current one. In order
4987 not to confuse logic hanlding later insn `bx lr', if current insn mode
4988 is Thumb, the bit 0 of LR value should be set to 1. */
4989 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4990
4991 if (dsc->is_thumb)
4992 next_insn_addr |= 0x1;
4993
4994 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4995 CANNOT_WRITE_PC);
cca44b1b
JB
4996 }
4997
bf9f652a 4998 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4999}
5000
5001/* Copy B/BL/BLX instructions with immediate destinations. */
5002
7ff120b4
YQ
5003static void
5004install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5005 struct displaced_step_closure *dsc,
5006 unsigned int cond, int exchange, int link, long offset)
5007{
5008 /* Implement "BL<cond> <label>" as:
5009
5010 Preparation: cond <- instruction condition
5011 Insn: mov r0, r0 (nop)
5012 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5013
5014 B<cond> similar, but don't set r14 in cleanup. */
5015
5016 dsc->u.branch.cond = cond;
5017 dsc->u.branch.link = link;
5018 dsc->u.branch.exchange = exchange;
5019
2b16b2e3
YQ
5020 dsc->u.branch.dest = dsc->insn_addr;
5021 if (link && exchange)
5022 /* For BLX, offset is computed from the Align (PC, 4). */
5023 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5024
7ff120b4 5025 if (dsc->is_thumb)
2b16b2e3 5026 dsc->u.branch.dest += 4 + offset;
7ff120b4 5027 else
2b16b2e3 5028 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5029
5030 dsc->cleanup = &cleanup_branch;
5031}
cca44b1b 5032static int
7ff120b4
YQ
5033arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5034 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5035{
5036 unsigned int cond = bits (insn, 28, 31);
5037 int exchange = (cond == 0xf);
5038 int link = exchange || bit (insn, 24);
cca44b1b
JB
5039 long offset;
5040
5041 if (debug_displaced)
5042 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5043 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5044 (unsigned long) insn);
cca44b1b
JB
5045 if (exchange)
5046 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5047 then arrange the switch into Thumb mode. */
5048 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5049 else
5050 offset = bits (insn, 0, 23) << 2;
5051
5052 if (bit (offset, 25))
5053 offset = offset | ~0x3ffffff;
5054
cca44b1b
JB
5055 dsc->modinsn[0] = ARM_NOP;
5056
7ff120b4 5057 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5058 return 0;
5059}
5060
34518530
YQ
5061static int
5062thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5063 uint16_t insn2, struct regcache *regs,
5064 struct displaced_step_closure *dsc)
5065{
5066 int link = bit (insn2, 14);
5067 int exchange = link && !bit (insn2, 12);
5068 int cond = INST_AL;
5069 long offset = 0;
5070 int j1 = bit (insn2, 13);
5071 int j2 = bit (insn2, 11);
5072 int s = sbits (insn1, 10, 10);
5073 int i1 = !(j1 ^ bit (insn1, 10));
5074 int i2 = !(j2 ^ bit (insn1, 10));
5075
5076 if (!link && !exchange) /* B */
5077 {
5078 offset = (bits (insn2, 0, 10) << 1);
5079 if (bit (insn2, 12)) /* Encoding T4 */
5080 {
5081 offset |= (bits (insn1, 0, 9) << 12)
5082 | (i2 << 22)
5083 | (i1 << 23)
5084 | (s << 24);
5085 cond = INST_AL;
5086 }
5087 else /* Encoding T3 */
5088 {
5089 offset |= (bits (insn1, 0, 5) << 12)
5090 | (j1 << 18)
5091 | (j2 << 19)
5092 | (s << 20);
5093 cond = bits (insn1, 6, 9);
5094 }
5095 }
5096 else
5097 {
5098 offset = (bits (insn1, 0, 9) << 12);
5099 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5100 offset |= exchange ?
5101 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5102 }
5103
5104 if (debug_displaced)
5105 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5106 "%.4x %.4x with offset %.8lx\n",
5107 link ? (exchange) ? "blx" : "bl" : "b",
5108 insn1, insn2, offset);
5109
5110 dsc->modinsn[0] = THUMB_NOP;
5111
5112 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5113 return 0;
5114}
5115
5116/* Copy B Thumb instructions. */
5117static int
615234c1 5118thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5119 struct displaced_step_closure *dsc)
5120{
5121 unsigned int cond = 0;
5122 int offset = 0;
5123 unsigned short bit_12_15 = bits (insn, 12, 15);
5124 CORE_ADDR from = dsc->insn_addr;
5125
5126 if (bit_12_15 == 0xd)
5127 {
5128 /* offset = SignExtend (imm8:0, 32) */
5129 offset = sbits ((insn << 1), 0, 8);
5130 cond = bits (insn, 8, 11);
5131 }
5132 else if (bit_12_15 == 0xe) /* Encoding T2 */
5133 {
5134 offset = sbits ((insn << 1), 0, 11);
5135 cond = INST_AL;
5136 }
5137
5138 if (debug_displaced)
5139 fprintf_unfiltered (gdb_stdlog,
5140 "displaced: copying b immediate insn %.4x "
5141 "with offset %d\n", insn, offset);
5142
5143 dsc->u.branch.cond = cond;
5144 dsc->u.branch.link = 0;
5145 dsc->u.branch.exchange = 0;
5146 dsc->u.branch.dest = from + 4 + offset;
5147
5148 dsc->modinsn[0] = THUMB_NOP;
5149
5150 dsc->cleanup = &cleanup_branch;
5151
5152 return 0;
5153}
5154
cca44b1b
JB
5155/* Copy BX/BLX with register-specified destinations. */
5156
7ff120b4
YQ
5157static void
5158install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5159 struct displaced_step_closure *dsc, int link,
5160 unsigned int cond, unsigned int rm)
cca44b1b 5161{
cca44b1b
JB
5162 /* Implement {BX,BLX}<cond> <reg>" as:
5163
5164 Preparation: cond <- instruction condition
5165 Insn: mov r0, r0 (nop)
5166 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5167
5168 Don't set r14 in cleanup for BX. */
5169
36073a92 5170 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5171
5172 dsc->u.branch.cond = cond;
5173 dsc->u.branch.link = link;
cca44b1b 5174
7ff120b4 5175 dsc->u.branch.exchange = 1;
cca44b1b
JB
5176
5177 dsc->cleanup = &cleanup_branch;
7ff120b4 5178}
cca44b1b 5179
7ff120b4
YQ
5180static int
5181arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5182 struct regcache *regs, struct displaced_step_closure *dsc)
5183{
5184 unsigned int cond = bits (insn, 28, 31);
5185 /* BX: x12xxx1x
5186 BLX: x12xxx3x. */
5187 int link = bit (insn, 5);
5188 unsigned int rm = bits (insn, 0, 3);
5189
5190 if (debug_displaced)
5191 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5192 (unsigned long) insn);
5193
5194 dsc->modinsn[0] = ARM_NOP;
5195
5196 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5197 return 0;
5198}
5199
34518530
YQ
5200static int
5201thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5202 struct regcache *regs,
5203 struct displaced_step_closure *dsc)
5204{
5205 int link = bit (insn, 7);
5206 unsigned int rm = bits (insn, 3, 6);
5207
5208 if (debug_displaced)
5209 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5210 (unsigned short) insn);
5211
5212 dsc->modinsn[0] = THUMB_NOP;
5213
5214 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5215
5216 return 0;
5217}
5218
5219
0963b4bd 5220/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5221
5222static void
6e39997a 5223cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5224 struct regcache *regs, struct displaced_step_closure *dsc)
5225{
36073a92 5226 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5227 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5228 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5229 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5230}
5231
5232static int
7ff120b4
YQ
5233arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5234 struct displaced_step_closure *dsc)
cca44b1b
JB
5235{
5236 unsigned int rn = bits (insn, 16, 19);
5237 unsigned int rd = bits (insn, 12, 15);
5238 unsigned int op = bits (insn, 21, 24);
5239 int is_mov = (op == 0xd);
5240 ULONGEST rd_val, rn_val;
cca44b1b
JB
5241
5242 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5243 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5244
5245 if (debug_displaced)
5246 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5247 "%.8lx\n", is_mov ? "move" : "ALU",
5248 (unsigned long) insn);
5249
5250 /* Instruction is of form:
5251
5252 <op><cond> rd, [rn,] #imm
5253
5254 Rewrite as:
5255
5256 Preparation: tmp1, tmp2 <- r0, r1;
5257 r0, r1 <- rd, rn
5258 Insn: <op><cond> r0, r1, #imm
5259 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5260 */
5261
36073a92
YQ
5262 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5263 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5264 rn_val = displaced_read_reg (regs, dsc, rn);
5265 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5266 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5267 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5268 dsc->rd = rd;
5269
5270 if (is_mov)
5271 dsc->modinsn[0] = insn & 0xfff00fff;
5272 else
5273 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5274
5275 dsc->cleanup = &cleanup_alu_imm;
5276
5277 return 0;
5278}
5279
34518530
YQ
5280static int
5281thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5282 uint16_t insn2, struct regcache *regs,
5283 struct displaced_step_closure *dsc)
5284{
5285 unsigned int op = bits (insn1, 5, 8);
5286 unsigned int rn, rm, rd;
5287 ULONGEST rd_val, rn_val;
5288
5289 rn = bits (insn1, 0, 3); /* Rn */
5290 rm = bits (insn2, 0, 3); /* Rm */
5291 rd = bits (insn2, 8, 11); /* Rd */
5292
5293 /* This routine is only called for instruction MOV. */
5294 gdb_assert (op == 0x2 && rn == 0xf);
5295
5296 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5297 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5298
5299 if (debug_displaced)
5300 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5301 "ALU", insn1, insn2);
5302
5303 /* Instruction is of form:
5304
5305 <op><cond> rd, [rn,] #imm
5306
5307 Rewrite as:
5308
5309 Preparation: tmp1, tmp2 <- r0, r1;
5310 r0, r1 <- rd, rn
5311 Insn: <op><cond> r0, r1, #imm
5312 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5313 */
5314
5315 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5316 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5317 rn_val = displaced_read_reg (regs, dsc, rn);
5318 rd_val = displaced_read_reg (regs, dsc, rd);
5319 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5320 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5321 dsc->rd = rd;
5322
5323 dsc->modinsn[0] = insn1;
5324 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5325 dsc->numinsns = 2;
5326
5327 dsc->cleanup = &cleanup_alu_imm;
5328
5329 return 0;
5330}
5331
cca44b1b
JB
5332/* Copy/cleanup arithmetic/logic insns with register RHS. */
5333
5334static void
6e39997a 5335cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5336 struct regcache *regs, struct displaced_step_closure *dsc)
5337{
5338 ULONGEST rd_val;
5339 int i;
5340
36073a92 5341 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5342
5343 for (i = 0; i < 3; i++)
5344 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5345
5346 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5347}
5348
7ff120b4
YQ
5349static void
5350install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5351 struct displaced_step_closure *dsc,
5352 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5353{
cca44b1b 5354 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5355
cca44b1b
JB
5356 /* Instruction is of form:
5357
5358 <op><cond> rd, [rn,] rm [, <shift>]
5359
5360 Rewrite as:
5361
5362 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5363 r0, r1, r2 <- rd, rn, rm
ef713951 5364 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5365 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5366 */
5367
36073a92
YQ
5368 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5369 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5370 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5371 rd_val = displaced_read_reg (regs, dsc, rd);
5372 rn_val = displaced_read_reg (regs, dsc, rn);
5373 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5374 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5375 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5376 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5377 dsc->rd = rd;
5378
7ff120b4
YQ
5379 dsc->cleanup = &cleanup_alu_reg;
5380}
5381
5382static int
5383arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5384 struct displaced_step_closure *dsc)
5385{
5386 unsigned int op = bits (insn, 21, 24);
5387 int is_mov = (op == 0xd);
5388
5389 if (!insn_references_pc (insn, 0x000ff00ful))
5390 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5391
5392 if (debug_displaced)
5393 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5394 is_mov ? "move" : "ALU", (unsigned long) insn);
5395
cca44b1b
JB
5396 if (is_mov)
5397 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5398 else
5399 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5400
7ff120b4
YQ
5401 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5402 bits (insn, 0, 3));
cca44b1b
JB
5403 return 0;
5404}
5405
34518530
YQ
5406static int
5407thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5408 struct regcache *regs,
5409 struct displaced_step_closure *dsc)
5410{
ef713951 5411 unsigned rm, rd;
34518530 5412
ef713951
YQ
5413 rm = bits (insn, 3, 6);
5414 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5415
ef713951 5416 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5417 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5418
5419 if (debug_displaced)
ef713951
YQ
5420 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5421 (unsigned short) insn);
34518530 5422
ef713951 5423 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5424
ef713951 5425 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5426
5427 return 0;
5428}
5429
cca44b1b
JB
5430/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5431
5432static void
6e39997a 5433cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5434 struct regcache *regs,
5435 struct displaced_step_closure *dsc)
5436{
36073a92 5437 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5438 int i;
5439
5440 for (i = 0; i < 4; i++)
5441 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5442
5443 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5444}
5445
7ff120b4
YQ
5446static void
5447install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5448 struct displaced_step_closure *dsc,
5449 unsigned int rd, unsigned int rn, unsigned int rm,
5450 unsigned rs)
cca44b1b 5451{
7ff120b4 5452 int i;
cca44b1b 5453 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5454
cca44b1b
JB
5455 /* Instruction is of form:
5456
5457 <op><cond> rd, [rn,] rm, <shift> rs
5458
5459 Rewrite as:
5460
5461 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5462 r0, r1, r2, r3 <- rd, rn, rm, rs
5463 Insn: <op><cond> r0, r1, r2, <shift> r3
5464 Cleanup: tmp5 <- r0
5465 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5466 rd <- tmp5
5467 */
5468
5469 for (i = 0; i < 4; i++)
36073a92 5470 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5471
36073a92
YQ
5472 rd_val = displaced_read_reg (regs, dsc, rd);
5473 rn_val = displaced_read_reg (regs, dsc, rn);
5474 rm_val = displaced_read_reg (regs, dsc, rm);
5475 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5476 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5477 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5478 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5479 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5480 dsc->rd = rd;
7ff120b4
YQ
5481 dsc->cleanup = &cleanup_alu_shifted_reg;
5482}
5483
5484static int
5485arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5486 struct regcache *regs,
5487 struct displaced_step_closure *dsc)
5488{
5489 unsigned int op = bits (insn, 21, 24);
5490 int is_mov = (op == 0xd);
5491 unsigned int rd, rn, rm, rs;
5492
5493 if (!insn_references_pc (insn, 0x000fff0ful))
5494 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5495
5496 if (debug_displaced)
5497 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5498 "%.8lx\n", is_mov ? "move" : "ALU",
5499 (unsigned long) insn);
5500
5501 rn = bits (insn, 16, 19);
5502 rm = bits (insn, 0, 3);
5503 rs = bits (insn, 8, 11);
5504 rd = bits (insn, 12, 15);
cca44b1b
JB
5505
5506 if (is_mov)
5507 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5508 else
5509 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5510
7ff120b4 5511 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5512
5513 return 0;
5514}
5515
5516/* Clean up load instructions. */
5517
5518static void
6e39997a 5519cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5520 struct displaced_step_closure *dsc)
5521{
5522 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5523
36073a92 5524 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5525 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5526 rt_val2 = displaced_read_reg (regs, dsc, 1);
5527 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5528
5529 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5530 if (dsc->u.ldst.xfersize > 4)
5531 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5532 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5533 if (!dsc->u.ldst.immed)
5534 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5535
5536 /* Handle register writeback. */
5537 if (dsc->u.ldst.writeback)
5538 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5539 /* Put result in right place. */
5540 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5541 if (dsc->u.ldst.xfersize == 8)
5542 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5543}
5544
5545/* Clean up store instructions. */
5546
5547static void
6e39997a 5548cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5549 struct displaced_step_closure *dsc)
5550{
36073a92 5551 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5552
5553 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5554 if (dsc->u.ldst.xfersize > 4)
5555 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5556 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5557 if (!dsc->u.ldst.immed)
5558 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5559 if (!dsc->u.ldst.restore_r4)
5560 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5561
5562 /* Writeback. */
5563 if (dsc->u.ldst.writeback)
5564 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5565}
5566
5567/* Copy "extra" load/store instructions. These are halfword/doubleword
5568 transfers, which have a different encoding to byte/word transfers. */
5569
5570static int
550dc4e2 5571arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5572 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5573{
5574 unsigned int op1 = bits (insn, 20, 24);
5575 unsigned int op2 = bits (insn, 5, 6);
5576 unsigned int rt = bits (insn, 12, 15);
5577 unsigned int rn = bits (insn, 16, 19);
5578 unsigned int rm = bits (insn, 0, 3);
5579 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5580 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5581 int immed = (op1 & 0x4) != 0;
5582 int opcode;
5583 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5584
5585 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5586 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5587
5588 if (debug_displaced)
5589 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5590 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5591 (unsigned long) insn);
5592
5593 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5594
5595 if (opcode < 0)
5596 internal_error (__FILE__, __LINE__,
5597 _("copy_extra_ld_st: instruction decode error"));
5598
36073a92
YQ
5599 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5600 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5601 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5602 if (!immed)
36073a92 5603 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5604
36073a92 5605 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5606 if (bytesize[opcode] == 8)
36073a92
YQ
5607 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5608 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5609 if (!immed)
36073a92 5610 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5611
5612 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5613 if (bytesize[opcode] == 8)
5614 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5615 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5616 if (!immed)
5617 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5618
5619 dsc->rd = rt;
5620 dsc->u.ldst.xfersize = bytesize[opcode];
5621 dsc->u.ldst.rn = rn;
5622 dsc->u.ldst.immed = immed;
5623 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5624 dsc->u.ldst.restore_r4 = 0;
5625
5626 if (immed)
5627 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5628 ->
5629 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5630 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5631 else
5632 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5633 ->
5634 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5635 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5636
5637 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5638
5639 return 0;
5640}
5641
0f6f04ba 5642/* Copy byte/half word/word loads and stores. */
cca44b1b 5643
7ff120b4 5644static void
0f6f04ba
YQ
5645install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5646 struct displaced_step_closure *dsc, int load,
5647 int immed, int writeback, int size, int usermode,
5648 int rt, int rm, int rn)
cca44b1b 5649{
cca44b1b 5650 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5651
36073a92
YQ
5652 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5653 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5654 if (!immed)
36073a92 5655 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5656 if (!load)
36073a92 5657 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5658
36073a92
YQ
5659 rt_val = displaced_read_reg (regs, dsc, rt);
5660 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5661 if (!immed)
36073a92 5662 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5663
5664 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5665 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5666 if (!immed)
5667 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5668 dsc->rd = rt;
0f6f04ba 5669 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5670 dsc->u.ldst.rn = rn;
5671 dsc->u.ldst.immed = immed;
7ff120b4 5672 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5673
5674 /* To write PC we can do:
5675
494e194e
YQ
5676 Before this sequence of instructions:
5677 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5678 r2 is the Rn value got from dispalced_read_reg.
5679
5680 Insn1: push {pc} Write address of STR instruction + offset on stack
5681 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5682 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5683 = addr(Insn1) + offset - addr(Insn3) - 8
5684 = offset - 16
5685 Insn4: add r4, r4, #8 r4 = offset - 8
5686 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5687 = from + offset
5688 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5689
5690 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5691 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5692 of this can be found in Section "Saving from r15" in
5693 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5694
7ff120b4
YQ
5695 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5696}
5697
34518530
YQ
5698
5699static int
5700thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5701 uint16_t insn2, struct regcache *regs,
5702 struct displaced_step_closure *dsc, int size)
5703{
5704 unsigned int u_bit = bit (insn1, 7);
5705 unsigned int rt = bits (insn2, 12, 15);
5706 int imm12 = bits (insn2, 0, 11);
5707 ULONGEST pc_val;
5708
5709 if (debug_displaced)
5710 fprintf_unfiltered (gdb_stdlog,
5711 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5712 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5713 imm12);
5714
5715 if (!u_bit)
5716 imm12 = -1 * imm12;
5717
5718 /* Rewrite instruction LDR Rt imm12 into:
5719
5720 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5721
5722 LDR R0, R2, R3,
5723
5724 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5725
5726
5727 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5728 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5729 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5730
5731 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5732
5733 pc_val = pc_val & 0xfffffffc;
5734
5735 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5736 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5737
5738 dsc->rd = rt;
5739
5740 dsc->u.ldst.xfersize = size;
5741 dsc->u.ldst.immed = 0;
5742 dsc->u.ldst.writeback = 0;
5743 dsc->u.ldst.restore_r4 = 0;
5744
5745 /* LDR R0, R2, R3 */
5746 dsc->modinsn[0] = 0xf852;
5747 dsc->modinsn[1] = 0x3;
5748 dsc->numinsns = 2;
5749
5750 dsc->cleanup = &cleanup_load;
5751
5752 return 0;
5753}
5754
5755static int
5756thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5757 uint16_t insn2, struct regcache *regs,
5758 struct displaced_step_closure *dsc,
5759 int writeback, int immed)
5760{
5761 unsigned int rt = bits (insn2, 12, 15);
5762 unsigned int rn = bits (insn1, 0, 3);
5763 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5764 /* In LDR (register), there is also a register Rm, which is not allowed to
5765 be PC, so we don't have to check it. */
5766
5767 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5768 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5769 dsc);
5770
5771 if (debug_displaced)
5772 fprintf_unfiltered (gdb_stdlog,
5773 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5774 rt, rn, insn1, insn2);
5775
5776 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5777 0, rt, rm, rn);
5778
5779 dsc->u.ldst.restore_r4 = 0;
5780
5781 if (immed)
5782 /* ldr[b]<cond> rt, [rn, #imm], etc.
5783 ->
5784 ldr[b]<cond> r0, [r2, #imm]. */
5785 {
5786 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5787 dsc->modinsn[1] = insn2 & 0x0fff;
5788 }
5789 else
5790 /* ldr[b]<cond> rt, [rn, rm], etc.
5791 ->
5792 ldr[b]<cond> r0, [r2, r3]. */
5793 {
5794 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5795 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5796 }
5797
5798 dsc->numinsns = 2;
5799
5800 return 0;
5801}
5802
5803
7ff120b4
YQ
5804static int
5805arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5806 struct regcache *regs,
5807 struct displaced_step_closure *dsc,
0f6f04ba 5808 int load, int size, int usermode)
7ff120b4
YQ
5809{
5810 int immed = !bit (insn, 25);
5811 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5812 unsigned int rt = bits (insn, 12, 15);
5813 unsigned int rn = bits (insn, 16, 19);
5814 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5815
5816 if (!insn_references_pc (insn, 0x000ff00ful))
5817 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5818
5819 if (debug_displaced)
5820 fprintf_unfiltered (gdb_stdlog,
5821 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5822 load ? (size == 1 ? "ldrb" : "ldr")
5823 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5824 rt, rn,
5825 (unsigned long) insn);
5826
0f6f04ba
YQ
5827 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5828 usermode, rt, rm, rn);
7ff120b4 5829
bf9f652a 5830 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5831 {
5832 dsc->u.ldst.restore_r4 = 0;
5833
5834 if (immed)
5835 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5836 ->
5837 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5838 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5839 else
5840 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5841 ->
5842 {ldr,str}[b]<cond> r0, [r2, r3]. */
5843 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5844 }
5845 else
5846 {
5847 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5848 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5849 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5850 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5851 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5852 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5853 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5854
5855 /* As above. */
5856 if (immed)
5857 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5858 else
5859 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5860
cca44b1b
JB
5861 dsc->numinsns = 6;
5862 }
5863
5864 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5865
5866 return 0;
5867}
5868
5869/* Cleanup LDM instructions with fully-populated register list. This is an
5870 unfortunate corner case: it's impossible to implement correctly by modifying
5871 the instruction. The issue is as follows: we have an instruction,
5872
5873 ldm rN, {r0-r15}
5874
5875 which we must rewrite to avoid loading PC. A possible solution would be to
5876 do the load in two halves, something like (with suitable cleanup
5877 afterwards):
5878
5879 mov r8, rN
5880 ldm[id][ab] r8!, {r0-r7}
5881 str r7, <temp>
5882 ldm[id][ab] r8, {r7-r14}
5883 <bkpt>
5884
5885 but at present there's no suitable place for <temp>, since the scratch space
5886 is overwritten before the cleanup routine is called. For now, we simply
5887 emulate the instruction. */
5888
5889static void
5890cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5891 struct displaced_step_closure *dsc)
5892{
cca44b1b
JB
5893 int inc = dsc->u.block.increment;
5894 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5895 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5896 uint32_t regmask = dsc->u.block.regmask;
5897 int regno = inc ? 0 : 15;
5898 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5899 int exception_return = dsc->u.block.load && dsc->u.block.user
5900 && (regmask & 0x8000) != 0;
36073a92 5901 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5902 int do_transfer = condition_true (dsc->u.block.cond, status);
5903 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5904
5905 if (!do_transfer)
5906 return;
5907
5908 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5909 sensible we can do here. Complain loudly. */
5910 if (exception_return)
5911 error (_("Cannot single-step exception return"));
5912
5913 /* We don't handle any stores here for now. */
5914 gdb_assert (dsc->u.block.load != 0);
5915
5916 if (debug_displaced)
5917 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5918 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5919 dsc->u.block.increment ? "inc" : "dec",
5920 dsc->u.block.before ? "before" : "after");
5921
5922 while (regmask)
5923 {
5924 uint32_t memword;
5925
5926 if (inc)
bf9f652a 5927 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5928 regno++;
5929 else
5930 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5931 regno--;
5932
5933 xfer_addr += bump_before;
5934
5935 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5936 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5937
5938 xfer_addr += bump_after;
5939
5940 regmask &= ~(1 << regno);
5941 }
5942
5943 if (dsc->u.block.writeback)
5944 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5945 CANNOT_WRITE_PC);
5946}
5947
5948/* Clean up an STM which included the PC in the register list. */
5949
5950static void
5951cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5952 struct displaced_step_closure *dsc)
5953{
36073a92 5954 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5955 int store_executed = condition_true (dsc->u.block.cond, status);
5956 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5957 CORE_ADDR stm_insn_addr;
5958 uint32_t pc_val;
5959 long offset;
5960 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5961
5962 /* If condition code fails, there's nothing else to do. */
5963 if (!store_executed)
5964 return;
5965
5966 if (dsc->u.block.increment)
5967 {
5968 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5969
5970 if (dsc->u.block.before)
5971 pc_stored_at += 4;
5972 }
5973 else
5974 {
5975 pc_stored_at = dsc->u.block.xfer_addr;
5976
5977 if (dsc->u.block.before)
5978 pc_stored_at -= 4;
5979 }
5980
5981 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5982 stm_insn_addr = dsc->scratch_base;
5983 offset = pc_val - stm_insn_addr;
5984
5985 if (debug_displaced)
5986 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5987 "STM instruction\n", offset);
5988
5989 /* Rewrite the stored PC to the proper value for the non-displaced original
5990 instruction. */
5991 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5992 dsc->insn_addr + offset);
5993}
5994
5995/* Clean up an LDM which includes the PC in the register list. We clumped all
5996 the registers in the transferred list into a contiguous range r0...rX (to
5997 avoid loading PC directly and losing control of the debugged program), so we
5998 must undo that here. */
5999
6000static void
6e39997a 6001cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
6002 struct regcache *regs,
6003 struct displaced_step_closure *dsc)
6004{
36073a92 6005 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 6006 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 6007 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
6008 unsigned int regs_loaded = bitcount (mask);
6009 unsigned int num_to_shuffle = regs_loaded, clobbered;
6010
6011 /* The method employed here will fail if the register list is fully populated
6012 (we need to avoid loading PC directly). */
6013 gdb_assert (num_to_shuffle < 16);
6014
6015 if (!load_executed)
6016 return;
6017
6018 clobbered = (1 << num_to_shuffle) - 1;
6019
6020 while (num_to_shuffle > 0)
6021 {
6022 if ((mask & (1 << write_reg)) != 0)
6023 {
6024 unsigned int read_reg = num_to_shuffle - 1;
6025
6026 if (read_reg != write_reg)
6027 {
36073a92 6028 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6029 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6030 if (debug_displaced)
6031 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6032 "loaded register r%d to r%d\n"), read_reg,
6033 write_reg);
6034 }
6035 else if (debug_displaced)
6036 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6037 "r%d already in the right place\n"),
6038 write_reg);
6039
6040 clobbered &= ~(1 << write_reg);
6041
6042 num_to_shuffle--;
6043 }
6044
6045 write_reg--;
6046 }
6047
6048 /* Restore any registers we scribbled over. */
6049 for (write_reg = 0; clobbered != 0; write_reg++)
6050 {
6051 if ((clobbered & (1 << write_reg)) != 0)
6052 {
6053 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6054 CANNOT_WRITE_PC);
6055 if (debug_displaced)
6056 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6057 "clobbered register r%d\n"), write_reg);
6058 clobbered &= ~(1 << write_reg);
6059 }
6060 }
6061
6062 /* Perform register writeback manually. */
6063 if (dsc->u.block.writeback)
6064 {
6065 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6066
6067 if (dsc->u.block.increment)
6068 new_rn_val += regs_loaded * 4;
6069 else
6070 new_rn_val -= regs_loaded * 4;
6071
6072 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6073 CANNOT_WRITE_PC);
6074 }
6075}
6076
6077/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6078 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6079
6080static int
7ff120b4
YQ
6081arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6082 struct regcache *regs,
6083 struct displaced_step_closure *dsc)
cca44b1b
JB
6084{
6085 int load = bit (insn, 20);
6086 int user = bit (insn, 22);
6087 int increment = bit (insn, 23);
6088 int before = bit (insn, 24);
6089 int writeback = bit (insn, 21);
6090 int rn = bits (insn, 16, 19);
cca44b1b 6091
0963b4bd
MS
6092 /* Block transfers which don't mention PC can be run directly
6093 out-of-line. */
bf9f652a 6094 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6095 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6096
bf9f652a 6097 if (rn == ARM_PC_REGNUM)
cca44b1b 6098 {
0963b4bd
MS
6099 warning (_("displaced: Unpredictable LDM or STM with "
6100 "base register r15"));
7ff120b4 6101 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6102 }
6103
6104 if (debug_displaced)
6105 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6106 "%.8lx\n", (unsigned long) insn);
6107
36073a92 6108 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6109 dsc->u.block.rn = rn;
6110
6111 dsc->u.block.load = load;
6112 dsc->u.block.user = user;
6113 dsc->u.block.increment = increment;
6114 dsc->u.block.before = before;
6115 dsc->u.block.writeback = writeback;
6116 dsc->u.block.cond = bits (insn, 28, 31);
6117
6118 dsc->u.block.regmask = insn & 0xffff;
6119
6120 if (load)
6121 {
6122 if ((insn & 0xffff) == 0xffff)
6123 {
6124 /* LDM with a fully-populated register list. This case is
6125 particularly tricky. Implement for now by fully emulating the
6126 instruction (which might not behave perfectly in all cases, but
6127 these instructions should be rare enough for that not to matter
6128 too much). */
6129 dsc->modinsn[0] = ARM_NOP;
6130
6131 dsc->cleanup = &cleanup_block_load_all;
6132 }
6133 else
6134 {
6135 /* LDM of a list of registers which includes PC. Implement by
6136 rewriting the list of registers to be transferred into a
6137 contiguous chunk r0...rX before doing the transfer, then shuffling
6138 registers into the correct places in the cleanup routine. */
6139 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6140 unsigned int num_in_list = bitcount (regmask), new_regmask;
6141 unsigned int i;
cca44b1b
JB
6142
6143 for (i = 0; i < num_in_list; i++)
36073a92 6144 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6145
6146 /* Writeback makes things complicated. We need to avoid clobbering
6147 the base register with one of the registers in our modified
6148 register list, but just using a different register can't work in
6149 all cases, e.g.:
6150
6151 ldm r14!, {r0-r13,pc}
6152
6153 which would need to be rewritten as:
6154
6155 ldm rN!, {r0-r14}
6156
6157 but that can't work, because there's no free register for N.
6158
6159 Solve this by turning off the writeback bit, and emulating
6160 writeback manually in the cleanup routine. */
6161
6162 if (writeback)
6163 insn &= ~(1 << 21);
6164
6165 new_regmask = (1 << num_in_list) - 1;
6166
6167 if (debug_displaced)
6168 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6169 "{..., pc}: original reg list %.4x, modified "
6170 "list %.4x\n"), rn, writeback ? "!" : "",
6171 (int) insn & 0xffff, new_regmask);
6172
6173 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6174
6175 dsc->cleanup = &cleanup_block_load_pc;
6176 }
6177 }
6178 else
6179 {
6180 /* STM of a list of registers which includes PC. Run the instruction
6181 as-is, but out of line: this will store the wrong value for the PC,
6182 so we must manually fix up the memory in the cleanup routine.
6183 Doing things this way has the advantage that we can auto-detect
6184 the offset of the PC write (which is architecture-dependent) in
6185 the cleanup routine. */
6186 dsc->modinsn[0] = insn;
6187
6188 dsc->cleanup = &cleanup_block_store_pc;
6189 }
6190
6191 return 0;
6192}
6193
34518530
YQ
6194static int
6195thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6196 struct regcache *regs,
6197 struct displaced_step_closure *dsc)
cca44b1b 6198{
34518530
YQ
6199 int rn = bits (insn1, 0, 3);
6200 int load = bit (insn1, 4);
6201 int writeback = bit (insn1, 5);
cca44b1b 6202
34518530
YQ
6203 /* Block transfers which don't mention PC can be run directly
6204 out-of-line. */
6205 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6206 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6207
34518530
YQ
6208 if (rn == ARM_PC_REGNUM)
6209 {
6210 warning (_("displaced: Unpredictable LDM or STM with "
6211 "base register r15"));
6212 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6213 "unpredictable ldm/stm", dsc);
6214 }
cca44b1b
JB
6215
6216 if (debug_displaced)
34518530
YQ
6217 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6218 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6219
34518530
YQ
6220 /* Clear bit 13, since it should be always zero. */
6221 dsc->u.block.regmask = (insn2 & 0xdfff);
6222 dsc->u.block.rn = rn;
cca44b1b 6223
34518530
YQ
6224 dsc->u.block.load = load;
6225 dsc->u.block.user = 0;
6226 dsc->u.block.increment = bit (insn1, 7);
6227 dsc->u.block.before = bit (insn1, 8);
6228 dsc->u.block.writeback = writeback;
6229 dsc->u.block.cond = INST_AL;
6230 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6231
34518530
YQ
6232 if (load)
6233 {
6234 if (dsc->u.block.regmask == 0xffff)
6235 {
6236 /* This branch is impossible to happen. */
6237 gdb_assert (0);
6238 }
6239 else
6240 {
6241 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6242 unsigned int num_in_list = bitcount (regmask), new_regmask;
6243 unsigned int i;
34518530
YQ
6244
6245 for (i = 0; i < num_in_list; i++)
6246 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6247
6248 if (writeback)
6249 insn1 &= ~(1 << 5);
6250
6251 new_regmask = (1 << num_in_list) - 1;
6252
6253 if (debug_displaced)
6254 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6255 "{..., pc}: original reg list %.4x, modified "
6256 "list %.4x\n"), rn, writeback ? "!" : "",
6257 (int) dsc->u.block.regmask, new_regmask);
6258
6259 dsc->modinsn[0] = insn1;
6260 dsc->modinsn[1] = (new_regmask & 0xffff);
6261 dsc->numinsns = 2;
6262
6263 dsc->cleanup = &cleanup_block_load_pc;
6264 }
6265 }
6266 else
6267 {
6268 dsc->modinsn[0] = insn1;
6269 dsc->modinsn[1] = insn2;
6270 dsc->numinsns = 2;
6271 dsc->cleanup = &cleanup_block_store_pc;
6272 }
6273 return 0;
6274}
6275
d9311bfa
AT
6276/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6277 This is used to avoid a dependency on BFD's bfd_endian enum. */
6278
6279ULONGEST
6280arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6281 int byte_order)
6282{
5f2dfcfd
AT
6283 return read_memory_unsigned_integer (memaddr, len,
6284 (enum bfd_endian) byte_order);
d9311bfa
AT
6285}
6286
6287/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6288
6289CORE_ADDR
6290arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6291 CORE_ADDR val)
6292{
6293 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6294}
6295
6296/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6297
e7cf25a8 6298static CORE_ADDR
553cb527 6299arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6300{
d9311bfa
AT
6301 return 0;
6302}
6303
6304/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6305
6306int
6307arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6308{
6309 return arm_is_thumb (self->regcache);
6310}
6311
6312/* single_step() is called just before we want to resume the inferior,
6313 if we want to single-step it but there is no hardware or kernel
6314 single-step support. We find the target of the coming instructions
6315 and breakpoint them. */
6316
6317int
6318arm_software_single_step (struct frame_info *frame)
6319{
6320 struct regcache *regcache = get_current_regcache ();
6321 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6322 struct address_space *aspace = get_regcache_aspace (regcache);
6323 struct arm_get_next_pcs next_pcs_ctx;
6324 CORE_ADDR pc;
6325 int i;
6326 VEC (CORE_ADDR) *next_pcs = NULL;
6327 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6328
6329 arm_get_next_pcs_ctor (&next_pcs_ctx,
6330 &arm_get_next_pcs_ops,
6331 gdbarch_byte_order (gdbarch),
6332 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6333 0,
d9311bfa
AT
6334 regcache);
6335
4d18591b 6336 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6337
6338 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6339 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6340
6341 do_cleanups (old_chain);
6342
6343 return 1;
6344}
6345
34518530
YQ
6346/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6347 for Linux, where some SVC instructions must be treated specially. */
6348
6349static void
6350cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6351 struct displaced_step_closure *dsc)
6352{
6353 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6354
6355 if (debug_displaced)
6356 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6357 "%.8lx\n", (unsigned long) resume_addr);
6358
6359 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6360}
6361
6362
6363/* Common copy routine for svc instruciton. */
6364
6365static int
6366install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6367 struct displaced_step_closure *dsc)
6368{
6369 /* Preparation: none.
6370 Insn: unmodified svc.
6371 Cleanup: pc <- insn_addr + insn_size. */
6372
6373 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6374 instruction. */
6375 dsc->wrote_to_pc = 1;
6376
6377 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6378 if (dsc->u.svc.copy_svc_os)
6379 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6380 else
6381 {
6382 dsc->cleanup = &cleanup_svc;
6383 return 0;
6384 }
34518530
YQ
6385}
6386
6387static int
6388arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6389 struct regcache *regs, struct displaced_step_closure *dsc)
6390{
6391
6392 if (debug_displaced)
6393 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6394 (unsigned long) insn);
6395
6396 dsc->modinsn[0] = insn;
6397
6398 return install_svc (gdbarch, regs, dsc);
6399}
6400
6401static int
6402thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6403 struct regcache *regs, struct displaced_step_closure *dsc)
6404{
6405
6406 if (debug_displaced)
6407 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6408 insn);
bd18283a 6409
34518530
YQ
6410 dsc->modinsn[0] = insn;
6411
6412 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6413}
6414
6415/* Copy undefined instructions. */
6416
6417static int
7ff120b4
YQ
6418arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6419 struct displaced_step_closure *dsc)
cca44b1b
JB
6420{
6421 if (debug_displaced)
0963b4bd
MS
6422 fprintf_unfiltered (gdb_stdlog,
6423 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6424 (unsigned long) insn);
6425
6426 dsc->modinsn[0] = insn;
6427
6428 return 0;
6429}
6430
34518530
YQ
6431static int
6432thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6433 struct displaced_step_closure *dsc)
6434{
6435
6436 if (debug_displaced)
6437 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6438 "%.4x %.4x\n", (unsigned short) insn1,
6439 (unsigned short) insn2);
6440
6441 dsc->modinsn[0] = insn1;
6442 dsc->modinsn[1] = insn2;
6443 dsc->numinsns = 2;
6444
6445 return 0;
6446}
6447
cca44b1b
JB
6448/* Copy unpredictable instructions. */
6449
6450static int
7ff120b4
YQ
6451arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6452 struct displaced_step_closure *dsc)
cca44b1b
JB
6453{
6454 if (debug_displaced)
6455 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6456 "%.8lx\n", (unsigned long) insn);
6457
6458 dsc->modinsn[0] = insn;
6459
6460 return 0;
6461}
6462
6463/* The decode_* functions are instruction decoding helpers. They mostly follow
6464 the presentation in the ARM ARM. */
6465
6466static int
7ff120b4
YQ
6467arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6468 struct regcache *regs,
6469 struct displaced_step_closure *dsc)
cca44b1b
JB
6470{
6471 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6472 unsigned int rn = bits (insn, 16, 19);
6473
6474 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6475 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6476 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6477 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6478 else if ((op1 & 0x60) == 0x20)
7ff120b4 6479 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6480 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6481 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6482 dsc);
cca44b1b 6483 else if ((op1 & 0x77) == 0x41)
7ff120b4 6484 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6485 else if ((op1 & 0x77) == 0x45)
7ff120b4 6486 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6487 else if ((op1 & 0x77) == 0x51)
6488 {
6489 if (rn != 0xf)
7ff120b4 6490 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6491 else
7ff120b4 6492 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6493 }
6494 else if ((op1 & 0x77) == 0x55)
7ff120b4 6495 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6496 else if (op1 == 0x57)
6497 switch (op2)
6498 {
7ff120b4
YQ
6499 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6500 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6501 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6502 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6503 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6504 }
6505 else if ((op1 & 0x63) == 0x43)
7ff120b4 6506 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6507 else if ((op2 & 0x1) == 0x0)
6508 switch (op1 & ~0x80)
6509 {
6510 case 0x61:
7ff120b4 6511 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6512 case 0x65:
7ff120b4 6513 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6514 case 0x71: case 0x75:
6515 /* pld/pldw reg. */
7ff120b4 6516 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6517 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6518 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6519 default:
7ff120b4 6520 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6521 }
6522 else
7ff120b4 6523 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6524}
6525
6526static int
7ff120b4
YQ
6527arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6528 struct regcache *regs,
6529 struct displaced_step_closure *dsc)
cca44b1b
JB
6530{
6531 if (bit (insn, 27) == 0)
7ff120b4 6532 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6533 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6534 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6535 {
6536 case 0x0: case 0x2:
7ff120b4 6537 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6538
6539 case 0x1: case 0x3:
7ff120b4 6540 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6541
6542 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6543 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6544
6545 case 0x8:
6546 switch ((insn & 0xe00000) >> 21)
6547 {
6548 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6549 /* stc/stc2. */
7ff120b4 6550 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6551
6552 case 0x2:
7ff120b4 6553 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6554
6555 default:
7ff120b4 6556 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6557 }
6558
6559 case 0x9:
6560 {
6561 int rn_f = (bits (insn, 16, 19) == 0xf);
6562 switch ((insn & 0xe00000) >> 21)
6563 {
6564 case 0x1: case 0x3:
6565 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6566 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6567 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6568
6569 case 0x2:
7ff120b4 6570 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6571
6572 case 0x4: case 0x5: case 0x6: case 0x7:
6573 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6574 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6575 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6576
6577 default:
7ff120b4 6578 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6579 }
6580 }
6581
6582 case 0xa:
7ff120b4 6583 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6584
6585 case 0xb:
6586 if (bits (insn, 16, 19) == 0xf)
6587 /* ldc/ldc2 lit. */
7ff120b4 6588 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6589 else
7ff120b4 6590 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6591
6592 case 0xc:
6593 if (bit (insn, 4))
7ff120b4 6594 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6595 else
7ff120b4 6596 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6597
6598 case 0xd:
6599 if (bit (insn, 4))
7ff120b4 6600 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6601 else
7ff120b4 6602 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6603
6604 default:
7ff120b4 6605 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6606 }
6607}
6608
6609/* Decode miscellaneous instructions in dp/misc encoding space. */
6610
6611static int
7ff120b4
YQ
6612arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6613 struct regcache *regs,
6614 struct displaced_step_closure *dsc)
cca44b1b
JB
6615{
6616 unsigned int op2 = bits (insn, 4, 6);
6617 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6618
6619 switch (op2)
6620 {
6621 case 0x0:
7ff120b4 6622 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6623
6624 case 0x1:
6625 if (op == 0x1) /* bx. */
7ff120b4 6626 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6627 else if (op == 0x3)
7ff120b4 6628 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6629 else
7ff120b4 6630 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6631
6632 case 0x2:
6633 if (op == 0x1)
6634 /* Not really supported. */
7ff120b4 6635 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6636 else
7ff120b4 6637 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6638
6639 case 0x3:
6640 if (op == 0x1)
7ff120b4 6641 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6642 regs, dsc); /* blx register. */
cca44b1b 6643 else
7ff120b4 6644 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6645
6646 case 0x5:
7ff120b4 6647 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6648
6649 case 0x7:
6650 if (op == 0x1)
7ff120b4 6651 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6652 else if (op == 0x3)
6653 /* Not really supported. */
7ff120b4 6654 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6655
6656 default:
7ff120b4 6657 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6658 }
6659}
6660
6661static int
7ff120b4
YQ
6662arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6663 struct regcache *regs,
6664 struct displaced_step_closure *dsc)
cca44b1b
JB
6665{
6666 if (bit (insn, 25))
6667 switch (bits (insn, 20, 24))
6668 {
6669 case 0x10:
7ff120b4 6670 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6671
6672 case 0x14:
7ff120b4 6673 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6674
6675 case 0x12: case 0x16:
7ff120b4 6676 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6677
6678 default:
7ff120b4 6679 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6680 }
6681 else
6682 {
6683 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6684
6685 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6686 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6687 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6688 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6689 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6690 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6691 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6692 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6693 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6694 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6695 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6696 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6697 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6698 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6699 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6700 dsc);
cca44b1b
JB
6701 }
6702
6703 /* Should be unreachable. */
6704 return 1;
6705}
6706
6707static int
7ff120b4
YQ
6708arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6709 struct regcache *regs,
6710 struct displaced_step_closure *dsc)
cca44b1b
JB
6711{
6712 int a = bit (insn, 25), b = bit (insn, 4);
6713 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6714
6715 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6716 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6717 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6718 else if ((!a && (op1 & 0x17) == 0x02)
6719 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6720 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6721 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6722 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6723 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6724 else if ((!a && (op1 & 0x17) == 0x03)
6725 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6726 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6727 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6728 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6729 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6730 else if ((!a && (op1 & 0x17) == 0x06)
6731 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6732 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6733 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6734 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6735 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6736 else if ((!a && (op1 & 0x17) == 0x07)
6737 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6738 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6739
6740 /* Should be unreachable. */
6741 return 1;
6742}
6743
6744static int
7ff120b4
YQ
6745arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6746 struct displaced_step_closure *dsc)
cca44b1b
JB
6747{
6748 switch (bits (insn, 20, 24))
6749 {
6750 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6751 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6752
6753 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6754 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6755
6756 case 0x08: case 0x09: case 0x0a: case 0x0b:
6757 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6758 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6759 "decode/pack/unpack/saturate/reverse", dsc);
6760
6761 case 0x18:
6762 if (bits (insn, 5, 7) == 0) /* op2. */
6763 {
6764 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6765 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6766 else
7ff120b4 6767 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6768 }
6769 else
7ff120b4 6770 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6771
6772 case 0x1a: case 0x1b:
6773 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6774 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6775 else
7ff120b4 6776 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6777
6778 case 0x1c: case 0x1d:
6779 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6780 {
6781 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6782 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6783 else
7ff120b4 6784 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6785 }
6786 else
7ff120b4 6787 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6788
6789 case 0x1e: case 0x1f:
6790 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6791 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6792 else
7ff120b4 6793 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6794 }
6795
6796 /* Should be unreachable. */
6797 return 1;
6798}
6799
6800static int
615234c1 6801arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6802 struct regcache *regs,
6803 struct displaced_step_closure *dsc)
cca44b1b
JB
6804{
6805 if (bit (insn, 25))
7ff120b4 6806 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6807 else
7ff120b4 6808 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6809}
6810
6811static int
7ff120b4
YQ
6812arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6813 struct regcache *regs,
6814 struct displaced_step_closure *dsc)
cca44b1b
JB
6815{
6816 unsigned int opcode = bits (insn, 20, 24);
6817
6818 switch (opcode)
6819 {
6820 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6821 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6822
6823 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6824 case 0x12: case 0x16:
7ff120b4 6825 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6826
6827 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6828 case 0x13: case 0x17:
7ff120b4 6829 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6830
6831 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6832 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6833 /* Note: no writeback for these instructions. Bit 25 will always be
6834 zero though (via caller), so the following works OK. */
7ff120b4 6835 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6836 }
6837
6838 /* Should be unreachable. */
6839 return 1;
6840}
6841
34518530
YQ
6842/* Decode shifted register instructions. */
6843
6844static int
6845thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6846 uint16_t insn2, struct regcache *regs,
6847 struct displaced_step_closure *dsc)
6848{
6849 /* PC is only allowed to be used in instruction MOV. */
6850
6851 unsigned int op = bits (insn1, 5, 8);
6852 unsigned int rn = bits (insn1, 0, 3);
6853
6854 if (op == 0x2 && rn == 0xf) /* MOV */
6855 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6856 else
6857 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6858 "dp (shift reg)", dsc);
6859}
6860
6861
6862/* Decode extension register load/store. Exactly the same as
6863 arm_decode_ext_reg_ld_st. */
6864
6865static int
6866thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6867 uint16_t insn2, struct regcache *regs,
6868 struct displaced_step_closure *dsc)
6869{
6870 unsigned int opcode = bits (insn1, 4, 8);
6871
6872 switch (opcode)
6873 {
6874 case 0x04: case 0x05:
6875 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6876 "vfp/neon vmov", dsc);
6877
6878 case 0x08: case 0x0c: /* 01x00 */
6879 case 0x0a: case 0x0e: /* 01x10 */
6880 case 0x12: case 0x16: /* 10x10 */
6881 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6882 "vfp/neon vstm/vpush", dsc);
6883
6884 case 0x09: case 0x0d: /* 01x01 */
6885 case 0x0b: case 0x0f: /* 01x11 */
6886 case 0x13: case 0x17: /* 10x11 */
6887 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6888 "vfp/neon vldm/vpop", dsc);
6889
6890 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6891 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6892 "vstr", dsc);
6893 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6894 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6895 }
6896
6897 /* Should be unreachable. */
6898 return 1;
6899}
6900
cca44b1b 6901static int
12545665 6902arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6903 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6904{
6905 unsigned int op1 = bits (insn, 20, 25);
6906 int op = bit (insn, 4);
6907 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6908
6909 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6910 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6911 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6912 && (coproc & 0xe) != 0xa)
6913 /* stc/stc2. */
7ff120b4 6914 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6915 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6916 && (coproc & 0xe) != 0xa)
6917 /* ldc/ldc2 imm/lit. */
7ff120b4 6918 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6919 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6920 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6921 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6922 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6923 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6924 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6925 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6926 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6927 else if ((op1 & 0x30) == 0x20 && !op)
6928 {
6929 if ((coproc & 0xe) == 0xa)
7ff120b4 6930 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6931 else
7ff120b4 6932 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6933 }
6934 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6935 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6936 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6937 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6938 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6939 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6940 else if ((op1 & 0x30) == 0x30)
7ff120b4 6941 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6942 else
7ff120b4 6943 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6944}
6945
34518530
YQ
6946static int
6947thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6948 uint16_t insn2, struct regcache *regs,
6949 struct displaced_step_closure *dsc)
6950{
6951 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6952 unsigned int bit_5_8 = bits (insn1, 5, 8);
6953 unsigned int bit_9 = bit (insn1, 9);
6954 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6955
6956 if (bit_9 == 0)
6957 {
6958 if (bit_5_8 == 2)
6959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6960 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6961 dsc);
6962 else if (bit_5_8 == 0) /* UNDEFINED. */
6963 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6964 else
6965 {
6966 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6967 if ((coproc & 0xe) == 0xa)
6968 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6969 dsc);
6970 else /* coproc is not 101x. */
6971 {
6972 if (bit_4 == 0) /* STC/STC2. */
6973 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6974 "stc/stc2", dsc);
6975 else /* LDC/LDC2 {literal, immeidate}. */
6976 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6977 regs, dsc);
6978 }
6979 }
6980 }
6981 else
6982 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6983
6984 return 0;
6985}
6986
6987static void
6988install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6989 struct displaced_step_closure *dsc, int rd)
6990{
6991 /* ADR Rd, #imm
6992
6993 Rewrite as:
6994
6995 Preparation: Rd <- PC
6996 Insn: ADD Rd, #imm
6997 Cleanup: Null.
6998 */
6999
7000 /* Rd <- PC */
7001 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7002 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7003}
7004
7005static int
7006thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7007 struct displaced_step_closure *dsc,
7008 int rd, unsigned int imm)
7009{
7010
7011 /* Encoding T2: ADDS Rd, #imm */
7012 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7013
7014 install_pc_relative (gdbarch, regs, dsc, rd);
7015
7016 return 0;
7017}
7018
7019static int
7020thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7021 struct regcache *regs,
7022 struct displaced_step_closure *dsc)
7023{
7024 unsigned int rd = bits (insn, 8, 10);
7025 unsigned int imm8 = bits (insn, 0, 7);
7026
7027 if (debug_displaced)
7028 fprintf_unfiltered (gdb_stdlog,
7029 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7030 rd, imm8, insn);
7031
7032 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7033}
7034
7035static int
7036thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7037 uint16_t insn2, struct regcache *regs,
7038 struct displaced_step_closure *dsc)
7039{
7040 unsigned int rd = bits (insn2, 8, 11);
7041 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7042 extract raw immediate encoding rather than computing immediate. When
7043 generating ADD or SUB instruction, we can simply perform OR operation to
7044 set immediate into ADD. */
7045 unsigned int imm_3_8 = insn2 & 0x70ff;
7046 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7047
7048 if (debug_displaced)
7049 fprintf_unfiltered (gdb_stdlog,
7050 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7051 rd, imm_i, imm_3_8, insn1, insn2);
7052
7053 if (bit (insn1, 7)) /* Encoding T2 */
7054 {
7055 /* Encoding T3: SUB Rd, Rd, #imm */
7056 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7057 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7058 }
7059 else /* Encoding T3 */
7060 {
7061 /* Encoding T3: ADD Rd, Rd, #imm */
7062 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7063 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7064 }
7065 dsc->numinsns = 2;
7066
7067 install_pc_relative (gdbarch, regs, dsc, rd);
7068
7069 return 0;
7070}
7071
7072static int
615234c1 7073thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7074 struct regcache *regs,
7075 struct displaced_step_closure *dsc)
7076{
7077 unsigned int rt = bits (insn1, 8, 10);
7078 unsigned int pc;
7079 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7080
7081 /* LDR Rd, #imm8
7082
7083 Rwrite as:
7084
7085 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7086
7087 Insn: LDR R0, [R2, R3];
7088 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7089
7090 if (debug_displaced)
7091 fprintf_unfiltered (gdb_stdlog,
7092 "displaced: copying thumb ldr r%d [pc #%d]\n"
7093 , rt, imm8);
7094
7095 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7096 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7097 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7098 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7099 /* The assembler calculates the required value of the offset from the
7100 Align(PC,4) value of this instruction to the label. */
7101 pc = pc & 0xfffffffc;
7102
7103 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7104 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7105
7106 dsc->rd = rt;
7107 dsc->u.ldst.xfersize = 4;
7108 dsc->u.ldst.rn = 0;
7109 dsc->u.ldst.immed = 0;
7110 dsc->u.ldst.writeback = 0;
7111 dsc->u.ldst.restore_r4 = 0;
7112
7113 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7114
7115 dsc->cleanup = &cleanup_load;
7116
7117 return 0;
7118}
7119
7120/* Copy Thumb cbnz/cbz insruction. */
7121
7122static int
7123thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7124 struct regcache *regs,
7125 struct displaced_step_closure *dsc)
7126{
7127 int non_zero = bit (insn1, 11);
7128 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7129 CORE_ADDR from = dsc->insn_addr;
7130 int rn = bits (insn1, 0, 2);
7131 int rn_val = displaced_read_reg (regs, dsc, rn);
7132
7133 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7134 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7135 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7136 condition is false, let it be, cleanup_branch will do nothing. */
7137 if (dsc->u.branch.cond)
7138 {
7139 dsc->u.branch.cond = INST_AL;
7140 dsc->u.branch.dest = from + 4 + imm5;
7141 }
7142 else
7143 dsc->u.branch.dest = from + 2;
7144
7145 dsc->u.branch.link = 0;
7146 dsc->u.branch.exchange = 0;
7147
7148 if (debug_displaced)
7149 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7150 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7151 rn, rn_val, insn1, dsc->u.branch.dest);
7152
7153 dsc->modinsn[0] = THUMB_NOP;
7154
7155 dsc->cleanup = &cleanup_branch;
7156 return 0;
7157}
7158
7159/* Copy Table Branch Byte/Halfword */
7160static int
7161thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7162 uint16_t insn2, struct regcache *regs,
7163 struct displaced_step_closure *dsc)
7164{
7165 ULONGEST rn_val, rm_val;
7166 int is_tbh = bit (insn2, 4);
7167 CORE_ADDR halfwords = 0;
7168 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7169
7170 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7171 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7172
7173 if (is_tbh)
7174 {
7175 gdb_byte buf[2];
7176
7177 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7178 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7179 }
7180 else
7181 {
7182 gdb_byte buf[1];
7183
7184 target_read_memory (rn_val + rm_val, buf, 1);
7185 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7186 }
7187
7188 if (debug_displaced)
7189 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7190 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7191 (unsigned int) rn_val, (unsigned int) rm_val,
7192 (unsigned int) halfwords);
7193
7194 dsc->u.branch.cond = INST_AL;
7195 dsc->u.branch.link = 0;
7196 dsc->u.branch.exchange = 0;
7197 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7198
7199 dsc->cleanup = &cleanup_branch;
7200
7201 return 0;
7202}
7203
7204static void
7205cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7206 struct displaced_step_closure *dsc)
7207{
7208 /* PC <- r7 */
7209 int val = displaced_read_reg (regs, dsc, 7);
7210 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7211
7212 /* r7 <- r8 */
7213 val = displaced_read_reg (regs, dsc, 8);
7214 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7215
7216 /* r8 <- tmp[0] */
7217 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7218
7219}
7220
7221static int
615234c1 7222thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7223 struct regcache *regs,
7224 struct displaced_step_closure *dsc)
7225{
7226 dsc->u.block.regmask = insn1 & 0x00ff;
7227
7228 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7229 to :
7230
7231 (1) register list is full, that is, r0-r7 are used.
7232 Prepare: tmp[0] <- r8
7233
7234 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7235 MOV r8, r7; Move value of r7 to r8;
7236 POP {r7}; Store PC value into r7.
7237
7238 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7239
7240 (2) register list is not full, supposing there are N registers in
7241 register list (except PC, 0 <= N <= 7).
7242 Prepare: for each i, 0 - N, tmp[i] <- ri.
7243
7244 POP {r0, r1, ...., rN};
7245
7246 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7247 from tmp[] properly.
7248 */
7249 if (debug_displaced)
7250 fprintf_unfiltered (gdb_stdlog,
7251 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7252 dsc->u.block.regmask, insn1);
7253
7254 if (dsc->u.block.regmask == 0xff)
7255 {
7256 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7257
7258 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7259 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7260 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7261
7262 dsc->numinsns = 3;
7263 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7264 }
7265 else
7266 {
7267 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7268 unsigned int i;
7269 unsigned int new_regmask;
34518530
YQ
7270
7271 for (i = 0; i < num_in_list + 1; i++)
7272 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7273
7274 new_regmask = (1 << (num_in_list + 1)) - 1;
7275
7276 if (debug_displaced)
7277 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7278 "{..., pc}: original reg list %.4x,"
7279 " modified list %.4x\n"),
7280 (int) dsc->u.block.regmask, new_regmask);
7281
7282 dsc->u.block.regmask |= 0x8000;
7283 dsc->u.block.writeback = 0;
7284 dsc->u.block.cond = INST_AL;
7285
7286 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7287
7288 dsc->cleanup = &cleanup_block_load_pc;
7289 }
7290
7291 return 0;
7292}
7293
7294static void
7295thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7296 struct regcache *regs,
7297 struct displaced_step_closure *dsc)
7298{
7299 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7300 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7301 int err = 0;
7302
7303 /* 16-bit thumb instructions. */
7304 switch (op_bit_12_15)
7305 {
7306 /* Shift (imme), add, subtract, move and compare. */
7307 case 0: case 1: case 2: case 3:
7308 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7309 "shift/add/sub/mov/cmp",
7310 dsc);
7311 break;
7312 case 4:
7313 switch (op_bit_10_11)
7314 {
7315 case 0: /* Data-processing */
7316 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7317 "data-processing",
7318 dsc);
7319 break;
7320 case 1: /* Special data instructions and branch and exchange. */
7321 {
7322 unsigned short op = bits (insn1, 7, 9);
7323 if (op == 6 || op == 7) /* BX or BLX */
7324 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7325 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7326 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7327 else
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7329 dsc);
7330 }
7331 break;
7332 default: /* LDR (literal) */
7333 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7334 }
7335 break;
7336 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7338 break;
7339 case 10:
7340 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7341 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7342 else /* Generate SP-relative address */
7343 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7344 break;
7345 case 11: /* Misc 16-bit instructions */
7346 {
7347 switch (bits (insn1, 8, 11))
7348 {
7349 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7350 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7351 break;
7352 case 12: case 13: /* POP */
7353 if (bit (insn1, 8)) /* PC is in register list. */
7354 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7355 else
7356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7357 break;
7358 case 15: /* If-Then, and hints */
7359 if (bits (insn1, 0, 3))
7360 /* If-Then makes up to four following instructions conditional.
7361 IT instruction itself is not conditional, so handle it as a
7362 common unmodified instruction. */
7363 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7364 dsc);
7365 else
7366 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7367 break;
7368 default:
7369 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7370 }
7371 }
7372 break;
7373 case 12:
7374 if (op_bit_10_11 < 2) /* Store multiple registers */
7375 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7376 else /* Load multiple registers */
7377 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7378 break;
7379 case 13: /* Conditional branch and supervisor call */
7380 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7381 err = thumb_copy_b (gdbarch, insn1, dsc);
7382 else
7383 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7384 break;
7385 case 14: /* Unconditional branch */
7386 err = thumb_copy_b (gdbarch, insn1, dsc);
7387 break;
7388 default:
7389 err = 1;
7390 }
7391
7392 if (err)
7393 internal_error (__FILE__, __LINE__,
7394 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7395}
7396
7397static int
7398decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7399 uint16_t insn1, uint16_t insn2,
7400 struct regcache *regs,
7401 struct displaced_step_closure *dsc)
7402{
7403 int rt = bits (insn2, 12, 15);
7404 int rn = bits (insn1, 0, 3);
7405 int op1 = bits (insn1, 7, 8);
34518530
YQ
7406
7407 switch (bits (insn1, 5, 6))
7408 {
7409 case 0: /* Load byte and memory hints */
7410 if (rt == 0xf) /* PLD/PLI */
7411 {
7412 if (rn == 0xf)
7413 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7414 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7415 else
7416 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 "pli/pld", dsc);
7418 }
7419 else
7420 {
7421 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7422 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7423 1);
7424 else
7425 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "ldrb{reg, immediate}/ldrbt",
7427 dsc);
7428 }
7429
7430 break;
7431 case 1: /* Load halfword and memory hints. */
7432 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7433 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7434 "pld/unalloc memhint", dsc);
7435 else
7436 {
7437 if (rn == 0xf)
7438 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7439 2);
7440 else
7441 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 "ldrh/ldrht", dsc);
7443 }
7444 break;
7445 case 2: /* Load word */
7446 {
7447 int insn2_bit_8_11 = bits (insn2, 8, 11);
7448
7449 if (rn == 0xf)
7450 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7451 else if (op1 == 0x1) /* Encoding T3 */
7452 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7453 0, 1);
7454 else /* op1 == 0x0 */
7455 {
7456 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7457 /* LDR (immediate) */
7458 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7459 dsc, bit (insn2, 8), 1);
7460 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7461 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7462 "ldrt", dsc);
7463 else
7464 /* LDR (register) */
7465 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7466 dsc, 0, 0);
7467 }
7468 break;
7469 }
7470 default:
7471 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7472 break;
7473 }
7474 return 0;
7475}
7476
7477static void
7478thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7479 uint16_t insn2, struct regcache *regs,
7480 struct displaced_step_closure *dsc)
7481{
7482 int err = 0;
7483 unsigned short op = bit (insn2, 15);
7484 unsigned int op1 = bits (insn1, 11, 12);
7485
7486 switch (op1)
7487 {
7488 case 1:
7489 {
7490 switch (bits (insn1, 9, 10))
7491 {
7492 case 0:
7493 if (bit (insn1, 6))
7494 {
7495 /* Load/store {dual, execlusive}, table branch. */
7496 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7497 && bits (insn2, 5, 7) == 0)
7498 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7499 dsc);
7500 else
7501 /* PC is not allowed to use in load/store {dual, exclusive}
7502 instructions. */
7503 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7504 "load/store dual/ex", dsc);
7505 }
7506 else /* load/store multiple */
7507 {
7508 switch (bits (insn1, 7, 8))
7509 {
7510 case 0: case 3: /* SRS, RFE */
7511 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7512 "srs/rfe", dsc);
7513 break;
7514 case 1: case 2: /* LDM/STM/PUSH/POP */
7515 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7516 break;
7517 }
7518 }
7519 break;
7520
7521 case 1:
7522 /* Data-processing (shift register). */
7523 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7524 dsc);
7525 break;
7526 default: /* Coprocessor instructions. */
7527 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7528 break;
7529 }
7530 break;
7531 }
7532 case 2: /* op1 = 2 */
7533 if (op) /* Branch and misc control. */
7534 {
7535 if (bit (insn2, 14) /* BLX/BL */
7536 || bit (insn2, 12) /* Unconditional branch */
7537 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7538 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7539 else
7540 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7541 "misc ctrl", dsc);
7542 }
7543 else
7544 {
7545 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7546 {
7547 int op = bits (insn1, 4, 8);
7548 int rn = bits (insn1, 0, 3);
7549 if ((op == 0 || op == 0xa) && rn == 0xf)
7550 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7551 regs, dsc);
7552 else
7553 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7554 "dp/pb", dsc);
7555 }
7556 else /* Data processing (modified immeidate) */
7557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7558 "dp/mi", dsc);
7559 }
7560 break;
7561 case 3: /* op1 = 3 */
7562 switch (bits (insn1, 9, 10))
7563 {
7564 case 0:
7565 if (bit (insn1, 4))
7566 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7567 regs, dsc);
7568 else /* NEON Load/Store and Store single data item */
7569 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7570 "neon elt/struct load/store",
7571 dsc);
7572 break;
7573 case 1: /* op1 = 3, bits (9, 10) == 1 */
7574 switch (bits (insn1, 7, 8))
7575 {
7576 case 0: case 1: /* Data processing (register) */
7577 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7578 "dp(reg)", dsc);
7579 break;
7580 case 2: /* Multiply and absolute difference */
7581 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7582 "mul/mua/diff", dsc);
7583 break;
7584 case 3: /* Long multiply and divide */
7585 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7586 "lmul/lmua", dsc);
7587 break;
7588 }
7589 break;
7590 default: /* Coprocessor instructions */
7591 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7592 break;
7593 }
7594 break;
7595 default:
7596 err = 1;
7597 }
7598
7599 if (err)
7600 internal_error (__FILE__, __LINE__,
7601 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7602
7603}
7604
b434a28f
YQ
7605static void
7606thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7607 struct regcache *regs,
b434a28f
YQ
7608 struct displaced_step_closure *dsc)
7609{
34518530
YQ
7610 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7611 uint16_t insn1
7612 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7613
7614 if (debug_displaced)
7615 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7616 "at %.8lx\n", insn1, (unsigned long) from);
7617
7618 dsc->is_thumb = 1;
7619 dsc->insn_size = thumb_insn_size (insn1);
7620 if (thumb_insn_size (insn1) == 4)
7621 {
7622 uint16_t insn2
7623 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7624 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7625 }
7626 else
7627 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7628}
7629
cca44b1b 7630void
b434a28f
YQ
7631arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7632 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7633 struct displaced_step_closure *dsc)
7634{
7635 int err = 0;
b434a28f
YQ
7636 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7637 uint32_t insn;
cca44b1b
JB
7638
7639 /* Most displaced instructions use a 1-instruction scratch space, so set this
7640 here and override below if/when necessary. */
7641 dsc->numinsns = 1;
7642 dsc->insn_addr = from;
7643 dsc->scratch_base = to;
7644 dsc->cleanup = NULL;
7645 dsc->wrote_to_pc = 0;
7646
b434a28f 7647 if (!displaced_in_arm_mode (regs))
12545665 7648 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7649
4db71c0b
YQ
7650 dsc->is_thumb = 0;
7651 dsc->insn_size = 4;
b434a28f
YQ
7652 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7653 if (debug_displaced)
7654 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7655 "at %.8lx\n", (unsigned long) insn,
7656 (unsigned long) from);
7657
cca44b1b 7658 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7659 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7660 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7661 {
7662 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7663 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7664 break;
7665
7666 case 0x4: case 0x5: case 0x6:
7ff120b4 7667 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7668 break;
7669
7670 case 0x7:
7ff120b4 7671 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7672 break;
7673
7674 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7675 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7676 break;
7677
7678 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7679 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7680 break;
7681 }
7682
7683 if (err)
7684 internal_error (__FILE__, __LINE__,
7685 _("arm_process_displaced_insn: Instruction decode error"));
7686}
7687
7688/* Actually set up the scratch space for a displaced instruction. */
7689
7690void
7691arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7692 CORE_ADDR to, struct displaced_step_closure *dsc)
7693{
7694 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7695 unsigned int i, len, offset;
cca44b1b 7696 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7697 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7698 const gdb_byte *bkp_insn;
cca44b1b 7699
4db71c0b 7700 offset = 0;
cca44b1b
JB
7701 /* Poke modified instruction(s). */
7702 for (i = 0; i < dsc->numinsns; i++)
7703 {
7704 if (debug_displaced)
4db71c0b
YQ
7705 {
7706 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7707 if (size == 4)
7708 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7709 dsc->modinsn[i]);
7710 else if (size == 2)
7711 fprintf_unfiltered (gdb_stdlog, "%.4x",
7712 (unsigned short)dsc->modinsn[i]);
7713
7714 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7715 (unsigned long) to + offset);
7716
7717 }
7718 write_memory_unsigned_integer (to + offset, size,
7719 byte_order_for_code,
cca44b1b 7720 dsc->modinsn[i]);
4db71c0b
YQ
7721 offset += size;
7722 }
7723
7724 /* Choose the correct breakpoint instruction. */
7725 if (dsc->is_thumb)
7726 {
7727 bkp_insn = tdep->thumb_breakpoint;
7728 len = tdep->thumb_breakpoint_size;
7729 }
7730 else
7731 {
7732 bkp_insn = tdep->arm_breakpoint;
7733 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7734 }
7735
7736 /* Put breakpoint afterwards. */
4db71c0b 7737 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7738
7739 if (debug_displaced)
7740 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7741 paddress (gdbarch, from), paddress (gdbarch, to));
7742}
7743
cca44b1b
JB
7744/* Entry point for cleaning things up after a displaced instruction has been
7745 single-stepped. */
7746
7747void
7748arm_displaced_step_fixup (struct gdbarch *gdbarch,
7749 struct displaced_step_closure *dsc,
7750 CORE_ADDR from, CORE_ADDR to,
7751 struct regcache *regs)
7752{
7753 if (dsc->cleanup)
7754 dsc->cleanup (gdbarch, regs, dsc);
7755
7756 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7757 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7758 dsc->insn_addr + dsc->insn_size);
7759
cca44b1b
JB
7760}
7761
7762#include "bfd-in2.h"
7763#include "libcoff.h"
7764
7765static int
7766gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7767{
9a3c8263 7768 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
9779414d
DJ
7769
7770 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7771 {
7772 static asymbol *asym;
7773 static combined_entry_type ce;
7774 static struct coff_symbol_struct csym;
7775 static struct bfd fake_bfd;
7776 static bfd_target fake_target;
7777
7778 if (csym.native == NULL)
7779 {
7780 /* Create a fake symbol vector containing a Thumb symbol.
7781 This is solely so that the code in print_insn_little_arm()
7782 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7783 the presence of a Thumb symbol and switch to decoding
7784 Thumb instructions. */
7785
7786 fake_target.flavour = bfd_target_coff_flavour;
7787 fake_bfd.xvec = &fake_target;
7788 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7789 csym.native = &ce;
7790 csym.symbol.the_bfd = &fake_bfd;
7791 csym.symbol.name = "fake";
7792 asym = (asymbol *) & csym;
7793 }
7794
7795 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7796 info->symbols = &asym;
7797 }
7798 else
7799 info->symbols = NULL;
7800
7801 if (info->endian == BFD_ENDIAN_BIG)
7802 return print_insn_big_arm (memaddr, info);
7803 else
7804 return print_insn_little_arm (memaddr, info);
7805}
7806
7807/* The following define instruction sequences that will cause ARM
7808 cpu's to take an undefined instruction trap. These are used to
7809 signal a breakpoint to GDB.
7810
7811 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7812 modes. A different instruction is required for each mode. The ARM
7813 cpu's can also be big or little endian. Thus four different
7814 instructions are needed to support all cases.
7815
7816 Note: ARMv4 defines several new instructions that will take the
7817 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7818 not in fact add the new instructions. The new undefined
7819 instructions in ARMv4 are all instructions that had no defined
7820 behaviour in earlier chips. There is no guarantee that they will
7821 raise an exception, but may be treated as NOP's. In practice, it
7822 may only safe to rely on instructions matching:
7823
7824 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7825 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7826 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7827
0963b4bd 7828 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7829 following use a condition predicate of ALWAYS so it is always TRUE.
7830
7831 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7832 and NetBSD all use a software interrupt rather than an undefined
7833 instruction to force a trap. This can be handled by by the
7834 abi-specific code during establishment of the gdbarch vector. */
7835
7836#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7837#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7838#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7839#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7840
948f8e3d
PA
7841static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7842static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7843static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7844static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7845
cd6c3b4f
YQ
7846/* Implement the breakpoint_kind_from_pc gdbarch method. */
7847
d19280ad
YQ
7848static int
7849arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7850{
7851 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7852 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7853
9779414d 7854 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7855 {
7856 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7857
7858 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7859 check whether we are replacing a 32-bit instruction. */
7860 if (tdep->thumb2_breakpoint != NULL)
7861 {
7862 gdb_byte buf[2];
d19280ad 7863
177321bd
DJ
7864 if (target_read_memory (*pcptr, buf, 2) == 0)
7865 {
7866 unsigned short inst1;
d19280ad 7867
177321bd 7868 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7869 if (thumb_insn_size (inst1) == 4)
d19280ad 7870 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7871 }
7872 }
7873
d19280ad 7874 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7875 }
7876 else
d19280ad
YQ
7877 return ARM_BP_KIND_ARM;
7878
7879}
7880
cd6c3b4f
YQ
7881/* Implement the sw_breakpoint_from_kind gdbarch method. */
7882
d19280ad
YQ
7883static const gdb_byte *
7884arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7885{
7886 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7887
7888 switch (kind)
cca44b1b 7889 {
d19280ad
YQ
7890 case ARM_BP_KIND_ARM:
7891 *size = tdep->arm_breakpoint_size;
cca44b1b 7892 return tdep->arm_breakpoint;
d19280ad
YQ
7893 case ARM_BP_KIND_THUMB:
7894 *size = tdep->thumb_breakpoint_size;
7895 return tdep->thumb_breakpoint;
7896 case ARM_BP_KIND_THUMB2:
7897 *size = tdep->thumb2_breakpoint_size;
7898 return tdep->thumb2_breakpoint;
7899 default:
7900 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7901 }
7902}
7903
d19280ad
YQ
7904/* Determine the type and size of breakpoint to insert at PCPTR. Uses
7905 the program counter value to determine whether a 16-bit or 32-bit
7906 breakpoint should be used. It returns a pointer to a string of
7907 bytes that encode a breakpoint instruction, stores the length of
7908 the string to *lenptr, and adjusts the program counter (if
7909 necessary) to point to the actual memory location where the
7910 breakpoint should be inserted. */
7911
7912GDBARCH_BREAKPOINT_FROM_PC (arm)
7913
177321bd
DJ
7914static void
7915arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7916 int *kindptr)
7917{
177321bd 7918
d19280ad 7919 *kindptr = arm_breakpoint_kind_from_pc (gdbarch, pcptr);
177321bd
DJ
7920}
7921
cca44b1b
JB
7922/* Extract from an array REGBUF containing the (raw) register state a
7923 function return value of type TYPE, and copy that, in virtual
7924 format, into VALBUF. */
7925
7926static void
7927arm_extract_return_value (struct type *type, struct regcache *regs,
7928 gdb_byte *valbuf)
7929{
7930 struct gdbarch *gdbarch = get_regcache_arch (regs);
7931 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7932
7933 if (TYPE_CODE_FLT == TYPE_CODE (type))
7934 {
7935 switch (gdbarch_tdep (gdbarch)->fp_model)
7936 {
7937 case ARM_FLOAT_FPA:
7938 {
7939 /* The value is in register F0 in internal format. We need to
7940 extract the raw value and then convert it to the desired
7941 internal type. */
7942 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7943
7944 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7945 convert_from_extended (floatformat_from_type (type), tmpbuf,
7946 valbuf, gdbarch_byte_order (gdbarch));
7947 }
7948 break;
7949
7950 case ARM_FLOAT_SOFT_FPA:
7951 case ARM_FLOAT_SOFT_VFP:
7952 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7953 not using the VFP ABI code. */
7954 case ARM_FLOAT_VFP:
7955 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7956 if (TYPE_LENGTH (type) > 4)
7957 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7958 valbuf + INT_REGISTER_SIZE);
7959 break;
7960
7961 default:
0963b4bd
MS
7962 internal_error (__FILE__, __LINE__,
7963 _("arm_extract_return_value: "
7964 "Floating point model not supported"));
cca44b1b
JB
7965 break;
7966 }
7967 }
7968 else if (TYPE_CODE (type) == TYPE_CODE_INT
7969 || TYPE_CODE (type) == TYPE_CODE_CHAR
7970 || TYPE_CODE (type) == TYPE_CODE_BOOL
7971 || TYPE_CODE (type) == TYPE_CODE_PTR
7972 || TYPE_CODE (type) == TYPE_CODE_REF
7973 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7974 {
b021a221
MS
7975 /* If the type is a plain integer, then the access is
7976 straight-forward. Otherwise we have to play around a bit
7977 more. */
cca44b1b
JB
7978 int len = TYPE_LENGTH (type);
7979 int regno = ARM_A1_REGNUM;
7980 ULONGEST tmp;
7981
7982 while (len > 0)
7983 {
7984 /* By using store_unsigned_integer we avoid having to do
7985 anything special for small big-endian values. */
7986 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7987 store_unsigned_integer (valbuf,
7988 (len > INT_REGISTER_SIZE
7989 ? INT_REGISTER_SIZE : len),
7990 byte_order, tmp);
7991 len -= INT_REGISTER_SIZE;
7992 valbuf += INT_REGISTER_SIZE;
7993 }
7994 }
7995 else
7996 {
7997 /* For a structure or union the behaviour is as if the value had
7998 been stored to word-aligned memory and then loaded into
7999 registers with 32-bit load instruction(s). */
8000 int len = TYPE_LENGTH (type);
8001 int regno = ARM_A1_REGNUM;
8002 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8003
8004 while (len > 0)
8005 {
8006 regcache_cooked_read (regs, regno++, tmpbuf);
8007 memcpy (valbuf, tmpbuf,
8008 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8009 len -= INT_REGISTER_SIZE;
8010 valbuf += INT_REGISTER_SIZE;
8011 }
8012 }
8013}
8014
8015
8016/* Will a function return an aggregate type in memory or in a
8017 register? Return 0 if an aggregate type can be returned in a
8018 register, 1 if it must be returned in memory. */
8019
8020static int
8021arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8022{
cca44b1b
JB
8023 enum type_code code;
8024
f168693b 8025 type = check_typedef (type);
cca44b1b 8026
b13c8ab2
YQ
8027 /* Simple, non-aggregate types (ie not including vectors and
8028 complex) are always returned in a register (or registers). */
8029 code = TYPE_CODE (type);
8030 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8031 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8032 return 0;
cca44b1b 8033
c4312b19
YQ
8034 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8035 {
8036 /* Vector values should be returned using ARM registers if they
8037 are not over 16 bytes. */
8038 return (TYPE_LENGTH (type) > 16);
8039 }
8040
b13c8ab2 8041 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8042 {
b13c8ab2
YQ
8043 /* The AAPCS says all aggregates not larger than a word are returned
8044 in a register. */
8045 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8046 return 0;
8047
cca44b1b
JB
8048 return 1;
8049 }
b13c8ab2
YQ
8050 else
8051 {
8052 int nRc;
cca44b1b 8053
b13c8ab2
YQ
8054 /* All aggregate types that won't fit in a register must be returned
8055 in memory. */
8056 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8057 return 1;
cca44b1b 8058
b13c8ab2
YQ
8059 /* In the ARM ABI, "integer" like aggregate types are returned in
8060 registers. For an aggregate type to be integer like, its size
8061 must be less than or equal to INT_REGISTER_SIZE and the
8062 offset of each addressable subfield must be zero. Note that bit
8063 fields are not addressable, and all addressable subfields of
8064 unions always start at offset zero.
cca44b1b 8065
b13c8ab2
YQ
8066 This function is based on the behaviour of GCC 2.95.1.
8067 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8068
b13c8ab2
YQ
8069 Note: All versions of GCC before GCC 2.95.2 do not set up the
8070 parameters correctly for a function returning the following
8071 structure: struct { float f;}; This should be returned in memory,
8072 not a register. Richard Earnshaw sent me a patch, but I do not
8073 know of any way to detect if a function like the above has been
8074 compiled with the correct calling convention. */
8075
8076 /* Assume all other aggregate types can be returned in a register.
8077 Run a check for structures, unions and arrays. */
8078 nRc = 0;
67255d04 8079
b13c8ab2
YQ
8080 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8081 {
8082 int i;
8083 /* Need to check if this struct/union is "integer" like. For
8084 this to be true, its size must be less than or equal to
8085 INT_REGISTER_SIZE and the offset of each addressable
8086 subfield must be zero. Note that bit fields are not
8087 addressable, and unions always start at offset zero. If any
8088 of the subfields is a floating point type, the struct/union
8089 cannot be an integer type. */
8090
8091 /* For each field in the object, check:
8092 1) Is it FP? --> yes, nRc = 1;
8093 2) Is it addressable (bitpos != 0) and
8094 not packed (bitsize == 0)?
8095 --> yes, nRc = 1
8096 */
8097
8098 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8099 {
b13c8ab2
YQ
8100 enum type_code field_type_code;
8101
8102 field_type_code
8103 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8104 i)));
8105
8106 /* Is it a floating point type field? */
8107 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8108 {
8109 nRc = 1;
8110 break;
8111 }
b13c8ab2
YQ
8112
8113 /* If bitpos != 0, then we have to care about it. */
8114 if (TYPE_FIELD_BITPOS (type, i) != 0)
8115 {
8116 /* Bitfields are not addressable. If the field bitsize is
8117 zero, then the field is not packed. Hence it cannot be
8118 a bitfield or any other packed type. */
8119 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8120 {
8121 nRc = 1;
8122 break;
8123 }
8124 }
67255d04
RE
8125 }
8126 }
67255d04 8127
b13c8ab2
YQ
8128 return nRc;
8129 }
67255d04
RE
8130}
8131
34e8f22d
RE
8132/* Write into appropriate registers a function return value of type
8133 TYPE, given in virtual format. */
8134
8135static void
b508a996 8136arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8137 const gdb_byte *valbuf)
34e8f22d 8138{
be8626e0 8139 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8140 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8141
34e8f22d
RE
8142 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8143 {
e362b510 8144 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8145
be8626e0 8146 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8147 {
8148 case ARM_FLOAT_FPA:
8149
be8626e0
MD
8150 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8151 gdbarch_byte_order (gdbarch));
b508a996 8152 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8153 break;
8154
fd50bc42 8155 case ARM_FLOAT_SOFT_FPA:
08216dd7 8156 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8157 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8158 not using the VFP ABI code. */
8159 case ARM_FLOAT_VFP:
b508a996
RE
8160 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8161 if (TYPE_LENGTH (type) > 4)
8162 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8163 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8164 break;
8165
8166 default:
9b20d036
MS
8167 internal_error (__FILE__, __LINE__,
8168 _("arm_store_return_value: Floating "
8169 "point model not supported"));
08216dd7
RE
8170 break;
8171 }
34e8f22d 8172 }
b508a996
RE
8173 else if (TYPE_CODE (type) == TYPE_CODE_INT
8174 || TYPE_CODE (type) == TYPE_CODE_CHAR
8175 || TYPE_CODE (type) == TYPE_CODE_BOOL
8176 || TYPE_CODE (type) == TYPE_CODE_PTR
8177 || TYPE_CODE (type) == TYPE_CODE_REF
8178 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8179 {
8180 if (TYPE_LENGTH (type) <= 4)
8181 {
8182 /* Values of one word or less are zero/sign-extended and
8183 returned in r0. */
7a5ea0d4 8184 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8185 LONGEST val = unpack_long (type, valbuf);
8186
e17a4113 8187 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8188 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8189 }
8190 else
8191 {
8192 /* Integral values greater than one word are stored in consecutive
8193 registers starting with r0. This will always be a multiple of
8194 the regiser size. */
8195 int len = TYPE_LENGTH (type);
8196 int regno = ARM_A1_REGNUM;
8197
8198 while (len > 0)
8199 {
8200 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8201 len -= INT_REGISTER_SIZE;
8202 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8203 }
8204 }
8205 }
34e8f22d 8206 else
b508a996
RE
8207 {
8208 /* For a structure or union the behaviour is as if the value had
8209 been stored to word-aligned memory and then loaded into
8210 registers with 32-bit load instruction(s). */
8211 int len = TYPE_LENGTH (type);
8212 int regno = ARM_A1_REGNUM;
7a5ea0d4 8213 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8214
8215 while (len > 0)
8216 {
8217 memcpy (tmpbuf, valbuf,
7a5ea0d4 8218 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8219 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8220 len -= INT_REGISTER_SIZE;
8221 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8222 }
8223 }
34e8f22d
RE
8224}
8225
2af48f68
PB
8226
8227/* Handle function return values. */
8228
8229static enum return_value_convention
6a3a010b 8230arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8231 struct type *valtype, struct regcache *regcache,
8232 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8233{
7c00367c 8234 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8235 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8236 enum arm_vfp_cprc_base_type vfp_base_type;
8237 int vfp_base_count;
8238
8239 if (arm_vfp_abi_for_function (gdbarch, func_type)
8240 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8241 {
8242 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8243 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8244 int i;
8245 for (i = 0; i < vfp_base_count; i++)
8246 {
58d6951d
DJ
8247 if (reg_char == 'q')
8248 {
8249 if (writebuf)
8250 arm_neon_quad_write (gdbarch, regcache, i,
8251 writebuf + i * unit_length);
8252
8253 if (readbuf)
8254 arm_neon_quad_read (gdbarch, regcache, i,
8255 readbuf + i * unit_length);
8256 }
8257 else
8258 {
8259 char name_buf[4];
8260 int regnum;
8261
8c042590 8262 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8263 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8264 strlen (name_buf));
8265 if (writebuf)
8266 regcache_cooked_write (regcache, regnum,
8267 writebuf + i * unit_length);
8268 if (readbuf)
8269 regcache_cooked_read (regcache, regnum,
8270 readbuf + i * unit_length);
8271 }
90445bd3
DJ
8272 }
8273 return RETURN_VALUE_REGISTER_CONVENTION;
8274 }
7c00367c 8275
2af48f68
PB
8276 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8277 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8278 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8279 {
7c00367c
MK
8280 if (tdep->struct_return == pcc_struct_return
8281 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8282 return RETURN_VALUE_STRUCT_CONVENTION;
8283 }
b13c8ab2
YQ
8284 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8285 {
8286 if (arm_return_in_memory (gdbarch, valtype))
8287 return RETURN_VALUE_STRUCT_CONVENTION;
8288 }
7052e42c 8289
2af48f68
PB
8290 if (writebuf)
8291 arm_store_return_value (valtype, regcache, writebuf);
8292
8293 if (readbuf)
8294 arm_extract_return_value (valtype, regcache, readbuf);
8295
8296 return RETURN_VALUE_REGISTER_CONVENTION;
8297}
8298
8299
9df628e0 8300static int
60ade65d 8301arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8302{
e17a4113
UW
8303 struct gdbarch *gdbarch = get_frame_arch (frame);
8304 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8305 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8306 CORE_ADDR jb_addr;
e362b510 8307 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8308
60ade65d 8309 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8310
8311 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8312 INT_REGISTER_SIZE))
9df628e0
RE
8313 return 0;
8314
e17a4113 8315 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8316 return 1;
8317}
8318
faa95490
DJ
8319/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8320 return the target PC. Otherwise return 0. */
c906108c
SS
8321
8322CORE_ADDR
52f729a7 8323arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8324{
2c02bd72 8325 const char *name;
faa95490 8326 int namelen;
c906108c
SS
8327 CORE_ADDR start_addr;
8328
8329 /* Find the starting address and name of the function containing the PC. */
8330 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8331 {
8332 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8333 check here. */
8334 start_addr = arm_skip_bx_reg (frame, pc);
8335 if (start_addr != 0)
8336 return start_addr;
8337
8338 return 0;
8339 }
c906108c 8340
faa95490
DJ
8341 /* If PC is in a Thumb call or return stub, return the address of the
8342 target PC, which is in a register. The thunk functions are called
8343 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8344 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8345 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8346 if (startswith (name, "_call_via_")
8347 || startswith (name, "__ARM_call_via_"))
c906108c 8348 {
ed9a39eb
JM
8349 /* Use the name suffix to determine which register contains the
8350 target PC. */
c5aa993b
JM
8351 static char *table[15] =
8352 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8353 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8354 };
c906108c 8355 int regno;
faa95490 8356 int offset = strlen (name) - 2;
c906108c
SS
8357
8358 for (regno = 0; regno <= 14; regno++)
faa95490 8359 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8360 return get_frame_register_unsigned (frame, regno);
c906108c 8361 }
ed9a39eb 8362
faa95490
DJ
8363 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8364 non-interworking calls to foo. We could decode the stubs
8365 to find the target but it's easier to use the symbol table. */
8366 namelen = strlen (name);
8367 if (name[0] == '_' && name[1] == '_'
8368 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8369 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8370 || (namelen > 2 + strlen ("_from_arm")
61012eef 8371 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8372 {
8373 char *target_name;
8374 int target_len = namelen - 2;
3b7344d5 8375 struct bound_minimal_symbol minsym;
faa95490
DJ
8376 struct objfile *objfile;
8377 struct obj_section *sec;
8378
8379 if (name[namelen - 1] == 'b')
8380 target_len -= strlen ("_from_thumb");
8381 else
8382 target_len -= strlen ("_from_arm");
8383
224c3ddb 8384 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8385 memcpy (target_name, name + 2, target_len);
8386 target_name[target_len] = '\0';
8387
8388 sec = find_pc_section (pc);
8389 objfile = (sec == NULL) ? NULL : sec->objfile;
8390 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8391 if (minsym.minsym != NULL)
77e371c0 8392 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8393 else
8394 return 0;
8395 }
8396
c5aa993b 8397 return 0; /* not a stub */
c906108c
SS
8398}
8399
afd7eef0
RE
8400static void
8401set_arm_command (char *args, int from_tty)
8402{
edefbb7c
AC
8403 printf_unfiltered (_("\
8404\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8405 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8406}
8407
8408static void
8409show_arm_command (char *args, int from_tty)
8410{
26304000 8411 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8412}
8413
28e97307
DJ
8414static void
8415arm_update_current_architecture (void)
fd50bc42 8416{
28e97307 8417 struct gdbarch_info info;
fd50bc42 8418
28e97307 8419 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8420 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8421 return;
fd50bc42 8422
28e97307
DJ
8423 /* Update the architecture. */
8424 gdbarch_info_init (&info);
fd50bc42 8425
28e97307 8426 if (!gdbarch_update_p (info))
9b20d036 8427 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8428}
8429
8430static void
8431set_fp_model_sfunc (char *args, int from_tty,
8432 struct cmd_list_element *c)
8433{
570dc176 8434 int fp_model;
fd50bc42
RE
8435
8436 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8437 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8438 {
aead7601 8439 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8440 break;
8441 }
8442
8443 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8444 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8445 current_fp_model);
8446
28e97307 8447 arm_update_current_architecture ();
fd50bc42
RE
8448}
8449
8450static void
08546159
AC
8451show_fp_model (struct ui_file *file, int from_tty,
8452 struct cmd_list_element *c, const char *value)
fd50bc42 8453{
f5656ead 8454 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8455
28e97307 8456 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8457 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8458 fprintf_filtered (file, _("\
8459The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8460 fp_model_strings[tdep->fp_model]);
8461 else
8462 fprintf_filtered (file, _("\
8463The current ARM floating point model is \"%s\".\n"),
8464 fp_model_strings[arm_fp_model]);
8465}
8466
8467static void
8468arm_set_abi (char *args, int from_tty,
8469 struct cmd_list_element *c)
8470{
570dc176 8471 int arm_abi;
28e97307
DJ
8472
8473 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8474 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8475 {
aead7601 8476 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8477 break;
8478 }
8479
8480 if (arm_abi == ARM_ABI_LAST)
8481 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8482 arm_abi_string);
8483
8484 arm_update_current_architecture ();
8485}
8486
8487static void
8488arm_show_abi (struct ui_file *file, int from_tty,
8489 struct cmd_list_element *c, const char *value)
8490{
f5656ead 8491 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8492
8493 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8494 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8495 fprintf_filtered (file, _("\
8496The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8497 arm_abi_strings[tdep->arm_abi]);
8498 else
8499 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8500 arm_abi_string);
fd50bc42
RE
8501}
8502
0428b8f5
DJ
8503static void
8504arm_show_fallback_mode (struct ui_file *file, int from_tty,
8505 struct cmd_list_element *c, const char *value)
8506{
0963b4bd
MS
8507 fprintf_filtered (file,
8508 _("The current execution mode assumed "
8509 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8510 arm_fallback_mode_string);
8511}
8512
8513static void
8514arm_show_force_mode (struct ui_file *file, int from_tty,
8515 struct cmd_list_element *c, const char *value)
8516{
0963b4bd
MS
8517 fprintf_filtered (file,
8518 _("The current execution mode assumed "
8519 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8520 arm_force_mode_string);
8521}
8522
afd7eef0
RE
8523/* If the user changes the register disassembly style used for info
8524 register and other commands, we have to also switch the style used
8525 in opcodes for disassembly output. This function is run in the "set
8526 arm disassembly" command, and does that. */
bc90b915
FN
8527
8528static void
afd7eef0 8529set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8530 struct cmd_list_element *c)
8531{
afd7eef0 8532 set_disassembly_style ();
bc90b915
FN
8533}
8534\f
966fbf70 8535/* Return the ARM register name corresponding to register I. */
a208b0cb 8536static const char *
d93859e2 8537arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8538{
58d6951d
DJ
8539 const int num_regs = gdbarch_num_regs (gdbarch);
8540
8541 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8542 && i >= num_regs && i < num_regs + 32)
8543 {
8544 static const char *const vfp_pseudo_names[] = {
8545 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8546 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8547 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8548 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8549 };
8550
8551 return vfp_pseudo_names[i - num_regs];
8552 }
8553
8554 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8555 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8556 {
8557 static const char *const neon_pseudo_names[] = {
8558 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8559 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8560 };
8561
8562 return neon_pseudo_names[i - num_regs - 32];
8563 }
8564
ff6f572f
DJ
8565 if (i >= ARRAY_SIZE (arm_register_names))
8566 /* These registers are only supported on targets which supply
8567 an XML description. */
8568 return "";
8569
966fbf70
RE
8570 return arm_register_names[i];
8571}
8572
bc90b915 8573static void
afd7eef0 8574set_disassembly_style (void)
bc90b915 8575{
123dc839 8576 int current;
bc90b915 8577
123dc839
DJ
8578 /* Find the style that the user wants. */
8579 for (current = 0; current < num_disassembly_options; current++)
8580 if (disassembly_style == valid_disassembly_styles[current])
8581 break;
8582 gdb_assert (current < num_disassembly_options);
bc90b915 8583
94c30b78 8584 /* Synchronize the disassembler. */
bc90b915
FN
8585 set_arm_regname_option (current);
8586}
8587
082fc60d
RE
8588/* Test whether the coff symbol specific value corresponds to a Thumb
8589 function. */
8590
8591static int
8592coff_sym_is_thumb (int val)
8593{
f8bf5763
PM
8594 return (val == C_THUMBEXT
8595 || val == C_THUMBSTAT
8596 || val == C_THUMBEXTFUNC
8597 || val == C_THUMBSTATFUNC
8598 || val == C_THUMBLABEL);
082fc60d
RE
8599}
8600
8601/* arm_coff_make_msymbol_special()
8602 arm_elf_make_msymbol_special()
8603
8604 These functions test whether the COFF or ELF symbol corresponds to
8605 an address in thumb code, and set a "special" bit in a minimal
8606 symbol to indicate that it does. */
8607
34e8f22d 8608static void
082fc60d
RE
8609arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8610{
39d911fc
TP
8611 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8612
8613 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8614 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8615 MSYMBOL_SET_SPECIAL (msym);
8616}
8617
34e8f22d 8618static void
082fc60d
RE
8619arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8620{
8621 if (coff_sym_is_thumb (val))
8622 MSYMBOL_SET_SPECIAL (msym);
8623}
8624
60c5725c 8625static void
c1bd65d0 8626arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8627{
9a3c8263 8628 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8629 unsigned int i;
8630
8631 for (i = 0; i < objfile->obfd->section_count; i++)
8632 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8633}
8634
8635static void
8636arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8637 asymbol *sym)
8638{
8639 const char *name = bfd_asymbol_name (sym);
8640 struct arm_per_objfile *data;
8641 VEC(arm_mapping_symbol_s) **map_p;
8642 struct arm_mapping_symbol new_map_sym;
8643
8644 gdb_assert (name[0] == '$');
8645 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8646 return;
8647
9a3c8263
SM
8648 data = (struct arm_per_objfile *) objfile_data (objfile,
8649 arm_objfile_data_key);
60c5725c
DJ
8650 if (data == NULL)
8651 {
8652 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8653 struct arm_per_objfile);
8654 set_objfile_data (objfile, arm_objfile_data_key, data);
8655 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8656 objfile->obfd->section_count,
8657 VEC(arm_mapping_symbol_s) *);
8658 }
8659 map_p = &data->section_maps[bfd_get_section (sym)->index];
8660
8661 new_map_sym.value = sym->value;
8662 new_map_sym.type = name[1];
8663
8664 /* Assume that most mapping symbols appear in order of increasing
8665 value. If they were randomly distributed, it would be faster to
8666 always push here and then sort at first use. */
8667 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8668 {
8669 struct arm_mapping_symbol *prev_map_sym;
8670
8671 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8672 if (prev_map_sym->value >= sym->value)
8673 {
8674 unsigned int idx;
8675 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8676 arm_compare_mapping_symbols);
8677 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8678 return;
8679 }
8680 }
8681
8682 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8683}
8684
756fe439 8685static void
61a1198a 8686arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8687{
9779414d 8688 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8689 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8690
8691 /* If necessary, set the T bit. */
8692 if (arm_apcs_32)
8693 {
9779414d 8694 ULONGEST val, t_bit;
61a1198a 8695 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8696 t_bit = arm_psr_thumb_bit (gdbarch);
8697 if (arm_pc_is_thumb (gdbarch, pc))
8698 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8699 val | t_bit);
756fe439 8700 else
61a1198a 8701 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8702 val & ~t_bit);
756fe439
DJ
8703 }
8704}
123dc839 8705
58d6951d
DJ
8706/* Read the contents of a NEON quad register, by reading from two
8707 double registers. This is used to implement the quad pseudo
8708 registers, and for argument passing in case the quad registers are
8709 missing; vectors are passed in quad registers when using the VFP
8710 ABI, even if a NEON unit is not present. REGNUM is the index of
8711 the quad register, in [0, 15]. */
8712
05d1431c 8713static enum register_status
58d6951d
DJ
8714arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8715 int regnum, gdb_byte *buf)
8716{
8717 char name_buf[4];
8718 gdb_byte reg_buf[8];
8719 int offset, double_regnum;
05d1431c 8720 enum register_status status;
58d6951d 8721
8c042590 8722 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8723 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8724 strlen (name_buf));
8725
8726 /* d0 is always the least significant half of q0. */
8727 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8728 offset = 8;
8729 else
8730 offset = 0;
8731
05d1431c
PA
8732 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8733 if (status != REG_VALID)
8734 return status;
58d6951d
DJ
8735 memcpy (buf + offset, reg_buf, 8);
8736
8737 offset = 8 - offset;
05d1431c
PA
8738 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8739 if (status != REG_VALID)
8740 return status;
58d6951d 8741 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8742
8743 return REG_VALID;
58d6951d
DJ
8744}
8745
05d1431c 8746static enum register_status
58d6951d
DJ
8747arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8748 int regnum, gdb_byte *buf)
8749{
8750 const int num_regs = gdbarch_num_regs (gdbarch);
8751 char name_buf[4];
8752 gdb_byte reg_buf[8];
8753 int offset, double_regnum;
8754
8755 gdb_assert (regnum >= num_regs);
8756 regnum -= num_regs;
8757
8758 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8759 /* Quad-precision register. */
05d1431c 8760 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8761 else
8762 {
05d1431c
PA
8763 enum register_status status;
8764
58d6951d
DJ
8765 /* Single-precision register. */
8766 gdb_assert (regnum < 32);
8767
8768 /* s0 is always the least significant half of d0. */
8769 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8770 offset = (regnum & 1) ? 0 : 4;
8771 else
8772 offset = (regnum & 1) ? 4 : 0;
8773
8c042590 8774 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8775 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8776 strlen (name_buf));
8777
05d1431c
PA
8778 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8779 if (status == REG_VALID)
8780 memcpy (buf, reg_buf + offset, 4);
8781 return status;
58d6951d
DJ
8782 }
8783}
8784
8785/* Store the contents of BUF to a NEON quad register, by writing to
8786 two double registers. This is used to implement the quad pseudo
8787 registers, and for argument passing in case the quad registers are
8788 missing; vectors are passed in quad registers when using the VFP
8789 ABI, even if a NEON unit is not present. REGNUM is the index
8790 of the quad register, in [0, 15]. */
8791
8792static void
8793arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8794 int regnum, const gdb_byte *buf)
8795{
8796 char name_buf[4];
58d6951d
DJ
8797 int offset, double_regnum;
8798
8c042590 8799 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8800 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8801 strlen (name_buf));
8802
8803 /* d0 is always the least significant half of q0. */
8804 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8805 offset = 8;
8806 else
8807 offset = 0;
8808
8809 regcache_raw_write (regcache, double_regnum, buf + offset);
8810 offset = 8 - offset;
8811 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8812}
8813
8814static void
8815arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8816 int regnum, const gdb_byte *buf)
8817{
8818 const int num_regs = gdbarch_num_regs (gdbarch);
8819 char name_buf[4];
8820 gdb_byte reg_buf[8];
8821 int offset, double_regnum;
8822
8823 gdb_assert (regnum >= num_regs);
8824 regnum -= num_regs;
8825
8826 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8827 /* Quad-precision register. */
8828 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8829 else
8830 {
8831 /* Single-precision register. */
8832 gdb_assert (regnum < 32);
8833
8834 /* s0 is always the least significant half of d0. */
8835 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8836 offset = (regnum & 1) ? 0 : 4;
8837 else
8838 offset = (regnum & 1) ? 4 : 0;
8839
8c042590 8840 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8841 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8842 strlen (name_buf));
8843
8844 regcache_raw_read (regcache, double_regnum, reg_buf);
8845 memcpy (reg_buf + offset, buf, 4);
8846 regcache_raw_write (regcache, double_regnum, reg_buf);
8847 }
8848}
8849
123dc839
DJ
8850static struct value *
8851value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8852{
9a3c8263 8853 const int *reg_p = (const int *) baton;
123dc839
DJ
8854 return value_of_register (*reg_p, frame);
8855}
97e03143 8856\f
70f80edf
JT
8857static enum gdb_osabi
8858arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8859{
2af48f68 8860 unsigned int elfosabi;
70f80edf 8861 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8862
70f80edf 8863 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8864
28e97307
DJ
8865 if (elfosabi == ELFOSABI_ARM)
8866 /* GNU tools use this value. Check note sections in this case,
8867 as well. */
8868 bfd_map_over_sections (abfd,
8869 generic_elf_osabi_sniff_abi_tag_sections,
8870 &osabi);
97e03143 8871
28e97307 8872 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8873 return osabi;
97e03143
RE
8874}
8875
54483882
YQ
8876static int
8877arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8878 struct reggroup *group)
8879{
2c291032
YQ
8880 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8881 this, FPS register belongs to save_regroup, restore_reggroup, and
8882 all_reggroup, of course. */
54483882 8883 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8884 return (group == float_reggroup
8885 || group == save_reggroup
8886 || group == restore_reggroup
8887 || group == all_reggroup);
54483882
YQ
8888 else
8889 return default_register_reggroup_p (gdbarch, regnum, group);
8890}
8891
25f8c692
JL
8892\f
8893/* For backward-compatibility we allow two 'g' packet lengths with
8894 the remote protocol depending on whether FPA registers are
8895 supplied. M-profile targets do not have FPA registers, but some
8896 stubs already exist in the wild which use a 'g' packet which
8897 supplies them albeit with dummy values. The packet format which
8898 includes FPA registers should be considered deprecated for
8899 M-profile targets. */
8900
8901static void
8902arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8903{
8904 if (gdbarch_tdep (gdbarch)->is_m)
8905 {
8906 /* If we know from the executable this is an M-profile target,
8907 cater for remote targets whose register set layout is the
8908 same as the FPA layout. */
8909 register_remote_g_packet_guess (gdbarch,
03145bf4 8910 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8911 (16 * INT_REGISTER_SIZE)
8912 + (8 * FP_REGISTER_SIZE)
8913 + (2 * INT_REGISTER_SIZE),
8914 tdesc_arm_with_m_fpa_layout);
8915
8916 /* The regular M-profile layout. */
8917 register_remote_g_packet_guess (gdbarch,
8918 /* r0-r12,sp,lr,pc; xpsr */
8919 (16 * INT_REGISTER_SIZE)
8920 + INT_REGISTER_SIZE,
8921 tdesc_arm_with_m);
3184d3f9
JL
8922
8923 /* M-profile plus M4F VFP. */
8924 register_remote_g_packet_guess (gdbarch,
8925 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8926 (16 * INT_REGISTER_SIZE)
8927 + (16 * VFP_REGISTER_SIZE)
8928 + (2 * INT_REGISTER_SIZE),
8929 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8930 }
8931
8932 /* Otherwise we don't have a useful guess. */
8933}
8934
7eb89530
YQ
8935/* Implement the code_of_frame_writable gdbarch method. */
8936
8937static int
8938arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8939{
8940 if (gdbarch_tdep (gdbarch)->is_m
8941 && get_frame_type (frame) == SIGTRAMP_FRAME)
8942 {
8943 /* M-profile exception frames return to some magic PCs, where
8944 isn't writable at all. */
8945 return 0;
8946 }
8947 else
8948 return 1;
8949}
8950
70f80edf 8951\f
da3c6d4a
MS
8952/* Initialize the current architecture based on INFO. If possible,
8953 re-use an architecture from ARCHES, which is a list of
8954 architectures already created during this debugging session.
97e03143 8955
da3c6d4a
MS
8956 Called e.g. at program startup, when reading a core file, and when
8957 reading a binary file. */
97e03143 8958
39bbf761
RE
8959static struct gdbarch *
8960arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8961{
97e03143 8962 struct gdbarch_tdep *tdep;
39bbf761 8963 struct gdbarch *gdbarch;
28e97307
DJ
8964 struct gdbarch_list *best_arch;
8965 enum arm_abi_kind arm_abi = arm_abi_global;
8966 enum arm_float_model fp_model = arm_fp_model;
123dc839 8967 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8968 int i, is_m = 0;
330c6ca9 8969 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8970 int have_wmmx_registers = 0;
58d6951d 8971 int have_neon = 0;
ff6f572f 8972 int have_fpa_registers = 1;
9779414d
DJ
8973 const struct target_desc *tdesc = info.target_desc;
8974
8975 /* If we have an object to base this architecture on, try to determine
8976 its ABI. */
8977
8978 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8979 {
8980 int ei_osabi, e_flags;
8981
8982 switch (bfd_get_flavour (info.abfd))
8983 {
8984 case bfd_target_aout_flavour:
8985 /* Assume it's an old APCS-style ABI. */
8986 arm_abi = ARM_ABI_APCS;
8987 break;
8988
8989 case bfd_target_coff_flavour:
8990 /* Assume it's an old APCS-style ABI. */
8991 /* XXX WinCE? */
8992 arm_abi = ARM_ABI_APCS;
8993 break;
8994
8995 case bfd_target_elf_flavour:
8996 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8997 e_flags = elf_elfheader (info.abfd)->e_flags;
8998
8999 if (ei_osabi == ELFOSABI_ARM)
9000 {
9001 /* GNU tools used to use this value, but do not for EABI
9002 objects. There's nowhere to tag an EABI version
9003 anyway, so assume APCS. */
9004 arm_abi = ARM_ABI_APCS;
9005 }
d403db27 9006 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
9007 {
9008 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9009 int attr_arch, attr_profile;
9010
9011 switch (eabi_ver)
9012 {
9013 case EF_ARM_EABI_UNKNOWN:
9014 /* Assume GNU tools. */
9015 arm_abi = ARM_ABI_APCS;
9016 break;
9017
9018 case EF_ARM_EABI_VER4:
9019 case EF_ARM_EABI_VER5:
9020 arm_abi = ARM_ABI_AAPCS;
9021 /* EABI binaries default to VFP float ordering.
9022 They may also contain build attributes that can
9023 be used to identify if the VFP argument-passing
9024 ABI is in use. */
9025 if (fp_model == ARM_FLOAT_AUTO)
9026 {
9027#ifdef HAVE_ELF
9028 switch (bfd_elf_get_obj_attr_int (info.abfd,
9029 OBJ_ATTR_PROC,
9030 Tag_ABI_VFP_args))
9031 {
b35b0298 9032 case AEABI_VFP_args_base:
9779414d
DJ
9033 /* "The user intended FP parameter/result
9034 passing to conform to AAPCS, base
9035 variant". */
9036 fp_model = ARM_FLOAT_SOFT_VFP;
9037 break;
b35b0298 9038 case AEABI_VFP_args_vfp:
9779414d
DJ
9039 /* "The user intended FP parameter/result
9040 passing to conform to AAPCS, VFP
9041 variant". */
9042 fp_model = ARM_FLOAT_VFP;
9043 break;
b35b0298 9044 case AEABI_VFP_args_toolchain:
9779414d
DJ
9045 /* "The user intended FP parameter/result
9046 passing to conform to tool chain-specific
9047 conventions" - we don't know any such
9048 conventions, so leave it as "auto". */
9049 break;
b35b0298 9050 case AEABI_VFP_args_compatible:
5c294fee
TG
9051 /* "Code is compatible with both the base
9052 and VFP variants; the user did not permit
9053 non-variadic functions to pass FP
9054 parameters/results" - leave it as
9055 "auto". */
9056 break;
9779414d
DJ
9057 default:
9058 /* Attribute value not mentioned in the
5c294fee 9059 November 2012 ABI, so leave it as
9779414d
DJ
9060 "auto". */
9061 break;
9062 }
9063#else
9064 fp_model = ARM_FLOAT_SOFT_VFP;
9065#endif
9066 }
9067 break;
9068
9069 default:
9070 /* Leave it as "auto". */
9071 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9072 break;
9073 }
9074
9075#ifdef HAVE_ELF
9076 /* Detect M-profile programs. This only works if the
9077 executable file includes build attributes; GCC does
9078 copy them to the executable, but e.g. RealView does
9079 not. */
9080 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9081 Tag_CPU_arch);
0963b4bd
MS
9082 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9083 OBJ_ATTR_PROC,
9779414d
DJ
9084 Tag_CPU_arch_profile);
9085 /* GCC specifies the profile for v6-M; RealView only
9086 specifies the profile for architectures starting with
9087 V7 (as opposed to architectures with a tag
9088 numerically greater than TAG_CPU_ARCH_V7). */
9089 if (!tdesc_has_registers (tdesc)
9090 && (attr_arch == TAG_CPU_ARCH_V6_M
9091 || attr_arch == TAG_CPU_ARCH_V6S_M
9092 || attr_profile == 'M'))
25f8c692 9093 is_m = 1;
9779414d
DJ
9094#endif
9095 }
9096
9097 if (fp_model == ARM_FLOAT_AUTO)
9098 {
9099 int e_flags = elf_elfheader (info.abfd)->e_flags;
9100
9101 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9102 {
9103 case 0:
9104 /* Leave it as "auto". Strictly speaking this case
9105 means FPA, but almost nobody uses that now, and
9106 many toolchains fail to set the appropriate bits
9107 for the floating-point model they use. */
9108 break;
9109 case EF_ARM_SOFT_FLOAT:
9110 fp_model = ARM_FLOAT_SOFT_FPA;
9111 break;
9112 case EF_ARM_VFP_FLOAT:
9113 fp_model = ARM_FLOAT_VFP;
9114 break;
9115 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9116 fp_model = ARM_FLOAT_SOFT_VFP;
9117 break;
9118 }
9119 }
9120
9121 if (e_flags & EF_ARM_BE8)
9122 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9123
9124 break;
9125
9126 default:
9127 /* Leave it as "auto". */
9128 break;
9129 }
9130 }
123dc839
DJ
9131
9132 /* Check any target description for validity. */
9779414d 9133 if (tdesc_has_registers (tdesc))
123dc839
DJ
9134 {
9135 /* For most registers we require GDB's default names; but also allow
9136 the numeric names for sp / lr / pc, as a convenience. */
9137 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9138 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9139 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9140
9141 const struct tdesc_feature *feature;
58d6951d 9142 int valid_p;
123dc839 9143
9779414d 9144 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9145 "org.gnu.gdb.arm.core");
9146 if (feature == NULL)
9779414d
DJ
9147 {
9148 feature = tdesc_find_feature (tdesc,
9149 "org.gnu.gdb.arm.m-profile");
9150 if (feature == NULL)
9151 return NULL;
9152 else
9153 is_m = 1;
9154 }
123dc839
DJ
9155
9156 tdesc_data = tdesc_data_alloc ();
9157
9158 valid_p = 1;
9159 for (i = 0; i < ARM_SP_REGNUM; i++)
9160 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9161 arm_register_names[i]);
9162 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9163 ARM_SP_REGNUM,
9164 arm_sp_names);
9165 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9166 ARM_LR_REGNUM,
9167 arm_lr_names);
9168 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9169 ARM_PC_REGNUM,
9170 arm_pc_names);
9779414d
DJ
9171 if (is_m)
9172 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9173 ARM_PS_REGNUM, "xpsr");
9174 else
9175 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9176 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9177
9178 if (!valid_p)
9179 {
9180 tdesc_data_cleanup (tdesc_data);
9181 return NULL;
9182 }
9183
9779414d 9184 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9185 "org.gnu.gdb.arm.fpa");
9186 if (feature != NULL)
9187 {
9188 valid_p = 1;
9189 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9190 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9191 arm_register_names[i]);
9192 if (!valid_p)
9193 {
9194 tdesc_data_cleanup (tdesc_data);
9195 return NULL;
9196 }
9197 }
ff6f572f
DJ
9198 else
9199 have_fpa_registers = 0;
9200
9779414d 9201 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9202 "org.gnu.gdb.xscale.iwmmxt");
9203 if (feature != NULL)
9204 {
9205 static const char *const iwmmxt_names[] = {
9206 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9207 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9208 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9209 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9210 };
9211
9212 valid_p = 1;
9213 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9214 valid_p
9215 &= tdesc_numbered_register (feature, tdesc_data, i,
9216 iwmmxt_names[i - ARM_WR0_REGNUM]);
9217
9218 /* Check for the control registers, but do not fail if they
9219 are missing. */
9220 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9221 tdesc_numbered_register (feature, tdesc_data, i,
9222 iwmmxt_names[i - ARM_WR0_REGNUM]);
9223
9224 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9225 valid_p
9226 &= tdesc_numbered_register (feature, tdesc_data, i,
9227 iwmmxt_names[i - ARM_WR0_REGNUM]);
9228
9229 if (!valid_p)
9230 {
9231 tdesc_data_cleanup (tdesc_data);
9232 return NULL;
9233 }
a56cc1ce
YQ
9234
9235 have_wmmx_registers = 1;
ff6f572f 9236 }
58d6951d
DJ
9237
9238 /* If we have a VFP unit, check whether the single precision registers
9239 are present. If not, then we will synthesize them as pseudo
9240 registers. */
9779414d 9241 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9242 "org.gnu.gdb.arm.vfp");
9243 if (feature != NULL)
9244 {
9245 static const char *const vfp_double_names[] = {
9246 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9247 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9248 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9249 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9250 };
9251
9252 /* Require the double precision registers. There must be either
9253 16 or 32. */
9254 valid_p = 1;
9255 for (i = 0; i < 32; i++)
9256 {
9257 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9258 ARM_D0_REGNUM + i,
9259 vfp_double_names[i]);
9260 if (!valid_p)
9261 break;
9262 }
2b9e5ea6
UW
9263 if (!valid_p && i == 16)
9264 valid_p = 1;
58d6951d 9265
2b9e5ea6
UW
9266 /* Also require FPSCR. */
9267 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9268 ARM_FPSCR_REGNUM, "fpscr");
9269 if (!valid_p)
58d6951d
DJ
9270 {
9271 tdesc_data_cleanup (tdesc_data);
9272 return NULL;
9273 }
9274
9275 if (tdesc_unnumbered_register (feature, "s0") == 0)
9276 have_vfp_pseudos = 1;
9277
330c6ca9 9278 vfp_register_count = i;
58d6951d
DJ
9279
9280 /* If we have VFP, also check for NEON. The architecture allows
9281 NEON without VFP (integer vector operations only), but GDB
9282 does not support that. */
9779414d 9283 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9284 "org.gnu.gdb.arm.neon");
9285 if (feature != NULL)
9286 {
9287 /* NEON requires 32 double-precision registers. */
9288 if (i != 32)
9289 {
9290 tdesc_data_cleanup (tdesc_data);
9291 return NULL;
9292 }
9293
9294 /* If there are quad registers defined by the stub, use
9295 their type; otherwise (normally) provide them with
9296 the default type. */
9297 if (tdesc_unnumbered_register (feature, "q0") == 0)
9298 have_neon_pseudos = 1;
9299
9300 have_neon = 1;
9301 }
9302 }
123dc839 9303 }
39bbf761 9304
28e97307
DJ
9305 /* If there is already a candidate, use it. */
9306 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9307 best_arch != NULL;
9308 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9309 {
b8926edc
DJ
9310 if (arm_abi != ARM_ABI_AUTO
9311 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9312 continue;
9313
b8926edc
DJ
9314 if (fp_model != ARM_FLOAT_AUTO
9315 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9316 continue;
9317
58d6951d
DJ
9318 /* There are various other properties in tdep that we do not
9319 need to check here: those derived from a target description,
9320 since gdbarches with a different target description are
9321 automatically disqualified. */
9322
9779414d
DJ
9323 /* Do check is_m, though, since it might come from the binary. */
9324 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9325 continue;
9326
28e97307
DJ
9327 /* Found a match. */
9328 break;
9329 }
97e03143 9330
28e97307 9331 if (best_arch != NULL)
123dc839
DJ
9332 {
9333 if (tdesc_data != NULL)
9334 tdesc_data_cleanup (tdesc_data);
9335 return best_arch->gdbarch;
9336 }
28e97307 9337
8d749320 9338 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9339 gdbarch = gdbarch_alloc (&info, tdep);
9340
28e97307
DJ
9341 /* Record additional information about the architecture we are defining.
9342 These are gdbarch discriminators, like the OSABI. */
9343 tdep->arm_abi = arm_abi;
9344 tdep->fp_model = fp_model;
9779414d 9345 tdep->is_m = is_m;
ff6f572f 9346 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9347 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9348 gdb_assert (vfp_register_count == 0
9349 || vfp_register_count == 16
9350 || vfp_register_count == 32);
9351 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9352 tdep->have_vfp_pseudos = have_vfp_pseudos;
9353 tdep->have_neon_pseudos = have_neon_pseudos;
9354 tdep->have_neon = have_neon;
08216dd7 9355
25f8c692
JL
9356 arm_register_g_packet_guesses (gdbarch);
9357
08216dd7 9358 /* Breakpoints. */
9d4fde75 9359 switch (info.byte_order_for_code)
67255d04
RE
9360 {
9361 case BFD_ENDIAN_BIG:
66e810cd
RE
9362 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9363 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9364 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9365 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9366
67255d04
RE
9367 break;
9368
9369 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9370 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9371 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9372 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9373 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9374
67255d04
RE
9375 break;
9376
9377 default:
9378 internal_error (__FILE__, __LINE__,
edefbb7c 9379 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9380 }
9381
d7b486e7
RE
9382 /* On ARM targets char defaults to unsigned. */
9383 set_gdbarch_char_signed (gdbarch, 0);
9384
cca44b1b
JB
9385 /* Note: for displaced stepping, this includes the breakpoint, and one word
9386 of additional scratch space. This setting isn't used for anything beside
9387 displaced stepping at present. */
9388 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9389
9df628e0 9390 /* This should be low enough for everything. */
97e03143 9391 tdep->lowest_pc = 0x20;
94c30b78 9392 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9393
7c00367c
MK
9394 /* The default, for both APCS and AAPCS, is to return small
9395 structures in registers. */
9396 tdep->struct_return = reg_struct_return;
9397
2dd604e7 9398 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9399 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9400
7eb89530
YQ
9401 if (is_m)
9402 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9403
756fe439
DJ
9404 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9405
148754e5 9406 /* Frame handling. */
a262aec2 9407 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9408 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9409 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9410
eb5492fa 9411 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9412
34e8f22d 9413 /* Address manipulation. */
34e8f22d
RE
9414 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9415
34e8f22d
RE
9416 /* Advance PC across function entry code. */
9417 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9418
c9cf6e20
MG
9419 /* Detect whether PC is at a point where the stack has been destroyed. */
9420 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9421
190dce09
UW
9422 /* Skip trampolines. */
9423 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9424
34e8f22d
RE
9425 /* The stack grows downward. */
9426 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9427
9428 /* Breakpoint manipulation. */
d19280ad 9429 SET_GDBARCH_BREAKPOINT_MANIPULATION (arm);
177321bd
DJ
9430 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9431 arm_remote_breakpoint_from_pc);
34e8f22d
RE
9432
9433 /* Information about registers, etc. */
34e8f22d
RE
9434 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9435 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9436 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9437 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9438 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9439
ff6f572f
DJ
9440 /* This "info float" is FPA-specific. Use the generic version if we
9441 do not have FPA. */
9442 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9443 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9444
26216b98 9445 /* Internal <-> external register number maps. */
ff6f572f 9446 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9447 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9448
34e8f22d
RE
9449 set_gdbarch_register_name (gdbarch, arm_register_name);
9450
9451 /* Returning results. */
2af48f68 9452 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9453
03d48a7d
RE
9454 /* Disassembly. */
9455 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9456
34e8f22d
RE
9457 /* Minsymbol frobbing. */
9458 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9459 set_gdbarch_coff_make_msymbol_special (gdbarch,
9460 arm_coff_make_msymbol_special);
60c5725c 9461 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9462
f9d67f43
DJ
9463 /* Thumb-2 IT block support. */
9464 set_gdbarch_adjust_breakpoint_address (gdbarch,
9465 arm_adjust_breakpoint_address);
9466
0d5de010
DJ
9467 /* Virtual tables. */
9468 set_gdbarch_vbit_in_delta (gdbarch, 1);
9469
97e03143 9470 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9471 gdbarch_init_osabi (info, gdbarch);
97e03143 9472
b39cc962
DJ
9473 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9474
eb5492fa 9475 /* Add some default predicates. */
2ae28aa9
YQ
9476 if (is_m)
9477 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9478 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9479 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9480 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9481 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9482 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9483
97e03143
RE
9484 /* Now we have tuned the configuration, set a few final things,
9485 based on what the OS ABI has told us. */
9486
b8926edc
DJ
9487 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9488 binaries are always marked. */
9489 if (tdep->arm_abi == ARM_ABI_AUTO)
9490 tdep->arm_abi = ARM_ABI_APCS;
9491
e3039479
UW
9492 /* Watchpoints are not steppable. */
9493 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9494
b8926edc
DJ
9495 /* We used to default to FPA for generic ARM, but almost nobody
9496 uses that now, and we now provide a way for the user to force
9497 the model. So default to the most useful variant. */
9498 if (tdep->fp_model == ARM_FLOAT_AUTO)
9499 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9500
9df628e0
RE
9501 if (tdep->jb_pc >= 0)
9502 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9503
08216dd7 9504 /* Floating point sizes and format. */
8da61cc4 9505 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9506 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9507 {
8da61cc4
DJ
9508 set_gdbarch_double_format
9509 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9510 set_gdbarch_long_double_format
9511 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9512 }
9513 else
9514 {
9515 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9516 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9517 }
9518
58d6951d
DJ
9519 if (have_vfp_pseudos)
9520 {
9521 /* NOTE: These are the only pseudo registers used by
9522 the ARM target at the moment. If more are added, a
9523 little more care in numbering will be needed. */
9524
9525 int num_pseudos = 32;
9526 if (have_neon_pseudos)
9527 num_pseudos += 16;
9528 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9529 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9530 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9531 }
9532
123dc839 9533 if (tdesc_data)
58d6951d
DJ
9534 {
9535 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9536
9779414d 9537 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9538
9539 /* Override tdesc_register_type to adjust the types of VFP
9540 registers for NEON. */
9541 set_gdbarch_register_type (gdbarch, arm_register_type);
9542 }
123dc839
DJ
9543
9544 /* Add standard register aliases. We add aliases even for those
9545 nanes which are used by the current architecture - it's simpler,
9546 and does no harm, since nothing ever lists user registers. */
9547 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9548 user_reg_add (gdbarch, arm_register_aliases[i].name,
9549 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9550
39bbf761
RE
9551 return gdbarch;
9552}
9553
97e03143 9554static void
2af46ca0 9555arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9556{
2af46ca0 9557 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9558
9559 if (tdep == NULL)
9560 return;
9561
edefbb7c 9562 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9563 (unsigned long) tdep->lowest_pc);
9564}
9565
a78f21af
AC
9566extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9567
c906108c 9568void
ed9a39eb 9569_initialize_arm_tdep (void)
c906108c 9570{
bc90b915
FN
9571 struct ui_file *stb;
9572 long length;
53904c9e
AC
9573 const char *setname;
9574 const char *setdesc;
4bd7b427 9575 const char *const *regnames;
bec2ab5a 9576 int i;
bc90b915 9577 static char *helptext;
edefbb7c
AC
9578 char regdesc[1024], *rdptr = regdesc;
9579 size_t rest = sizeof (regdesc);
085dd6e6 9580
42cf1509 9581 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9582
60c5725c 9583 arm_objfile_data_key
c1bd65d0 9584 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9585
0e9e9abd
UW
9586 /* Add ourselves to objfile event chain. */
9587 observer_attach_new_objfile (arm_exidx_new_objfile);
9588 arm_exidx_data_key
9589 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9590
70f80edf
JT
9591 /* Register an ELF OS ABI sniffer for ARM binaries. */
9592 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9593 bfd_target_elf_flavour,
9594 arm_elf_osabi_sniffer);
9595
9779414d
DJ
9596 /* Initialize the standard target descriptions. */
9597 initialize_tdesc_arm_with_m ();
25f8c692 9598 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9599 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9600 initialize_tdesc_arm_with_iwmmxt ();
9601 initialize_tdesc_arm_with_vfpv2 ();
9602 initialize_tdesc_arm_with_vfpv3 ();
9603 initialize_tdesc_arm_with_neon ();
9779414d 9604
94c30b78 9605 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9606 num_disassembly_options = get_arm_regname_num_options ();
9607
9608 /* Add root prefix command for all "set arm"/"show arm" commands. */
9609 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9610 _("Various ARM-specific commands."),
afd7eef0
RE
9611 &setarmcmdlist, "set arm ", 0, &setlist);
9612
9613 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9614 _("Various ARM-specific commands."),
afd7eef0 9615 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9616
94c30b78 9617 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9618 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9619
eefe576e
AC
9620 /* Initialize the array that will be passed to
9621 add_setshow_enum_cmd(). */
8d749320
SM
9622 valid_disassembly_styles = XNEWVEC (const char *,
9623 num_disassembly_options + 1);
afd7eef0 9624 for (i = 0; i < num_disassembly_options; i++)
bc90b915 9625 {
bec2ab5a 9626 get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9627 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9628 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9629 rdptr += length;
9630 rest -= length;
123dc839
DJ
9631 /* When we find the default names, tell the disassembler to use
9632 them. */
bc90b915
FN
9633 if (!strcmp (setname, "std"))
9634 {
afd7eef0 9635 disassembly_style = setname;
bc90b915
FN
9636 set_arm_regname_option (i);
9637 }
9638 }
94c30b78 9639 /* Mark the end of valid options. */
afd7eef0 9640 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9641
edefbb7c
AC
9642 /* Create the help text. */
9643 stb = mem_fileopen ();
9644 fprintf_unfiltered (stb, "%s%s%s",
9645 _("The valid values are:\n"),
9646 regdesc,
9647 _("The default is \"std\"."));
759ef836 9648 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 9649 ui_file_delete (stb);
ed9a39eb 9650
edefbb7c
AC
9651 add_setshow_enum_cmd("disassembler", no_class,
9652 valid_disassembly_styles, &disassembly_style,
9653 _("Set the disassembly style."),
9654 _("Show the disassembly style."),
9655 helptext,
2c5b56ce 9656 set_disassembly_style_sfunc,
0963b4bd
MS
9657 NULL, /* FIXME: i18n: The disassembly style is
9658 \"%s\". */
7376b4c2 9659 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9660
9661 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9662 _("Set usage of ARM 32-bit mode."),
9663 _("Show usage of ARM 32-bit mode."),
9664 _("When off, a 26-bit PC will be used."),
2c5b56ce 9665 NULL,
0963b4bd
MS
9666 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9667 mode is %s. */
26304000 9668 &setarmcmdlist, &showarmcmdlist);
c906108c 9669
fd50bc42 9670 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9671 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9672 _("Set the floating point type."),
9673 _("Show the floating point type."),
9674 _("auto - Determine the FP typefrom the OS-ABI.\n\
9675softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9676fpa - FPA co-processor (GCC compiled).\n\
9677softvfp - Software FP with pure-endian doubles.\n\
9678vfp - VFP co-processor."),
edefbb7c 9679 set_fp_model_sfunc, show_fp_model,
7376b4c2 9680 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9681
28e97307
DJ
9682 /* Add a command to allow the user to force the ABI. */
9683 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9684 _("Set the ABI."),
9685 _("Show the ABI."),
9686 NULL, arm_set_abi, arm_show_abi,
9687 &setarmcmdlist, &showarmcmdlist);
9688
0428b8f5
DJ
9689 /* Add two commands to allow the user to force the assumed
9690 execution mode. */
9691 add_setshow_enum_cmd ("fallback-mode", class_support,
9692 arm_mode_strings, &arm_fallback_mode_string,
9693 _("Set the mode assumed when symbols are unavailable."),
9694 _("Show the mode assumed when symbols are unavailable."),
9695 NULL, NULL, arm_show_fallback_mode,
9696 &setarmcmdlist, &showarmcmdlist);
9697 add_setshow_enum_cmd ("force-mode", class_support,
9698 arm_mode_strings, &arm_force_mode_string,
9699 _("Set the mode assumed even when symbols are available."),
9700 _("Show the mode assumed even when symbols are available."),
9701 NULL, NULL, arm_show_force_mode,
9702 &setarmcmdlist, &showarmcmdlist);
9703
6529d2dd 9704 /* Debugging flag. */
edefbb7c
AC
9705 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9706 _("Set ARM debugging."),
9707 _("Show ARM debugging."),
9708 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9709 NULL,
7915a72c 9710 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9711 &setdebuglist, &showdebuglist);
c906108c 9712}
72508ac0
PO
9713
9714/* ARM-reversible process record data structures. */
9715
9716#define ARM_INSN_SIZE_BYTES 4
9717#define THUMB_INSN_SIZE_BYTES 2
9718#define THUMB2_INSN_SIZE_BYTES 4
9719
9720
71e396f9
LM
9721/* Position of the bit within a 32-bit ARM instruction
9722 that defines whether the instruction is a load or store. */
72508ac0
PO
9723#define INSN_S_L_BIT_NUM 20
9724
9725#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9726 do \
9727 { \
9728 unsigned int reg_len = LENGTH; \
9729 if (reg_len) \
9730 { \
9731 REGS = XNEWVEC (uint32_t, reg_len); \
9732 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9733 } \
9734 } \
9735 while (0)
9736
9737#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9738 do \
9739 { \
9740 unsigned int mem_len = LENGTH; \
9741 if (mem_len) \
9742 { \
9743 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9744 memcpy(&MEMS->len, &RECORD_BUF[0], \
9745 sizeof(struct arm_mem_r) * LENGTH); \
9746 } \
9747 } \
9748 while (0)
9749
9750/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9751#define INSN_RECORDED(ARM_RECORD) \
9752 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9753
9754/* ARM memory record structure. */
9755struct arm_mem_r
9756{
9757 uint32_t len; /* Record length. */
bfbbec00 9758 uint32_t addr; /* Memory address. */
72508ac0
PO
9759};
9760
9761/* ARM instruction record contains opcode of current insn
9762 and execution state (before entry to decode_insn()),
9763 contains list of to-be-modified registers and
9764 memory blocks (on return from decode_insn()). */
9765
9766typedef struct insn_decode_record_t
9767{
9768 struct gdbarch *gdbarch;
9769 struct regcache *regcache;
9770 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9771 uint32_t arm_insn; /* Should accommodate thumb. */
9772 uint32_t cond; /* Condition code. */
9773 uint32_t opcode; /* Insn opcode. */
9774 uint32_t decode; /* Insn decode bits. */
9775 uint32_t mem_rec_count; /* No of mem records. */
9776 uint32_t reg_rec_count; /* No of reg records. */
9777 uint32_t *arm_regs; /* Registers to be saved for this record. */
9778 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9779} insn_decode_record;
9780
9781
9782/* Checks ARM SBZ and SBO mandatory fields. */
9783
9784static int
9785sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9786{
9787 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9788
9789 if (!len)
9790 return 1;
9791
9792 if (!sbo)
9793 ones = ~ones;
9794
9795 while (ones)
9796 {
9797 if (!(ones & sbo))
9798 {
9799 return 0;
9800 }
9801 ones = ones >> 1;
9802 }
9803 return 1;
9804}
9805
c6ec2b30
OJ
9806enum arm_record_result
9807{
9808 ARM_RECORD_SUCCESS = 0,
9809 ARM_RECORD_FAILURE = 1
9810};
9811
72508ac0
PO
9812typedef enum
9813{
9814 ARM_RECORD_STRH=1,
9815 ARM_RECORD_STRD
9816} arm_record_strx_t;
9817
9818typedef enum
9819{
9820 ARM_RECORD=1,
9821 THUMB_RECORD,
9822 THUMB2_RECORD
9823} record_type_t;
9824
9825
9826static int
9827arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9828 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9829{
9830
9831 struct regcache *reg_cache = arm_insn_r->regcache;
9832 ULONGEST u_regval[2]= {0};
9833
9834 uint32_t reg_src1 = 0, reg_src2 = 0;
9835 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9836
9837 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9838 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9839
9840 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9841 {
9842 /* 1) Handle misc store, immediate offset. */
9843 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9844 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9845 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9846 regcache_raw_read_unsigned (reg_cache, reg_src1,
9847 &u_regval[0]);
9848 if (ARM_PC_REGNUM == reg_src1)
9849 {
9850 /* If R15 was used as Rn, hence current PC+8. */
9851 u_regval[0] = u_regval[0] + 8;
9852 }
9853 offset_8 = (immed_high << 4) | immed_low;
9854 /* Calculate target store address. */
9855 if (14 == arm_insn_r->opcode)
9856 {
9857 tgt_mem_addr = u_regval[0] + offset_8;
9858 }
9859 else
9860 {
9861 tgt_mem_addr = u_regval[0] - offset_8;
9862 }
9863 if (ARM_RECORD_STRH == str_type)
9864 {
9865 record_buf_mem[0] = 2;
9866 record_buf_mem[1] = tgt_mem_addr;
9867 arm_insn_r->mem_rec_count = 1;
9868 }
9869 else if (ARM_RECORD_STRD == str_type)
9870 {
9871 record_buf_mem[0] = 4;
9872 record_buf_mem[1] = tgt_mem_addr;
9873 record_buf_mem[2] = 4;
9874 record_buf_mem[3] = tgt_mem_addr + 4;
9875 arm_insn_r->mem_rec_count = 2;
9876 }
9877 }
9878 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9879 {
9880 /* 2) Store, register offset. */
9881 /* Get Rm. */
9882 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9883 /* Get Rn. */
9884 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9885 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9886 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9887 if (15 == reg_src2)
9888 {
9889 /* If R15 was used as Rn, hence current PC+8. */
9890 u_regval[0] = u_regval[0] + 8;
9891 }
9892 /* Calculate target store address, Rn +/- Rm, register offset. */
9893 if (12 == arm_insn_r->opcode)
9894 {
9895 tgt_mem_addr = u_regval[0] + u_regval[1];
9896 }
9897 else
9898 {
9899 tgt_mem_addr = u_regval[1] - u_regval[0];
9900 }
9901 if (ARM_RECORD_STRH == str_type)
9902 {
9903 record_buf_mem[0] = 2;
9904 record_buf_mem[1] = tgt_mem_addr;
9905 arm_insn_r->mem_rec_count = 1;
9906 }
9907 else if (ARM_RECORD_STRD == str_type)
9908 {
9909 record_buf_mem[0] = 4;
9910 record_buf_mem[1] = tgt_mem_addr;
9911 record_buf_mem[2] = 4;
9912 record_buf_mem[3] = tgt_mem_addr + 4;
9913 arm_insn_r->mem_rec_count = 2;
9914 }
9915 }
9916 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9917 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9918 {
9919 /* 3) Store, immediate pre-indexed. */
9920 /* 5) Store, immediate post-indexed. */
9921 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9922 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9923 offset_8 = (immed_high << 4) | immed_low;
9924 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9925 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9926 /* Calculate target store address, Rn +/- Rm, register offset. */
9927 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9928 {
9929 tgt_mem_addr = u_regval[0] + offset_8;
9930 }
9931 else
9932 {
9933 tgt_mem_addr = u_regval[0] - offset_8;
9934 }
9935 if (ARM_RECORD_STRH == str_type)
9936 {
9937 record_buf_mem[0] = 2;
9938 record_buf_mem[1] = tgt_mem_addr;
9939 arm_insn_r->mem_rec_count = 1;
9940 }
9941 else if (ARM_RECORD_STRD == str_type)
9942 {
9943 record_buf_mem[0] = 4;
9944 record_buf_mem[1] = tgt_mem_addr;
9945 record_buf_mem[2] = 4;
9946 record_buf_mem[3] = tgt_mem_addr + 4;
9947 arm_insn_r->mem_rec_count = 2;
9948 }
9949 /* Record Rn also as it changes. */
9950 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9951 arm_insn_r->reg_rec_count = 1;
9952 }
9953 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9954 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9955 {
9956 /* 4) Store, register pre-indexed. */
9957 /* 6) Store, register post -indexed. */
9958 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9959 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9960 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9961 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9962 /* Calculate target store address, Rn +/- Rm, register offset. */
9963 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9964 {
9965 tgt_mem_addr = u_regval[0] + u_regval[1];
9966 }
9967 else
9968 {
9969 tgt_mem_addr = u_regval[1] - u_regval[0];
9970 }
9971 if (ARM_RECORD_STRH == str_type)
9972 {
9973 record_buf_mem[0] = 2;
9974 record_buf_mem[1] = tgt_mem_addr;
9975 arm_insn_r->mem_rec_count = 1;
9976 }
9977 else if (ARM_RECORD_STRD == str_type)
9978 {
9979 record_buf_mem[0] = 4;
9980 record_buf_mem[1] = tgt_mem_addr;
9981 record_buf_mem[2] = 4;
9982 record_buf_mem[3] = tgt_mem_addr + 4;
9983 arm_insn_r->mem_rec_count = 2;
9984 }
9985 /* Record Rn also as it changes. */
9986 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9987 arm_insn_r->reg_rec_count = 1;
9988 }
9989 return 0;
9990}
9991
9992/* Handling ARM extension space insns. */
9993
9994static int
9995arm_record_extension_space (insn_decode_record *arm_insn_r)
9996{
9997 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9998 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9999 uint32_t record_buf[8], record_buf_mem[8];
10000 uint32_t reg_src1 = 0;
72508ac0
PO
10001 struct regcache *reg_cache = arm_insn_r->regcache;
10002 ULONGEST u_regval = 0;
10003
10004 gdb_assert (!INSN_RECORDED(arm_insn_r));
10005 /* Handle unconditional insn extension space. */
10006
10007 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10008 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10009 if (arm_insn_r->cond)
10010 {
10011 /* PLD has no affect on architectural state, it just affects
10012 the caches. */
10013 if (5 == ((opcode1 & 0xE0) >> 5))
10014 {
10015 /* BLX(1) */
10016 record_buf[0] = ARM_PS_REGNUM;
10017 record_buf[1] = ARM_LR_REGNUM;
10018 arm_insn_r->reg_rec_count = 2;
10019 }
10020 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10021 }
10022
10023
10024 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10025 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10026 {
10027 ret = -1;
10028 /* Undefined instruction on ARM V5; need to handle if later
10029 versions define it. */
10030 }
10031
10032 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10033 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10034 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10035
10036 /* Handle arithmetic insn extension space. */
10037 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10038 && !INSN_RECORDED(arm_insn_r))
10039 {
10040 /* Handle MLA(S) and MUL(S). */
10041 if (0 <= insn_op1 && 3 >= insn_op1)
10042 {
10043 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10044 record_buf[1] = ARM_PS_REGNUM;
10045 arm_insn_r->reg_rec_count = 2;
10046 }
10047 else if (4 <= insn_op1 && 15 >= insn_op1)
10048 {
10049 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10050 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10051 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10052 record_buf[2] = ARM_PS_REGNUM;
10053 arm_insn_r->reg_rec_count = 3;
10054 }
10055 }
10056
10057 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10058 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10059 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10060
10061 /* Handle control insn extension space. */
10062
10063 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10064 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10065 {
10066 if (!bit (arm_insn_r->arm_insn,25))
10067 {
10068 if (!bits (arm_insn_r->arm_insn, 4, 7))
10069 {
10070 if ((0 == insn_op1) || (2 == insn_op1))
10071 {
10072 /* MRS. */
10073 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10074 arm_insn_r->reg_rec_count = 1;
10075 }
10076 else if (1 == insn_op1)
10077 {
10078 /* CSPR is going to be changed. */
10079 record_buf[0] = ARM_PS_REGNUM;
10080 arm_insn_r->reg_rec_count = 1;
10081 }
10082 else if (3 == insn_op1)
10083 {
10084 /* SPSR is going to be changed. */
10085 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10086 return -1;
10087 }
10088 }
10089 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10090 {
10091 if (1 == insn_op1)
10092 {
10093 /* BX. */
10094 record_buf[0] = ARM_PS_REGNUM;
10095 arm_insn_r->reg_rec_count = 1;
10096 }
10097 else if (3 == insn_op1)
10098 {
10099 /* CLZ. */
10100 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10101 arm_insn_r->reg_rec_count = 1;
10102 }
10103 }
10104 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10105 {
10106 /* BLX. */
10107 record_buf[0] = ARM_PS_REGNUM;
10108 record_buf[1] = ARM_LR_REGNUM;
10109 arm_insn_r->reg_rec_count = 2;
10110 }
10111 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10112 {
10113 /* QADD, QSUB, QDADD, QDSUB */
10114 record_buf[0] = ARM_PS_REGNUM;
10115 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10116 arm_insn_r->reg_rec_count = 2;
10117 }
10118 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10119 {
10120 /* BKPT. */
10121 record_buf[0] = ARM_PS_REGNUM;
10122 record_buf[1] = ARM_LR_REGNUM;
10123 arm_insn_r->reg_rec_count = 2;
10124
10125 /* Save SPSR also;how? */
72508ac0
PO
10126 return -1;
10127 }
10128 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10129 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10130 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10131 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10132 )
10133 {
10134 if (0 == insn_op1 || 1 == insn_op1)
10135 {
10136 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10137 /* We dont do optimization for SMULW<y> where we
10138 need only Rd. */
10139 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10140 record_buf[1] = ARM_PS_REGNUM;
10141 arm_insn_r->reg_rec_count = 2;
10142 }
10143 else if (2 == insn_op1)
10144 {
10145 /* SMLAL<x><y>. */
10146 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10147 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10148 arm_insn_r->reg_rec_count = 2;
10149 }
10150 else if (3 == insn_op1)
10151 {
10152 /* SMUL<x><y>. */
10153 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10154 arm_insn_r->reg_rec_count = 1;
10155 }
10156 }
10157 }
10158 else
10159 {
10160 /* MSR : immediate form. */
10161 if (1 == insn_op1)
10162 {
10163 /* CSPR is going to be changed. */
10164 record_buf[0] = ARM_PS_REGNUM;
10165 arm_insn_r->reg_rec_count = 1;
10166 }
10167 else if (3 == insn_op1)
10168 {
10169 /* SPSR is going to be changed. */
10170 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10171 return -1;
10172 }
10173 }
10174 }
10175
10176 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10177 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10178 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10179
10180 /* Handle load/store insn extension space. */
10181
10182 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10183 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10184 && !INSN_RECORDED(arm_insn_r))
10185 {
10186 /* SWP/SWPB. */
10187 if (0 == insn_op1)
10188 {
10189 /* These insn, changes register and memory as well. */
10190 /* SWP or SWPB insn. */
10191 /* Get memory address given by Rn. */
10192 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10193 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10194 /* SWP insn ?, swaps word. */
10195 if (8 == arm_insn_r->opcode)
10196 {
10197 record_buf_mem[0] = 4;
10198 }
10199 else
10200 {
10201 /* SWPB insn, swaps only byte. */
10202 record_buf_mem[0] = 1;
10203 }
10204 record_buf_mem[1] = u_regval;
10205 arm_insn_r->mem_rec_count = 1;
10206 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10207 arm_insn_r->reg_rec_count = 1;
10208 }
10209 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10210 {
10211 /* STRH. */
10212 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10213 ARM_RECORD_STRH);
10214 }
10215 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10216 {
10217 /* LDRD. */
10218 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10219 record_buf[1] = record_buf[0] + 1;
10220 arm_insn_r->reg_rec_count = 2;
10221 }
10222 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10223 {
10224 /* STRD. */
10225 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10226 ARM_RECORD_STRD);
10227 }
10228 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10229 {
10230 /* LDRH, LDRSB, LDRSH. */
10231 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10232 arm_insn_r->reg_rec_count = 1;
10233 }
10234
10235 }
10236
10237 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10238 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10239 && !INSN_RECORDED(arm_insn_r))
10240 {
10241 ret = -1;
10242 /* Handle coprocessor insn extension space. */
10243 }
10244
10245 /* To be done for ARMv5 and later; as of now we return -1. */
10246 if (-1 == ret)
ca92db2d 10247 return ret;
72508ac0
PO
10248
10249 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10250 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10251
10252 return ret;
10253}
10254
10255/* Handling opcode 000 insns. */
10256
10257static int
10258arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10259{
10260 struct regcache *reg_cache = arm_insn_r->regcache;
10261 uint32_t record_buf[8], record_buf_mem[8];
10262 ULONGEST u_regval[2] = {0};
10263
bec2ab5a 10264 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10265 uint32_t opcode1 = 0;
10266
10267 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10268 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10269 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10270
10271 /* Data processing insn /multiply insn. */
10272 if (9 == arm_insn_r->decode
10273 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10274 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10275 {
10276 /* Handle multiply instructions. */
10277 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10278 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10279 {
10280 /* Handle MLA and MUL. */
10281 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10282 record_buf[1] = ARM_PS_REGNUM;
10283 arm_insn_r->reg_rec_count = 2;
10284 }
10285 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10286 {
10287 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10288 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10289 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10290 record_buf[2] = ARM_PS_REGNUM;
10291 arm_insn_r->reg_rec_count = 3;
10292 }
10293 }
10294 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10295 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10296 {
10297 /* Handle misc load insns, as 20th bit (L = 1). */
10298 /* LDR insn has a capability to do branching, if
10299 MOV LR, PC is precceded by LDR insn having Rn as R15
10300 in that case, it emulates branch and link insn, and hence we
10301 need to save CSPR and PC as well. I am not sure this is right
10302 place; as opcode = 010 LDR insn make this happen, if R15 was
10303 used. */
10304 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10305 if (15 != reg_dest)
10306 {
10307 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10308 arm_insn_r->reg_rec_count = 1;
10309 }
10310 else
10311 {
10312 record_buf[0] = reg_dest;
10313 record_buf[1] = ARM_PS_REGNUM;
10314 arm_insn_r->reg_rec_count = 2;
10315 }
10316 }
10317 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10318 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10319 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10320 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10321 {
10322 /* Handle MSR insn. */
10323 if (9 == arm_insn_r->opcode)
10324 {
10325 /* CSPR is going to be changed. */
10326 record_buf[0] = ARM_PS_REGNUM;
10327 arm_insn_r->reg_rec_count = 1;
10328 }
10329 else
10330 {
10331 /* SPSR is going to be changed. */
10332 /* How to read SPSR value? */
72508ac0
PO
10333 return -1;
10334 }
10335 }
10336 else if (9 == arm_insn_r->decode
10337 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10338 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10339 {
10340 /* Handling SWP, SWPB. */
10341 /* These insn, changes register and memory as well. */
10342 /* SWP or SWPB insn. */
10343
10344 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10345 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10346 /* SWP insn ?, swaps word. */
10347 if (8 == arm_insn_r->opcode)
10348 {
10349 record_buf_mem[0] = 4;
10350 }
10351 else
10352 {
10353 /* SWPB insn, swaps only byte. */
10354 record_buf_mem[0] = 1;
10355 }
10356 record_buf_mem[1] = u_regval[0];
10357 arm_insn_r->mem_rec_count = 1;
10358 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10359 arm_insn_r->reg_rec_count = 1;
10360 }
10361 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10362 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10363 {
10364 /* Handle BLX, branch and link/exchange. */
10365 if (9 == arm_insn_r->opcode)
10366 {
10367 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10368 and R14 stores the return address. */
10369 record_buf[0] = ARM_PS_REGNUM;
10370 record_buf[1] = ARM_LR_REGNUM;
10371 arm_insn_r->reg_rec_count = 2;
10372 }
10373 }
10374 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10375 {
10376 /* Handle enhanced software breakpoint insn, BKPT. */
10377 /* CPSR is changed to be executed in ARM state, disabling normal
10378 interrupts, entering abort mode. */
10379 /* According to high vector configuration PC is set. */
10380 /* user hit breakpoint and type reverse, in
10381 that case, we need to go back with previous CPSR and
10382 Program Counter. */
10383 record_buf[0] = ARM_PS_REGNUM;
10384 record_buf[1] = ARM_LR_REGNUM;
10385 arm_insn_r->reg_rec_count = 2;
10386
10387 /* Save SPSR also; how? */
72508ac0
PO
10388 return -1;
10389 }
10390 else if (11 == arm_insn_r->decode
10391 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10392 {
10393 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10394
10395 /* Handle str(x) insn */
10396 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10397 ARM_RECORD_STRH);
10398 }
10399 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10400 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10401 {
10402 /* Handle BX, branch and link/exchange. */
10403 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10404 record_buf[0] = ARM_PS_REGNUM;
10405 arm_insn_r->reg_rec_count = 1;
10406 }
10407 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10408 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10409 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10410 {
10411 /* Count leading zeros: CLZ. */
10412 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10413 arm_insn_r->reg_rec_count = 1;
10414 }
10415 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10416 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10417 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10418 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10419 )
10420 {
10421 /* Handle MRS insn. */
10422 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10423 arm_insn_r->reg_rec_count = 1;
10424 }
10425 else if (arm_insn_r->opcode <= 15)
10426 {
10427 /* Normal data processing insns. */
10428 /* Out of 11 shifter operands mode, all the insn modifies destination
10429 register, which is specified by 13-16 decode. */
10430 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10431 record_buf[1] = ARM_PS_REGNUM;
10432 arm_insn_r->reg_rec_count = 2;
10433 }
10434 else
10435 {
10436 return -1;
10437 }
10438
10439 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10440 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10441 return 0;
10442}
10443
10444/* Handling opcode 001 insns. */
10445
10446static int
10447arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10448{
10449 uint32_t record_buf[8], record_buf_mem[8];
10450
10451 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10452 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10453
10454 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10455 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10456 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10457 )
10458 {
10459 /* Handle MSR insn. */
10460 if (9 == arm_insn_r->opcode)
10461 {
10462 /* CSPR is going to be changed. */
10463 record_buf[0] = ARM_PS_REGNUM;
10464 arm_insn_r->reg_rec_count = 1;
10465 }
10466 else
10467 {
10468 /* SPSR is going to be changed. */
10469 }
10470 }
10471 else if (arm_insn_r->opcode <= 15)
10472 {
10473 /* Normal data processing insns. */
10474 /* Out of 11 shifter operands mode, all the insn modifies destination
10475 register, which is specified by 13-16 decode. */
10476 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10477 record_buf[1] = ARM_PS_REGNUM;
10478 arm_insn_r->reg_rec_count = 2;
10479 }
10480 else
10481 {
10482 return -1;
10483 }
10484
10485 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10486 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10487 return 0;
10488}
10489
c55978a6
YQ
10490static int
10491arm_record_media (insn_decode_record *arm_insn_r)
10492{
10493 uint32_t record_buf[8];
10494
10495 switch (bits (arm_insn_r->arm_insn, 22, 24))
10496 {
10497 case 0:
10498 /* Parallel addition and subtraction, signed */
10499 case 1:
10500 /* Parallel addition and subtraction, unsigned */
10501 case 2:
10502 case 3:
10503 /* Packing, unpacking, saturation and reversal */
10504 {
10505 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10506
10507 record_buf[arm_insn_r->reg_rec_count++] = rd;
10508 }
10509 break;
10510
10511 case 4:
10512 case 5:
10513 /* Signed multiplies */
10514 {
10515 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10516 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10517
10518 record_buf[arm_insn_r->reg_rec_count++] = rd;
10519 if (op1 == 0x0)
10520 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10521 else if (op1 == 0x4)
10522 record_buf[arm_insn_r->reg_rec_count++]
10523 = bits (arm_insn_r->arm_insn, 12, 15);
10524 }
10525 break;
10526
10527 case 6:
10528 {
10529 if (bit (arm_insn_r->arm_insn, 21)
10530 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10531 {
10532 /* SBFX */
10533 record_buf[arm_insn_r->reg_rec_count++]
10534 = bits (arm_insn_r->arm_insn, 12, 15);
10535 }
10536 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10537 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10538 {
10539 /* USAD8 and USADA8 */
10540 record_buf[arm_insn_r->reg_rec_count++]
10541 = bits (arm_insn_r->arm_insn, 16, 19);
10542 }
10543 }
10544 break;
10545
10546 case 7:
10547 {
10548 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10549 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10550 {
10551 /* Permanently UNDEFINED */
10552 return -1;
10553 }
10554 else
10555 {
10556 /* BFC, BFI and UBFX */
10557 record_buf[arm_insn_r->reg_rec_count++]
10558 = bits (arm_insn_r->arm_insn, 12, 15);
10559 }
10560 }
10561 break;
10562
10563 default:
10564 return -1;
10565 }
10566
10567 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10568
10569 return 0;
10570}
10571
71e396f9 10572/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10573
10574static int
10575arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10576{
10577 struct regcache *reg_cache = arm_insn_r->regcache;
10578
71e396f9
LM
10579 uint32_t reg_base , reg_dest;
10580 uint32_t offset_12, tgt_mem_addr;
72508ac0 10581 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10582 unsigned char wback;
10583 ULONGEST u_regval;
72508ac0 10584
71e396f9
LM
10585 /* Calculate wback. */
10586 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10587 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10588
71e396f9
LM
10589 arm_insn_r->reg_rec_count = 0;
10590 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10591
10592 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10593 {
71e396f9
LM
10594 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10595 and LDRT. */
10596
72508ac0 10597 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10598 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10599
10600 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10601 preceeds a LDR instruction having R15 as reg_base, it
10602 emulates a branch and link instruction, and hence we need to save
10603 CPSR and PC as well. */
10604 if (ARM_PC_REGNUM == reg_dest)
10605 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10606
10607 /* If wback is true, also save the base register, which is going to be
10608 written to. */
10609 if (wback)
10610 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10611 }
10612 else
10613 {
71e396f9
LM
10614 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10615
72508ac0 10616 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10617 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10618
10619 /* Handle bit U. */
72508ac0 10620 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10621 {
10622 /* U == 1: Add the offset. */
10623 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10624 }
72508ac0 10625 else
71e396f9
LM
10626 {
10627 /* U == 0: subtract the offset. */
10628 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10629 }
10630
10631 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10632 bytes. */
10633 if (bit (arm_insn_r->arm_insn, 22))
10634 {
10635 /* STRB and STRBT: 1 byte. */
10636 record_buf_mem[0] = 1;
10637 }
10638 else
10639 {
10640 /* STR and STRT: 4 bytes. */
10641 record_buf_mem[0] = 4;
10642 }
10643
10644 /* Handle bit P. */
10645 if (bit (arm_insn_r->arm_insn, 24))
10646 record_buf_mem[1] = tgt_mem_addr;
10647 else
10648 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10649
72508ac0
PO
10650 arm_insn_r->mem_rec_count = 1;
10651
71e396f9
LM
10652 /* If wback is true, also save the base register, which is going to be
10653 written to. */
10654 if (wback)
10655 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10656 }
10657
10658 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10659 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10660 return 0;
10661}
10662
10663/* Handling opcode 011 insns. */
10664
10665static int
10666arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10667{
10668 struct regcache *reg_cache = arm_insn_r->regcache;
10669
10670 uint32_t shift_imm = 0;
10671 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10672 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10673 uint32_t record_buf[8], record_buf_mem[8];
10674
10675 LONGEST s_word;
10676 ULONGEST u_regval[2];
10677
c55978a6
YQ
10678 if (bit (arm_insn_r->arm_insn, 4))
10679 return arm_record_media (arm_insn_r);
10680
72508ac0
PO
10681 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10682 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10683
10684 /* Handle enhanced store insns and LDRD DSP insn,
10685 order begins according to addressing modes for store insns
10686 STRH insn. */
10687
10688 /* LDR or STR? */
10689 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10690 {
10691 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10692 /* LDR insn has a capability to do branching, if
10693 MOV LR, PC is precedded by LDR insn having Rn as R15
10694 in that case, it emulates branch and link insn, and hence we
10695 need to save CSPR and PC as well. */
10696 if (15 != reg_dest)
10697 {
10698 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10699 arm_insn_r->reg_rec_count = 1;
10700 }
10701 else
10702 {
10703 record_buf[0] = reg_dest;
10704 record_buf[1] = ARM_PS_REGNUM;
10705 arm_insn_r->reg_rec_count = 2;
10706 }
10707 }
10708 else
10709 {
10710 if (! bits (arm_insn_r->arm_insn, 4, 11))
10711 {
10712 /* Store insn, register offset and register pre-indexed,
10713 register post-indexed. */
10714 /* Get Rm. */
10715 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10716 /* Get Rn. */
10717 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10718 regcache_raw_read_unsigned (reg_cache, reg_src1
10719 , &u_regval[0]);
10720 regcache_raw_read_unsigned (reg_cache, reg_src2
10721 , &u_regval[1]);
10722 if (15 == reg_src2)
10723 {
10724 /* If R15 was used as Rn, hence current PC+8. */
10725 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10726 u_regval[0] = u_regval[0] + 8;
10727 }
10728 /* Calculate target store address, Rn +/- Rm, register offset. */
10729 /* U == 1. */
10730 if (bit (arm_insn_r->arm_insn, 23))
10731 {
10732 tgt_mem_addr = u_regval[0] + u_regval[1];
10733 }
10734 else
10735 {
10736 tgt_mem_addr = u_regval[1] - u_regval[0];
10737 }
10738
10739 switch (arm_insn_r->opcode)
10740 {
10741 /* STR. */
10742 case 8:
10743 case 12:
10744 /* STR. */
10745 case 9:
10746 case 13:
10747 /* STRT. */
10748 case 1:
10749 case 5:
10750 /* STR. */
10751 case 0:
10752 case 4:
10753 record_buf_mem[0] = 4;
10754 break;
10755
10756 /* STRB. */
10757 case 10:
10758 case 14:
10759 /* STRB. */
10760 case 11:
10761 case 15:
10762 /* STRBT. */
10763 case 3:
10764 case 7:
10765 /* STRB. */
10766 case 2:
10767 case 6:
10768 record_buf_mem[0] = 1;
10769 break;
10770
10771 default:
10772 gdb_assert_not_reached ("no decoding pattern found");
10773 break;
10774 }
10775 record_buf_mem[1] = tgt_mem_addr;
10776 arm_insn_r->mem_rec_count = 1;
10777
10778 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10779 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10780 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10781 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10782 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10783 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10784 )
10785 {
10786 /* Rn is going to be changed in pre-indexed mode and
10787 post-indexed mode as well. */
10788 record_buf[0] = reg_src2;
10789 arm_insn_r->reg_rec_count = 1;
10790 }
10791 }
10792 else
10793 {
10794 /* Store insn, scaled register offset; scaled pre-indexed. */
10795 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10796 /* Get Rm. */
10797 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10798 /* Get Rn. */
10799 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10800 /* Get shift_imm. */
10801 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10802 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10803 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10804 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10805 /* Offset_12 used as shift. */
10806 switch (offset_12)
10807 {
10808 case 0:
10809 /* Offset_12 used as index. */
10810 offset_12 = u_regval[0] << shift_imm;
10811 break;
10812
10813 case 1:
10814 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10815 break;
10816
10817 case 2:
10818 if (!shift_imm)
10819 {
10820 if (bit (u_regval[0], 31))
10821 {
10822 offset_12 = 0xFFFFFFFF;
10823 }
10824 else
10825 {
10826 offset_12 = 0;
10827 }
10828 }
10829 else
10830 {
10831 /* This is arithmetic shift. */
10832 offset_12 = s_word >> shift_imm;
10833 }
10834 break;
10835
10836 case 3:
10837 if (!shift_imm)
10838 {
10839 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10840 &u_regval[1]);
10841 /* Get C flag value and shift it by 31. */
10842 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10843 | (u_regval[0]) >> 1);
10844 }
10845 else
10846 {
10847 offset_12 = (u_regval[0] >> shift_imm) \
10848 | (u_regval[0] <<
10849 (sizeof(uint32_t) - shift_imm));
10850 }
10851 break;
10852
10853 default:
10854 gdb_assert_not_reached ("no decoding pattern found");
10855 break;
10856 }
10857
10858 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10859 /* bit U set. */
10860 if (bit (arm_insn_r->arm_insn, 23))
10861 {
10862 tgt_mem_addr = u_regval[1] + offset_12;
10863 }
10864 else
10865 {
10866 tgt_mem_addr = u_regval[1] - offset_12;
10867 }
10868
10869 switch (arm_insn_r->opcode)
10870 {
10871 /* STR. */
10872 case 8:
10873 case 12:
10874 /* STR. */
10875 case 9:
10876 case 13:
10877 /* STRT. */
10878 case 1:
10879 case 5:
10880 /* STR. */
10881 case 0:
10882 case 4:
10883 record_buf_mem[0] = 4;
10884 break;
10885
10886 /* STRB. */
10887 case 10:
10888 case 14:
10889 /* STRB. */
10890 case 11:
10891 case 15:
10892 /* STRBT. */
10893 case 3:
10894 case 7:
10895 /* STRB. */
10896 case 2:
10897 case 6:
10898 record_buf_mem[0] = 1;
10899 break;
10900
10901 default:
10902 gdb_assert_not_reached ("no decoding pattern found");
10903 break;
10904 }
10905 record_buf_mem[1] = tgt_mem_addr;
10906 arm_insn_r->mem_rec_count = 1;
10907
10908 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10909 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10910 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10911 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10912 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10913 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10914 )
10915 {
10916 /* Rn is going to be changed in register scaled pre-indexed
10917 mode,and scaled post indexed mode. */
10918 record_buf[0] = reg_src2;
10919 arm_insn_r->reg_rec_count = 1;
10920 }
10921 }
10922 }
10923
10924 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10925 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10926 return 0;
10927}
10928
71e396f9 10929/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10930
10931static int
10932arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10933{
10934 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10935 uint32_t register_count = 0, register_bits;
10936 uint32_t reg_base, addr_mode;
72508ac0 10937 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10938 uint32_t wback;
10939 ULONGEST u_regval;
72508ac0 10940
71e396f9
LM
10941 /* Fetch the list of registers. */
10942 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10943 arm_insn_r->reg_rec_count = 0;
10944
10945 /* Fetch the base register that contains the address we are loading data
10946 to. */
10947 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10948
71e396f9
LM
10949 /* Calculate wback. */
10950 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10951
10952 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10953 {
71e396f9 10954 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10955
71e396f9 10956 /* Find out which registers are going to be loaded from memory. */
72508ac0 10957 while (register_bits)
71e396f9
LM
10958 {
10959 if (register_bits & 0x00000001)
10960 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10961 register_bits = register_bits >> 1;
10962 register_count++;
10963 }
72508ac0 10964
71e396f9
LM
10965
10966 /* If wback is true, also save the base register, which is going to be
10967 written to. */
10968 if (wback)
10969 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10970
10971 /* Save the CPSR register. */
10972 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10973 }
10974 else
10975 {
71e396f9 10976 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10977
71e396f9
LM
10978 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10979
10980 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10981
10982 /* Find out how many registers are going to be stored to memory. */
72508ac0 10983 while (register_bits)
71e396f9
LM
10984 {
10985 if (register_bits & 0x00000001)
10986 register_count++;
10987 register_bits = register_bits >> 1;
10988 }
72508ac0
PO
10989
10990 switch (addr_mode)
71e396f9
LM
10991 {
10992 /* STMDA (STMED): Decrement after. */
10993 case 0:
10994 record_buf_mem[1] = (uint32_t) u_regval
10995 - register_count * INT_REGISTER_SIZE + 4;
10996 break;
10997 /* STM (STMIA, STMEA): Increment after. */
10998 case 1:
10999 record_buf_mem[1] = (uint32_t) u_regval;
11000 break;
11001 /* STMDB (STMFD): Decrement before. */
11002 case 2:
11003 record_buf_mem[1] = (uint32_t) u_regval
11004 - register_count * INT_REGISTER_SIZE;
11005 break;
11006 /* STMIB (STMFA): Increment before. */
11007 case 3:
11008 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11009 break;
11010 default:
11011 gdb_assert_not_reached ("no decoding pattern found");
11012 break;
11013 }
72508ac0 11014
71e396f9
LM
11015 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11016 arm_insn_r->mem_rec_count = 1;
11017
11018 /* If wback is true, also save the base register, which is going to be
11019 written to. */
11020 if (wback)
11021 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11022 }
11023
11024 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11025 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11026 return 0;
11027}
11028
11029/* Handling opcode 101 insns. */
11030
11031static int
11032arm_record_b_bl (insn_decode_record *arm_insn_r)
11033{
11034 uint32_t record_buf[8];
11035
11036 /* Handle B, BL, BLX(1) insns. */
11037 /* B simply branches so we do nothing here. */
11038 /* Note: BLX(1) doesnt fall here but instead it falls into
11039 extension space. */
11040 if (bit (arm_insn_r->arm_insn, 24))
11041 {
11042 record_buf[0] = ARM_LR_REGNUM;
11043 arm_insn_r->reg_rec_count = 1;
11044 }
11045
11046 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11047
11048 return 0;
11049}
11050
72508ac0 11051static int
c6ec2b30 11052arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11053{
11054 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11055 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11056 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11057
11058 return -1;
11059}
11060
5a578da5
OJ
11061/* Record handler for vector data transfer instructions. */
11062
11063static int
11064arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11065{
11066 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11067 uint32_t record_buf[4];
11068
5a578da5
OJ
11069 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11070 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11071 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11072 bit_l = bit (arm_insn_r->arm_insn, 20);
11073 bit_c = bit (arm_insn_r->arm_insn, 8);
11074
11075 /* Handle VMOV instruction. */
11076 if (bit_l && bit_c)
11077 {
11078 record_buf[0] = reg_t;
11079 arm_insn_r->reg_rec_count = 1;
11080 }
11081 else if (bit_l && !bit_c)
11082 {
11083 /* Handle VMOV instruction. */
11084 if (bits_a == 0x00)
11085 {
f1771dce 11086 record_buf[0] = reg_t;
5a578da5
OJ
11087 arm_insn_r->reg_rec_count = 1;
11088 }
11089 /* Handle VMRS instruction. */
11090 else if (bits_a == 0x07)
11091 {
11092 if (reg_t == 15)
11093 reg_t = ARM_PS_REGNUM;
11094
11095 record_buf[0] = reg_t;
11096 arm_insn_r->reg_rec_count = 1;
11097 }
11098 }
11099 else if (!bit_l && !bit_c)
11100 {
11101 /* Handle VMOV instruction. */
11102 if (bits_a == 0x00)
11103 {
f1771dce 11104 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11105
11106 arm_insn_r->reg_rec_count = 1;
11107 }
11108 /* Handle VMSR instruction. */
11109 else if (bits_a == 0x07)
11110 {
11111 record_buf[0] = ARM_FPSCR_REGNUM;
11112 arm_insn_r->reg_rec_count = 1;
11113 }
11114 }
11115 else if (!bit_l && bit_c)
11116 {
11117 /* Handle VMOV instruction. */
11118 if (!(bits_a & 0x04))
11119 {
11120 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11121 + ARM_D0_REGNUM;
11122 arm_insn_r->reg_rec_count = 1;
11123 }
11124 /* Handle VDUP instruction. */
11125 else
11126 {
11127 if (bit (arm_insn_r->arm_insn, 21))
11128 {
11129 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11130 record_buf[0] = reg_v + ARM_D0_REGNUM;
11131 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11132 arm_insn_r->reg_rec_count = 2;
11133 }
11134 else
11135 {
11136 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11137 record_buf[0] = reg_v + ARM_D0_REGNUM;
11138 arm_insn_r->reg_rec_count = 1;
11139 }
11140 }
11141 }
11142
11143 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11144 return 0;
11145}
11146
f20f80dd
OJ
11147/* Record handler for extension register load/store instructions. */
11148
11149static int
11150arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11151{
11152 uint32_t opcode, single_reg;
11153 uint8_t op_vldm_vstm;
11154 uint32_t record_buf[8], record_buf_mem[128];
11155 ULONGEST u_regval = 0;
11156
11157 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11158
11159 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11160 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11161 op_vldm_vstm = opcode & 0x1b;
11162
11163 /* Handle VMOV instructions. */
11164 if ((opcode & 0x1e) == 0x04)
11165 {
9fde51ed 11166 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11167 {
11168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11169 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11170 arm_insn_r->reg_rec_count = 2;
11171 }
f20f80dd 11172 else
01e57735 11173 {
9fde51ed
YQ
11174 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11175 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11176
9fde51ed 11177 if (single_reg)
01e57735 11178 {
9fde51ed
YQ
11179 /* The first S register number m is REG_M:M (M is bit 5),
11180 the corresponding D register number is REG_M:M / 2, which
11181 is REG_M. */
11182 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11183 /* The second S register number is REG_M:M + 1, the
11184 corresponding D register number is (REG_M:M + 1) / 2.
11185 IOW, if bit M is 1, the first and second S registers
11186 are mapped to different D registers, otherwise, they are
11187 in the same D register. */
11188 if (bit_m)
11189 {
11190 record_buf[arm_insn_r->reg_rec_count++]
11191 = ARM_D0_REGNUM + reg_m + 1;
11192 }
01e57735
YQ
11193 }
11194 else
11195 {
9fde51ed 11196 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11197 arm_insn_r->reg_rec_count = 1;
11198 }
11199 }
f20f80dd
OJ
11200 }
11201 /* Handle VSTM and VPUSH instructions. */
11202 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11203 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11204 {
11205 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11206 uint32_t memory_index = 0;
11207
11208 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11209 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11210 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11211 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11212 memory_count = imm_off8;
11213
11214 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11215 start_address = u_regval;
f20f80dd 11216 else
01e57735 11217 start_address = u_regval - imm_off32;
f20f80dd
OJ
11218
11219 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11220 {
11221 record_buf[0] = reg_rn;
11222 arm_insn_r->reg_rec_count = 1;
11223 }
f20f80dd
OJ
11224
11225 while (memory_count > 0)
01e57735 11226 {
9fde51ed 11227 if (single_reg)
01e57735 11228 {
9fde51ed
YQ
11229 record_buf_mem[memory_index] = 4;
11230 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11231 start_address = start_address + 4;
11232 memory_index = memory_index + 2;
11233 }
11234 else
11235 {
9fde51ed
YQ
11236 record_buf_mem[memory_index] = 4;
11237 record_buf_mem[memory_index + 1] = start_address;
11238 record_buf_mem[memory_index + 2] = 4;
11239 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11240 start_address = start_address + 8;
11241 memory_index = memory_index + 4;
11242 }
11243 memory_count--;
11244 }
f20f80dd
OJ
11245 arm_insn_r->mem_rec_count = (memory_index >> 1);
11246 }
11247 /* Handle VLDM instructions. */
11248 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11249 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11250 {
11251 uint32_t reg_count, reg_vd;
11252 uint32_t reg_index = 0;
9fde51ed 11253 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11254
11255 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11256 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11257
9fde51ed
YQ
11258 /* REG_VD is the first D register number. If the instruction
11259 loads memory to S registers (SINGLE_REG is TRUE), the register
11260 number is (REG_VD << 1 | bit D), so the corresponding D
11261 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11262 if (!single_reg)
11263 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11264
9fde51ed 11265 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11266 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11267
9fde51ed
YQ
11268 /* If the instruction loads memory to D register, REG_COUNT should
11269 be divided by 2, according to the ARM Architecture Reference
11270 Manual. If the instruction loads memory to S register, divide by
11271 2 as well because two S registers are mapped to D register. */
11272 reg_count = reg_count / 2;
11273 if (single_reg && bit_d)
01e57735 11274 {
9fde51ed
YQ
11275 /* Increase the register count if S register list starts from
11276 an odd number (bit d is one). */
11277 reg_count++;
11278 }
f20f80dd 11279
9fde51ed
YQ
11280 while (reg_count > 0)
11281 {
11282 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11283 reg_count--;
11284 }
f20f80dd
OJ
11285 arm_insn_r->reg_rec_count = reg_index;
11286 }
11287 /* VSTR Vector store register. */
11288 else if ((opcode & 0x13) == 0x10)
11289 {
bec2ab5a 11290 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11291 uint32_t memory_index = 0;
11292
11293 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11294 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11295 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11296 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11297
11298 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11299 start_address = u_regval + imm_off32;
f20f80dd 11300 else
01e57735 11301 start_address = u_regval - imm_off32;
f20f80dd
OJ
11302
11303 if (single_reg)
01e57735 11304 {
9fde51ed
YQ
11305 record_buf_mem[memory_index] = 4;
11306 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11307 arm_insn_r->mem_rec_count = 1;
11308 }
f20f80dd 11309 else
01e57735 11310 {
9fde51ed
YQ
11311 record_buf_mem[memory_index] = 4;
11312 record_buf_mem[memory_index + 1] = start_address;
11313 record_buf_mem[memory_index + 2] = 4;
11314 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11315 arm_insn_r->mem_rec_count = 2;
11316 }
f20f80dd
OJ
11317 }
11318 /* VLDR Vector load register. */
11319 else if ((opcode & 0x13) == 0x11)
11320 {
11321 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11322
11323 if (!single_reg)
01e57735
YQ
11324 {
11325 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11326 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11327 }
f20f80dd 11328 else
01e57735
YQ
11329 {
11330 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11331 /* Record register D rather than pseudo register S. */
11332 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11333 }
f20f80dd
OJ
11334 arm_insn_r->reg_rec_count = 1;
11335 }
11336
11337 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11338 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11339 return 0;
11340}
11341
851f26ae
OJ
11342/* Record handler for arm/thumb mode VFP data processing instructions. */
11343
11344static int
11345arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11346{
11347 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11348 uint32_t record_buf[4];
11349 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11350 enum insn_types curr_insn_type = INSN_INV;
11351
11352 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11353 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11354 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11355 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11356 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11357 bit_d = bit (arm_insn_r->arm_insn, 22);
11358 opc1 = opc1 & 0x04;
11359
11360 /* Handle VMLA, VMLS. */
11361 if (opc1 == 0x00)
11362 {
11363 if (bit (arm_insn_r->arm_insn, 10))
11364 {
11365 if (bit (arm_insn_r->arm_insn, 6))
11366 curr_insn_type = INSN_T0;
11367 else
11368 curr_insn_type = INSN_T1;
11369 }
11370 else
11371 {
11372 if (dp_op_sz)
11373 curr_insn_type = INSN_T1;
11374 else
11375 curr_insn_type = INSN_T2;
11376 }
11377 }
11378 /* Handle VNMLA, VNMLS, VNMUL. */
11379 else if (opc1 == 0x01)
11380 {
11381 if (dp_op_sz)
11382 curr_insn_type = INSN_T1;
11383 else
11384 curr_insn_type = INSN_T2;
11385 }
11386 /* Handle VMUL. */
11387 else if (opc1 == 0x02 && !(opc3 & 0x01))
11388 {
11389 if (bit (arm_insn_r->arm_insn, 10))
11390 {
11391 if (bit (arm_insn_r->arm_insn, 6))
11392 curr_insn_type = INSN_T0;
11393 else
11394 curr_insn_type = INSN_T1;
11395 }
11396 else
11397 {
11398 if (dp_op_sz)
11399 curr_insn_type = INSN_T1;
11400 else
11401 curr_insn_type = INSN_T2;
11402 }
11403 }
11404 /* Handle VADD, VSUB. */
11405 else if (opc1 == 0x03)
11406 {
11407 if (!bit (arm_insn_r->arm_insn, 9))
11408 {
11409 if (bit (arm_insn_r->arm_insn, 6))
11410 curr_insn_type = INSN_T0;
11411 else
11412 curr_insn_type = INSN_T1;
11413 }
11414 else
11415 {
11416 if (dp_op_sz)
11417 curr_insn_type = INSN_T1;
11418 else
11419 curr_insn_type = INSN_T2;
11420 }
11421 }
11422 /* Handle VDIV. */
11423 else if (opc1 == 0x0b)
11424 {
11425 if (dp_op_sz)
11426 curr_insn_type = INSN_T1;
11427 else
11428 curr_insn_type = INSN_T2;
11429 }
11430 /* Handle all other vfp data processing instructions. */
11431 else if (opc1 == 0x0b)
11432 {
11433 /* Handle VMOV. */
11434 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11435 {
11436 if (bit (arm_insn_r->arm_insn, 4))
11437 {
11438 if (bit (arm_insn_r->arm_insn, 6))
11439 curr_insn_type = INSN_T0;
11440 else
11441 curr_insn_type = INSN_T1;
11442 }
11443 else
11444 {
11445 if (dp_op_sz)
11446 curr_insn_type = INSN_T1;
11447 else
11448 curr_insn_type = INSN_T2;
11449 }
11450 }
11451 /* Handle VNEG and VABS. */
11452 else if ((opc2 == 0x01 && opc3 == 0x01)
11453 || (opc2 == 0x00 && opc3 == 0x03))
11454 {
11455 if (!bit (arm_insn_r->arm_insn, 11))
11456 {
11457 if (bit (arm_insn_r->arm_insn, 6))
11458 curr_insn_type = INSN_T0;
11459 else
11460 curr_insn_type = INSN_T1;
11461 }
11462 else
11463 {
11464 if (dp_op_sz)
11465 curr_insn_type = INSN_T1;
11466 else
11467 curr_insn_type = INSN_T2;
11468 }
11469 }
11470 /* Handle VSQRT. */
11471 else if (opc2 == 0x01 && opc3 == 0x03)
11472 {
11473 if (dp_op_sz)
11474 curr_insn_type = INSN_T1;
11475 else
11476 curr_insn_type = INSN_T2;
11477 }
11478 /* Handle VCVT. */
11479 else if (opc2 == 0x07 && opc3 == 0x03)
11480 {
11481 if (!dp_op_sz)
11482 curr_insn_type = INSN_T1;
11483 else
11484 curr_insn_type = INSN_T2;
11485 }
11486 else if (opc3 & 0x01)
11487 {
11488 /* Handle VCVT. */
11489 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11490 {
11491 if (!bit (arm_insn_r->arm_insn, 18))
11492 curr_insn_type = INSN_T2;
11493 else
11494 {
11495 if (dp_op_sz)
11496 curr_insn_type = INSN_T1;
11497 else
11498 curr_insn_type = INSN_T2;
11499 }
11500 }
11501 /* Handle VCVT. */
11502 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11503 {
11504 if (dp_op_sz)
11505 curr_insn_type = INSN_T1;
11506 else
11507 curr_insn_type = INSN_T2;
11508 }
11509 /* Handle VCVTB, VCVTT. */
11510 else if ((opc2 & 0x0e) == 0x02)
11511 curr_insn_type = INSN_T2;
11512 /* Handle VCMP, VCMPE. */
11513 else if ((opc2 & 0x0e) == 0x04)
11514 curr_insn_type = INSN_T3;
11515 }
11516 }
11517
11518 switch (curr_insn_type)
11519 {
11520 case INSN_T0:
11521 reg_vd = reg_vd | (bit_d << 4);
11522 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11523 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11524 arm_insn_r->reg_rec_count = 2;
11525 break;
11526
11527 case INSN_T1:
11528 reg_vd = reg_vd | (bit_d << 4);
11529 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11530 arm_insn_r->reg_rec_count = 1;
11531 break;
11532
11533 case INSN_T2:
11534 reg_vd = (reg_vd << 1) | bit_d;
11535 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11536 arm_insn_r->reg_rec_count = 1;
11537 break;
11538
11539 case INSN_T3:
11540 record_buf[0] = ARM_FPSCR_REGNUM;
11541 arm_insn_r->reg_rec_count = 1;
11542 break;
11543
11544 default:
11545 gdb_assert_not_reached ("no decoding pattern found");
11546 break;
11547 }
11548
11549 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11550 return 0;
11551}
11552
60cc5e93
OJ
11553/* Handling opcode 110 insns. */
11554
11555static int
11556arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11557{
bec2ab5a 11558 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11559
11560 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11561 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11562 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11563
11564 if ((coproc & 0x0e) == 0x0a)
11565 {
11566 /* Handle extension register ld/st instructions. */
11567 if (!(op1 & 0x20))
f20f80dd 11568 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11569
11570 /* 64-bit transfers between arm core and extension registers. */
11571 if ((op1 & 0x3e) == 0x04)
f20f80dd 11572 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11573 }
11574 else
11575 {
11576 /* Handle coprocessor ld/st instructions. */
11577 if (!(op1 & 0x3a))
11578 {
11579 /* Store. */
11580 if (!op1_ebit)
11581 return arm_record_unsupported_insn (arm_insn_r);
11582 else
11583 /* Load. */
11584 return arm_record_unsupported_insn (arm_insn_r);
11585 }
11586
11587 /* Move to coprocessor from two arm core registers. */
11588 if (op1 == 0x4)
11589 return arm_record_unsupported_insn (arm_insn_r);
11590
11591 /* Move to two arm core registers from coprocessor. */
11592 if (op1 == 0x5)
11593 {
11594 uint32_t reg_t[2];
11595
11596 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11597 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11598 arm_insn_r->reg_rec_count = 2;
11599
11600 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11601 return 0;
11602 }
11603 }
11604 return arm_record_unsupported_insn (arm_insn_r);
11605}
11606
72508ac0
PO
11607/* Handling opcode 111 insns. */
11608
11609static int
11610arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11611{
60cc5e93 11612 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11613 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11614 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11615
11616 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11617 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11618 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11619 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11620 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11621
11622 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11623 if (op1_sbit)
97dfe206
OJ
11624 {
11625 if (tdep->arm_syscall_record != NULL)
11626 {
11627 ULONGEST svc_operand, svc_number;
11628
11629 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11630
11631 if (svc_operand) /* OABI. */
11632 svc_number = svc_operand - 0x900000;
11633 else /* EABI. */
11634 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11635
60cc5e93 11636 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11637 }
11638 else
11639 {
11640 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11641 return -1;
97dfe206
OJ
11642 }
11643 }
60cc5e93
OJ
11644
11645 if ((coproc & 0x0e) == 0x0a)
11646 {
11647 /* VFP data-processing instructions. */
11648 if (!op1_sbit && !op)
851f26ae 11649 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11650
11651 /* Advanced SIMD, VFP instructions. */
11652 if (!op1_sbit && op)
5a578da5 11653 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11654 }
97dfe206
OJ
11655 else
11656 {
60cc5e93
OJ
11657 /* Coprocessor data operations. */
11658 if (!op1_sbit && !op)
11659 return arm_record_unsupported_insn (arm_insn_r);
11660
11661 /* Move to Coprocessor from ARM core register. */
11662 if (!op1_sbit && !op1_ebit && op)
11663 return arm_record_unsupported_insn (arm_insn_r);
11664
11665 /* Move to arm core register from coprocessor. */
11666 if (!op1_sbit && op1_ebit && op)
11667 {
11668 uint32_t record_buf[1];
11669
11670 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11671 if (record_buf[0] == 15)
11672 record_buf[0] = ARM_PS_REGNUM;
11673
11674 arm_insn_r->reg_rec_count = 1;
11675 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11676 record_buf);
11677 return 0;
11678 }
97dfe206 11679 }
72508ac0 11680
60cc5e93 11681 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11682}
11683
11684/* Handling opcode 000 insns. */
11685
11686static int
11687thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11688{
11689 uint32_t record_buf[8];
11690 uint32_t reg_src1 = 0;
11691
11692 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11693
11694 record_buf[0] = ARM_PS_REGNUM;
11695 record_buf[1] = reg_src1;
11696 thumb_insn_r->reg_rec_count = 2;
11697
11698 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11699
11700 return 0;
11701}
11702
11703
11704/* Handling opcode 001 insns. */
11705
11706static int
11707thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11708{
11709 uint32_t record_buf[8];
11710 uint32_t reg_src1 = 0;
11711
11712 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11713
11714 record_buf[0] = ARM_PS_REGNUM;
11715 record_buf[1] = reg_src1;
11716 thumb_insn_r->reg_rec_count = 2;
11717
11718 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11719
11720 return 0;
11721}
11722
11723/* Handling opcode 010 insns. */
11724
11725static int
11726thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11727{
11728 struct regcache *reg_cache = thumb_insn_r->regcache;
11729 uint32_t record_buf[8], record_buf_mem[8];
11730
11731 uint32_t reg_src1 = 0, reg_src2 = 0;
11732 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11733
11734 ULONGEST u_regval[2] = {0};
11735
11736 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11737
11738 if (bit (thumb_insn_r->arm_insn, 12))
11739 {
11740 /* Handle load/store register offset. */
11741 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11742 if (opcode2 >= 12 && opcode2 <= 15)
11743 {
11744 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11745 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11746 record_buf[0] = reg_src1;
11747 thumb_insn_r->reg_rec_count = 1;
11748 }
11749 else if (opcode2 >= 8 && opcode2 <= 10)
11750 {
11751 /* STR(2), STRB(2), STRH(2) . */
11752 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11753 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11754 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11755 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11756 if (8 == opcode2)
11757 record_buf_mem[0] = 4; /* STR (2). */
11758 else if (10 == opcode2)
11759 record_buf_mem[0] = 1; /* STRB (2). */
11760 else if (9 == opcode2)
11761 record_buf_mem[0] = 2; /* STRH (2). */
11762 record_buf_mem[1] = u_regval[0] + u_regval[1];
11763 thumb_insn_r->mem_rec_count = 1;
11764 }
11765 }
11766 else if (bit (thumb_insn_r->arm_insn, 11))
11767 {
11768 /* Handle load from literal pool. */
11769 /* LDR(3). */
11770 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11771 record_buf[0] = reg_src1;
11772 thumb_insn_r->reg_rec_count = 1;
11773 }
11774 else if (opcode1)
11775 {
11776 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11777 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11778 if ((3 == opcode2) && (!opcode3))
11779 {
11780 /* Branch with exchange. */
11781 record_buf[0] = ARM_PS_REGNUM;
11782 thumb_insn_r->reg_rec_count = 1;
11783 }
11784 else
11785 {
1f33efec
YQ
11786 /* Format 8; special data processing insns. */
11787 record_buf[0] = ARM_PS_REGNUM;
11788 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11789 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11790 thumb_insn_r->reg_rec_count = 2;
11791 }
11792 }
11793 else
11794 {
11795 /* Format 5; data processing insns. */
11796 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11797 if (bit (thumb_insn_r->arm_insn, 7))
11798 {
11799 reg_src1 = reg_src1 + 8;
11800 }
11801 record_buf[0] = ARM_PS_REGNUM;
11802 record_buf[1] = reg_src1;
11803 thumb_insn_r->reg_rec_count = 2;
11804 }
11805
11806 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11807 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11808 record_buf_mem);
11809
11810 return 0;
11811}
11812
11813/* Handling opcode 001 insns. */
11814
11815static int
11816thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11817{
11818 struct regcache *reg_cache = thumb_insn_r->regcache;
11819 uint32_t record_buf[8], record_buf_mem[8];
11820
11821 uint32_t reg_src1 = 0;
11822 uint32_t opcode = 0, immed_5 = 0;
11823
11824 ULONGEST u_regval = 0;
11825
11826 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11827
11828 if (opcode)
11829 {
11830 /* LDR(1). */
11831 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11832 record_buf[0] = reg_src1;
11833 thumb_insn_r->reg_rec_count = 1;
11834 }
11835 else
11836 {
11837 /* STR(1). */
11838 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11839 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11840 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11841 record_buf_mem[0] = 4;
11842 record_buf_mem[1] = u_regval + (immed_5 * 4);
11843 thumb_insn_r->mem_rec_count = 1;
11844 }
11845
11846 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11847 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11848 record_buf_mem);
11849
11850 return 0;
11851}
11852
11853/* Handling opcode 100 insns. */
11854
11855static int
11856thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11857{
11858 struct regcache *reg_cache = thumb_insn_r->regcache;
11859 uint32_t record_buf[8], record_buf_mem[8];
11860
11861 uint32_t reg_src1 = 0;
11862 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11863
11864 ULONGEST u_regval = 0;
11865
11866 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11867
11868 if (3 == opcode)
11869 {
11870 /* LDR(4). */
11871 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11872 record_buf[0] = reg_src1;
11873 thumb_insn_r->reg_rec_count = 1;
11874 }
11875 else if (1 == opcode)
11876 {
11877 /* LDRH(1). */
11878 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11879 record_buf[0] = reg_src1;
11880 thumb_insn_r->reg_rec_count = 1;
11881 }
11882 else if (2 == opcode)
11883 {
11884 /* STR(3). */
11885 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11886 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11887 record_buf_mem[0] = 4;
11888 record_buf_mem[1] = u_regval + (immed_8 * 4);
11889 thumb_insn_r->mem_rec_count = 1;
11890 }
11891 else if (0 == opcode)
11892 {
11893 /* STRH(1). */
11894 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11895 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11896 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11897 record_buf_mem[0] = 2;
11898 record_buf_mem[1] = u_regval + (immed_5 * 2);
11899 thumb_insn_r->mem_rec_count = 1;
11900 }
11901
11902 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11903 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11904 record_buf_mem);
11905
11906 return 0;
11907}
11908
11909/* Handling opcode 101 insns. */
11910
11911static int
11912thumb_record_misc (insn_decode_record *thumb_insn_r)
11913{
11914 struct regcache *reg_cache = thumb_insn_r->regcache;
11915
11916 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11917 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11918 uint32_t index = 0, start_address = 0;
72508ac0
PO
11919 uint32_t record_buf[24], record_buf_mem[48];
11920 uint32_t reg_src1;
11921
11922 ULONGEST u_regval = 0;
11923
11924 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11925 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11926 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11927
11928 if (14 == opcode2)
11929 {
11930 /* POP. */
11931 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11932 while (register_bits)
f969241e
OJ
11933 {
11934 if (register_bits & 0x00000001)
11935 record_buf[index++] = register_count;
11936 register_bits = register_bits >> 1;
11937 register_count++;
11938 }
11939 record_buf[index++] = ARM_PS_REGNUM;
11940 record_buf[index++] = ARM_SP_REGNUM;
11941 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11942 }
11943 else if (10 == opcode2)
11944 {
11945 /* PUSH. */
11946 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11947 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11948 while (register_bits)
11949 {
11950 if (register_bits & 0x00000001)
11951 register_count++;
11952 register_bits = register_bits >> 1;
11953 }
11954 start_address = u_regval - \
11955 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11956 thumb_insn_r->mem_rec_count = register_count;
11957 while (register_count)
11958 {
11959 record_buf_mem[(register_count * 2) - 1] = start_address;
11960 record_buf_mem[(register_count * 2) - 2] = 4;
11961 start_address = start_address + 4;
11962 register_count--;
11963 }
11964 record_buf[0] = ARM_SP_REGNUM;
11965 thumb_insn_r->reg_rec_count = 1;
11966 }
11967 else if (0x1E == opcode1)
11968 {
11969 /* BKPT insn. */
11970 /* Handle enhanced software breakpoint insn, BKPT. */
11971 /* CPSR is changed to be executed in ARM state, disabling normal
11972 interrupts, entering abort mode. */
11973 /* According to high vector configuration PC is set. */
11974 /* User hits breakpoint and type reverse, in that case, we need to go back with
11975 previous CPSR and Program Counter. */
11976 record_buf[0] = ARM_PS_REGNUM;
11977 record_buf[1] = ARM_LR_REGNUM;
11978 thumb_insn_r->reg_rec_count = 2;
11979 /* We need to save SPSR value, which is not yet done. */
11980 printf_unfiltered (_("Process record does not support instruction "
11981 "0x%0x at address %s.\n"),
11982 thumb_insn_r->arm_insn,
11983 paddress (thumb_insn_r->gdbarch,
11984 thumb_insn_r->this_addr));
11985 return -1;
11986 }
11987 else if ((0 == opcode) || (1 == opcode))
11988 {
11989 /* ADD(5), ADD(6). */
11990 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11991 record_buf[0] = reg_src1;
11992 thumb_insn_r->reg_rec_count = 1;
11993 }
11994 else if (2 == opcode)
11995 {
11996 /* ADD(7), SUB(4). */
11997 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11998 record_buf[0] = ARM_SP_REGNUM;
11999 thumb_insn_r->reg_rec_count = 1;
12000 }
12001
12002 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12003 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12004 record_buf_mem);
12005
12006 return 0;
12007}
12008
12009/* Handling opcode 110 insns. */
12010
12011static int
12012thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12013{
12014 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12015 struct regcache *reg_cache = thumb_insn_r->regcache;
12016
12017 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12018 uint32_t reg_src1 = 0;
12019 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12020 uint32_t index = 0, start_address = 0;
72508ac0
PO
12021 uint32_t record_buf[24], record_buf_mem[48];
12022
12023 ULONGEST u_regval = 0;
12024
12025 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12026 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12027
12028 if (1 == opcode2)
12029 {
12030
12031 /* LDMIA. */
12032 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12033 /* Get Rn. */
12034 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12035 while (register_bits)
12036 {
12037 if (register_bits & 0x00000001)
f969241e 12038 record_buf[index++] = register_count;
72508ac0 12039 register_bits = register_bits >> 1;
f969241e 12040 register_count++;
72508ac0 12041 }
f969241e
OJ
12042 record_buf[index++] = reg_src1;
12043 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12044 }
12045 else if (0 == opcode2)
12046 {
12047 /* It handles both STMIA. */
12048 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12049 /* Get Rn. */
12050 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12051 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12052 while (register_bits)
12053 {
12054 if (register_bits & 0x00000001)
12055 register_count++;
12056 register_bits = register_bits >> 1;
12057 }
12058 start_address = u_regval;
12059 thumb_insn_r->mem_rec_count = register_count;
12060 while (register_count)
12061 {
12062 record_buf_mem[(register_count * 2) - 1] = start_address;
12063 record_buf_mem[(register_count * 2) - 2] = 4;
12064 start_address = start_address + 4;
12065 register_count--;
12066 }
12067 }
12068 else if (0x1F == opcode1)
12069 {
12070 /* Handle arm syscall insn. */
97dfe206 12071 if (tdep->arm_syscall_record != NULL)
72508ac0 12072 {
97dfe206
OJ
12073 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12074 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12075 }
12076 else
12077 {
12078 printf_unfiltered (_("no syscall record support\n"));
12079 return -1;
12080 }
12081 }
12082
12083 /* B (1), conditional branch is automatically taken care in process_record,
12084 as PC is saved there. */
12085
12086 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12087 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12088 record_buf_mem);
12089
12090 return ret;
12091}
12092
12093/* Handling opcode 111 insns. */
12094
12095static int
12096thumb_record_branch (insn_decode_record *thumb_insn_r)
12097{
12098 uint32_t record_buf[8];
12099 uint32_t bits_h = 0;
12100
12101 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12102
12103 if (2 == bits_h || 3 == bits_h)
12104 {
12105 /* BL */
12106 record_buf[0] = ARM_LR_REGNUM;
12107 thumb_insn_r->reg_rec_count = 1;
12108 }
12109 else if (1 == bits_h)
12110 {
12111 /* BLX(1). */
12112 record_buf[0] = ARM_PS_REGNUM;
12113 record_buf[1] = ARM_LR_REGNUM;
12114 thumb_insn_r->reg_rec_count = 2;
12115 }
12116
12117 /* B(2) is automatically taken care in process_record, as PC is
12118 saved there. */
12119
12120 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12121
12122 return 0;
12123}
12124
c6ec2b30
OJ
12125/* Handler for thumb2 load/store multiple instructions. */
12126
12127static int
12128thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12129{
12130 struct regcache *reg_cache = thumb2_insn_r->regcache;
12131
12132 uint32_t reg_rn, op;
12133 uint32_t register_bits = 0, register_count = 0;
12134 uint32_t index = 0, start_address = 0;
12135 uint32_t record_buf[24], record_buf_mem[48];
12136
12137 ULONGEST u_regval = 0;
12138
12139 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12140 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12141
12142 if (0 == op || 3 == op)
12143 {
12144 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12145 {
12146 /* Handle RFE instruction. */
12147 record_buf[0] = ARM_PS_REGNUM;
12148 thumb2_insn_r->reg_rec_count = 1;
12149 }
12150 else
12151 {
12152 /* Handle SRS instruction after reading banked SP. */
12153 return arm_record_unsupported_insn (thumb2_insn_r);
12154 }
12155 }
12156 else if (1 == op || 2 == op)
12157 {
12158 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12159 {
12160 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12161 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12162 while (register_bits)
12163 {
12164 if (register_bits & 0x00000001)
12165 record_buf[index++] = register_count;
12166
12167 register_count++;
12168 register_bits = register_bits >> 1;
12169 }
12170 record_buf[index++] = reg_rn;
12171 record_buf[index++] = ARM_PS_REGNUM;
12172 thumb2_insn_r->reg_rec_count = index;
12173 }
12174 else
12175 {
12176 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12177 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12178 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12179 while (register_bits)
12180 {
12181 if (register_bits & 0x00000001)
12182 register_count++;
12183
12184 register_bits = register_bits >> 1;
12185 }
12186
12187 if (1 == op)
12188 {
12189 /* Start address calculation for LDMDB/LDMEA. */
12190 start_address = u_regval;
12191 }
12192 else if (2 == op)
12193 {
12194 /* Start address calculation for LDMDB/LDMEA. */
12195 start_address = u_regval - register_count * 4;
12196 }
12197
12198 thumb2_insn_r->mem_rec_count = register_count;
12199 while (register_count)
12200 {
12201 record_buf_mem[register_count * 2 - 1] = start_address;
12202 record_buf_mem[register_count * 2 - 2] = 4;
12203 start_address = start_address + 4;
12204 register_count--;
12205 }
12206 record_buf[0] = reg_rn;
12207 record_buf[1] = ARM_PS_REGNUM;
12208 thumb2_insn_r->reg_rec_count = 2;
12209 }
12210 }
12211
12212 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12213 record_buf_mem);
12214 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12215 record_buf);
12216 return ARM_RECORD_SUCCESS;
12217}
12218
12219/* Handler for thumb2 load/store (dual/exclusive) and table branch
12220 instructions. */
12221
12222static int
12223thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12224{
12225 struct regcache *reg_cache = thumb2_insn_r->regcache;
12226
12227 uint32_t reg_rd, reg_rn, offset_imm;
12228 uint32_t reg_dest1, reg_dest2;
12229 uint32_t address, offset_addr;
12230 uint32_t record_buf[8], record_buf_mem[8];
12231 uint32_t op1, op2, op3;
c6ec2b30
OJ
12232
12233 ULONGEST u_regval[2];
12234
12235 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12236 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12237 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12238
12239 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12240 {
12241 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12242 {
12243 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12244 record_buf[0] = reg_dest1;
12245 record_buf[1] = ARM_PS_REGNUM;
12246 thumb2_insn_r->reg_rec_count = 2;
12247 }
12248
12249 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12250 {
12251 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12252 record_buf[2] = reg_dest2;
12253 thumb2_insn_r->reg_rec_count = 3;
12254 }
12255 }
12256 else
12257 {
12258 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12259 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12260
12261 if (0 == op1 && 0 == op2)
12262 {
12263 /* Handle STREX. */
12264 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12265 address = u_regval[0] + (offset_imm * 4);
12266 record_buf_mem[0] = 4;
12267 record_buf_mem[1] = address;
12268 thumb2_insn_r->mem_rec_count = 1;
12269 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12270 record_buf[0] = reg_rd;
12271 thumb2_insn_r->reg_rec_count = 1;
12272 }
12273 else if (1 == op1 && 0 == op2)
12274 {
12275 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12276 record_buf[0] = reg_rd;
12277 thumb2_insn_r->reg_rec_count = 1;
12278 address = u_regval[0];
12279 record_buf_mem[1] = address;
12280
12281 if (4 == op3)
12282 {
12283 /* Handle STREXB. */
12284 record_buf_mem[0] = 1;
12285 thumb2_insn_r->mem_rec_count = 1;
12286 }
12287 else if (5 == op3)
12288 {
12289 /* Handle STREXH. */
12290 record_buf_mem[0] = 2 ;
12291 thumb2_insn_r->mem_rec_count = 1;
12292 }
12293 else if (7 == op3)
12294 {
12295 /* Handle STREXD. */
12296 address = u_regval[0];
12297 record_buf_mem[0] = 4;
12298 record_buf_mem[2] = 4;
12299 record_buf_mem[3] = address + 4;
12300 thumb2_insn_r->mem_rec_count = 2;
12301 }
12302 }
12303 else
12304 {
12305 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12306
12307 if (bit (thumb2_insn_r->arm_insn, 24))
12308 {
12309 if (bit (thumb2_insn_r->arm_insn, 23))
12310 offset_addr = u_regval[0] + (offset_imm * 4);
12311 else
12312 offset_addr = u_regval[0] - (offset_imm * 4);
12313
12314 address = offset_addr;
12315 }
12316 else
12317 address = u_regval[0];
12318
12319 record_buf_mem[0] = 4;
12320 record_buf_mem[1] = address;
12321 record_buf_mem[2] = 4;
12322 record_buf_mem[3] = address + 4;
12323 thumb2_insn_r->mem_rec_count = 2;
12324 record_buf[0] = reg_rn;
12325 thumb2_insn_r->reg_rec_count = 1;
12326 }
12327 }
12328
12329 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12330 record_buf);
12331 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12332 record_buf_mem);
12333 return ARM_RECORD_SUCCESS;
12334}
12335
12336/* Handler for thumb2 data processing (shift register and modified immediate)
12337 instructions. */
12338
12339static int
12340thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12341{
12342 uint32_t reg_rd, op;
12343 uint32_t record_buf[8];
12344
12345 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12346 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12347
12348 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12349 {
12350 record_buf[0] = ARM_PS_REGNUM;
12351 thumb2_insn_r->reg_rec_count = 1;
12352 }
12353 else
12354 {
12355 record_buf[0] = reg_rd;
12356 record_buf[1] = ARM_PS_REGNUM;
12357 thumb2_insn_r->reg_rec_count = 2;
12358 }
12359
12360 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12361 record_buf);
12362 return ARM_RECORD_SUCCESS;
12363}
12364
12365/* Generic handler for thumb2 instructions which effect destination and PS
12366 registers. */
12367
12368static int
12369thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12370{
12371 uint32_t reg_rd;
12372 uint32_t record_buf[8];
12373
12374 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12375
12376 record_buf[0] = reg_rd;
12377 record_buf[1] = ARM_PS_REGNUM;
12378 thumb2_insn_r->reg_rec_count = 2;
12379
12380 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12381 record_buf);
12382 return ARM_RECORD_SUCCESS;
12383}
12384
12385/* Handler for thumb2 branch and miscellaneous control instructions. */
12386
12387static int
12388thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12389{
12390 uint32_t op, op1, op2;
12391 uint32_t record_buf[8];
12392
12393 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12394 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12395 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12396
12397 /* Handle MSR insn. */
12398 if (!(op1 & 0x2) && 0x38 == op)
12399 {
12400 if (!(op2 & 0x3))
12401 {
12402 /* CPSR is going to be changed. */
12403 record_buf[0] = ARM_PS_REGNUM;
12404 thumb2_insn_r->reg_rec_count = 1;
12405 }
12406 else
12407 {
12408 arm_record_unsupported_insn(thumb2_insn_r);
12409 return -1;
12410 }
12411 }
12412 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12413 {
12414 /* BLX. */
12415 record_buf[0] = ARM_PS_REGNUM;
12416 record_buf[1] = ARM_LR_REGNUM;
12417 thumb2_insn_r->reg_rec_count = 2;
12418 }
12419
12420 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12421 record_buf);
12422 return ARM_RECORD_SUCCESS;
12423}
12424
12425/* Handler for thumb2 store single data item instructions. */
12426
12427static int
12428thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12429{
12430 struct regcache *reg_cache = thumb2_insn_r->regcache;
12431
12432 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12433 uint32_t address, offset_addr;
12434 uint32_t record_buf[8], record_buf_mem[8];
12435 uint32_t op1, op2;
12436
12437 ULONGEST u_regval[2];
12438
12439 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12440 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12441 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12442 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12443
12444 if (bit (thumb2_insn_r->arm_insn, 23))
12445 {
12446 /* T2 encoding. */
12447 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12448 offset_addr = u_regval[0] + offset_imm;
12449 address = offset_addr;
12450 }
12451 else
12452 {
12453 /* T3 encoding. */
12454 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12455 {
12456 /* Handle STRB (register). */
12457 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12458 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12459 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12460 offset_addr = u_regval[1] << shift_imm;
12461 address = u_regval[0] + offset_addr;
12462 }
12463 else
12464 {
12465 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12466 if (bit (thumb2_insn_r->arm_insn, 10))
12467 {
12468 if (bit (thumb2_insn_r->arm_insn, 9))
12469 offset_addr = u_regval[0] + offset_imm;
12470 else
12471 offset_addr = u_regval[0] - offset_imm;
12472
12473 address = offset_addr;
12474 }
12475 else
12476 address = u_regval[0];
12477 }
12478 }
12479
12480 switch (op1)
12481 {
12482 /* Store byte instructions. */
12483 case 4:
12484 case 0:
12485 record_buf_mem[0] = 1;
12486 break;
12487 /* Store half word instructions. */
12488 case 1:
12489 case 5:
12490 record_buf_mem[0] = 2;
12491 break;
12492 /* Store word instructions. */
12493 case 2:
12494 case 6:
12495 record_buf_mem[0] = 4;
12496 break;
12497
12498 default:
12499 gdb_assert_not_reached ("no decoding pattern found");
12500 break;
12501 }
12502
12503 record_buf_mem[1] = address;
12504 thumb2_insn_r->mem_rec_count = 1;
12505 record_buf[0] = reg_rn;
12506 thumb2_insn_r->reg_rec_count = 1;
12507
12508 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12509 record_buf);
12510 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12511 record_buf_mem);
12512 return ARM_RECORD_SUCCESS;
12513}
12514
12515/* Handler for thumb2 load memory hints instructions. */
12516
12517static int
12518thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12519{
12520 uint32_t record_buf[8];
12521 uint32_t reg_rt, reg_rn;
12522
12523 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12524 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12525
12526 if (ARM_PC_REGNUM != reg_rt)
12527 {
12528 record_buf[0] = reg_rt;
12529 record_buf[1] = reg_rn;
12530 record_buf[2] = ARM_PS_REGNUM;
12531 thumb2_insn_r->reg_rec_count = 3;
12532
12533 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12534 record_buf);
12535 return ARM_RECORD_SUCCESS;
12536 }
12537
12538 return ARM_RECORD_FAILURE;
12539}
12540
12541/* Handler for thumb2 load word instructions. */
12542
12543static int
12544thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12545{
c6ec2b30
OJ
12546 uint32_t record_buf[8];
12547
12548 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12549 record_buf[1] = ARM_PS_REGNUM;
12550 thumb2_insn_r->reg_rec_count = 2;
12551
12552 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12553 record_buf);
12554 return ARM_RECORD_SUCCESS;
12555}
12556
12557/* Handler for thumb2 long multiply, long multiply accumulate, and
12558 divide instructions. */
12559
12560static int
12561thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12562{
12563 uint32_t opcode1 = 0, opcode2 = 0;
12564 uint32_t record_buf[8];
c6ec2b30
OJ
12565
12566 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12567 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12568
12569 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12570 {
12571 /* Handle SMULL, UMULL, SMULAL. */
12572 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12573 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12574 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12575 record_buf[2] = ARM_PS_REGNUM;
12576 thumb2_insn_r->reg_rec_count = 3;
12577 }
12578 else if (1 == opcode1 || 3 == opcode2)
12579 {
12580 /* Handle SDIV and UDIV. */
12581 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12582 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12583 record_buf[2] = ARM_PS_REGNUM;
12584 thumb2_insn_r->reg_rec_count = 3;
12585 }
12586 else
12587 return ARM_RECORD_FAILURE;
12588
12589 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12590 record_buf);
12591 return ARM_RECORD_SUCCESS;
12592}
12593
60cc5e93
OJ
12594/* Record handler for thumb32 coprocessor instructions. */
12595
12596static int
12597thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12598{
12599 if (bit (thumb2_insn_r->arm_insn, 25))
12600 return arm_record_coproc_data_proc (thumb2_insn_r);
12601 else
12602 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12603}
12604
1e1b6563
OJ
12605/* Record handler for advance SIMD structure load/store instructions. */
12606
12607static int
12608thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12609{
12610 struct regcache *reg_cache = thumb2_insn_r->regcache;
12611 uint32_t l_bit, a_bit, b_bits;
12612 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12613 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12614 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12615 uint8_t f_ebytes;
12616
12617 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12618 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12619 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12620 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12621 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12622 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12623 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12624 f_elem = 8 / f_ebytes;
12625
12626 if (!l_bit)
12627 {
12628 ULONGEST u_regval = 0;
12629 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12630 address = u_regval;
12631
12632 if (!a_bit)
12633 {
12634 /* Handle VST1. */
12635 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12636 {
12637 if (b_bits == 0x07)
12638 bf_regs = 1;
12639 else if (b_bits == 0x0a)
12640 bf_regs = 2;
12641 else if (b_bits == 0x06)
12642 bf_regs = 3;
12643 else if (b_bits == 0x02)
12644 bf_regs = 4;
12645 else
12646 bf_regs = 0;
12647
12648 for (index_r = 0; index_r < bf_regs; index_r++)
12649 {
12650 for (index_e = 0; index_e < f_elem; index_e++)
12651 {
12652 record_buf_mem[index_m++] = f_ebytes;
12653 record_buf_mem[index_m++] = address;
12654 address = address + f_ebytes;
12655 thumb2_insn_r->mem_rec_count += 1;
12656 }
12657 }
12658 }
12659 /* Handle VST2. */
12660 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12661 {
12662 if (b_bits == 0x09 || b_bits == 0x08)
12663 bf_regs = 1;
12664 else if (b_bits == 0x03)
12665 bf_regs = 2;
12666 else
12667 bf_regs = 0;
12668
12669 for (index_r = 0; index_r < bf_regs; index_r++)
12670 for (index_e = 0; index_e < f_elem; index_e++)
12671 {
12672 for (loop_t = 0; loop_t < 2; loop_t++)
12673 {
12674 record_buf_mem[index_m++] = f_ebytes;
12675 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12676 thumb2_insn_r->mem_rec_count += 1;
12677 }
12678 address = address + (2 * f_ebytes);
12679 }
12680 }
12681 /* Handle VST3. */
12682 else if ((b_bits & 0x0e) == 0x04)
12683 {
12684 for (index_e = 0; index_e < f_elem; index_e++)
12685 {
12686 for (loop_t = 0; loop_t < 3; loop_t++)
12687 {
12688 record_buf_mem[index_m++] = f_ebytes;
12689 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12690 thumb2_insn_r->mem_rec_count += 1;
12691 }
12692 address = address + (3 * f_ebytes);
12693 }
12694 }
12695 /* Handle VST4. */
12696 else if (!(b_bits & 0x0e))
12697 {
12698 for (index_e = 0; index_e < f_elem; index_e++)
12699 {
12700 for (loop_t = 0; loop_t < 4; loop_t++)
12701 {
12702 record_buf_mem[index_m++] = f_ebytes;
12703 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12704 thumb2_insn_r->mem_rec_count += 1;
12705 }
12706 address = address + (4 * f_ebytes);
12707 }
12708 }
12709 }
12710 else
12711 {
12712 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12713
12714 if (bft_size == 0x00)
12715 f_ebytes = 1;
12716 else if (bft_size == 0x01)
12717 f_ebytes = 2;
12718 else if (bft_size == 0x02)
12719 f_ebytes = 4;
12720 else
12721 f_ebytes = 0;
12722
12723 /* Handle VST1. */
12724 if (!(b_bits & 0x0b) || b_bits == 0x08)
12725 thumb2_insn_r->mem_rec_count = 1;
12726 /* Handle VST2. */
12727 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12728 thumb2_insn_r->mem_rec_count = 2;
12729 /* Handle VST3. */
12730 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12731 thumb2_insn_r->mem_rec_count = 3;
12732 /* Handle VST4. */
12733 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12734 thumb2_insn_r->mem_rec_count = 4;
12735
12736 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12737 {
12738 record_buf_mem[index_m] = f_ebytes;
12739 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12740 }
12741 }
12742 }
12743 else
12744 {
12745 if (!a_bit)
12746 {
12747 /* Handle VLD1. */
12748 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12749 thumb2_insn_r->reg_rec_count = 1;
12750 /* Handle VLD2. */
12751 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12752 thumb2_insn_r->reg_rec_count = 2;
12753 /* Handle VLD3. */
12754 else if ((b_bits & 0x0e) == 0x04)
12755 thumb2_insn_r->reg_rec_count = 3;
12756 /* Handle VLD4. */
12757 else if (!(b_bits & 0x0e))
12758 thumb2_insn_r->reg_rec_count = 4;
12759 }
12760 else
12761 {
12762 /* Handle VLD1. */
12763 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12764 thumb2_insn_r->reg_rec_count = 1;
12765 /* Handle VLD2. */
12766 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12767 thumb2_insn_r->reg_rec_count = 2;
12768 /* Handle VLD3. */
12769 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12770 thumb2_insn_r->reg_rec_count = 3;
12771 /* Handle VLD4. */
12772 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12773 thumb2_insn_r->reg_rec_count = 4;
12774
12775 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12776 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12777 }
12778 }
12779
12780 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12781 {
12782 record_buf[index_r] = reg_rn;
12783 thumb2_insn_r->reg_rec_count += 1;
12784 }
12785
12786 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12787 record_buf);
12788 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12789 record_buf_mem);
12790 return 0;
12791}
12792
c6ec2b30
OJ
12793/* Decodes thumb2 instruction type and invokes its record handler. */
12794
12795static unsigned int
12796thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12797{
12798 uint32_t op, op1, op2;
12799
12800 op = bit (thumb2_insn_r->arm_insn, 15);
12801 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12802 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12803
12804 if (op1 == 0x01)
12805 {
12806 if (!(op2 & 0x64 ))
12807 {
12808 /* Load/store multiple instruction. */
12809 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12810 }
12811 else if (!((op2 & 0x64) ^ 0x04))
12812 {
12813 /* Load/store (dual/exclusive) and table branch instruction. */
12814 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12815 }
12816 else if (!((op2 & 0x20) ^ 0x20))
12817 {
12818 /* Data-processing (shifted register). */
12819 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12820 }
12821 else if (op2 & 0x40)
12822 {
12823 /* Co-processor instructions. */
60cc5e93 12824 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12825 }
12826 }
12827 else if (op1 == 0x02)
12828 {
12829 if (op)
12830 {
12831 /* Branches and miscellaneous control instructions. */
12832 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12833 }
12834 else if (op2 & 0x20)
12835 {
12836 /* Data-processing (plain binary immediate) instruction. */
12837 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12838 }
12839 else
12840 {
12841 /* Data-processing (modified immediate). */
12842 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12843 }
12844 }
12845 else if (op1 == 0x03)
12846 {
12847 if (!(op2 & 0x71 ))
12848 {
12849 /* Store single data item. */
12850 return thumb2_record_str_single_data (thumb2_insn_r);
12851 }
12852 else if (!((op2 & 0x71) ^ 0x10))
12853 {
12854 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12855 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12856 }
12857 else if (!((op2 & 0x67) ^ 0x01))
12858 {
12859 /* Load byte, memory hints instruction. */
12860 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12861 }
12862 else if (!((op2 & 0x67) ^ 0x03))
12863 {
12864 /* Load halfword, memory hints instruction. */
12865 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12866 }
12867 else if (!((op2 & 0x67) ^ 0x05))
12868 {
12869 /* Load word instruction. */
12870 return thumb2_record_ld_word (thumb2_insn_r);
12871 }
12872 else if (!((op2 & 0x70) ^ 0x20))
12873 {
12874 /* Data-processing (register) instruction. */
12875 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12876 }
12877 else if (!((op2 & 0x78) ^ 0x30))
12878 {
12879 /* Multiply, multiply accumulate, abs diff instruction. */
12880 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12881 }
12882 else if (!((op2 & 0x78) ^ 0x38))
12883 {
12884 /* Long multiply, long multiply accumulate, and divide. */
12885 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12886 }
12887 else if (op2 & 0x40)
12888 {
12889 /* Co-processor instructions. */
60cc5e93 12890 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12891 }
12892 }
12893
12894 return -1;
12895}
72508ac0
PO
12896
12897/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12898and positive val on fauilure. */
12899
12900static int
12901extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12902{
12903 gdb_byte buf[insn_size];
12904
12905 memset (&buf[0], 0, insn_size);
12906
12907 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12908 return 1;
12909 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12910 insn_size,
2959fed9 12911 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12912 return 0;
12913}
12914
12915typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12916
12917/* Decode arm/thumb insn depending on condition cods and opcodes; and
12918 dispatch it. */
12919
12920static int
12921decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12922 uint32_t insn_size)
72508ac0
PO
12923{
12924
01e57735
YQ
12925 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12926 instruction. */
0fa9c223 12927 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12928 {
12929 arm_record_data_proc_misc_ld_str, /* 000. */
12930 arm_record_data_proc_imm, /* 001. */
12931 arm_record_ld_st_imm_offset, /* 010. */
12932 arm_record_ld_st_reg_offset, /* 011. */
12933 arm_record_ld_st_multiple, /* 100. */
12934 arm_record_b_bl, /* 101. */
60cc5e93 12935 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12936 arm_record_coproc_data_proc /* 111. */
12937 };
12938
01e57735
YQ
12939 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12940 instruction. */
0fa9c223 12941 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12942 { \
12943 thumb_record_shift_add_sub, /* 000. */
12944 thumb_record_add_sub_cmp_mov, /* 001. */
12945 thumb_record_ld_st_reg_offset, /* 010. */
12946 thumb_record_ld_st_imm_offset, /* 011. */
12947 thumb_record_ld_st_stack, /* 100. */
12948 thumb_record_misc, /* 101. */
12949 thumb_record_ldm_stm_swi, /* 110. */
12950 thumb_record_branch /* 111. */
12951 };
12952
12953 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12954 uint32_t insn_id = 0;
12955
12956 if (extract_arm_insn (arm_record, insn_size))
12957 {
12958 if (record_debug)
01e57735
YQ
12959 {
12960 printf_unfiltered (_("Process record: error reading memory at "
12961 "addr %s len = %d.\n"),
12962 paddress (arm_record->gdbarch,
12963 arm_record->this_addr), insn_size);
12964 }
72508ac0
PO
12965 return -1;
12966 }
12967 else if (ARM_RECORD == record_type)
12968 {
12969 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12970 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12971
12972 if (arm_record->cond == 0xf)
12973 ret = arm_record_extension_space (arm_record);
12974 else
01e57735 12975 {
ca92db2d
YQ
12976 /* If this insn has fallen into extension space
12977 then we need not decode it anymore. */
01e57735
YQ
12978 ret = arm_handle_insn[insn_id] (arm_record);
12979 }
ca92db2d
YQ
12980 if (ret != ARM_RECORD_SUCCESS)
12981 {
12982 arm_record_unsupported_insn (arm_record);
12983 ret = -1;
12984 }
72508ac0
PO
12985 }
12986 else if (THUMB_RECORD == record_type)
12987 {
12988 /* As thumb does not have condition codes, we set negative. */
12989 arm_record->cond = -1;
12990 insn_id = bits (arm_record->arm_insn, 13, 15);
12991 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
12992 if (ret != ARM_RECORD_SUCCESS)
12993 {
12994 arm_record_unsupported_insn (arm_record);
12995 ret = -1;
12996 }
72508ac0
PO
12997 }
12998 else if (THUMB2_RECORD == record_type)
12999 {
c6ec2b30
OJ
13000 /* As thumb does not have condition codes, we set negative. */
13001 arm_record->cond = -1;
13002
13003 /* Swap first half of 32bit thumb instruction with second half. */
13004 arm_record->arm_insn
01e57735 13005 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13006
ca92db2d 13007 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13008
ca92db2d 13009 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13010 {
13011 arm_record_unsupported_insn (arm_record);
13012 ret = -1;
13013 }
72508ac0
PO
13014 }
13015 else
13016 {
13017 /* Throw assertion. */
13018 gdb_assert_not_reached ("not a valid instruction, could not decode");
13019 }
13020
13021 return ret;
13022}
13023
13024
13025/* Cleans up local record registers and memory allocations. */
13026
13027static void
13028deallocate_reg_mem (insn_decode_record *record)
13029{
13030 xfree (record->arm_regs);
13031 xfree (record->arm_mems);
13032}
13033
13034
01e57735 13035/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13036 memory that will be changed in current instruction to record_arch_list".
13037 Return -1 if something is wrong. */
13038
13039int
01e57735
YQ
13040arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13041 CORE_ADDR insn_addr)
72508ac0
PO
13042{
13043
72508ac0
PO
13044 uint32_t no_of_rec = 0;
13045 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13046 ULONGEST t_bit = 0, insn_id = 0;
13047
13048 ULONGEST u_regval = 0;
13049
13050 insn_decode_record arm_record;
13051
13052 memset (&arm_record, 0, sizeof (insn_decode_record));
13053 arm_record.regcache = regcache;
13054 arm_record.this_addr = insn_addr;
13055 arm_record.gdbarch = gdbarch;
13056
13057
13058 if (record_debug > 1)
13059 {
13060 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13061 "addr = %s\n",
72508ac0
PO
13062 paddress (gdbarch, arm_record.this_addr));
13063 }
13064
13065 if (extract_arm_insn (&arm_record, 2))
13066 {
13067 if (record_debug)
01e57735
YQ
13068 {
13069 printf_unfiltered (_("Process record: error reading memory at "
13070 "addr %s len = %d.\n"),
13071 paddress (arm_record.gdbarch,
13072 arm_record.this_addr), 2);
13073 }
72508ac0
PO
13074 return -1;
13075 }
13076
13077 /* Check the insn, whether it is thumb or arm one. */
13078
13079 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13080 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13081
13082
13083 if (!(u_regval & t_bit))
13084 {
13085 /* We are decoding arm insn. */
13086 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13087 }
13088 else
13089 {
13090 insn_id = bits (arm_record.arm_insn, 11, 15);
13091 /* is it thumb2 insn? */
13092 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13093 {
13094 ret = decode_insn (&arm_record, THUMB2_RECORD,
13095 THUMB2_INSN_SIZE_BYTES);
13096 }
72508ac0 13097 else
01e57735
YQ
13098 {
13099 /* We are decoding thumb insn. */
13100 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13101 }
72508ac0
PO
13102 }
13103
13104 if (0 == ret)
13105 {
13106 /* Record registers. */
25ea693b 13107 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13108 if (arm_record.arm_regs)
01e57735
YQ
13109 {
13110 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13111 {
13112 if (record_full_arch_list_add_reg
25ea693b 13113 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13114 ret = -1;
13115 }
13116 }
72508ac0
PO
13117 /* Record memories. */
13118 if (arm_record.arm_mems)
01e57735
YQ
13119 {
13120 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13121 {
13122 if (record_full_arch_list_add_mem
13123 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13124 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13125 ret = -1;
13126 }
13127 }
72508ac0 13128
25ea693b 13129 if (record_full_arch_list_add_end ())
01e57735 13130 ret = -1;
72508ac0
PO
13131 }
13132
13133
13134 deallocate_reg_mem (&arm_record);
13135
13136 return ret;
13137}
This page took 1.842017 seconds and 4 git commands to generate.