ld: fix regressions with rett instructions in sparc tests.
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
61baf725 3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
e47ad6c0 30#include "disasm.h"
4e052eda 31#include "regcache.h"
54483882 32#include "reggroups.h"
d16aafd8 33#include "doublest.h"
fd0407d6 34#include "value.h"
34e8f22d 35#include "arch-utils.h"
4be87837 36#include "osabi.h"
eb5492fa
DJ
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
842e1f1e
DJ
40#include "objfiles.h"
41#include "dwarf2-frame.h"
e4c16157 42#include "gdbtypes.h"
29d73ae4 43#include "prologue-value.h"
25f8c692 44#include "remote.h"
123dc839
DJ
45#include "target-descriptions.h"
46#include "user-regs.h"
0e9e9abd 47#include "observer.h"
34e8f22d 48
8689682c 49#include "arch/arm.h"
d9311bfa 50#include "arch/arm-get-next-pcs.h"
34e8f22d 51#include "arm-tdep.h"
26216b98 52#include "gdb/sim-arm.h"
34e8f22d 53
082fc60d
RE
54#include "elf-bfd.h"
55#include "coff/internal.h"
97e03143 56#include "elf/arm.h"
c906108c 57
60c5725c 58#include "vec.h"
26216b98 59
72508ac0 60#include "record.h"
d02ed0bb 61#include "record-full.h"
325fac50 62#include <algorithm>
72508ac0 63
0a69eedb
YQ
64#include "features/arm/arm-with-m.c"
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
67#include "features/arm/arm-with-iwmmxt.c"
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
70#include "features/arm/arm-with-neon.c"
9779414d 71
b121eeb9
YQ
72#if GDB_SELF_TEST
73#include "selftest.h"
74#endif
75
6529d2dd
AC
76static int arm_debug;
77
082fc60d
RE
78/* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 80 is used for this purpose.
082fc60d
RE
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 84
0963b4bd 85#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
87
88#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 89 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 90
60c5725c
DJ
91/* Per-objfile data used for mapping symbols. */
92static const struct objfile_data *arm_objfile_data_key;
93
94struct arm_mapping_symbol
95{
96 bfd_vma value;
97 char type;
98};
99typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100DEF_VEC_O(arm_mapping_symbol_s);
101
102struct arm_per_objfile
103{
104 VEC(arm_mapping_symbol_s) **section_maps;
105};
106
afd7eef0
RE
107/* The list of available "set arm ..." and "show arm ..." commands. */
108static struct cmd_list_element *setarmcmdlist = NULL;
109static struct cmd_list_element *showarmcmdlist = NULL;
110
fd50bc42
RE
111/* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 113static const char *const fp_model_strings[] =
fd50bc42
RE
114{
115 "auto",
116 "softfpa",
117 "fpa",
118 "softvfp",
28e97307
DJ
119 "vfp",
120 NULL
fd50bc42
RE
121};
122
123/* A variable that can be configured by the user. */
124static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125static const char *current_fp_model = "auto";
126
28e97307 127/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 128static const char *const arm_abi_strings[] =
28e97307
DJ
129{
130 "auto",
131 "APCS",
132 "AAPCS",
133 NULL
134};
135
136/* A variable that can be configured by the user. */
137static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138static const char *arm_abi_string = "auto";
139
0428b8f5 140/* The execution mode to assume. */
40478521 141static const char *const arm_mode_strings[] =
0428b8f5
DJ
142 {
143 "auto",
144 "arm",
68770265
MGD
145 "thumb",
146 NULL
0428b8f5
DJ
147 };
148
149static const char *arm_fallback_mode_string = "auto";
150static const char *arm_force_mode_string = "auto";
151
f32bf4a4
YQ
152/* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
156static const struct
157{
158 const char *name;
159 int regnum;
160} arm_register_aliases[] = {
161 /* Basic register numbers. */
162 { "r0", 0 },
163 { "r1", 1 },
164 { "r2", 2 },
165 { "r3", 3 },
166 { "r4", 4 },
167 { "r5", 5 },
168 { "r6", 6 },
169 { "r7", 7 },
170 { "r8", 8 },
171 { "r9", 9 },
172 { "r10", 10 },
173 { "r11", 11 },
174 { "r12", 12 },
175 { "r13", 13 },
176 { "r14", 14 },
177 { "r15", 15 },
178 /* Synonyms (argument and variable registers). */
179 { "a1", 0 },
180 { "a2", 1 },
181 { "a3", 2 },
182 { "a4", 3 },
183 { "v1", 4 },
184 { "v2", 5 },
185 { "v3", 6 },
186 { "v4", 7 },
187 { "v5", 8 },
188 { "v6", 9 },
189 { "v7", 10 },
190 { "v8", 11 },
191 /* Other platform-specific names for r9. */
192 { "sb", 9 },
193 { "tr", 9 },
194 /* Special names. */
195 { "ip", 12 },
123dc839 196 { "lr", 14 },
123dc839
DJ
197 /* Names used by GCC (not listed in the ARM EABI). */
198 { "sl", 10 },
123dc839
DJ
199 /* A special name from the older ATPCS. */
200 { "wr", 7 },
201};
bc90b915 202
123dc839 203static const char *const arm_register_names[] =
da59e081
JM
204{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 210 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 211
65b48a81
PB
212/* Holds the current set of options to be passed to the disassembler. */
213static char *arm_disassembler_options;
214
afd7eef0
RE
215/* Valid register name styles. */
216static const char **valid_disassembly_styles;
ed9a39eb 217
afd7eef0
RE
218/* Disassembly style to use. Default to "std" register names. */
219static const char *disassembly_style;
96baa820 220
ed9a39eb 221/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
222 style. */
223static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 224 struct cmd_list_element *);
65b48a81
PB
225static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
227 const char *);
ed9a39eb 228
b508a996 229static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 230 void *, int);
b508a996 231static void convert_to_extended (const struct floatformat *, void *,
be8626e0 232 const void *, int);
ed9a39eb 233
05d1431c
PA
234static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, gdb_byte *buf);
58d6951d
DJ
237static void arm_neon_quad_write (struct gdbarch *gdbarch,
238 struct regcache *regcache,
239 int regnum, const gdb_byte *buf);
240
e7cf25a8 241static CORE_ADDR
553cb527 242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
243
244
d9311bfa
AT
245/* get_next_pcs operations. */
246static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
250 arm_get_next_pcs_is_thumb,
251 NULL,
d9311bfa
AT
252};
253
9b8d791a 254struct arm_prologue_cache
c3b4394c 255{
eb5492fa
DJ
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
259 CORE_ADDR prev_sp;
260
4be43953
DJ
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
eb5492fa 264
c3b4394c 265 int framesize;
eb5492fa
DJ
266
267 /* The register used to hold the frame pointer for this frame. */
c3b4394c 268 int framereg;
eb5492fa
DJ
269
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
c3b4394c 272};
ed9a39eb 273
0d39a070
DJ
274static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
278
cca44b1b
JB
279/* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
281
282#define DISPLACED_STEPPING_ARCH_VERSION 5
283
94c30b78 284/* Set to true if the 32-bit mode is in use. */
c906108c
SS
285
286int arm_apcs_32 = 1;
287
9779414d
DJ
288/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
289
478fd957 290int
9779414d
DJ
291arm_psr_thumb_bit (struct gdbarch *gdbarch)
292{
293 if (gdbarch_tdep (gdbarch)->is_m)
294 return XPSR_T;
295 else
296 return CPSR_T;
297}
298
d0e59a68
AT
299/* Determine if the processor is currently executing in Thumb mode. */
300
301int
302arm_is_thumb (struct regcache *regcache)
303{
304 ULONGEST cpsr;
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308
309 return (cpsr & t_bit) != 0;
310}
311
b39cc962
DJ
312/* Determine if FRAME is executing in Thumb mode. */
313
25b41d01 314int
b39cc962
DJ
315arm_frame_is_thumb (struct frame_info *frame)
316{
317 CORE_ADDR cpsr;
9779414d 318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
319
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325
9779414d 326 return (cpsr & t_bit) != 0;
b39cc962
DJ
327}
328
60c5725c
DJ
329/* Callback for VEC_lower_bound. */
330
331static inline int
332arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
334{
335 return lhs->value < rhs->value;
336}
337
f9d67f43
DJ
338/* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
c906108c 341
f9d67f43
DJ
342static char
343arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 344{
60c5725c 345 struct obj_section *sec;
0428b8f5 346
60c5725c
DJ
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
349 if (sec != NULL)
350 {
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
354 0 };
60c5725c
DJ
355 unsigned int idx;
356
9a3c8263
SM
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
60c5725c
DJ
359 if (data != NULL)
360 {
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
363 {
364 struct arm_mapping_symbol *map_sym;
365
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
368
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 {
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
f9d67f43
DJ
377 {
378 if (start)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
381 }
60c5725c
DJ
382 }
383
384 if (idx > 0)
385 {
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
387 if (start)
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
60c5725c
DJ
390 }
391 }
392 }
393 }
394
f9d67f43
DJ
395 return 0;
396}
397
398/* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
401
e3039479 402int
9779414d 403arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 404{
7cbd4a93 405 struct bound_minimal_symbol sym;
f9d67f43 406 char type;
a42244db
YQ
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
409
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
412 if (dsc)
413 {
414 if (debug_displaced)
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
420 }
f9d67f43
DJ
421
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
424 return 1;
425
426 /* If the user wants to override the symbol table, let him. */
427 if (strcmp (arm_force_mode_string, "arm") == 0)
428 return 0;
429 if (strcmp (arm_force_mode_string, "thumb") == 0)
430 return 1;
431
9779414d
DJ
432 /* ARM v6-M and v7-M are always in Thumb mode. */
433 if (gdbarch_tdep (gdbarch)->is_m)
434 return 1;
435
f9d67f43
DJ
436 /* If there are mapping symbols, consult them. */
437 type = arm_find_mapping_symbol (memaddr, NULL);
438 if (type)
439 return type == 't';
440
ed9a39eb 441 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 442 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
443 if (sym.minsym)
444 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
445
446 /* If the user wants to override the fallback mode, let them. */
447 if (strcmp (arm_fallback_mode_string, "arm") == 0)
448 return 0;
449 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
450 return 1;
451
452 /* If we couldn't find any symbol, but we're talking to a running
453 target, then trust the current value of $cpsr. This lets
454 "display/i $pc" always show the correct mode (though if there is
455 a symbol table we will not reach here, so it still may not be
18819fa6 456 displayed in the mode it will be executed). */
0428b8f5 457 if (target_has_registers)
18819fa6 458 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
459
460 /* Otherwise we're out of luck; we assume ARM. */
461 return 0;
c906108c
SS
462}
463
ca90e760
FH
464/* Determine if the address specified equals any of these magic return
465 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
466 architectures.
467
468 From ARMv6-M Reference Manual B1.5.8
469 Table B1-5 Exception return behavior
470
471 EXC_RETURN Return To Return Stack
472 0xFFFFFFF1 Handler mode Main
473 0xFFFFFFF9 Thread mode Main
474 0xFFFFFFFD Thread mode Process
475
476 From ARMv7-M Reference Manual B1.5.8
477 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
478
479 EXC_RETURN Return To Return Stack
480 0xFFFFFFF1 Handler mode Main
481 0xFFFFFFF9 Thread mode Main
482 0xFFFFFFFD Thread mode Process
483
484 Table B1-9 EXC_RETURN definition of exception return behavior, with
485 FP
486
487 EXC_RETURN Return To Return Stack Frame Type
488 0xFFFFFFE1 Handler mode Main Extended
489 0xFFFFFFE9 Thread mode Main Extended
490 0xFFFFFFED Thread mode Process Extended
491 0xFFFFFFF1 Handler mode Main Basic
492 0xFFFFFFF9 Thread mode Main Basic
493 0xFFFFFFFD Thread mode Process Basic
494
495 For more details see "B1.5.8 Exception return behavior"
496 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
497
498static int
499arm_m_addr_is_magic (CORE_ADDR addr)
500{
501 switch (addr)
502 {
503 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
504 the exception return behavior. */
505 case 0xffffffe1:
506 case 0xffffffe9:
507 case 0xffffffed:
508 case 0xfffffff1:
509 case 0xfffffff9:
510 case 0xfffffffd:
511 /* Address is magic. */
512 return 1;
513
514 default:
515 /* Address is not magic. */
516 return 0;
517 }
518}
519
181c1381 520/* Remove useless bits from addresses in a running program. */
34e8f22d 521static CORE_ADDR
24568a2c 522arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 523{
2ae28aa9
YQ
524 /* On M-profile devices, do not strip the low bit from EXC_RETURN
525 (the magic exception return address). */
526 if (gdbarch_tdep (gdbarch)->is_m
ca90e760 527 && arm_m_addr_is_magic (val))
2ae28aa9
YQ
528 return val;
529
a3a2ee65 530 if (arm_apcs_32)
dd6be234 531 return UNMAKE_THUMB_ADDR (val);
c906108c 532 else
a3a2ee65 533 return (val & 0x03fffffc);
c906108c
SS
534}
535
0d39a070 536/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
537 can be safely ignored during prologue skipping. IS_THUMB is true
538 if the function is known to be a Thumb function due to the way it
539 is being called. */
0d39a070 540static int
e0634ccf 541skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 542{
e0634ccf 543 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 544 struct bound_minimal_symbol msym;
0d39a070
DJ
545
546 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 547 if (msym.minsym != NULL
77e371c0 548 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 549 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 550 {
efd66ac6 551 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 552
e0634ccf
UW
553 /* The GNU linker's Thumb call stub to foo is named
554 __foo_from_thumb. */
555 if (strstr (name, "_from_thumb") != NULL)
556 name += 2;
0d39a070 557
e0634ccf
UW
558 /* On soft-float targets, __truncdfsf2 is called to convert promoted
559 arguments to their argument types in non-prototyped
560 functions. */
61012eef 561 if (startswith (name, "__truncdfsf2"))
e0634ccf 562 return 1;
61012eef 563 if (startswith (name, "__aeabi_d2f"))
e0634ccf 564 return 1;
0d39a070 565
e0634ccf 566 /* Internal functions related to thread-local storage. */
61012eef 567 if (startswith (name, "__tls_get_addr"))
e0634ccf 568 return 1;
61012eef 569 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
570 return 1;
571 }
572 else
573 {
574 /* If we run against a stripped glibc, we may be unable to identify
575 special functions by name. Check for one important case,
576 __aeabi_read_tp, by comparing the *code* against the default
577 implementation (this is hand-written ARM assembler in glibc). */
578
579 if (!is_thumb
198cd59d 580 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
e0634ccf 581 == 0xe3e00a0f /* mov r0, #0xffff0fff */
198cd59d 582 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
e0634ccf
UW
583 == 0xe240f01f) /* sub pc, r0, #31 */
584 return 1;
585 }
ec3d575a 586
0d39a070
DJ
587 return 0;
588}
589
621c6d5b
YQ
590/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
591 the first 16-bit of instruction, and INSN2 is the second 16-bit of
592 instruction. */
593#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
594 ((bits ((insn1), 0, 3) << 12) \
595 | (bits ((insn1), 10, 10) << 11) \
596 | (bits ((insn2), 12, 14) << 8) \
597 | bits ((insn2), 0, 7))
598
599/* Extract the immediate from instruction movw/movt of encoding A. INSN is
600 the 32-bit instruction. */
601#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
602 ((bits ((insn), 16, 19) << 12) \
603 | bits ((insn), 0, 11))
604
ec3d575a
UW
605/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
606
607static unsigned int
608thumb_expand_immediate (unsigned int imm)
609{
610 unsigned int count = imm >> 7;
611
612 if (count < 8)
613 switch (count / 2)
614 {
615 case 0:
616 return imm & 0xff;
617 case 1:
618 return (imm & 0xff) | ((imm & 0xff) << 16);
619 case 2:
620 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
621 case 3:
622 return (imm & 0xff) | ((imm & 0xff) << 8)
623 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
624 }
625
626 return (0x80 | (imm & 0x7f)) << (32 - count);
627}
628
540314bd
YQ
629/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
630 epilogue, 0 otherwise. */
631
632static int
633thumb_instruction_restores_sp (unsigned short insn)
634{
635 return (insn == 0x46bd /* mov sp, r7 */
636 || (insn & 0xff80) == 0xb000 /* add sp, imm */
637 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
638}
639
29d73ae4
DJ
640/* Analyze a Thumb prologue, looking for a recognizable stack frame
641 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
642 clobber the stack frame unexpectedly, or an unknown instruction.
643 Return the last address which is definitely safe to skip for an
644 initial breakpoint. */
c906108c
SS
645
646static CORE_ADDR
29d73ae4
DJ
647thumb_analyze_prologue (struct gdbarch *gdbarch,
648 CORE_ADDR start, CORE_ADDR limit,
649 struct arm_prologue_cache *cache)
c906108c 650{
0d39a070 651 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 652 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
653 int i;
654 pv_t regs[16];
655 struct pv_area *stack;
656 struct cleanup *back_to;
657 CORE_ADDR offset;
ec3d575a 658 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 659
29d73ae4
DJ
660 for (i = 0; i < 16; i++)
661 regs[i] = pv_register (i, 0);
55f960e1 662 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
663 back_to = make_cleanup_free_pv_area (stack);
664
29d73ae4 665 while (start < limit)
c906108c 666 {
29d73ae4
DJ
667 unsigned short insn;
668
198cd59d 669 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 670
94c30b78 671 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 672 {
29d73ae4
DJ
673 int regno;
674 int mask;
4be43953
DJ
675
676 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
677 break;
29d73ae4
DJ
678
679 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
680 whether to save LR (R14). */
681 mask = (insn & 0xff) | ((insn & 0x100) << 6);
682
683 /* Calculate offsets of saved R0-R7 and LR. */
684 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
685 if (mask & (1 << regno))
686 {
29d73ae4
DJ
687 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
688 -4);
689 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
690 }
da59e081 691 }
1db01f22 692 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 693 {
29d73ae4 694 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
695 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
696 -offset);
da59e081 697 }
808f7ab1
YQ
698 else if (thumb_instruction_restores_sp (insn))
699 {
700 /* Don't scan past the epilogue. */
701 break;
702 }
0d39a070
DJ
703 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
704 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
705 (insn & 0xff) << 2);
706 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
707 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
708 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
709 bits (insn, 6, 8));
710 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
711 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
712 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
713 bits (insn, 0, 7));
714 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
715 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
716 && pv_is_constant (regs[bits (insn, 3, 5)]))
717 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
718 regs[bits (insn, 6, 8)]);
719 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
720 && pv_is_constant (regs[bits (insn, 3, 6)]))
721 {
722 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
723 int rm = bits (insn, 3, 6);
724 regs[rd] = pv_add (regs[rd], regs[rm]);
725 }
29d73ae4 726 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 727 {
29d73ae4
DJ
728 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
729 int src_reg = (insn & 0x78) >> 3;
730 regs[dst_reg] = regs[src_reg];
da59e081 731 }
29d73ae4 732 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 733 {
29d73ae4
DJ
734 /* Handle stores to the stack. Normally pushes are used,
735 but with GCC -mtpcs-frame, there may be other stores
736 in the prologue to create the frame. */
737 int regno = (insn >> 8) & 0x7;
738 pv_t addr;
739
740 offset = (insn & 0xff) << 2;
741 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
742
743 if (pv_area_store_would_trash (stack, addr))
744 break;
745
746 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 747 }
0d39a070
DJ
748 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
749 {
750 int rd = bits (insn, 0, 2);
751 int rn = bits (insn, 3, 5);
752 pv_t addr;
753
754 offset = bits (insn, 6, 10) << 2;
755 addr = pv_add_constant (regs[rn], offset);
756
757 if (pv_area_store_would_trash (stack, addr))
758 break;
759
760 pv_area_store (stack, addr, 4, regs[rd]);
761 }
762 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
763 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 /* Ignore stores of argument registers to the stack. */
766 ;
767 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 /* Ignore block loads from the stack, potentially copying
770 parameters from memory. */
771 ;
772 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
773 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
774 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
775 /* Similarly ignore single loads from the stack. */
776 ;
777 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
778 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
779 /* Skip register copies, i.e. saves to another register
780 instead of the stack. */
781 ;
782 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
783 /* Recognize constant loads; even with small stacks these are necessary
784 on Thumb. */
785 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
786 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
787 {
788 /* Constant pool loads, for the same reason. */
789 unsigned int constant;
790 CORE_ADDR loc;
791
792 loc = start + 4 + bits (insn, 0, 7) * 4;
793 constant = read_memory_unsigned_integer (loc, 4, byte_order);
794 regs[bits (insn, 8, 10)] = pv_constant (constant);
795 }
db24da6d 796 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 797 {
0d39a070
DJ
798 unsigned short inst2;
799
198cd59d
YQ
800 inst2 = read_code_unsigned_integer (start + 2, 2,
801 byte_order_for_code);
0d39a070
DJ
802
803 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
804 {
805 /* BL, BLX. Allow some special function calls when
806 skipping the prologue; GCC generates these before
807 storing arguments to the stack. */
808 CORE_ADDR nextpc;
809 int j1, j2, imm1, imm2;
810
811 imm1 = sbits (insn, 0, 10);
812 imm2 = bits (inst2, 0, 10);
813 j1 = bit (inst2, 13);
814 j2 = bit (inst2, 11);
815
816 offset = ((imm1 << 12) + (imm2 << 1));
817 offset ^= ((!j2) << 22) | ((!j1) << 23);
818
819 nextpc = start + 4 + offset;
820 /* For BLX make sure to clear the low bits. */
821 if (bit (inst2, 12) == 0)
822 nextpc = nextpc & 0xfffffffc;
823
e0634ccf
UW
824 if (!skip_prologue_function (gdbarch, nextpc,
825 bit (inst2, 12) != 0))
0d39a070
DJ
826 break;
827 }
ec3d575a 828
0963b4bd
MS
829 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
830 { registers } */
ec3d575a
UW
831 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
832 {
833 pv_t addr = regs[bits (insn, 0, 3)];
834 int regno;
835
836 if (pv_area_store_would_trash (stack, addr))
837 break;
838
839 /* Calculate offsets of saved registers. */
840 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
841 if (inst2 & (1 << regno))
842 {
843 addr = pv_add_constant (addr, -4);
844 pv_area_store (stack, addr, 4, regs[regno]);
845 }
846
847 if (insn & 0x0020)
848 regs[bits (insn, 0, 3)] = addr;
849 }
850
0963b4bd
MS
851 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
852 [Rn, #+/-imm]{!} */
ec3d575a
UW
853 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
854 {
855 int regno1 = bits (inst2, 12, 15);
856 int regno2 = bits (inst2, 8, 11);
857 pv_t addr = regs[bits (insn, 0, 3)];
858
859 offset = inst2 & 0xff;
860 if (insn & 0x0080)
861 addr = pv_add_constant (addr, offset);
862 else
863 addr = pv_add_constant (addr, -offset);
864
865 if (pv_area_store_would_trash (stack, addr))
866 break;
867
868 pv_area_store (stack, addr, 4, regs[regno1]);
869 pv_area_store (stack, pv_add_constant (addr, 4),
870 4, regs[regno2]);
871
872 if (insn & 0x0020)
873 regs[bits (insn, 0, 3)] = addr;
874 }
875
876 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
877 && (inst2 & 0x0c00) == 0x0c00
878 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
879 {
880 int regno = bits (inst2, 12, 15);
881 pv_t addr = regs[bits (insn, 0, 3)];
882
883 offset = inst2 & 0xff;
884 if (inst2 & 0x0200)
885 addr = pv_add_constant (addr, offset);
886 else
887 addr = pv_add_constant (addr, -offset);
888
889 if (pv_area_store_would_trash (stack, addr))
890 break;
891
892 pv_area_store (stack, addr, 4, regs[regno]);
893
894 if (inst2 & 0x0100)
895 regs[bits (insn, 0, 3)] = addr;
896 }
897
898 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
899 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
900 {
901 int regno = bits (inst2, 12, 15);
902 pv_t addr;
903
904 offset = inst2 & 0xfff;
905 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
906
907 if (pv_area_store_would_trash (stack, addr))
908 break;
909
910 pv_area_store (stack, addr, 4, regs[regno]);
911 }
912
913 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 914 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 915 /* Ignore stores of argument registers to the stack. */
0d39a070 916 ;
ec3d575a
UW
917
918 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
919 && (inst2 & 0x0d00) == 0x0c00
0d39a070 920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 921 /* Ignore stores of argument registers to the stack. */
0d39a070 922 ;
ec3d575a 923
0963b4bd
MS
924 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
925 { registers } */
ec3d575a
UW
926 && (inst2 & 0x8000) == 0x0000
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Ignore block loads from the stack, potentially copying
929 parameters from memory. */
0d39a070 930 ;
ec3d575a 931
0963b4bd
MS
932 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
933 [Rn, #+/-imm] */
0d39a070 934 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 935 /* Similarly ignore dual loads from the stack. */
0d39a070 936 ;
ec3d575a
UW
937
938 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
939 && (inst2 & 0x0d00) == 0x0c00
0d39a070 940 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 941 /* Similarly ignore single loads from the stack. */
0d39a070 942 ;
ec3d575a
UW
943
944 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 945 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 946 /* Similarly ignore single loads from the stack. */
0d39a070 947 ;
ec3d575a
UW
948
949 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
950 && (inst2 & 0x8000) == 0x0000)
951 {
952 unsigned int imm = ((bits (insn, 10, 10) << 11)
953 | (bits (inst2, 12, 14) << 8)
954 | bits (inst2, 0, 7));
955
956 regs[bits (inst2, 8, 11)]
957 = pv_add_constant (regs[bits (insn, 0, 3)],
958 thumb_expand_immediate (imm));
959 }
960
961 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
962 && (inst2 & 0x8000) == 0x0000)
0d39a070 963 {
ec3d575a
UW
964 unsigned int imm = ((bits (insn, 10, 10) << 11)
965 | (bits (inst2, 12, 14) << 8)
966 | bits (inst2, 0, 7));
967
968 regs[bits (inst2, 8, 11)]
969 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
970 }
971
972 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
973 && (inst2 & 0x8000) == 0x0000)
974 {
975 unsigned int imm = ((bits (insn, 10, 10) << 11)
976 | (bits (inst2, 12, 14) << 8)
977 | bits (inst2, 0, 7));
978
979 regs[bits (inst2, 8, 11)]
980 = pv_add_constant (regs[bits (insn, 0, 3)],
981 - (CORE_ADDR) thumb_expand_immediate (imm));
982 }
983
984 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
985 && (inst2 & 0x8000) == 0x0000)
986 {
987 unsigned int imm = ((bits (insn, 10, 10) << 11)
988 | (bits (inst2, 12, 14) << 8)
989 | bits (inst2, 0, 7));
990
991 regs[bits (inst2, 8, 11)]
992 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
993 }
994
995 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
996 {
997 unsigned int imm = ((bits (insn, 10, 10) << 11)
998 | (bits (inst2, 12, 14) << 8)
999 | bits (inst2, 0, 7));
1000
1001 regs[bits (inst2, 8, 11)]
1002 = pv_constant (thumb_expand_immediate (imm));
1003 }
1004
1005 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1006 {
621c6d5b
YQ
1007 unsigned int imm
1008 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
1009
1010 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1011 }
1012
1013 else if (insn == 0xea5f /* mov.w Rd,Rm */
1014 && (inst2 & 0xf0f0) == 0)
1015 {
1016 int dst_reg = (inst2 & 0x0f00) >> 8;
1017 int src_reg = inst2 & 0xf;
1018 regs[dst_reg] = regs[src_reg];
1019 }
1020
1021 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1022 {
1023 /* Constant pool loads. */
1024 unsigned int constant;
1025 CORE_ADDR loc;
1026
cac395ea 1027 offset = bits (inst2, 0, 11);
ec3d575a
UW
1028 if (insn & 0x0080)
1029 loc = start + 4 + offset;
1030 else
1031 loc = start + 4 - offset;
1032
1033 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1034 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1035 }
1036
1037 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1038 {
1039 /* Constant pool loads. */
1040 unsigned int constant;
1041 CORE_ADDR loc;
1042
cac395ea 1043 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
1044 if (insn & 0x0080)
1045 loc = start + 4 + offset;
1046 else
1047 loc = start + 4 - offset;
1048
1049 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1050 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1051
1052 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1053 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1054 }
1055
1056 else if (thumb2_instruction_changes_pc (insn, inst2))
1057 {
1058 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1059 break;
1060 }
ec3d575a
UW
1061 else
1062 {
1063 /* The optimizer might shove anything into the prologue,
1064 so we just skip what we don't recognize. */
1065 unrecognized_pc = start;
1066 }
0d39a070
DJ
1067
1068 start += 2;
1069 }
ec3d575a 1070 else if (thumb_instruction_changes_pc (insn))
3d74b771 1071 {
ec3d575a 1072 /* Don't scan past anything that might change control flow. */
da3c6d4a 1073 break;
3d74b771 1074 }
ec3d575a
UW
1075 else
1076 {
1077 /* The optimizer might shove anything into the prologue,
1078 so we just skip what we don't recognize. */
1079 unrecognized_pc = start;
1080 }
29d73ae4
DJ
1081
1082 start += 2;
c906108c
SS
1083 }
1084
0d39a070
DJ
1085 if (arm_debug)
1086 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1087 paddress (gdbarch, start));
1088
ec3d575a
UW
1089 if (unrecognized_pc == 0)
1090 unrecognized_pc = start;
1091
29d73ae4
DJ
1092 if (cache == NULL)
1093 {
1094 do_cleanups (back_to);
ec3d575a 1095 return unrecognized_pc;
29d73ae4
DJ
1096 }
1097
29d73ae4
DJ
1098 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1099 {
1100 /* Frame pointer is fp. Frame size is constant. */
1101 cache->framereg = ARM_FP_REGNUM;
1102 cache->framesize = -regs[ARM_FP_REGNUM].k;
1103 }
1104 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1105 {
1106 /* Frame pointer is r7. Frame size is constant. */
1107 cache->framereg = THUMB_FP_REGNUM;
1108 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1109 }
72a2e3dc 1110 else
29d73ae4
DJ
1111 {
1112 /* Try the stack pointer... this is a bit desperate. */
1113 cache->framereg = ARM_SP_REGNUM;
1114 cache->framesize = -regs[ARM_SP_REGNUM].k;
1115 }
29d73ae4
DJ
1116
1117 for (i = 0; i < 16; i++)
1118 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1119 cache->saved_regs[i].addr = offset;
1120
1121 do_cleanups (back_to);
ec3d575a 1122 return unrecognized_pc;
c906108c
SS
1123}
1124
621c6d5b
YQ
1125
1126/* Try to analyze the instructions starting from PC, which load symbol
1127 __stack_chk_guard. Return the address of instruction after loading this
1128 symbol, set the dest register number to *BASEREG, and set the size of
1129 instructions for loading symbol in OFFSET. Return 0 if instructions are
1130 not recognized. */
1131
1132static CORE_ADDR
1133arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1134 unsigned int *destreg, int *offset)
1135{
1136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1137 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1138 unsigned int low, high, address;
1139
1140 address = 0;
1141 if (is_thumb)
1142 {
1143 unsigned short insn1
198cd59d 1144 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
621c6d5b
YQ
1145
1146 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1147 {
1148 *destreg = bits (insn1, 8, 10);
1149 *offset = 2;
6ae274b7
YQ
1150 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1151 address = read_memory_unsigned_integer (address, 4,
1152 byte_order_for_code);
621c6d5b
YQ
1153 }
1154 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1155 {
1156 unsigned short insn2
198cd59d 1157 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
621c6d5b
YQ
1158
1159 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1160
1161 insn1
198cd59d 1162 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
621c6d5b 1163 insn2
198cd59d 1164 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
621c6d5b
YQ
1165
1166 /* movt Rd, #const */
1167 if ((insn1 & 0xfbc0) == 0xf2c0)
1168 {
1169 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1170 *destreg = bits (insn2, 8, 11);
1171 *offset = 8;
1172 address = (high << 16 | low);
1173 }
1174 }
1175 }
1176 else
1177 {
2e9e421f 1178 unsigned int insn
198cd59d 1179 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
2e9e421f 1180
6ae274b7 1181 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1182 {
6ae274b7
YQ
1183 address = bits (insn, 0, 11) + pc + 8;
1184 address = read_memory_unsigned_integer (address, 4,
1185 byte_order_for_code);
1186
2e9e421f
UW
1187 *destreg = bits (insn, 12, 15);
1188 *offset = 4;
1189 }
1190 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1191 {
1192 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1193
1194 insn
198cd59d 1195 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
2e9e421f
UW
1196
1197 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1198 {
1199 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1200 *destreg = bits (insn, 12, 15);
1201 *offset = 8;
1202 address = (high << 16 | low);
1203 }
1204 }
621c6d5b
YQ
1205 }
1206
1207 return address;
1208}
1209
1210/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1211 points to the first instruction of this sequence, return the address of
1212 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1213
1214 On arm, this sequence of instructions is composed of mainly three steps,
1215 Step 1: load symbol __stack_chk_guard,
1216 Step 2: load from address of __stack_chk_guard,
1217 Step 3: store it to somewhere else.
1218
1219 Usually, instructions on step 2 and step 3 are the same on various ARM
1220 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1221 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1222 instructions in step 1 vary from different ARM architectures. On ARMv7,
1223 they are,
1224
1225 movw Rn, #:lower16:__stack_chk_guard
1226 movt Rn, #:upper16:__stack_chk_guard
1227
1228 On ARMv5t, it is,
1229
1230 ldr Rn, .Label
1231 ....
1232 .Lable:
1233 .word __stack_chk_guard
1234
1235 Since ldr/str is a very popular instruction, we can't use them as
1236 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1237 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1238 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1239
1240static CORE_ADDR
1241arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1242{
1243 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1244 unsigned int basereg;
7cbd4a93 1245 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1246 int offset;
1247 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1248 CORE_ADDR addr;
1249
1250 /* Try to parse the instructions in Step 1. */
1251 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1252 &basereg, &offset);
1253 if (!addr)
1254 return pc;
1255
1256 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1257 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1258 Otherwise, this sequence cannot be for stack protector. */
1259 if (stack_chk_guard.minsym == NULL
61012eef 1260 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1261 return pc;
1262
1263 if (is_thumb)
1264 {
1265 unsigned int destreg;
1266 unsigned short insn
198cd59d 1267 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
621c6d5b
YQ
1268
1269 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1270 if ((insn & 0xf800) != 0x6800)
1271 return pc;
1272 if (bits (insn, 3, 5) != basereg)
1273 return pc;
1274 destreg = bits (insn, 0, 2);
1275
198cd59d
YQ
1276 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1277 byte_order_for_code);
621c6d5b
YQ
1278 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1279 if ((insn & 0xf800) != 0x6000)
1280 return pc;
1281 if (destreg != bits (insn, 0, 2))
1282 return pc;
1283 }
1284 else
1285 {
1286 unsigned int destreg;
1287 unsigned int insn
198cd59d 1288 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
621c6d5b
YQ
1289
1290 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1291 if ((insn & 0x0e500000) != 0x04100000)
1292 return pc;
1293 if (bits (insn, 16, 19) != basereg)
1294 return pc;
1295 destreg = bits (insn, 12, 15);
1296 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
198cd59d 1297 insn = read_code_unsigned_integer (pc + offset + 4,
621c6d5b
YQ
1298 4, byte_order_for_code);
1299 if ((insn & 0x0e500000) != 0x04000000)
1300 return pc;
1301 if (bits (insn, 12, 15) != destreg)
1302 return pc;
1303 }
1304 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1305 on arm. */
1306 if (is_thumb)
1307 return pc + offset + 4;
1308 else
1309 return pc + offset + 8;
1310}
1311
da3c6d4a
MS
1312/* Advance the PC across any function entry prologue instructions to
1313 reach some "real" code.
34e8f22d
RE
1314
1315 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1316 prologue:
c906108c 1317
c5aa993b
JM
1318 mov ip, sp
1319 [stmfd sp!, {a1,a2,a3,a4}]
1320 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1321 [stfe f7, [sp, #-12]!]
1322 [stfe f6, [sp, #-12]!]
1323 [stfe f5, [sp, #-12]!]
1324 [stfe f4, [sp, #-12]!]
0963b4bd 1325 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1326
34e8f22d 1327static CORE_ADDR
6093d2eb 1328arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1329{
a89fea3c 1330 CORE_ADDR func_addr, limit_pc;
c906108c 1331
a89fea3c
JL
1332 /* See if we can determine the end of the prologue via the symbol table.
1333 If so, then return either PC, or the PC after the prologue, whichever
1334 is greater. */
1335 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1336 {
d80b854b
UW
1337 CORE_ADDR post_prologue_pc
1338 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1339 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1340
621c6d5b
YQ
1341 if (post_prologue_pc)
1342 post_prologue_pc
1343 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1344
1345
0d39a070
DJ
1346 /* GCC always emits a line note before the prologue and another
1347 one after, even if the two are at the same address or on the
1348 same line. Take advantage of this so that we do not need to
1349 know every instruction that might appear in the prologue. We
1350 will have producer information for most binaries; if it is
1351 missing (e.g. for -gstabs), assuming the GNU tools. */
1352 if (post_prologue_pc
43f3e411
DE
1353 && (cust == NULL
1354 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1355 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1356 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1357 return post_prologue_pc;
1358
a89fea3c 1359 if (post_prologue_pc != 0)
0d39a070
DJ
1360 {
1361 CORE_ADDR analyzed_limit;
1362
1363 /* For non-GCC compilers, make sure the entire line is an
1364 acceptable prologue; GDB will round this function's
1365 return value up to the end of the following line so we
1366 can not skip just part of a line (and we do not want to).
1367
1368 RealView does not treat the prologue specially, but does
1369 associate prologue code with the opening brace; so this
1370 lets us skip the first line if we think it is the opening
1371 brace. */
9779414d 1372 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1373 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1374 post_prologue_pc, NULL);
1375 else
1376 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1377 post_prologue_pc, NULL);
1378
1379 if (analyzed_limit != post_prologue_pc)
1380 return func_addr;
1381
1382 return post_prologue_pc;
1383 }
c906108c
SS
1384 }
1385
a89fea3c
JL
1386 /* Can't determine prologue from the symbol table, need to examine
1387 instructions. */
c906108c 1388
a89fea3c
JL
1389 /* Find an upper limit on the function prologue using the debug
1390 information. If the debug information could not be used to provide
1391 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1392 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1393 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1394 if (limit_pc == 0)
1395 limit_pc = pc + 64; /* Magic. */
1396
c906108c 1397
29d73ae4 1398 /* Check if this is Thumb code. */
9779414d 1399 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1400 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1401 else
1402 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1403}
94c30b78 1404
c5aa993b 1405/* *INDENT-OFF* */
c906108c
SS
1406/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1407 This function decodes a Thumb function prologue to determine:
1408 1) the size of the stack frame
1409 2) which registers are saved on it
1410 3) the offsets of saved regs
1411 4) the offset from the stack pointer to the frame pointer
c906108c 1412
da59e081
JM
1413 A typical Thumb function prologue would create this stack frame
1414 (offsets relative to FP)
c906108c
SS
1415 old SP -> 24 stack parameters
1416 20 LR
1417 16 R7
1418 R7 -> 0 local variables (16 bytes)
1419 SP -> -12 additional stack space (12 bytes)
1420 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1421 12 bytes. The frame register is R7.
da59e081 1422
da3c6d4a
MS
1423 The comments for thumb_skip_prolog() describe the algorithm we use
1424 to detect the end of the prolog. */
c5aa993b
JM
1425/* *INDENT-ON* */
1426
c906108c 1427static void
be8626e0 1428thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1429 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1430{
1431 CORE_ADDR prologue_start;
1432 CORE_ADDR prologue_end;
c906108c 1433
b39cc962
DJ
1434 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1435 &prologue_end))
c906108c 1436 {
ec3d575a
UW
1437 /* See comment in arm_scan_prologue for an explanation of
1438 this heuristics. */
1439 if (prologue_end > prologue_start + 64)
1440 {
1441 prologue_end = prologue_start + 64;
1442 }
c906108c
SS
1443 }
1444 else
f7060f85
DJ
1445 /* We're in the boondocks: we have no idea where the start of the
1446 function is. */
1447 return;
c906108c 1448
325fac50 1449 prologue_end = std::min (prologue_end, prev_pc);
c906108c 1450
be8626e0 1451 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1452}
1453
f303bc3e
YQ
1454/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1455 otherwise. */
1456
1457static int
1458arm_instruction_restores_sp (unsigned int insn)
1459{
1460 if (bits (insn, 28, 31) != INST_NV)
1461 {
1462 if ((insn & 0x0df0f000) == 0x0080d000
1463 /* ADD SP (register or immediate). */
1464 || (insn & 0x0df0f000) == 0x0040d000
1465 /* SUB SP (register or immediate). */
1466 || (insn & 0x0ffffff0) == 0x01a0d000
1467 /* MOV SP. */
1468 || (insn & 0x0fff0000) == 0x08bd0000
1469 /* POP (LDMIA). */
1470 || (insn & 0x0fff0000) == 0x049d0000)
1471 /* POP of a single register. */
1472 return 1;
1473 }
1474
1475 return 0;
1476}
1477
0d39a070
DJ
1478/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1479 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1480 fill it in. Return the first address not recognized as a prologue
1481 instruction.
eb5492fa 1482
0d39a070
DJ
1483 We recognize all the instructions typically found in ARM prologues,
1484 plus harmless instructions which can be skipped (either for analysis
1485 purposes, or a more restrictive set that can be skipped when finding
1486 the end of the prologue). */
1487
1488static CORE_ADDR
1489arm_analyze_prologue (struct gdbarch *gdbarch,
1490 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1491 struct arm_prologue_cache *cache)
1492{
0d39a070
DJ
1493 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1494 int regno;
1495 CORE_ADDR offset, current_pc;
1496 pv_t regs[ARM_FPS_REGNUM];
1497 struct pv_area *stack;
1498 struct cleanup *back_to;
0d39a070
DJ
1499 CORE_ADDR unrecognized_pc = 0;
1500
1501 /* Search the prologue looking for instructions that set up the
96baa820 1502 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1503
96baa820
JM
1504 Be careful, however, and if it doesn't look like a prologue,
1505 don't try to scan it. If, for instance, a frameless function
1506 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1507 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1508 and other operations that rely on a knowledge of the stack
0d39a070 1509 traceback. */
d4473757 1510
4be43953
DJ
1511 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1512 regs[regno] = pv_register (regno, 0);
55f960e1 1513 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1514 back_to = make_cleanup_free_pv_area (stack);
1515
94c30b78
MS
1516 for (current_pc = prologue_start;
1517 current_pc < prologue_end;
f43845b3 1518 current_pc += 4)
96baa820 1519 {
e17a4113 1520 unsigned int insn
198cd59d 1521 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1522
94c30b78 1523 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1524 {
4be43953 1525 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1526 continue;
1527 }
0d39a070
DJ
1528 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1529 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1530 {
1531 unsigned imm = insn & 0xff; /* immediate value */
1532 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1533 int rd = bits (insn, 12, 15);
28cd8767 1534 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1535 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1536 continue;
1537 }
0d39a070
DJ
1538 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1539 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1540 {
1541 unsigned imm = insn & 0xff; /* immediate value */
1542 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1543 int rd = bits (insn, 12, 15);
28cd8767 1544 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1545 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1546 continue;
1547 }
0963b4bd
MS
1548 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1549 [sp, #-4]! */
f43845b3 1550 {
4be43953
DJ
1551 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1552 break;
1553 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1554 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1555 regs[bits (insn, 12, 15)]);
f43845b3
MS
1556 continue;
1557 }
1558 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1559 /* stmfd sp!, {..., fp, ip, lr, pc}
1560 or
1561 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1562 {
d4473757 1563 int mask = insn & 0xffff;
ed9a39eb 1564
4be43953
DJ
1565 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1566 break;
1567
94c30b78 1568 /* Calculate offsets of saved registers. */
34e8f22d 1569 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1570 if (mask & (1 << regno))
1571 {
0963b4bd
MS
1572 regs[ARM_SP_REGNUM]
1573 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1574 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1575 }
1576 }
0d39a070
DJ
1577 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1578 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1579 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1580 {
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1582 continue;
1583 }
0d39a070
DJ
1584 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1585 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1586 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1587 {
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1589 continue;
1590 }
0963b4bd
MS
1591 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1592 { registers } */
0d39a070
DJ
1593 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1594 {
1595 /* No need to add this to saved_regs -- it's just arg regs. */
1596 continue;
1597 }
d4473757
KB
1598 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1599 {
94c30b78
MS
1600 unsigned imm = insn & 0xff; /* immediate value */
1601 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1602 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1603 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1604 }
1605 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1606 {
94c30b78
MS
1607 unsigned imm = insn & 0xff; /* immediate value */
1608 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1609 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1610 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1611 }
0963b4bd
MS
1612 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1613 [sp, -#c]! */
2af46ca0 1614 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1615 {
4be43953
DJ
1616 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1617 break;
1618
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1620 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1621 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1622 }
0963b4bd
MS
1623 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1624 [sp!] */
2af46ca0 1625 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1626 {
1627 int n_saved_fp_regs;
1628 unsigned int fp_start_reg, fp_bound_reg;
1629
4be43953
DJ
1630 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1631 break;
1632
94c30b78 1633 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1634 {
d4473757
KB
1635 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs = 3;
1637 else
1638 n_saved_fp_regs = 1;
96baa820 1639 }
d4473757 1640 else
96baa820 1641 {
d4473757
KB
1642 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs = 2;
1644 else
1645 n_saved_fp_regs = 4;
96baa820 1646 }
d4473757 1647
34e8f22d 1648 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1649 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1650 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1651 {
4be43953
DJ
1652 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1653 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1654 regs[fp_start_reg++]);
96baa820 1655 }
c906108c 1656 }
0d39a070
DJ
1657 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1658 {
1659 /* Allow some special function calls when skipping the
1660 prologue; GCC generates these before storing arguments to
1661 the stack. */
1662 CORE_ADDR dest = BranchDest (current_pc, insn);
1663
e0634ccf 1664 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1665 continue;
1666 else
1667 break;
1668 }
d4473757 1669 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1670 break; /* Condition not true, exit early. */
0d39a070
DJ
1671 else if (arm_instruction_changes_pc (insn))
1672 /* Don't scan past anything that might change control flow. */
1673 break;
f303bc3e
YQ
1674 else if (arm_instruction_restores_sp (insn))
1675 {
1676 /* Don't scan past the epilogue. */
1677 break;
1678 }
d19f7eee
UW
1679 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Ignore block loads from the stack, potentially copying
1682 parameters from memory. */
1683 continue;
1684 else if ((insn & 0xfc500000) == 0xe4100000
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1686 /* Similarly ignore single loads from the stack. */
1687 continue;
0d39a070
DJ
1688 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1689 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1690 register instead of the stack. */
d4473757 1691 continue;
0d39a070
DJ
1692 else
1693 {
21daaaaf
YQ
1694 /* The optimizer might shove anything into the prologue, if
1695 we build up cache (cache != NULL) from scanning prologue,
1696 we just skip what we don't recognize and scan further to
1697 make cache as complete as possible. However, if we skip
1698 prologue, we'll stop immediately on unrecognized
1699 instruction. */
0d39a070 1700 unrecognized_pc = current_pc;
21daaaaf
YQ
1701 if (cache != NULL)
1702 continue;
1703 else
1704 break;
0d39a070 1705 }
c906108c
SS
1706 }
1707
0d39a070
DJ
1708 if (unrecognized_pc == 0)
1709 unrecognized_pc = current_pc;
1710
0d39a070
DJ
1711 if (cache)
1712 {
4072f920
YQ
1713 int framereg, framesize;
1714
1715 /* The frame size is just the distance from the frame register
1716 to the original stack pointer. */
1717 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1718 {
1719 /* Frame pointer is fp. */
1720 framereg = ARM_FP_REGNUM;
1721 framesize = -regs[ARM_FP_REGNUM].k;
1722 }
1723 else
1724 {
1725 /* Try the stack pointer... this is a bit desperate. */
1726 framereg = ARM_SP_REGNUM;
1727 framesize = -regs[ARM_SP_REGNUM].k;
1728 }
1729
0d39a070
DJ
1730 cache->framereg = framereg;
1731 cache->framesize = framesize;
1732
1733 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1734 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1735 cache->saved_regs[regno].addr = offset;
1736 }
1737
1738 if (arm_debug)
1739 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1740 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1741
1742 do_cleanups (back_to);
0d39a070
DJ
1743 return unrecognized_pc;
1744}
1745
1746static void
1747arm_scan_prologue (struct frame_info *this_frame,
1748 struct arm_prologue_cache *cache)
1749{
1750 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1751 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1752 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1753 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1754 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1755
1756 /* Assume there is no frame until proven otherwise. */
1757 cache->framereg = ARM_SP_REGNUM;
1758 cache->framesize = 0;
1759
1760 /* Check for Thumb prologue. */
1761 if (arm_frame_is_thumb (this_frame))
1762 {
1763 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1764 return;
1765 }
1766
1767 /* Find the function prologue. If we can't find the function in
1768 the symbol table, peek in the stack frame to find the PC. */
1769 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1770 &prologue_end))
1771 {
1772 /* One way to find the end of the prologue (which works well
1773 for unoptimized code) is to do the following:
1774
1775 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1776
1777 if (sal.line == 0)
1778 prologue_end = prev_pc;
1779 else if (sal.end < prologue_end)
1780 prologue_end = sal.end;
1781
1782 This mechanism is very accurate so long as the optimizer
1783 doesn't move any instructions from the function body into the
1784 prologue. If this happens, sal.end will be the last
1785 instruction in the first hunk of prologue code just before
1786 the first instruction that the scheduler has moved from
1787 the body to the prologue.
1788
1789 In order to make sure that we scan all of the prologue
1790 instructions, we use a slightly less accurate mechanism which
1791 may scan more than necessary. To help compensate for this
1792 lack of accuracy, the prologue scanning loop below contains
1793 several clauses which'll cause the loop to terminate early if
1794 an implausible prologue instruction is encountered.
1795
1796 The expression
1797
1798 prologue_start + 64
1799
1800 is a suitable endpoint since it accounts for the largest
1801 possible prologue plus up to five instructions inserted by
1802 the scheduler. */
1803
1804 if (prologue_end > prologue_start + 64)
1805 {
1806 prologue_end = prologue_start + 64; /* See above. */
1807 }
1808 }
1809 else
1810 {
1811 /* We have no symbol information. Our only option is to assume this
1812 function has a standard stack frame and the normal frame register.
1813 Then, we can find the value of our frame pointer on entrance to
1814 the callee (or at the present moment if this is the innermost frame).
1815 The value stored there should be the address of the stmfd + 8. */
1816 CORE_ADDR frame_loc;
7913a64c 1817 ULONGEST return_value;
0d39a070
DJ
1818
1819 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
7913a64c
YQ
1820 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1821 &return_value))
0d39a070
DJ
1822 return;
1823 else
1824 {
1825 prologue_start = gdbarch_addr_bits_remove
1826 (gdbarch, return_value) - 8;
1827 prologue_end = prologue_start + 64; /* See above. */
1828 }
1829 }
1830
1831 if (prev_pc < prologue_end)
1832 prologue_end = prev_pc;
1833
1834 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1835}
1836
eb5492fa 1837static struct arm_prologue_cache *
a262aec2 1838arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1839{
eb5492fa
DJ
1840 int reg;
1841 struct arm_prologue_cache *cache;
1842 CORE_ADDR unwound_fp;
c5aa993b 1843
35d5d4ee 1844 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1845 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1846
a262aec2 1847 arm_scan_prologue (this_frame, cache);
848cfffb 1848
a262aec2 1849 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1850 if (unwound_fp == 0)
1851 return cache;
c906108c 1852
4be43953 1853 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1854
eb5492fa
DJ
1855 /* Calculate actual addresses of saved registers using offsets
1856 determined by arm_scan_prologue. */
a262aec2 1857 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1858 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1859 cache->saved_regs[reg].addr += cache->prev_sp;
1860
1861 return cache;
c906108c
SS
1862}
1863
c1ee9414
LM
1864/* Implementation of the stop_reason hook for arm_prologue frames. */
1865
1866static enum unwind_stop_reason
1867arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1868 void **this_cache)
1869{
1870 struct arm_prologue_cache *cache;
1871 CORE_ADDR pc;
1872
1873 if (*this_cache == NULL)
1874 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1875 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1876
1877 /* This is meant to halt the backtrace at "_start". */
1878 pc = get_frame_pc (this_frame);
1879 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1880 return UNWIND_OUTERMOST;
1881
1882 /* If we've hit a wall, stop. */
1883 if (cache->prev_sp == 0)
1884 return UNWIND_OUTERMOST;
1885
1886 return UNWIND_NO_REASON;
1887}
1888
eb5492fa
DJ
1889/* Our frame ID for a normal frame is the current function's starting PC
1890 and the caller's SP when we were called. */
c906108c 1891
148754e5 1892static void
a262aec2 1893arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1894 void **this_cache,
1895 struct frame_id *this_id)
c906108c 1896{
eb5492fa
DJ
1897 struct arm_prologue_cache *cache;
1898 struct frame_id id;
2c404490 1899 CORE_ADDR pc, func;
f079148d 1900
eb5492fa 1901 if (*this_cache == NULL)
a262aec2 1902 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1903 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1904
0e9e9abd
UW
1905 /* Use function start address as part of the frame ID. If we cannot
1906 identify the start address (due to missing symbol information),
1907 fall back to just using the current PC. */
c1ee9414 1908 pc = get_frame_pc (this_frame);
2c404490 1909 func = get_frame_func (this_frame);
0e9e9abd
UW
1910 if (!func)
1911 func = pc;
1912
eb5492fa 1913 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1914 *this_id = id;
c906108c
SS
1915}
1916
a262aec2
DJ
1917static struct value *
1918arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1919 void **this_cache,
a262aec2 1920 int prev_regnum)
24de872b 1921{
24568a2c 1922 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1923 struct arm_prologue_cache *cache;
1924
eb5492fa 1925 if (*this_cache == NULL)
a262aec2 1926 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1927 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1928
eb5492fa 1929 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1930 instead. The prologue may save PC, but it will point into this
1931 frame's prologue, not the next frame's resume location. Also
1932 strip the saved T bit. A valid LR may have the low bit set, but
1933 a valid PC never does. */
eb5492fa 1934 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1935 {
1936 CORE_ADDR lr;
1937
1938 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1939 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1940 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1941 }
24de872b 1942
eb5492fa 1943 /* SP is generally not saved to the stack, but this frame is
a262aec2 1944 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1945 The value was already reconstructed into PREV_SP. */
1946 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1947 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1948
b39cc962
DJ
1949 /* The CPSR may have been changed by the call instruction and by the
1950 called function. The only bit we can reconstruct is the T bit,
1951 by checking the low bit of LR as of the call. This is a reliable
1952 indicator of Thumb-ness except for some ARM v4T pre-interworking
1953 Thumb code, which could get away with a clear low bit as long as
1954 the called function did not use bx. Guess that all other
1955 bits are unchanged; the condition flags are presumably lost,
1956 but the processor status is likely valid. */
1957 if (prev_regnum == ARM_PS_REGNUM)
1958 {
1959 CORE_ADDR lr, cpsr;
9779414d 1960 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1961
1962 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1963 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1964 if (IS_THUMB_ADDR (lr))
9779414d 1965 cpsr |= t_bit;
b39cc962 1966 else
9779414d 1967 cpsr &= ~t_bit;
b39cc962
DJ
1968 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1969 }
1970
a262aec2
DJ
1971 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1972 prev_regnum);
eb5492fa
DJ
1973}
1974
1975struct frame_unwind arm_prologue_unwind = {
1976 NORMAL_FRAME,
c1ee9414 1977 arm_prologue_unwind_stop_reason,
eb5492fa 1978 arm_prologue_this_id,
a262aec2
DJ
1979 arm_prologue_prev_register,
1980 NULL,
1981 default_frame_sniffer
eb5492fa
DJ
1982};
1983
0e9e9abd
UW
1984/* Maintain a list of ARM exception table entries per objfile, similar to the
1985 list of mapping symbols. We only cache entries for standard ARM-defined
1986 personality routines; the cache will contain only the frame unwinding
1987 instructions associated with the entry (not the descriptors). */
1988
1989static const struct objfile_data *arm_exidx_data_key;
1990
1991struct arm_exidx_entry
1992{
1993 bfd_vma addr;
1994 gdb_byte *entry;
1995};
1996typedef struct arm_exidx_entry arm_exidx_entry_s;
1997DEF_VEC_O(arm_exidx_entry_s);
1998
1999struct arm_exidx_data
2000{
2001 VEC(arm_exidx_entry_s) **section_maps;
2002};
2003
2004static void
2005arm_exidx_data_free (struct objfile *objfile, void *arg)
2006{
9a3c8263 2007 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
2008 unsigned int i;
2009
2010 for (i = 0; i < objfile->obfd->section_count; i++)
2011 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2012}
2013
2014static inline int
2015arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2016 const struct arm_exidx_entry *rhs)
2017{
2018 return lhs->addr < rhs->addr;
2019}
2020
2021static struct obj_section *
2022arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2023{
2024 struct obj_section *osect;
2025
2026 ALL_OBJFILE_OSECTIONS (objfile, osect)
2027 if (bfd_get_section_flags (objfile->obfd,
2028 osect->the_bfd_section) & SEC_ALLOC)
2029 {
2030 bfd_vma start, size;
2031 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2032 size = bfd_get_section_size (osect->the_bfd_section);
2033
2034 if (start <= vma && vma < start + size)
2035 return osect;
2036 }
2037
2038 return NULL;
2039}
2040
2041/* Parse contents of exception table and exception index sections
2042 of OBJFILE, and fill in the exception table entry cache.
2043
2044 For each entry that refers to a standard ARM-defined personality
2045 routine, extract the frame unwinding instructions (from either
2046 the index or the table section). The unwinding instructions
2047 are normalized by:
2048 - extracting them from the rest of the table data
2049 - converting to host endianness
2050 - appending the implicit 0xb0 ("Finish") code
2051
2052 The extracted and normalized instructions are stored for later
2053 retrieval by the arm_find_exidx_entry routine. */
2054
2055static void
2056arm_exidx_new_objfile (struct objfile *objfile)
2057{
3bb47e8b 2058 struct cleanup *cleanups;
0e9e9abd
UW
2059 struct arm_exidx_data *data;
2060 asection *exidx, *extab;
2061 bfd_vma exidx_vma = 0, extab_vma = 0;
2062 bfd_size_type exidx_size = 0, extab_size = 0;
2063 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2064 LONGEST i;
2065
2066 /* If we've already touched this file, do nothing. */
2067 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2068 return;
3bb47e8b 2069 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2070
2071 /* Read contents of exception table and index. */
a5eda10c 2072 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2073 if (exidx)
2074 {
2075 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2076 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2077 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2078 make_cleanup (xfree, exidx_data);
2079
2080 if (!bfd_get_section_contents (objfile->obfd, exidx,
2081 exidx_data, 0, exidx_size))
2082 {
2083 do_cleanups (cleanups);
2084 return;
2085 }
2086 }
2087
2088 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2089 if (extab)
2090 {
2091 extab_vma = bfd_section_vma (objfile->obfd, extab);
2092 extab_size = bfd_get_section_size (extab);
224c3ddb 2093 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2094 make_cleanup (xfree, extab_data);
2095
2096 if (!bfd_get_section_contents (objfile->obfd, extab,
2097 extab_data, 0, extab_size))
2098 {
2099 do_cleanups (cleanups);
2100 return;
2101 }
2102 }
2103
2104 /* Allocate exception table data structure. */
2105 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2106 set_objfile_data (objfile, arm_exidx_data_key, data);
2107 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2108 objfile->obfd->section_count,
2109 VEC(arm_exidx_entry_s) *);
2110
2111 /* Fill in exception table. */
2112 for (i = 0; i < exidx_size / 8; i++)
2113 {
2114 struct arm_exidx_entry new_exidx_entry;
2115 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2116 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2117 bfd_vma addr = 0, word = 0;
2118 int n_bytes = 0, n_words = 0;
2119 struct obj_section *sec;
2120 gdb_byte *entry = NULL;
2121
2122 /* Extract address of start of function. */
2123 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2124 idx += exidx_vma + i * 8;
2125
2126 /* Find section containing function and compute section offset. */
2127 sec = arm_obj_section_from_vma (objfile, idx);
2128 if (sec == NULL)
2129 continue;
2130 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2131
2132 /* Determine address of exception table entry. */
2133 if (val == 1)
2134 {
2135 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2136 }
2137 else if ((val & 0xff000000) == 0x80000000)
2138 {
2139 /* Exception table entry embedded in .ARM.exidx
2140 -- must be short form. */
2141 word = val;
2142 n_bytes = 3;
2143 }
2144 else if (!(val & 0x80000000))
2145 {
2146 /* Exception table entry in .ARM.extab. */
2147 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2148 addr += exidx_vma + i * 8 + 4;
2149
2150 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2151 {
2152 word = bfd_h_get_32 (objfile->obfd,
2153 extab_data + addr - extab_vma);
2154 addr += 4;
2155
2156 if ((word & 0xff000000) == 0x80000000)
2157 {
2158 /* Short form. */
2159 n_bytes = 3;
2160 }
2161 else if ((word & 0xff000000) == 0x81000000
2162 || (word & 0xff000000) == 0x82000000)
2163 {
2164 /* Long form. */
2165 n_bytes = 2;
2166 n_words = ((word >> 16) & 0xff);
2167 }
2168 else if (!(word & 0x80000000))
2169 {
2170 bfd_vma pers;
2171 struct obj_section *pers_sec;
2172 int gnu_personality = 0;
2173
2174 /* Custom personality routine. */
2175 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2176 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2177
2178 /* Check whether we've got one of the variants of the
2179 GNU personality routines. */
2180 pers_sec = arm_obj_section_from_vma (objfile, pers);
2181 if (pers_sec)
2182 {
2183 static const char *personality[] =
2184 {
2185 "__gcc_personality_v0",
2186 "__gxx_personality_v0",
2187 "__gcj_personality_v0",
2188 "__gnu_objc_personality_v0",
2189 NULL
2190 };
2191
2192 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2193 int k;
2194
2195 for (k = 0; personality[k]; k++)
2196 if (lookup_minimal_symbol_by_pc_name
2197 (pc, personality[k], objfile))
2198 {
2199 gnu_personality = 1;
2200 break;
2201 }
2202 }
2203
2204 /* If so, the next word contains a word count in the high
2205 byte, followed by the same unwind instructions as the
2206 pre-defined forms. */
2207 if (gnu_personality
2208 && addr + 4 <= extab_vma + extab_size)
2209 {
2210 word = bfd_h_get_32 (objfile->obfd,
2211 extab_data + addr - extab_vma);
2212 addr += 4;
2213 n_bytes = 3;
2214 n_words = ((word >> 24) & 0xff);
2215 }
2216 }
2217 }
2218 }
2219
2220 /* Sanity check address. */
2221 if (n_words)
2222 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2223 n_words = n_bytes = 0;
2224
2225 /* The unwind instructions reside in WORD (only the N_BYTES least
2226 significant bytes are valid), followed by N_WORDS words in the
2227 extab section starting at ADDR. */
2228 if (n_bytes || n_words)
2229 {
224c3ddb
SM
2230 gdb_byte *p = entry
2231 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2232 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2233
2234 while (n_bytes--)
2235 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2236
2237 while (n_words--)
2238 {
2239 word = bfd_h_get_32 (objfile->obfd,
2240 extab_data + addr - extab_vma);
2241 addr += 4;
2242
2243 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2244 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2245 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2246 *p++ = (gdb_byte) (word & 0xff);
2247 }
2248
2249 /* Implied "Finish" to terminate the list. */
2250 *p++ = 0xb0;
2251 }
2252
2253 /* Push entry onto vector. They are guaranteed to always
2254 appear in order of increasing addresses. */
2255 new_exidx_entry.addr = idx;
2256 new_exidx_entry.entry = entry;
2257 VEC_safe_push (arm_exidx_entry_s,
2258 data->section_maps[sec->the_bfd_section->index],
2259 &new_exidx_entry);
2260 }
2261
2262 do_cleanups (cleanups);
2263}
2264
2265/* Search for the exception table entry covering MEMADDR. If one is found,
2266 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2267 set *START to the start of the region covered by this entry. */
2268
2269static gdb_byte *
2270arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2271{
2272 struct obj_section *sec;
2273
2274 sec = find_pc_section (memaddr);
2275 if (sec != NULL)
2276 {
2277 struct arm_exidx_data *data;
2278 VEC(arm_exidx_entry_s) *map;
2279 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2280 unsigned int idx;
2281
9a3c8263
SM
2282 data = ((struct arm_exidx_data *)
2283 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2284 if (data != NULL)
2285 {
2286 map = data->section_maps[sec->the_bfd_section->index];
2287 if (!VEC_empty (arm_exidx_entry_s, map))
2288 {
2289 struct arm_exidx_entry *map_sym;
2290
2291 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2292 arm_compare_exidx_entries);
2293
2294 /* VEC_lower_bound finds the earliest ordered insertion
2295 point. If the following symbol starts at this exact
2296 address, we use that; otherwise, the preceding
2297 exception table entry covers this address. */
2298 if (idx < VEC_length (arm_exidx_entry_s, map))
2299 {
2300 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2301 if (map_sym->addr == map_key.addr)
2302 {
2303 if (start)
2304 *start = map_sym->addr + obj_section_addr (sec);
2305 return map_sym->entry;
2306 }
2307 }
2308
2309 if (idx > 0)
2310 {
2311 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2312 if (start)
2313 *start = map_sym->addr + obj_section_addr (sec);
2314 return map_sym->entry;
2315 }
2316 }
2317 }
2318 }
2319
2320 return NULL;
2321}
2322
2323/* Given the current frame THIS_FRAME, and its associated frame unwinding
2324 instruction list from the ARM exception table entry ENTRY, allocate and
2325 return a prologue cache structure describing how to unwind this frame.
2326
2327 Return NULL if the unwinding instruction list contains a "spare",
2328 "reserved" or "refuse to unwind" instruction as defined in section
2329 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2330 for the ARM Architecture" document. */
2331
2332static struct arm_prologue_cache *
2333arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2334{
2335 CORE_ADDR vsp = 0;
2336 int vsp_valid = 0;
2337
2338 struct arm_prologue_cache *cache;
2339 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2340 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2341
2342 for (;;)
2343 {
2344 gdb_byte insn;
2345
2346 /* Whenever we reload SP, we actually have to retrieve its
2347 actual value in the current frame. */
2348 if (!vsp_valid)
2349 {
2350 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2351 {
2352 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2353 vsp = get_frame_register_unsigned (this_frame, reg);
2354 }
2355 else
2356 {
2357 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2358 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2359 }
2360
2361 vsp_valid = 1;
2362 }
2363
2364 /* Decode next unwind instruction. */
2365 insn = *entry++;
2366
2367 if ((insn & 0xc0) == 0)
2368 {
2369 int offset = insn & 0x3f;
2370 vsp += (offset << 2) + 4;
2371 }
2372 else if ((insn & 0xc0) == 0x40)
2373 {
2374 int offset = insn & 0x3f;
2375 vsp -= (offset << 2) + 4;
2376 }
2377 else if ((insn & 0xf0) == 0x80)
2378 {
2379 int mask = ((insn & 0xf) << 8) | *entry++;
2380 int i;
2381
2382 /* The special case of an all-zero mask identifies
2383 "Refuse to unwind". We return NULL to fall back
2384 to the prologue analyzer. */
2385 if (mask == 0)
2386 return NULL;
2387
2388 /* Pop registers r4..r15 under mask. */
2389 for (i = 0; i < 12; i++)
2390 if (mask & (1 << i))
2391 {
2392 cache->saved_regs[4 + i].addr = vsp;
2393 vsp += 4;
2394 }
2395
2396 /* Special-case popping SP -- we need to reload vsp. */
2397 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2398 vsp_valid = 0;
2399 }
2400 else if ((insn & 0xf0) == 0x90)
2401 {
2402 int reg = insn & 0xf;
2403
2404 /* Reserved cases. */
2405 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2406 return NULL;
2407
2408 /* Set SP from another register and mark VSP for reload. */
2409 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2410 vsp_valid = 0;
2411 }
2412 else if ((insn & 0xf0) == 0xa0)
2413 {
2414 int count = insn & 0x7;
2415 int pop_lr = (insn & 0x8) != 0;
2416 int i;
2417
2418 /* Pop r4..r[4+count]. */
2419 for (i = 0; i <= count; i++)
2420 {
2421 cache->saved_regs[4 + i].addr = vsp;
2422 vsp += 4;
2423 }
2424
2425 /* If indicated by flag, pop LR as well. */
2426 if (pop_lr)
2427 {
2428 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2429 vsp += 4;
2430 }
2431 }
2432 else if (insn == 0xb0)
2433 {
2434 /* We could only have updated PC by popping into it; if so, it
2435 will show up as address. Otherwise, copy LR into PC. */
2436 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2437 cache->saved_regs[ARM_PC_REGNUM]
2438 = cache->saved_regs[ARM_LR_REGNUM];
2439
2440 /* We're done. */
2441 break;
2442 }
2443 else if (insn == 0xb1)
2444 {
2445 int mask = *entry++;
2446 int i;
2447
2448 /* All-zero mask and mask >= 16 is "spare". */
2449 if (mask == 0 || mask >= 16)
2450 return NULL;
2451
2452 /* Pop r0..r3 under mask. */
2453 for (i = 0; i < 4; i++)
2454 if (mask & (1 << i))
2455 {
2456 cache->saved_regs[i].addr = vsp;
2457 vsp += 4;
2458 }
2459 }
2460 else if (insn == 0xb2)
2461 {
2462 ULONGEST offset = 0;
2463 unsigned shift = 0;
2464
2465 do
2466 {
2467 offset |= (*entry & 0x7f) << shift;
2468 shift += 7;
2469 }
2470 while (*entry++ & 0x80);
2471
2472 vsp += 0x204 + (offset << 2);
2473 }
2474 else if (insn == 0xb3)
2475 {
2476 int start = *entry >> 4;
2477 int count = (*entry++) & 0xf;
2478 int i;
2479
2480 /* Only registers D0..D15 are valid here. */
2481 if (start + count >= 16)
2482 return NULL;
2483
2484 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2485 for (i = 0; i <= count; i++)
2486 {
2487 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2488 vsp += 8;
2489 }
2490
2491 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2492 vsp += 4;
2493 }
2494 else if ((insn & 0xf8) == 0xb8)
2495 {
2496 int count = insn & 0x7;
2497 int i;
2498
2499 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2500 for (i = 0; i <= count; i++)
2501 {
2502 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2503 vsp += 8;
2504 }
2505
2506 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2507 vsp += 4;
2508 }
2509 else if (insn == 0xc6)
2510 {
2511 int start = *entry >> 4;
2512 int count = (*entry++) & 0xf;
2513 int i;
2514
2515 /* Only registers WR0..WR15 are valid. */
2516 if (start + count >= 16)
2517 return NULL;
2518
2519 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2520 for (i = 0; i <= count; i++)
2521 {
2522 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2523 vsp += 8;
2524 }
2525 }
2526 else if (insn == 0xc7)
2527 {
2528 int mask = *entry++;
2529 int i;
2530
2531 /* All-zero mask and mask >= 16 is "spare". */
2532 if (mask == 0 || mask >= 16)
2533 return NULL;
2534
2535 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2536 for (i = 0; i < 4; i++)
2537 if (mask & (1 << i))
2538 {
2539 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2540 vsp += 4;
2541 }
2542 }
2543 else if ((insn & 0xf8) == 0xc0)
2544 {
2545 int count = insn & 0x7;
2546 int i;
2547
2548 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2549 for (i = 0; i <= count; i++)
2550 {
2551 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2552 vsp += 8;
2553 }
2554 }
2555 else if (insn == 0xc8)
2556 {
2557 int start = *entry >> 4;
2558 int count = (*entry++) & 0xf;
2559 int i;
2560
2561 /* Only registers D0..D31 are valid. */
2562 if (start + count >= 16)
2563 return NULL;
2564
2565 /* Pop VFP double-precision registers
2566 D[16+start]..D[16+start+count]. */
2567 for (i = 0; i <= count; i++)
2568 {
2569 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2570 vsp += 8;
2571 }
2572 }
2573 else if (insn == 0xc9)
2574 {
2575 int start = *entry >> 4;
2576 int count = (*entry++) & 0xf;
2577 int i;
2578
2579 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2580 for (i = 0; i <= count; i++)
2581 {
2582 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2583 vsp += 8;
2584 }
2585 }
2586 else if ((insn & 0xf8) == 0xd0)
2587 {
2588 int count = insn & 0x7;
2589 int i;
2590
2591 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2592 for (i = 0; i <= count; i++)
2593 {
2594 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2595 vsp += 8;
2596 }
2597 }
2598 else
2599 {
2600 /* Everything else is "spare". */
2601 return NULL;
2602 }
2603 }
2604
2605 /* If we restore SP from a register, assume this was the frame register.
2606 Otherwise just fall back to SP as frame register. */
2607 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2608 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2609 else
2610 cache->framereg = ARM_SP_REGNUM;
2611
2612 /* Determine offset to previous frame. */
2613 cache->framesize
2614 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2615
2616 /* We already got the previous SP. */
2617 cache->prev_sp = vsp;
2618
2619 return cache;
2620}
2621
2622/* Unwinding via ARM exception table entries. Note that the sniffer
2623 already computes a filled-in prologue cache, which is then used
2624 with the same arm_prologue_this_id and arm_prologue_prev_register
2625 routines also used for prologue-parsing based unwinding. */
2626
2627static int
2628arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2629 struct frame_info *this_frame,
2630 void **this_prologue_cache)
2631{
2632 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2633 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2634 CORE_ADDR addr_in_block, exidx_region, func_start;
2635 struct arm_prologue_cache *cache;
2636 gdb_byte *entry;
2637
2638 /* See if we have an ARM exception table entry covering this address. */
2639 addr_in_block = get_frame_address_in_block (this_frame);
2640 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2641 if (!entry)
2642 return 0;
2643
2644 /* The ARM exception table does not describe unwind information
2645 for arbitrary PC values, but is guaranteed to be correct only
2646 at call sites. We have to decide here whether we want to use
2647 ARM exception table information for this frame, or fall back
2648 to using prologue parsing. (Note that if we have DWARF CFI,
2649 this sniffer isn't even called -- CFI is always preferred.)
2650
2651 Before we make this decision, however, we check whether we
2652 actually have *symbol* information for the current frame.
2653 If not, prologue parsing would not work anyway, so we might
2654 as well use the exception table and hope for the best. */
2655 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2656 {
2657 int exc_valid = 0;
2658
2659 /* If the next frame is "normal", we are at a call site in this
2660 frame, so exception information is guaranteed to be valid. */
2661 if (get_next_frame (this_frame)
2662 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2663 exc_valid = 1;
2664
2665 /* We also assume exception information is valid if we're currently
2666 blocked in a system call. The system library is supposed to
d9311bfa
AT
2667 ensure this, so that e.g. pthread cancellation works. */
2668 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2669 {
7913a64c 2670 ULONGEST insn;
416dc9c6 2671
7913a64c
YQ
2672 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2673 2, byte_order_for_code, &insn)
d9311bfa
AT
2674 && (insn & 0xff00) == 0xdf00 /* svc */)
2675 exc_valid = 1;
0e9e9abd 2676 }
d9311bfa
AT
2677 else
2678 {
7913a64c 2679 ULONGEST insn;
416dc9c6 2680
7913a64c
YQ
2681 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2682 4, byte_order_for_code, &insn)
d9311bfa
AT
2683 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2684 exc_valid = 1;
2685 }
2686
0e9e9abd
UW
2687 /* Bail out if we don't know that exception information is valid. */
2688 if (!exc_valid)
2689 return 0;
2690
2691 /* The ARM exception index does not mark the *end* of the region
2692 covered by the entry, and some functions will not have any entry.
2693 To correctly recognize the end of the covered region, the linker
2694 should have inserted dummy records with a CANTUNWIND marker.
2695
2696 Unfortunately, current versions of GNU ld do not reliably do
2697 this, and thus we may have found an incorrect entry above.
2698 As a (temporary) sanity check, we only use the entry if it
2699 lies *within* the bounds of the function. Note that this check
2700 might reject perfectly valid entries that just happen to cover
2701 multiple functions; therefore this check ought to be removed
2702 once the linker is fixed. */
2703 if (func_start > exidx_region)
2704 return 0;
2705 }
2706
2707 /* Decode the list of unwinding instructions into a prologue cache.
2708 Note that this may fail due to e.g. a "refuse to unwind" code. */
2709 cache = arm_exidx_fill_cache (this_frame, entry);
2710 if (!cache)
2711 return 0;
2712
2713 *this_prologue_cache = cache;
2714 return 1;
2715}
2716
2717struct frame_unwind arm_exidx_unwind = {
2718 NORMAL_FRAME,
8fbca658 2719 default_frame_unwind_stop_reason,
0e9e9abd
UW
2720 arm_prologue_this_id,
2721 arm_prologue_prev_register,
2722 NULL,
2723 arm_exidx_unwind_sniffer
2724};
2725
779aa56f
YQ
2726static struct arm_prologue_cache *
2727arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2728{
2729 struct arm_prologue_cache *cache;
779aa56f
YQ
2730 int reg;
2731
2732 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2733 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2734
2735 /* Still rely on the offset calculated from prologue. */
2736 arm_scan_prologue (this_frame, cache);
2737
2738 /* Since we are in epilogue, the SP has been restored. */
2739 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2740
2741 /* Calculate actual addresses of saved registers using offsets
2742 determined by arm_scan_prologue. */
2743 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2744 if (trad_frame_addr_p (cache->saved_regs, reg))
2745 cache->saved_regs[reg].addr += cache->prev_sp;
2746
2747 return cache;
2748}
2749
2750/* Implementation of function hook 'this_id' in
2751 'struct frame_uwnind' for epilogue unwinder. */
2752
2753static void
2754arm_epilogue_frame_this_id (struct frame_info *this_frame,
2755 void **this_cache,
2756 struct frame_id *this_id)
2757{
2758 struct arm_prologue_cache *cache;
2759 CORE_ADDR pc, func;
2760
2761 if (*this_cache == NULL)
2762 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2763 cache = (struct arm_prologue_cache *) *this_cache;
2764
2765 /* Use function start address as part of the frame ID. If we cannot
2766 identify the start address (due to missing symbol information),
2767 fall back to just using the current PC. */
2768 pc = get_frame_pc (this_frame);
2769 func = get_frame_func (this_frame);
fb3f3d25 2770 if (func == 0)
779aa56f
YQ
2771 func = pc;
2772
2773 (*this_id) = frame_id_build (cache->prev_sp, pc);
2774}
2775
2776/* Implementation of function hook 'prev_register' in
2777 'struct frame_uwnind' for epilogue unwinder. */
2778
2779static struct value *
2780arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2781 void **this_cache, int regnum)
2782{
779aa56f
YQ
2783 if (*this_cache == NULL)
2784 *this_cache = arm_make_epilogue_frame_cache (this_frame);
779aa56f
YQ
2785
2786 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2787}
2788
2789static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2790 CORE_ADDR pc);
2791static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2792 CORE_ADDR pc);
2793
2794/* Implementation of function hook 'sniffer' in
2795 'struct frame_uwnind' for epilogue unwinder. */
2796
2797static int
2798arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2799 struct frame_info *this_frame,
2800 void **this_prologue_cache)
2801{
2802 if (frame_relative_level (this_frame) == 0)
2803 {
2804 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2805 CORE_ADDR pc = get_frame_pc (this_frame);
2806
2807 if (arm_frame_is_thumb (this_frame))
2808 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2809 else
2810 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2811 }
2812 else
2813 return 0;
2814}
2815
2816/* Frame unwinder from epilogue. */
2817
2818static const struct frame_unwind arm_epilogue_frame_unwind =
2819{
2820 NORMAL_FRAME,
2821 default_frame_unwind_stop_reason,
2822 arm_epilogue_frame_this_id,
2823 arm_epilogue_frame_prev_register,
2824 NULL,
2825 arm_epilogue_frame_sniffer,
2826};
2827
80d8d390
YQ
2828/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2829 trampoline, return the target PC. Otherwise return 0.
2830
2831 void call0a (char c, short s, int i, long l) {}
2832
2833 int main (void)
2834 {
2835 (*pointer_to_call0a) (c, s, i, l);
2836 }
2837
2838 Instead of calling a stub library function _call_via_xx (xx is
2839 the register name), GCC may inline the trampoline in the object
2840 file as below (register r2 has the address of call0a).
2841
2842 .global main
2843 .type main, %function
2844 ...
2845 bl .L1
2846 ...
2847 .size main, .-main
2848
2849 .L1:
2850 bx r2
2851
2852 The trampoline 'bx r2' doesn't belong to main. */
2853
2854static CORE_ADDR
2855arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2856{
2857 /* The heuristics of recognizing such trampoline is that FRAME is
2858 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2859 if (arm_frame_is_thumb (frame))
2860 {
2861 gdb_byte buf[2];
2862
2863 if (target_read_memory (pc, buf, 2) == 0)
2864 {
2865 struct gdbarch *gdbarch = get_frame_arch (frame);
2866 enum bfd_endian byte_order_for_code
2867 = gdbarch_byte_order_for_code (gdbarch);
2868 uint16_t insn
2869 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2870
2871 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2872 {
2873 CORE_ADDR dest
2874 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2875
2876 /* Clear the LSB so that gdb core sets step-resume
2877 breakpoint at the right address. */
2878 return UNMAKE_THUMB_ADDR (dest);
2879 }
2880 }
2881 }
2882
2883 return 0;
2884}
2885
909cf6ea 2886static struct arm_prologue_cache *
a262aec2 2887arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2888{
909cf6ea 2889 struct arm_prologue_cache *cache;
909cf6ea 2890
35d5d4ee 2891 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2892 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2893
a262aec2 2894 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2895
2896 return cache;
2897}
2898
2899/* Our frame ID for a stub frame is the current SP and LR. */
2900
2901static void
a262aec2 2902arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2903 void **this_cache,
2904 struct frame_id *this_id)
2905{
2906 struct arm_prologue_cache *cache;
2907
2908 if (*this_cache == NULL)
a262aec2 2909 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2910 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2911
a262aec2 2912 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2913}
2914
a262aec2
DJ
2915static int
2916arm_stub_unwind_sniffer (const struct frame_unwind *self,
2917 struct frame_info *this_frame,
2918 void **this_prologue_cache)
909cf6ea 2919{
93d42b30 2920 CORE_ADDR addr_in_block;
948f8e3d 2921 gdb_byte dummy[4];
18d18ac8
YQ
2922 CORE_ADDR pc, start_addr;
2923 const char *name;
909cf6ea 2924
a262aec2 2925 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2926 pc = get_frame_pc (this_frame);
3e5d3a5a 2927 if (in_plt_section (addr_in_block)
fc36e839
DE
2928 /* We also use the stub winder if the target memory is unreadable
2929 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2930 || target_read_memory (pc, dummy, 4) != 0)
2931 return 1;
2932
2933 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2934 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2935 return 1;
909cf6ea 2936
a262aec2 2937 return 0;
909cf6ea
DJ
2938}
2939
a262aec2
DJ
2940struct frame_unwind arm_stub_unwind = {
2941 NORMAL_FRAME,
8fbca658 2942 default_frame_unwind_stop_reason,
a262aec2
DJ
2943 arm_stub_this_id,
2944 arm_prologue_prev_register,
2945 NULL,
2946 arm_stub_unwind_sniffer
2947};
2948
2ae28aa9
YQ
2949/* Put here the code to store, into CACHE->saved_regs, the addresses
2950 of the saved registers of frame described by THIS_FRAME. CACHE is
2951 returned. */
2952
2953static struct arm_prologue_cache *
2954arm_m_exception_cache (struct frame_info *this_frame)
2955{
2956 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2957 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2958 struct arm_prologue_cache *cache;
2959 CORE_ADDR unwound_sp;
2960 LONGEST xpsr;
2961
2962 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2963 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2964
2965 unwound_sp = get_frame_register_unsigned (this_frame,
2966 ARM_SP_REGNUM);
2967
2968 /* The hardware saves eight 32-bit words, comprising xPSR,
2969 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2970 "B1.5.6 Exception entry behavior" in
2971 "ARMv7-M Architecture Reference Manual". */
2972 cache->saved_regs[0].addr = unwound_sp;
2973 cache->saved_regs[1].addr = unwound_sp + 4;
2974 cache->saved_regs[2].addr = unwound_sp + 8;
2975 cache->saved_regs[3].addr = unwound_sp + 12;
2976 cache->saved_regs[12].addr = unwound_sp + 16;
2977 cache->saved_regs[14].addr = unwound_sp + 20;
2978 cache->saved_regs[15].addr = unwound_sp + 24;
2979 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2980
2981 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2982 aligner between the top of the 32-byte stack frame and the
2983 previous context's stack pointer. */
2984 cache->prev_sp = unwound_sp + 32;
2985 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2986 && (xpsr & (1 << 9)) != 0)
2987 cache->prev_sp += 4;
2988
2989 return cache;
2990}
2991
2992/* Implementation of function hook 'this_id' in
2993 'struct frame_uwnind'. */
2994
2995static void
2996arm_m_exception_this_id (struct frame_info *this_frame,
2997 void **this_cache,
2998 struct frame_id *this_id)
2999{
3000 struct arm_prologue_cache *cache;
3001
3002 if (*this_cache == NULL)
3003 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3004 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3005
3006 /* Our frame ID for a stub frame is the current SP and LR. */
3007 *this_id = frame_id_build (cache->prev_sp,
3008 get_frame_pc (this_frame));
3009}
3010
3011/* Implementation of function hook 'prev_register' in
3012 'struct frame_uwnind'. */
3013
3014static struct value *
3015arm_m_exception_prev_register (struct frame_info *this_frame,
3016 void **this_cache,
3017 int prev_regnum)
3018{
2ae28aa9
YQ
3019 struct arm_prologue_cache *cache;
3020
3021 if (*this_cache == NULL)
3022 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 3023 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
3024
3025 /* The value was already reconstructed into PREV_SP. */
3026 if (prev_regnum == ARM_SP_REGNUM)
3027 return frame_unwind_got_constant (this_frame, prev_regnum,
3028 cache->prev_sp);
3029
3030 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3031 prev_regnum);
3032}
3033
3034/* Implementation of function hook 'sniffer' in
3035 'struct frame_uwnind'. */
3036
3037static int
3038arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3039 struct frame_info *this_frame,
3040 void **this_prologue_cache)
3041{
3042 CORE_ADDR this_pc = get_frame_pc (this_frame);
3043
3044 /* No need to check is_m; this sniffer is only registered for
3045 M-profile architectures. */
3046
ca90e760
FH
3047 /* Check if exception frame returns to a magic PC value. */
3048 return arm_m_addr_is_magic (this_pc);
2ae28aa9
YQ
3049}
3050
3051/* Frame unwinder for M-profile exceptions. */
3052
3053struct frame_unwind arm_m_exception_unwind =
3054{
3055 SIGTRAMP_FRAME,
3056 default_frame_unwind_stop_reason,
3057 arm_m_exception_this_id,
3058 arm_m_exception_prev_register,
3059 NULL,
3060 arm_m_exception_unwind_sniffer
3061};
3062
24de872b 3063static CORE_ADDR
a262aec2 3064arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3065{
3066 struct arm_prologue_cache *cache;
3067
eb5492fa 3068 if (*this_cache == NULL)
a262aec2 3069 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3070 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3071
4be43953 3072 return cache->prev_sp - cache->framesize;
24de872b
DJ
3073}
3074
eb5492fa
DJ
3075struct frame_base arm_normal_base = {
3076 &arm_prologue_unwind,
3077 arm_normal_frame_base,
3078 arm_normal_frame_base,
3079 arm_normal_frame_base
3080};
3081
a262aec2 3082/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3083 dummy frame. The frame ID's base needs to match the TOS value
3084 saved by save_dummy_frame_tos() and returned from
3085 arm_push_dummy_call, and the PC needs to match the dummy frame's
3086 breakpoint. */
c906108c 3087
eb5492fa 3088static struct frame_id
a262aec2 3089arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3090{
0963b4bd
MS
3091 return frame_id_build (get_frame_register_unsigned (this_frame,
3092 ARM_SP_REGNUM),
a262aec2 3093 get_frame_pc (this_frame));
eb5492fa 3094}
c3b4394c 3095
eb5492fa
DJ
3096/* Given THIS_FRAME, find the previous frame's resume PC (which will
3097 be used to construct the previous frame's ID, after looking up the
3098 containing function). */
c3b4394c 3099
eb5492fa
DJ
3100static CORE_ADDR
3101arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3102{
3103 CORE_ADDR pc;
3104 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3105 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3106}
3107
3108static CORE_ADDR
3109arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3110{
3111 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3112}
3113
b39cc962
DJ
3114static struct value *
3115arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3116 int regnum)
3117{
24568a2c 3118 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3119 CORE_ADDR lr, cpsr;
9779414d 3120 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3121
3122 switch (regnum)
3123 {
3124 case ARM_PC_REGNUM:
3125 /* The PC is normally copied from the return column, which
3126 describes saves of LR. However, that version may have an
3127 extra bit set to indicate Thumb state. The bit is not
3128 part of the PC. */
3129 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3130 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3131 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3132
3133 case ARM_PS_REGNUM:
3134 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3135 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3136 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3137 if (IS_THUMB_ADDR (lr))
9779414d 3138 cpsr |= t_bit;
b39cc962 3139 else
9779414d 3140 cpsr &= ~t_bit;
ca38c58e 3141 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3142
3143 default:
3144 internal_error (__FILE__, __LINE__,
3145 _("Unexpected register %d"), regnum);
3146 }
3147}
3148
3149static void
3150arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3151 struct dwarf2_frame_state_reg *reg,
3152 struct frame_info *this_frame)
3153{
3154 switch (regnum)
3155 {
3156 case ARM_PC_REGNUM:
3157 case ARM_PS_REGNUM:
3158 reg->how = DWARF2_FRAME_REG_FN;
3159 reg->loc.fn = arm_dwarf2_prev_register;
3160 break;
3161 case ARM_SP_REGNUM:
3162 reg->how = DWARF2_FRAME_REG_CFA;
3163 break;
3164 }
3165}
3166
c9cf6e20 3167/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3168
3169static int
c9cf6e20 3170thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3171{
3172 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3173 unsigned int insn, insn2;
3174 int found_return = 0, found_stack_adjust = 0;
3175 CORE_ADDR func_start, func_end;
3176 CORE_ADDR scan_pc;
3177 gdb_byte buf[4];
3178
3179 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3180 return 0;
3181
3182 /* The epilogue is a sequence of instructions along the following lines:
3183
3184 - add stack frame size to SP or FP
3185 - [if frame pointer used] restore SP from FP
3186 - restore registers from SP [may include PC]
3187 - a return-type instruction [if PC wasn't already restored]
3188
3189 In a first pass, we scan forward from the current PC and verify the
3190 instructions we find as compatible with this sequence, ending in a
3191 return instruction.
3192
3193 However, this is not sufficient to distinguish indirect function calls
3194 within a function from indirect tail calls in the epilogue in some cases.
3195 Therefore, if we didn't already find any SP-changing instruction during
3196 forward scan, we add a backward scanning heuristic to ensure we actually
3197 are in the epilogue. */
3198
3199 scan_pc = pc;
3200 while (scan_pc < func_end && !found_return)
3201 {
3202 if (target_read_memory (scan_pc, buf, 2))
3203 break;
3204
3205 scan_pc += 2;
3206 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3207
3208 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3209 found_return = 1;
3210 else if (insn == 0x46f7) /* mov pc, lr */
3211 found_return = 1;
540314bd 3212 else if (thumb_instruction_restores_sp (insn))
4024ca99 3213 {
b7576e5c 3214 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3215 found_return = 1;
3216 }
db24da6d 3217 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3218 {
3219 if (target_read_memory (scan_pc, buf, 2))
3220 break;
3221
3222 scan_pc += 2;
3223 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3224
3225 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3226 {
4024ca99
UW
3227 if (insn2 & 0x8000) /* <registers> include PC. */
3228 found_return = 1;
3229 }
3230 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3231 && (insn2 & 0x0fff) == 0x0b04)
3232 {
4024ca99
UW
3233 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3234 found_return = 1;
3235 }
3236 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3237 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3238 ;
4024ca99
UW
3239 else
3240 break;
3241 }
3242 else
3243 break;
3244 }
3245
3246 if (!found_return)
3247 return 0;
3248
3249 /* Since any instruction in the epilogue sequence, with the possible
3250 exception of return itself, updates the stack pointer, we need to
3251 scan backwards for at most one instruction. Try either a 16-bit or
3252 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3253 too much about false positives. */
4024ca99 3254
6b65d1b6
YQ
3255 if (pc - 4 < func_start)
3256 return 0;
3257 if (target_read_memory (pc - 4, buf, 4))
3258 return 0;
4024ca99 3259
6b65d1b6
YQ
3260 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3261 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3262
3263 if (thumb_instruction_restores_sp (insn2))
3264 found_stack_adjust = 1;
3265 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3266 found_stack_adjust = 1;
3267 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3268 && (insn2 & 0x0fff) == 0x0b04)
3269 found_stack_adjust = 1;
3270 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3271 && (insn2 & 0x0e00) == 0x0a00)
3272 found_stack_adjust = 1;
4024ca99
UW
3273
3274 return found_stack_adjust;
3275}
3276
4024ca99 3277static int
c58b006a 3278arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3279{
3280 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3281 unsigned int insn;
f303bc3e 3282 int found_return;
4024ca99
UW
3283 CORE_ADDR func_start, func_end;
3284
4024ca99
UW
3285 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3286 return 0;
3287
3288 /* We are in the epilogue if the previous instruction was a stack
3289 adjustment and the next instruction is a possible return (bx, mov
3290 pc, or pop). We could have to scan backwards to find the stack
3291 adjustment, or forwards to find the return, but this is a decent
3292 approximation. First scan forwards. */
3293
3294 found_return = 0;
3295 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3296 if (bits (insn, 28, 31) != INST_NV)
3297 {
3298 if ((insn & 0x0ffffff0) == 0x012fff10)
3299 /* BX. */
3300 found_return = 1;
3301 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3302 /* MOV PC. */
3303 found_return = 1;
3304 else if ((insn & 0x0fff0000) == 0x08bd0000
3305 && (insn & 0x0000c000) != 0)
3306 /* POP (LDMIA), including PC or LR. */
3307 found_return = 1;
3308 }
3309
3310 if (!found_return)
3311 return 0;
3312
3313 /* Scan backwards. This is just a heuristic, so do not worry about
3314 false positives from mode changes. */
3315
3316 if (pc < func_start + 4)
3317 return 0;
3318
3319 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3320 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3321 return 1;
3322
3323 return 0;
3324}
3325
c58b006a
YQ
3326/* Implement the stack_frame_destroyed_p gdbarch method. */
3327
3328static int
3329arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3330{
3331 if (arm_pc_is_thumb (gdbarch, pc))
3332 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3333 else
3334 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3335}
4024ca99 3336
2dd604e7
RE
3337/* When arguments must be pushed onto the stack, they go on in reverse
3338 order. The code below implements a FILO (stack) to do this. */
3339
3340struct stack_item
3341{
3342 int len;
3343 struct stack_item *prev;
7c543f7b 3344 gdb_byte *data;
2dd604e7
RE
3345};
3346
3347static struct stack_item *
df3b6708 3348push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3349{
3350 struct stack_item *si;
8d749320 3351 si = XNEW (struct stack_item);
7c543f7b 3352 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3353 si->len = len;
3354 si->prev = prev;
3355 memcpy (si->data, contents, len);
3356 return si;
3357}
3358
3359static struct stack_item *
3360pop_stack_item (struct stack_item *si)
3361{
3362 struct stack_item *dead = si;
3363 si = si->prev;
3364 xfree (dead->data);
3365 xfree (dead);
3366 return si;
3367}
3368
2af48f68
PB
3369
3370/* Return the alignment (in bytes) of the given type. */
3371
3372static int
3373arm_type_align (struct type *t)
3374{
3375 int n;
3376 int align;
3377 int falign;
3378
3379 t = check_typedef (t);
3380 switch (TYPE_CODE (t))
3381 {
3382 default:
3383 /* Should never happen. */
3384 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3385 return 4;
3386
3387 case TYPE_CODE_PTR:
3388 case TYPE_CODE_ENUM:
3389 case TYPE_CODE_INT:
3390 case TYPE_CODE_FLT:
3391 case TYPE_CODE_SET:
3392 case TYPE_CODE_RANGE:
2af48f68 3393 case TYPE_CODE_REF:
aa006118 3394 case TYPE_CODE_RVALUE_REF:
2af48f68
PB
3395 case TYPE_CODE_CHAR:
3396 case TYPE_CODE_BOOL:
3397 return TYPE_LENGTH (t);
3398
3399 case TYPE_CODE_ARRAY:
c4312b19
YQ
3400 if (TYPE_VECTOR (t))
3401 {
3402 /* Use the natural alignment for vector types (the same for
3403 scalar type), but the maximum alignment is 64-bit. */
3404 if (TYPE_LENGTH (t) > 8)
3405 return 8;
3406 else
3407 return TYPE_LENGTH (t);
3408 }
3409 else
3410 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3411 case TYPE_CODE_COMPLEX:
2af48f68
PB
3412 return arm_type_align (TYPE_TARGET_TYPE (t));
3413
3414 case TYPE_CODE_STRUCT:
3415 case TYPE_CODE_UNION:
3416 align = 1;
3417 for (n = 0; n < TYPE_NFIELDS (t); n++)
3418 {
3419 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3420 if (falign > align)
3421 align = falign;
3422 }
3423 return align;
3424 }
3425}
3426
90445bd3
DJ
3427/* Possible base types for a candidate for passing and returning in
3428 VFP registers. */
3429
3430enum arm_vfp_cprc_base_type
3431{
3432 VFP_CPRC_UNKNOWN,
3433 VFP_CPRC_SINGLE,
3434 VFP_CPRC_DOUBLE,
3435 VFP_CPRC_VEC64,
3436 VFP_CPRC_VEC128
3437};
3438
3439/* The length of one element of base type B. */
3440
3441static unsigned
3442arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3443{
3444 switch (b)
3445 {
3446 case VFP_CPRC_SINGLE:
3447 return 4;
3448 case VFP_CPRC_DOUBLE:
3449 return 8;
3450 case VFP_CPRC_VEC64:
3451 return 8;
3452 case VFP_CPRC_VEC128:
3453 return 16;
3454 default:
3455 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3456 (int) b);
3457 }
3458}
3459
3460/* The character ('s', 'd' or 'q') for the type of VFP register used
3461 for passing base type B. */
3462
3463static int
3464arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3465{
3466 switch (b)
3467 {
3468 case VFP_CPRC_SINGLE:
3469 return 's';
3470 case VFP_CPRC_DOUBLE:
3471 return 'd';
3472 case VFP_CPRC_VEC64:
3473 return 'd';
3474 case VFP_CPRC_VEC128:
3475 return 'q';
3476 default:
3477 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3478 (int) b);
3479 }
3480}
3481
3482/* Determine whether T may be part of a candidate for passing and
3483 returning in VFP registers, ignoring the limit on the total number
3484 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3485 classification of the first valid component found; if it is not
3486 VFP_CPRC_UNKNOWN, all components must have the same classification
3487 as *BASE_TYPE. If it is found that T contains a type not permitted
3488 for passing and returning in VFP registers, a type differently
3489 classified from *BASE_TYPE, or two types differently classified
3490 from each other, return -1, otherwise return the total number of
3491 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3492 array). Vector types are not currently supported, matching the
3493 generic AAPCS support. */
90445bd3
DJ
3494
3495static int
3496arm_vfp_cprc_sub_candidate (struct type *t,
3497 enum arm_vfp_cprc_base_type *base_type)
3498{
3499 t = check_typedef (t);
3500 switch (TYPE_CODE (t))
3501 {
3502 case TYPE_CODE_FLT:
3503 switch (TYPE_LENGTH (t))
3504 {
3505 case 4:
3506 if (*base_type == VFP_CPRC_UNKNOWN)
3507 *base_type = VFP_CPRC_SINGLE;
3508 else if (*base_type != VFP_CPRC_SINGLE)
3509 return -1;
3510 return 1;
3511
3512 case 8:
3513 if (*base_type == VFP_CPRC_UNKNOWN)
3514 *base_type = VFP_CPRC_DOUBLE;
3515 else if (*base_type != VFP_CPRC_DOUBLE)
3516 return -1;
3517 return 1;
3518
3519 default:
3520 return -1;
3521 }
3522 break;
3523
817e0957
YQ
3524 case TYPE_CODE_COMPLEX:
3525 /* Arguments of complex T where T is one of the types float or
3526 double get treated as if they are implemented as:
3527
3528 struct complexT
3529 {
3530 T real;
3531 T imag;
5f52445b
YQ
3532 };
3533
3534 */
817e0957
YQ
3535 switch (TYPE_LENGTH (t))
3536 {
3537 case 8:
3538 if (*base_type == VFP_CPRC_UNKNOWN)
3539 *base_type = VFP_CPRC_SINGLE;
3540 else if (*base_type != VFP_CPRC_SINGLE)
3541 return -1;
3542 return 2;
3543
3544 case 16:
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_DOUBLE;
3547 else if (*base_type != VFP_CPRC_DOUBLE)
3548 return -1;
3549 return 2;
3550
3551 default:
3552 return -1;
3553 }
3554 break;
3555
90445bd3
DJ
3556 case TYPE_CODE_ARRAY:
3557 {
c4312b19 3558 if (TYPE_VECTOR (t))
90445bd3 3559 {
c4312b19
YQ
3560 /* A 64-bit or 128-bit containerized vector type are VFP
3561 CPRCs. */
3562 switch (TYPE_LENGTH (t))
3563 {
3564 case 8:
3565 if (*base_type == VFP_CPRC_UNKNOWN)
3566 *base_type = VFP_CPRC_VEC64;
3567 return 1;
3568 case 16:
3569 if (*base_type == VFP_CPRC_UNKNOWN)
3570 *base_type = VFP_CPRC_VEC128;
3571 return 1;
3572 default:
3573 return -1;
3574 }
3575 }
3576 else
3577 {
3578 int count;
3579 unsigned unitlen;
3580
3581 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3582 base_type);
3583 if (count == -1)
3584 return -1;
3585 if (TYPE_LENGTH (t) == 0)
3586 {
3587 gdb_assert (count == 0);
3588 return 0;
3589 }
3590 else if (count == 0)
3591 return -1;
3592 unitlen = arm_vfp_cprc_unit_length (*base_type);
3593 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3594 return TYPE_LENGTH (t) / unitlen;
90445bd3 3595 }
90445bd3
DJ
3596 }
3597 break;
3598
3599 case TYPE_CODE_STRUCT:
3600 {
3601 int count = 0;
3602 unsigned unitlen;
3603 int i;
3604 for (i = 0; i < TYPE_NFIELDS (t); i++)
3605 {
1040b979
YQ
3606 int sub_count = 0;
3607
3608 if (!field_is_static (&TYPE_FIELD (t, i)))
3609 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3610 base_type);
90445bd3
DJ
3611 if (sub_count == -1)
3612 return -1;
3613 count += sub_count;
3614 }
3615 if (TYPE_LENGTH (t) == 0)
3616 {
3617 gdb_assert (count == 0);
3618 return 0;
3619 }
3620 else if (count == 0)
3621 return -1;
3622 unitlen = arm_vfp_cprc_unit_length (*base_type);
3623 if (TYPE_LENGTH (t) != unitlen * count)
3624 return -1;
3625 return count;
3626 }
3627
3628 case TYPE_CODE_UNION:
3629 {
3630 int count = 0;
3631 unsigned unitlen;
3632 int i;
3633 for (i = 0; i < TYPE_NFIELDS (t); i++)
3634 {
3635 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3636 base_type);
3637 if (sub_count == -1)
3638 return -1;
3639 count = (count > sub_count ? count : sub_count);
3640 }
3641 if (TYPE_LENGTH (t) == 0)
3642 {
3643 gdb_assert (count == 0);
3644 return 0;
3645 }
3646 else if (count == 0)
3647 return -1;
3648 unitlen = arm_vfp_cprc_unit_length (*base_type);
3649 if (TYPE_LENGTH (t) != unitlen * count)
3650 return -1;
3651 return count;
3652 }
3653
3654 default:
3655 break;
3656 }
3657
3658 return -1;
3659}
3660
3661/* Determine whether T is a VFP co-processor register candidate (CPRC)
3662 if passed to or returned from a non-variadic function with the VFP
3663 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3664 *BASE_TYPE to the base type for T and *COUNT to the number of
3665 elements of that base type before returning. */
3666
3667static int
3668arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3669 int *count)
3670{
3671 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3672 int c = arm_vfp_cprc_sub_candidate (t, &b);
3673 if (c <= 0 || c > 4)
3674 return 0;
3675 *base_type = b;
3676 *count = c;
3677 return 1;
3678}
3679
3680/* Return 1 if the VFP ABI should be used for passing arguments to and
3681 returning values from a function of type FUNC_TYPE, 0
3682 otherwise. */
3683
3684static int
3685arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3686{
3687 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3688 /* Variadic functions always use the base ABI. Assume that functions
3689 without debug info are not variadic. */
3690 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3691 return 0;
3692 /* The VFP ABI is only supported as a variant of AAPCS. */
3693 if (tdep->arm_abi != ARM_ABI_AAPCS)
3694 return 0;
3695 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3696}
3697
3698/* We currently only support passing parameters in integer registers, which
3699 conforms with GCC's default model, and VFP argument passing following
3700 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3701 we should probably support some of them based on the selected ABI. */
3702
3703static CORE_ADDR
7d9b040b 3704arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3705 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3706 struct value **args, CORE_ADDR sp, int struct_return,
3707 CORE_ADDR struct_addr)
2dd604e7 3708{
e17a4113 3709 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3710 int argnum;
3711 int argreg;
3712 int nstack;
3713 struct stack_item *si = NULL;
90445bd3
DJ
3714 int use_vfp_abi;
3715 struct type *ftype;
3716 unsigned vfp_regs_free = (1 << 16) - 1;
3717
3718 /* Determine the type of this function and whether the VFP ABI
3719 applies. */
3720 ftype = check_typedef (value_type (function));
3721 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3722 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3723 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3724
6a65450a
AC
3725 /* Set the return address. For the ARM, the return breakpoint is
3726 always at BP_ADDR. */
9779414d 3727 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3728 bp_addr |= 1;
6a65450a 3729 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3730
3731 /* Walk through the list of args and determine how large a temporary
3732 stack is required. Need to take care here as structs may be
7a9dd1b2 3733 passed on the stack, and we have to push them. */
2dd604e7
RE
3734 nstack = 0;
3735
3736 argreg = ARM_A1_REGNUM;
3737 nstack = 0;
3738
2dd604e7
RE
3739 /* The struct_return pointer occupies the first parameter
3740 passing register. */
3741 if (struct_return)
3742 {
3743 if (arm_debug)
5af949e3 3744 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3745 gdbarch_register_name (gdbarch, argreg),
5af949e3 3746 paddress (gdbarch, struct_addr));
2dd604e7
RE
3747 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3748 argreg++;
3749 }
3750
3751 for (argnum = 0; argnum < nargs; argnum++)
3752 {
3753 int len;
3754 struct type *arg_type;
3755 struct type *target_type;
3756 enum type_code typecode;
8c6363cf 3757 const bfd_byte *val;
2af48f68 3758 int align;
90445bd3
DJ
3759 enum arm_vfp_cprc_base_type vfp_base_type;
3760 int vfp_base_count;
3761 int may_use_core_reg = 1;
2dd604e7 3762
df407dfe 3763 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3764 len = TYPE_LENGTH (arg_type);
3765 target_type = TYPE_TARGET_TYPE (arg_type);
3766 typecode = TYPE_CODE (arg_type);
8c6363cf 3767 val = value_contents (args[argnum]);
2dd604e7 3768
2af48f68
PB
3769 align = arm_type_align (arg_type);
3770 /* Round alignment up to a whole number of words. */
3771 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3772 /* Different ABIs have different maximum alignments. */
3773 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3774 {
3775 /* The APCS ABI only requires word alignment. */
3776 align = INT_REGISTER_SIZE;
3777 }
3778 else
3779 {
3780 /* The AAPCS requires at most doubleword alignment. */
3781 if (align > INT_REGISTER_SIZE * 2)
3782 align = INT_REGISTER_SIZE * 2;
3783 }
3784
90445bd3
DJ
3785 if (use_vfp_abi
3786 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3787 &vfp_base_count))
3788 {
3789 int regno;
3790 int unit_length;
3791 int shift;
3792 unsigned mask;
3793
3794 /* Because this is a CPRC it cannot go in a core register or
3795 cause a core register to be skipped for alignment.
3796 Either it goes in VFP registers and the rest of this loop
3797 iteration is skipped for this argument, or it goes on the
3798 stack (and the stack alignment code is correct for this
3799 case). */
3800 may_use_core_reg = 0;
3801
3802 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3803 shift = unit_length / 4;
3804 mask = (1 << (shift * vfp_base_count)) - 1;
3805 for (regno = 0; regno < 16; regno += shift)
3806 if (((vfp_regs_free >> regno) & mask) == mask)
3807 break;
3808
3809 if (regno < 16)
3810 {
3811 int reg_char;
3812 int reg_scaled;
3813 int i;
3814
3815 vfp_regs_free &= ~(mask << regno);
3816 reg_scaled = regno / shift;
3817 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3818 for (i = 0; i < vfp_base_count; i++)
3819 {
3820 char name_buf[4];
3821 int regnum;
58d6951d
DJ
3822 if (reg_char == 'q')
3823 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3824 val + i * unit_length);
58d6951d
DJ
3825 else
3826 {
8c042590
PM
3827 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3828 reg_char, reg_scaled + i);
58d6951d
DJ
3829 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3830 strlen (name_buf));
3831 regcache_cooked_write (regcache, regnum,
3832 val + i * unit_length);
3833 }
90445bd3
DJ
3834 }
3835 continue;
3836 }
3837 else
3838 {
3839 /* This CPRC could not go in VFP registers, so all VFP
3840 registers are now marked as used. */
3841 vfp_regs_free = 0;
3842 }
3843 }
3844
2af48f68
PB
3845 /* Push stack padding for dowubleword alignment. */
3846 if (nstack & (align - 1))
3847 {
3848 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3849 nstack += INT_REGISTER_SIZE;
3850 }
3851
3852 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3853 if (may_use_core_reg
3854 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3855 && align > INT_REGISTER_SIZE
3856 && argreg & 1)
3857 argreg++;
3858
2dd604e7
RE
3859 /* If the argument is a pointer to a function, and it is a
3860 Thumb function, create a LOCAL copy of the value and set
3861 the THUMB bit in it. */
3862 if (TYPE_CODE_PTR == typecode
3863 && target_type != NULL
f96b8fa0 3864 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3865 {
e17a4113 3866 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3867 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3868 {
224c3ddb 3869 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3870 store_unsigned_integer (copy, len, byte_order,
e17a4113 3871 MAKE_THUMB_ADDR (regval));
8c6363cf 3872 val = copy;
2dd604e7
RE
3873 }
3874 }
3875
3876 /* Copy the argument to general registers or the stack in
3877 register-sized pieces. Large arguments are split between
3878 registers and stack. */
3879 while (len > 0)
3880 {
f0c9063c 3881 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3882 CORE_ADDR regval
3883 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3884
90445bd3 3885 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3886 {
3887 /* The argument is being passed in a general purpose
3888 register. */
e17a4113 3889 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3890 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3891 if (arm_debug)
3892 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3893 argnum,
3894 gdbarch_register_name
2af46ca0 3895 (gdbarch, argreg),
f0c9063c 3896 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3897 regcache_cooked_write_unsigned (regcache, argreg, regval);
3898 argreg++;
3899 }
3900 else
3901 {
ef9bd0b8
YQ
3902 gdb_byte buf[INT_REGISTER_SIZE];
3903
3904 memset (buf, 0, sizeof (buf));
3905 store_unsigned_integer (buf, partial_len, byte_order, regval);
3906
2dd604e7
RE
3907 /* Push the arguments onto the stack. */
3908 if (arm_debug)
3909 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3910 argnum, nstack);
ef9bd0b8 3911 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3912 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3913 }
3914
3915 len -= partial_len;
3916 val += partial_len;
3917 }
3918 }
3919 /* If we have an odd number of words to push, then decrement the stack
3920 by one word now, so first stack argument will be dword aligned. */
3921 if (nstack & 4)
3922 sp -= 4;
3923
3924 while (si)
3925 {
3926 sp -= si->len;
3927 write_memory (sp, si->data, si->len);
3928 si = pop_stack_item (si);
3929 }
3930
3931 /* Finally, update teh SP register. */
3932 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3933
3934 return sp;
3935}
3936
f53f0d0b
PB
3937
3938/* Always align the frame to an 8-byte boundary. This is required on
3939 some platforms and harmless on the rest. */
3940
3941static CORE_ADDR
3942arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3943{
3944 /* Align the stack to eight bytes. */
3945 return sp & ~ (CORE_ADDR) 7;
3946}
3947
c906108c 3948static void
12b27276 3949print_fpu_flags (struct ui_file *file, int flags)
c906108c 3950{
c5aa993b 3951 if (flags & (1 << 0))
12b27276 3952 fputs_filtered ("IVO ", file);
c5aa993b 3953 if (flags & (1 << 1))
12b27276 3954 fputs_filtered ("DVZ ", file);
c5aa993b 3955 if (flags & (1 << 2))
12b27276 3956 fputs_filtered ("OFL ", file);
c5aa993b 3957 if (flags & (1 << 3))
12b27276 3958 fputs_filtered ("UFL ", file);
c5aa993b 3959 if (flags & (1 << 4))
12b27276
WN
3960 fputs_filtered ("INX ", file);
3961 fputc_filtered ('\n', file);
c906108c
SS
3962}
3963
5e74b15c
RE
3964/* Print interesting information about the floating point processor
3965 (if present) or emulator. */
34e8f22d 3966static void
d855c300 3967arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3968 struct frame_info *frame, const char *args)
c906108c 3969{
9c9acae0 3970 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3971 int type;
3972
3973 type = (status >> 24) & 127;
edefbb7c 3974 if (status & (1 << 31))
12b27276 3975 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3976 else
12b27276 3977 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3978 /* i18n: [floating point unit] mask */
12b27276
WN
3979 fputs_filtered (_("mask: "), file);
3980 print_fpu_flags (file, status >> 16);
edefbb7c 3981 /* i18n: [floating point unit] flags */
12b27276
WN
3982 fputs_filtered (_("flags: "), file);
3983 print_fpu_flags (file, status);
c906108c
SS
3984}
3985
27067745
UW
3986/* Construct the ARM extended floating point type. */
3987static struct type *
3988arm_ext_type (struct gdbarch *gdbarch)
3989{
3990 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3991
3992 if (!tdep->arm_ext_type)
3993 tdep->arm_ext_type
e9bb382b 3994 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3995 floatformats_arm_ext);
3996
3997 return tdep->arm_ext_type;
3998}
3999
58d6951d
DJ
4000static struct type *
4001arm_neon_double_type (struct gdbarch *gdbarch)
4002{
4003 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4004
4005 if (tdep->neon_double_type == NULL)
4006 {
4007 struct type *t, *elem;
4008
4009 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4010 TYPE_CODE_UNION);
4011 elem = builtin_type (gdbarch)->builtin_uint8;
4012 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4013 elem = builtin_type (gdbarch)->builtin_uint16;
4014 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4015 elem = builtin_type (gdbarch)->builtin_uint32;
4016 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4017 elem = builtin_type (gdbarch)->builtin_uint64;
4018 append_composite_type_field (t, "u64", elem);
4019 elem = builtin_type (gdbarch)->builtin_float;
4020 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4021 elem = builtin_type (gdbarch)->builtin_double;
4022 append_composite_type_field (t, "f64", elem);
4023
4024 TYPE_VECTOR (t) = 1;
4025 TYPE_NAME (t) = "neon_d";
4026 tdep->neon_double_type = t;
4027 }
4028
4029 return tdep->neon_double_type;
4030}
4031
4032/* FIXME: The vector types are not correctly ordered on big-endian
4033 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4034 bits of d0 - regardless of what unit size is being held in d0. So
4035 the offset of the first uint8 in d0 is 7, but the offset of the
4036 first float is 4. This code works as-is for little-endian
4037 targets. */
4038
4039static struct type *
4040arm_neon_quad_type (struct gdbarch *gdbarch)
4041{
4042 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4043
4044 if (tdep->neon_quad_type == NULL)
4045 {
4046 struct type *t, *elem;
4047
4048 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4049 TYPE_CODE_UNION);
4050 elem = builtin_type (gdbarch)->builtin_uint8;
4051 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4052 elem = builtin_type (gdbarch)->builtin_uint16;
4053 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4054 elem = builtin_type (gdbarch)->builtin_uint32;
4055 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4056 elem = builtin_type (gdbarch)->builtin_uint64;
4057 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4058 elem = builtin_type (gdbarch)->builtin_float;
4059 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4060 elem = builtin_type (gdbarch)->builtin_double;
4061 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4062
4063 TYPE_VECTOR (t) = 1;
4064 TYPE_NAME (t) = "neon_q";
4065 tdep->neon_quad_type = t;
4066 }
4067
4068 return tdep->neon_quad_type;
4069}
4070
34e8f22d
RE
4071/* Return the GDB type object for the "standard" data type of data in
4072 register N. */
4073
4074static struct type *
7a5ea0d4 4075arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4076{
58d6951d
DJ
4077 int num_regs = gdbarch_num_regs (gdbarch);
4078
4079 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4080 && regnum >= num_regs && regnum < num_regs + 32)
4081 return builtin_type (gdbarch)->builtin_float;
4082
4083 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4084 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4085 return arm_neon_quad_type (gdbarch);
4086
4087 /* If the target description has register information, we are only
4088 in this function so that we can override the types of
4089 double-precision registers for NEON. */
4090 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4091 {
4092 struct type *t = tdesc_register_type (gdbarch, regnum);
4093
4094 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4095 && TYPE_CODE (t) == TYPE_CODE_FLT
4096 && gdbarch_tdep (gdbarch)->have_neon)
4097 return arm_neon_double_type (gdbarch);
4098 else
4099 return t;
4100 }
4101
34e8f22d 4102 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4103 {
4104 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4105 return builtin_type (gdbarch)->builtin_void;
4106
4107 return arm_ext_type (gdbarch);
4108 }
e4c16157 4109 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4110 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4111 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4112 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4113 else if (regnum >= ARRAY_SIZE (arm_register_names))
4114 /* These registers are only supported on targets which supply
4115 an XML description. */
df4df182 4116 return builtin_type (gdbarch)->builtin_int0;
032758dc 4117 else
df4df182 4118 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4119}
4120
ff6f572f
DJ
4121/* Map a DWARF register REGNUM onto the appropriate GDB register
4122 number. */
4123
4124static int
d3f73121 4125arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4126{
4127 /* Core integer regs. */
4128 if (reg >= 0 && reg <= 15)
4129 return reg;
4130
4131 /* Legacy FPA encoding. These were once used in a way which
4132 overlapped with VFP register numbering, so their use is
4133 discouraged, but GDB doesn't support the ARM toolchain
4134 which used them for VFP. */
4135 if (reg >= 16 && reg <= 23)
4136 return ARM_F0_REGNUM + reg - 16;
4137
4138 /* New assignments for the FPA registers. */
4139 if (reg >= 96 && reg <= 103)
4140 return ARM_F0_REGNUM + reg - 96;
4141
4142 /* WMMX register assignments. */
4143 if (reg >= 104 && reg <= 111)
4144 return ARM_WCGR0_REGNUM + reg - 104;
4145
4146 if (reg >= 112 && reg <= 127)
4147 return ARM_WR0_REGNUM + reg - 112;
4148
4149 if (reg >= 192 && reg <= 199)
4150 return ARM_WC0_REGNUM + reg - 192;
4151
58d6951d
DJ
4152 /* VFP v2 registers. A double precision value is actually
4153 in d1 rather than s2, but the ABI only defines numbering
4154 for the single precision registers. This will "just work"
4155 in GDB for little endian targets (we'll read eight bytes,
4156 starting in s0 and then progressing to s1), but will be
4157 reversed on big endian targets with VFP. This won't
4158 be a problem for the new Neon quad registers; you're supposed
4159 to use DW_OP_piece for those. */
4160 if (reg >= 64 && reg <= 95)
4161 {
4162 char name_buf[4];
4163
8c042590 4164 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4165 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4166 strlen (name_buf));
4167 }
4168
4169 /* VFP v3 / Neon registers. This range is also used for VFP v2
4170 registers, except that it now describes d0 instead of s0. */
4171 if (reg >= 256 && reg <= 287)
4172 {
4173 char name_buf[4];
4174
8c042590 4175 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4176 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4177 strlen (name_buf));
4178 }
4179
ff6f572f
DJ
4180 return -1;
4181}
4182
26216b98
AC
4183/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4184static int
e7faf938 4185arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4186{
4187 int reg = regnum;
e7faf938 4188 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4189
ff6f572f
DJ
4190 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4191 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4192
4193 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4194 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4195
4196 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4197 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4198
26216b98
AC
4199 if (reg < NUM_GREGS)
4200 return SIM_ARM_R0_REGNUM + reg;
4201 reg -= NUM_GREGS;
4202
4203 if (reg < NUM_FREGS)
4204 return SIM_ARM_FP0_REGNUM + reg;
4205 reg -= NUM_FREGS;
4206
4207 if (reg < NUM_SREGS)
4208 return SIM_ARM_FPS_REGNUM + reg;
4209 reg -= NUM_SREGS;
4210
edefbb7c 4211 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4212}
34e8f22d 4213
a37b3cc0
AC
4214/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4215 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4216 It is thought that this is is the floating-point register format on
4217 little-endian systems. */
c906108c 4218
ed9a39eb 4219static void
b508a996 4220convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4221 void *dbl, int endianess)
c906108c 4222{
a37b3cc0 4223 DOUBLEST d;
be8626e0
MD
4224
4225 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4226 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4227 else
4228 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4229 ptr, &d);
b508a996 4230 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4231}
4232
34e8f22d 4233static void
be8626e0
MD
4234convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4235 int endianess)
c906108c 4236{
a37b3cc0 4237 DOUBLEST d;
be8626e0 4238
b508a996 4239 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4240 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4241 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4242 else
4243 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4244 &d, dbl);
c906108c 4245}
ed9a39eb 4246
d9311bfa
AT
4247/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4248 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4249 NULL if an error occurs. BUF is freed. */
c906108c 4250
d9311bfa
AT
4251static gdb_byte *
4252extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4253 int old_len, int new_len)
4254{
4255 gdb_byte *new_buf;
4256 int bytes_to_read = new_len - old_len;
c906108c 4257
d9311bfa
AT
4258 new_buf = (gdb_byte *) xmalloc (new_len);
4259 memcpy (new_buf + bytes_to_read, buf, old_len);
4260 xfree (buf);
198cd59d 4261 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
d9311bfa
AT
4262 {
4263 xfree (new_buf);
4264 return NULL;
c906108c 4265 }
d9311bfa 4266 return new_buf;
c906108c
SS
4267}
4268
d9311bfa
AT
4269/* An IT block is at most the 2-byte IT instruction followed by
4270 four 4-byte instructions. The furthest back we must search to
4271 find an IT block that affects the current instruction is thus
4272 2 + 3 * 4 == 14 bytes. */
4273#define MAX_IT_BLOCK_PREFIX 14
177321bd 4274
d9311bfa
AT
4275/* Use a quick scan if there are more than this many bytes of
4276 code. */
4277#define IT_SCAN_THRESHOLD 32
177321bd 4278
d9311bfa
AT
4279/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4280 A breakpoint in an IT block may not be hit, depending on the
4281 condition flags. */
ad527d2e 4282static CORE_ADDR
d9311bfa 4283arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4284{
d9311bfa
AT
4285 gdb_byte *buf;
4286 char map_type;
4287 CORE_ADDR boundary, func_start;
4288 int buf_len;
4289 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4290 int i, any, last_it, last_it_count;
177321bd 4291
d9311bfa
AT
4292 /* If we are using BKPT breakpoints, none of this is necessary. */
4293 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4294 return bpaddr;
177321bd 4295
d9311bfa
AT
4296 /* ARM mode does not have this problem. */
4297 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4298 return bpaddr;
177321bd 4299
d9311bfa
AT
4300 /* We are setting a breakpoint in Thumb code that could potentially
4301 contain an IT block. The first step is to find how much Thumb
4302 code there is; we do not need to read outside of known Thumb
4303 sequences. */
4304 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4305 if (map_type == 0)
4306 /* Thumb-2 code must have mapping symbols to have a chance. */
4307 return bpaddr;
9dca5578 4308
d9311bfa 4309 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4310
d9311bfa
AT
4311 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4312 && func_start > boundary)
4313 boundary = func_start;
9dca5578 4314
d9311bfa
AT
4315 /* Search for a candidate IT instruction. We have to do some fancy
4316 footwork to distinguish a real IT instruction from the second
4317 half of a 32-bit instruction, but there is no need for that if
4318 there's no candidate. */
325fac50 4319 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
d9311bfa
AT
4320 if (buf_len == 0)
4321 /* No room for an IT instruction. */
4322 return bpaddr;
c906108c 4323
d9311bfa 4324 buf = (gdb_byte *) xmalloc (buf_len);
198cd59d 4325 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
d9311bfa
AT
4326 return bpaddr;
4327 any = 0;
4328 for (i = 0; i < buf_len; i += 2)
c906108c 4329 {
d9311bfa
AT
4330 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4331 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4332 {
d9311bfa
AT
4333 any = 1;
4334 break;
25b41d01 4335 }
c906108c 4336 }
d9311bfa
AT
4337
4338 if (any == 0)
c906108c 4339 {
d9311bfa
AT
4340 xfree (buf);
4341 return bpaddr;
f9d67f43
DJ
4342 }
4343
4344 /* OK, the code bytes before this instruction contain at least one
4345 halfword which resembles an IT instruction. We know that it's
4346 Thumb code, but there are still two possibilities. Either the
4347 halfword really is an IT instruction, or it is the second half of
4348 a 32-bit Thumb instruction. The only way we can tell is to
4349 scan forwards from a known instruction boundary. */
4350 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4351 {
4352 int definite;
4353
4354 /* There's a lot of code before this instruction. Start with an
4355 optimistic search; it's easy to recognize halfwords that can
4356 not be the start of a 32-bit instruction, and use that to
4357 lock on to the instruction boundaries. */
4358 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4359 if (buf == NULL)
4360 return bpaddr;
4361 buf_len = IT_SCAN_THRESHOLD;
4362
4363 definite = 0;
4364 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4365 {
4366 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4367 if (thumb_insn_size (inst1) == 2)
4368 {
4369 definite = 1;
4370 break;
4371 }
4372 }
4373
4374 /* At this point, if DEFINITE, BUF[I] is the first place we
4375 are sure that we know the instruction boundaries, and it is far
4376 enough from BPADDR that we could not miss an IT instruction
4377 affecting BPADDR. If ! DEFINITE, give up - start from a
4378 known boundary. */
4379 if (! definite)
4380 {
0963b4bd
MS
4381 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4382 bpaddr - boundary);
f9d67f43
DJ
4383 if (buf == NULL)
4384 return bpaddr;
4385 buf_len = bpaddr - boundary;
4386 i = 0;
4387 }
4388 }
4389 else
4390 {
4391 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4392 if (buf == NULL)
4393 return bpaddr;
4394 buf_len = bpaddr - boundary;
4395 i = 0;
4396 }
4397
4398 /* Scan forwards. Find the last IT instruction before BPADDR. */
4399 last_it = -1;
4400 last_it_count = 0;
4401 while (i < buf_len)
4402 {
4403 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4404 last_it_count--;
4405 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4406 {
4407 last_it = i;
4408 if (inst1 & 0x0001)
4409 last_it_count = 4;
4410 else if (inst1 & 0x0002)
4411 last_it_count = 3;
4412 else if (inst1 & 0x0004)
4413 last_it_count = 2;
4414 else
4415 last_it_count = 1;
4416 }
4417 i += thumb_insn_size (inst1);
4418 }
4419
4420 xfree (buf);
4421
4422 if (last_it == -1)
4423 /* There wasn't really an IT instruction after all. */
4424 return bpaddr;
4425
4426 if (last_it_count < 1)
4427 /* It was too far away. */
4428 return bpaddr;
4429
4430 /* This really is a trouble spot. Move the breakpoint to the IT
4431 instruction. */
4432 return bpaddr - buf_len + last_it;
4433}
4434
cca44b1b 4435/* ARM displaced stepping support.
c906108c 4436
cca44b1b 4437 Generally ARM displaced stepping works as follows:
c906108c 4438
cca44b1b 4439 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4440 arm_process_displaced_insn. Depending on the type of instruction, it is
4441 then copied to a scratch location, possibly in a modified form. The
4442 copy_* set of functions performs such modification, as necessary. A
4443 breakpoint is placed after the modified instruction in the scratch space
4444 to return control to GDB. Note in particular that instructions which
4445 modify the PC will no longer do so after modification.
c5aa993b 4446
cca44b1b
JB
4447 2. The instruction is single-stepped, by setting the PC to the scratch
4448 location address, and resuming. Control returns to GDB when the
4449 breakpoint is hit.
c5aa993b 4450
cca44b1b
JB
4451 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4452 function used for the current instruction. This function's job is to
4453 put the CPU/memory state back to what it would have been if the
4454 instruction had been executed unmodified in its original location. */
c5aa993b 4455
cca44b1b
JB
4456/* NOP instruction (mov r0, r0). */
4457#define ARM_NOP 0xe1a00000
34518530 4458#define THUMB_NOP 0x4600
cca44b1b
JB
4459
4460/* Helper for register reads for displaced stepping. In particular, this
4461 returns the PC as it would be seen by the instruction at its original
4462 location. */
4463
4464ULONGEST
36073a92
YQ
4465displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4466 int regno)
cca44b1b
JB
4467{
4468 ULONGEST ret;
36073a92 4469 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4470
bf9f652a 4471 if (regno == ARM_PC_REGNUM)
cca44b1b 4472 {
4db71c0b
YQ
4473 /* Compute pipeline offset:
4474 - When executing an ARM instruction, PC reads as the address of the
4475 current instruction plus 8.
4476 - When executing a Thumb instruction, PC reads as the address of the
4477 current instruction plus 4. */
4478
36073a92 4479 if (!dsc->is_thumb)
4db71c0b
YQ
4480 from += 8;
4481 else
4482 from += 4;
4483
cca44b1b
JB
4484 if (debug_displaced)
4485 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4486 (unsigned long) from);
4487 return (ULONGEST) from;
cca44b1b 4488 }
c906108c 4489 else
cca44b1b
JB
4490 {
4491 regcache_cooked_read_unsigned (regs, regno, &ret);
4492 if (debug_displaced)
4493 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4494 regno, (unsigned long) ret);
4495 return ret;
4496 }
c906108c
SS
4497}
4498
cca44b1b
JB
4499static int
4500displaced_in_arm_mode (struct regcache *regs)
4501{
4502 ULONGEST ps;
9779414d 4503 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4504
cca44b1b 4505 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4506
9779414d 4507 return (ps & t_bit) == 0;
cca44b1b 4508}
66e810cd 4509
cca44b1b 4510/* Write to the PC as from a branch instruction. */
c906108c 4511
cca44b1b 4512static void
36073a92
YQ
4513branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4514 ULONGEST val)
c906108c 4515{
36073a92 4516 if (!dsc->is_thumb)
cca44b1b
JB
4517 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4518 architecture versions < 6. */
0963b4bd
MS
4519 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4520 val & ~(ULONGEST) 0x3);
cca44b1b 4521 else
0963b4bd
MS
4522 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4523 val & ~(ULONGEST) 0x1);
cca44b1b 4524}
66e810cd 4525
cca44b1b
JB
4526/* Write to the PC as from a branch-exchange instruction. */
4527
4528static void
4529bx_write_pc (struct regcache *regs, ULONGEST val)
4530{
4531 ULONGEST ps;
9779414d 4532 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4533
4534 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4535
4536 if ((val & 1) == 1)
c906108c 4537 {
9779414d 4538 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4539 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4540 }
4541 else if ((val & 2) == 0)
4542 {
9779414d 4543 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4544 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4545 }
4546 else
4547 {
cca44b1b
JB
4548 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4549 mode, align dest to 4 bytes). */
4550 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4551 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4552 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4553 }
4554}
ed9a39eb 4555
cca44b1b 4556/* Write to the PC as if from a load instruction. */
ed9a39eb 4557
34e8f22d 4558static void
36073a92
YQ
4559load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4560 ULONGEST val)
ed9a39eb 4561{
cca44b1b
JB
4562 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4563 bx_write_pc (regs, val);
4564 else
36073a92 4565 branch_write_pc (regs, dsc, val);
cca44b1b 4566}
be8626e0 4567
cca44b1b
JB
4568/* Write to the PC as if from an ALU instruction. */
4569
4570static void
36073a92
YQ
4571alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4572 ULONGEST val)
cca44b1b 4573{
36073a92 4574 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4575 bx_write_pc (regs, val);
4576 else
36073a92 4577 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4578}
4579
4580/* Helper for writing to registers for displaced stepping. Writing to the PC
4581 has a varying effects depending on the instruction which does the write:
4582 this is controlled by the WRITE_PC argument. */
4583
4584void
4585displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4586 int regno, ULONGEST val, enum pc_write_style write_pc)
4587{
bf9f652a 4588 if (regno == ARM_PC_REGNUM)
08216dd7 4589 {
cca44b1b
JB
4590 if (debug_displaced)
4591 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4592 (unsigned long) val);
4593 switch (write_pc)
08216dd7 4594 {
cca44b1b 4595 case BRANCH_WRITE_PC:
36073a92 4596 branch_write_pc (regs, dsc, val);
08216dd7
RE
4597 break;
4598
cca44b1b
JB
4599 case BX_WRITE_PC:
4600 bx_write_pc (regs, val);
4601 break;
4602
4603 case LOAD_WRITE_PC:
36073a92 4604 load_write_pc (regs, dsc, val);
cca44b1b
JB
4605 break;
4606
4607 case ALU_WRITE_PC:
36073a92 4608 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4609 break;
4610
4611 case CANNOT_WRITE_PC:
4612 warning (_("Instruction wrote to PC in an unexpected way when "
4613 "single-stepping"));
08216dd7
RE
4614 break;
4615
4616 default:
97b9747c
JB
4617 internal_error (__FILE__, __LINE__,
4618 _("Invalid argument to displaced_write_reg"));
08216dd7 4619 }
b508a996 4620
cca44b1b 4621 dsc->wrote_to_pc = 1;
b508a996 4622 }
ed9a39eb 4623 else
b508a996 4624 {
cca44b1b
JB
4625 if (debug_displaced)
4626 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4627 regno, (unsigned long) val);
4628 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4629 }
34e8f22d
RE
4630}
4631
cca44b1b
JB
4632/* This function is used to concisely determine if an instruction INSN
4633 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4634 corresponding fields of BITMASK set to 0b1111. The function
4635 returns return 1 if any of these fields in INSN reference the PC
4636 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4637
4638static int
cca44b1b 4639insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4640{
cca44b1b 4641 uint32_t lowbit = 1;
67255d04 4642
cca44b1b
JB
4643 while (bitmask != 0)
4644 {
4645 uint32_t mask;
44e1a9eb 4646
cca44b1b
JB
4647 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4648 ;
67255d04 4649
cca44b1b
JB
4650 if (!lowbit)
4651 break;
67255d04 4652
cca44b1b 4653 mask = lowbit * 0xf;
67255d04 4654
cca44b1b
JB
4655 if ((insn & mask) == mask)
4656 return 1;
4657
4658 bitmask &= ~mask;
67255d04
RE
4659 }
4660
cca44b1b
JB
4661 return 0;
4662}
2af48f68 4663
cca44b1b
JB
4664/* The simplest copy function. Many instructions have the same effect no
4665 matter what address they are executed at: in those cases, use this. */
67255d04 4666
cca44b1b 4667static int
7ff120b4
YQ
4668arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4669 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4670{
4671 if (debug_displaced)
4672 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4673 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4674 iname);
67255d04 4675
cca44b1b 4676 dsc->modinsn[0] = insn;
67255d04 4677
cca44b1b
JB
4678 return 0;
4679}
4680
34518530
YQ
4681static int
4682thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4683 uint16_t insn2, const char *iname,
4684 struct displaced_step_closure *dsc)
4685{
4686 if (debug_displaced)
4687 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4688 "opcode/class '%s' unmodified\n", insn1, insn2,
4689 iname);
4690
4691 dsc->modinsn[0] = insn1;
4692 dsc->modinsn[1] = insn2;
4693 dsc->numinsns = 2;
4694
4695 return 0;
4696}
4697
4698/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4699 modification. */
4700static int
615234c1 4701thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4702 const char *iname,
4703 struct displaced_step_closure *dsc)
4704{
4705 if (debug_displaced)
4706 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4707 "opcode/class '%s' unmodified\n", insn,
4708 iname);
4709
4710 dsc->modinsn[0] = insn;
4711
4712 return 0;
4713}
4714
cca44b1b
JB
4715/* Preload instructions with immediate offset. */
4716
4717static void
6e39997a 4718cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4719 struct regcache *regs, struct displaced_step_closure *dsc)
4720{
4721 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4722 if (!dsc->u.preload.immed)
4723 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4724}
4725
7ff120b4
YQ
4726static void
4727install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4728 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4729{
cca44b1b 4730 ULONGEST rn_val;
cca44b1b
JB
4731 /* Preload instructions:
4732
4733 {pli/pld} [rn, #+/-imm]
4734 ->
4735 {pli/pld} [r0, #+/-imm]. */
4736
36073a92
YQ
4737 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4738 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4739 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4740 dsc->u.preload.immed = 1;
4741
cca44b1b 4742 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4743}
4744
cca44b1b 4745static int
7ff120b4 4746arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4747 struct displaced_step_closure *dsc)
4748{
4749 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4750
7ff120b4
YQ
4751 if (!insn_references_pc (insn, 0x000f0000ul))
4752 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4753
4754 if (debug_displaced)
4755 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4756 (unsigned long) insn);
4757
7ff120b4
YQ
4758 dsc->modinsn[0] = insn & 0xfff0ffff;
4759
4760 install_preload (gdbarch, regs, dsc, rn);
4761
4762 return 0;
4763}
4764
34518530
YQ
4765static int
4766thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4767 struct regcache *regs, struct displaced_step_closure *dsc)
4768{
4769 unsigned int rn = bits (insn1, 0, 3);
4770 unsigned int u_bit = bit (insn1, 7);
4771 int imm12 = bits (insn2, 0, 11);
4772 ULONGEST pc_val;
4773
4774 if (rn != ARM_PC_REGNUM)
4775 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4776
4777 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4778 PLD (literal) Encoding T1. */
4779 if (debug_displaced)
4780 fprintf_unfiltered (gdb_stdlog,
4781 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4782 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4783 imm12);
4784
4785 if (!u_bit)
4786 imm12 = -1 * imm12;
4787
4788 /* Rewrite instruction {pli/pld} PC imm12 into:
4789 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4790
4791 {pli/pld} [r0, r1]
4792
4793 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4794
4795 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4796 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4797
4798 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4799
4800 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4801 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4802 dsc->u.preload.immed = 0;
4803
4804 /* {pli/pld} [r0, r1] */
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = 0xf001;
4807 dsc->numinsns = 2;
4808
4809 dsc->cleanup = &cleanup_preload;
4810 return 0;
4811}
4812
7ff120b4
YQ
4813/* Preload instructions with register offset. */
4814
4815static void
4816install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4817 struct displaced_step_closure *dsc, unsigned int rn,
4818 unsigned int rm)
4819{
4820 ULONGEST rn_val, rm_val;
4821
cca44b1b
JB
4822 /* Preload register-offset instructions:
4823
4824 {pli/pld} [rn, rm {, shift}]
4825 ->
4826 {pli/pld} [r0, r1 {, shift}]. */
4827
36073a92
YQ
4828 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4829 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4830 rn_val = displaced_read_reg (regs, dsc, rn);
4831 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4832 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4833 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4834 dsc->u.preload.immed = 0;
4835
cca44b1b 4836 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4837}
4838
4839static int
4840arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4841 struct regcache *regs,
4842 struct displaced_step_closure *dsc)
4843{
4844 unsigned int rn = bits (insn, 16, 19);
4845 unsigned int rm = bits (insn, 0, 3);
4846
4847
4848 if (!insn_references_pc (insn, 0x000f000ful))
4849 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4850
4851 if (debug_displaced)
4852 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4853 (unsigned long) insn);
4854
4855 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4856
7ff120b4 4857 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4858 return 0;
4859}
4860
4861/* Copy/cleanup coprocessor load and store instructions. */
4862
4863static void
6e39997a 4864cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4865 struct regcache *regs,
4866 struct displaced_step_closure *dsc)
4867{
36073a92 4868 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4869
4870 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4871
4872 if (dsc->u.ldst.writeback)
4873 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4874}
4875
7ff120b4
YQ
4876static void
4877install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4878 struct displaced_step_closure *dsc,
4879 int writeback, unsigned int rn)
cca44b1b 4880{
cca44b1b 4881 ULONGEST rn_val;
cca44b1b 4882
cca44b1b
JB
4883 /* Coprocessor load/store instructions:
4884
4885 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4886 ->
4887 {stc/stc2} [r0, #+/-imm].
4888
4889 ldc/ldc2 are handled identically. */
4890
36073a92
YQ
4891 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4892 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4893 /* PC should be 4-byte aligned. */
4894 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4895 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4896
7ff120b4 4897 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4898 dsc->u.ldst.rn = rn;
4899
7ff120b4
YQ
4900 dsc->cleanup = &cleanup_copro_load_store;
4901}
4902
4903static int
4904arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4905 struct regcache *regs,
4906 struct displaced_step_closure *dsc)
4907{
4908 unsigned int rn = bits (insn, 16, 19);
4909
4910 if (!insn_references_pc (insn, 0x000f0000ul))
4911 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4912
4913 if (debug_displaced)
4914 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4915 "load/store insn %.8lx\n", (unsigned long) insn);
4916
cca44b1b
JB
4917 dsc->modinsn[0] = insn & 0xfff0ffff;
4918
7ff120b4 4919 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4920
4921 return 0;
4922}
4923
34518530
YQ
4924static int
4925thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4926 uint16_t insn2, struct regcache *regs,
4927 struct displaced_step_closure *dsc)
4928{
4929 unsigned int rn = bits (insn1, 0, 3);
4930
4931 if (rn != ARM_PC_REGNUM)
4932 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4933 "copro load/store", dsc);
4934
4935 if (debug_displaced)
4936 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4937 "load/store insn %.4x%.4x\n", insn1, insn2);
4938
4939 dsc->modinsn[0] = insn1 & 0xfff0;
4940 dsc->modinsn[1] = insn2;
4941 dsc->numinsns = 2;
4942
4943 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4944 doesn't support writeback, so pass 0. */
4945 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4946
4947 return 0;
4948}
4949
cca44b1b
JB
4950/* Clean up branch instructions (actually perform the branch, by setting
4951 PC). */
4952
4953static void
6e39997a 4954cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4955 struct displaced_step_closure *dsc)
4956{
36073a92 4957 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4958 int branch_taken = condition_true (dsc->u.branch.cond, status);
4959 enum pc_write_style write_pc = dsc->u.branch.exchange
4960 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4961
4962 if (!branch_taken)
4963 return;
4964
4965 if (dsc->u.branch.link)
4966 {
8c8dba6d
YQ
4967 /* The value of LR should be the next insn of current one. In order
4968 not to confuse logic hanlding later insn `bx lr', if current insn mode
4969 is Thumb, the bit 0 of LR value should be set to 1. */
4970 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4971
4972 if (dsc->is_thumb)
4973 next_insn_addr |= 0x1;
4974
4975 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4976 CANNOT_WRITE_PC);
cca44b1b
JB
4977 }
4978
bf9f652a 4979 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4980}
4981
4982/* Copy B/BL/BLX instructions with immediate destinations. */
4983
7ff120b4
YQ
4984static void
4985install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4986 struct displaced_step_closure *dsc,
4987 unsigned int cond, int exchange, int link, long offset)
4988{
4989 /* Implement "BL<cond> <label>" as:
4990
4991 Preparation: cond <- instruction condition
4992 Insn: mov r0, r0 (nop)
4993 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4994
4995 B<cond> similar, but don't set r14 in cleanup. */
4996
4997 dsc->u.branch.cond = cond;
4998 dsc->u.branch.link = link;
4999 dsc->u.branch.exchange = exchange;
5000
2b16b2e3
YQ
5001 dsc->u.branch.dest = dsc->insn_addr;
5002 if (link && exchange)
5003 /* For BLX, offset is computed from the Align (PC, 4). */
5004 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5005
7ff120b4 5006 if (dsc->is_thumb)
2b16b2e3 5007 dsc->u.branch.dest += 4 + offset;
7ff120b4 5008 else
2b16b2e3 5009 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
5010
5011 dsc->cleanup = &cleanup_branch;
5012}
cca44b1b 5013static int
7ff120b4
YQ
5014arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5015 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5016{
5017 unsigned int cond = bits (insn, 28, 31);
5018 int exchange = (cond == 0xf);
5019 int link = exchange || bit (insn, 24);
cca44b1b
JB
5020 long offset;
5021
5022 if (debug_displaced)
5023 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5024 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5025 (unsigned long) insn);
cca44b1b
JB
5026 if (exchange)
5027 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5028 then arrange the switch into Thumb mode. */
5029 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5030 else
5031 offset = bits (insn, 0, 23) << 2;
5032
5033 if (bit (offset, 25))
5034 offset = offset | ~0x3ffffff;
5035
cca44b1b
JB
5036 dsc->modinsn[0] = ARM_NOP;
5037
7ff120b4 5038 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5039 return 0;
5040}
5041
34518530
YQ
5042static int
5043thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5044 uint16_t insn2, struct regcache *regs,
5045 struct displaced_step_closure *dsc)
5046{
5047 int link = bit (insn2, 14);
5048 int exchange = link && !bit (insn2, 12);
5049 int cond = INST_AL;
5050 long offset = 0;
5051 int j1 = bit (insn2, 13);
5052 int j2 = bit (insn2, 11);
5053 int s = sbits (insn1, 10, 10);
5054 int i1 = !(j1 ^ bit (insn1, 10));
5055 int i2 = !(j2 ^ bit (insn1, 10));
5056
5057 if (!link && !exchange) /* B */
5058 {
5059 offset = (bits (insn2, 0, 10) << 1);
5060 if (bit (insn2, 12)) /* Encoding T4 */
5061 {
5062 offset |= (bits (insn1, 0, 9) << 12)
5063 | (i2 << 22)
5064 | (i1 << 23)
5065 | (s << 24);
5066 cond = INST_AL;
5067 }
5068 else /* Encoding T3 */
5069 {
5070 offset |= (bits (insn1, 0, 5) << 12)
5071 | (j1 << 18)
5072 | (j2 << 19)
5073 | (s << 20);
5074 cond = bits (insn1, 6, 9);
5075 }
5076 }
5077 else
5078 {
5079 offset = (bits (insn1, 0, 9) << 12);
5080 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5081 offset |= exchange ?
5082 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5083 }
5084
5085 if (debug_displaced)
5086 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5087 "%.4x %.4x with offset %.8lx\n",
5088 link ? (exchange) ? "blx" : "bl" : "b",
5089 insn1, insn2, offset);
5090
5091 dsc->modinsn[0] = THUMB_NOP;
5092
5093 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5094 return 0;
5095}
5096
5097/* Copy B Thumb instructions. */
5098static int
615234c1 5099thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5100 struct displaced_step_closure *dsc)
5101{
5102 unsigned int cond = 0;
5103 int offset = 0;
5104 unsigned short bit_12_15 = bits (insn, 12, 15);
5105 CORE_ADDR from = dsc->insn_addr;
5106
5107 if (bit_12_15 == 0xd)
5108 {
5109 /* offset = SignExtend (imm8:0, 32) */
5110 offset = sbits ((insn << 1), 0, 8);
5111 cond = bits (insn, 8, 11);
5112 }
5113 else if (bit_12_15 == 0xe) /* Encoding T2 */
5114 {
5115 offset = sbits ((insn << 1), 0, 11);
5116 cond = INST_AL;
5117 }
5118
5119 if (debug_displaced)
5120 fprintf_unfiltered (gdb_stdlog,
5121 "displaced: copying b immediate insn %.4x "
5122 "with offset %d\n", insn, offset);
5123
5124 dsc->u.branch.cond = cond;
5125 dsc->u.branch.link = 0;
5126 dsc->u.branch.exchange = 0;
5127 dsc->u.branch.dest = from + 4 + offset;
5128
5129 dsc->modinsn[0] = THUMB_NOP;
5130
5131 dsc->cleanup = &cleanup_branch;
5132
5133 return 0;
5134}
5135
cca44b1b
JB
5136/* Copy BX/BLX with register-specified destinations. */
5137
7ff120b4
YQ
5138static void
5139install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5140 struct displaced_step_closure *dsc, int link,
5141 unsigned int cond, unsigned int rm)
cca44b1b 5142{
cca44b1b
JB
5143 /* Implement {BX,BLX}<cond> <reg>" as:
5144
5145 Preparation: cond <- instruction condition
5146 Insn: mov r0, r0 (nop)
5147 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5148
5149 Don't set r14 in cleanup for BX. */
5150
36073a92 5151 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5152
5153 dsc->u.branch.cond = cond;
5154 dsc->u.branch.link = link;
cca44b1b 5155
7ff120b4 5156 dsc->u.branch.exchange = 1;
cca44b1b
JB
5157
5158 dsc->cleanup = &cleanup_branch;
7ff120b4 5159}
cca44b1b 5160
7ff120b4
YQ
5161static int
5162arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5163 struct regcache *regs, struct displaced_step_closure *dsc)
5164{
5165 unsigned int cond = bits (insn, 28, 31);
5166 /* BX: x12xxx1x
5167 BLX: x12xxx3x. */
5168 int link = bit (insn, 5);
5169 unsigned int rm = bits (insn, 0, 3);
5170
5171 if (debug_displaced)
5172 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5173 (unsigned long) insn);
5174
5175 dsc->modinsn[0] = ARM_NOP;
5176
5177 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5178 return 0;
5179}
5180
34518530
YQ
5181static int
5182thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5183 struct regcache *regs,
5184 struct displaced_step_closure *dsc)
5185{
5186 int link = bit (insn, 7);
5187 unsigned int rm = bits (insn, 3, 6);
5188
5189 if (debug_displaced)
5190 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5191 (unsigned short) insn);
5192
5193 dsc->modinsn[0] = THUMB_NOP;
5194
5195 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5196
5197 return 0;
5198}
5199
5200
0963b4bd 5201/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5202
5203static void
6e39997a 5204cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5205 struct regcache *regs, struct displaced_step_closure *dsc)
5206{
36073a92 5207 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5208 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5209 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5210 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5211}
5212
5213static int
7ff120b4
YQ
5214arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5215 struct displaced_step_closure *dsc)
cca44b1b
JB
5216{
5217 unsigned int rn = bits (insn, 16, 19);
5218 unsigned int rd = bits (insn, 12, 15);
5219 unsigned int op = bits (insn, 21, 24);
5220 int is_mov = (op == 0xd);
5221 ULONGEST rd_val, rn_val;
cca44b1b
JB
5222
5223 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5224 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5225
5226 if (debug_displaced)
5227 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5228 "%.8lx\n", is_mov ? "move" : "ALU",
5229 (unsigned long) insn);
5230
5231 /* Instruction is of form:
5232
5233 <op><cond> rd, [rn,] #imm
5234
5235 Rewrite as:
5236
5237 Preparation: tmp1, tmp2 <- r0, r1;
5238 r0, r1 <- rd, rn
5239 Insn: <op><cond> r0, r1, #imm
5240 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5241 */
5242
36073a92
YQ
5243 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5244 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5245 rn_val = displaced_read_reg (regs, dsc, rn);
5246 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5247 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5248 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5249 dsc->rd = rd;
5250
5251 if (is_mov)
5252 dsc->modinsn[0] = insn & 0xfff00fff;
5253 else
5254 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5255
5256 dsc->cleanup = &cleanup_alu_imm;
5257
5258 return 0;
5259}
5260
34518530
YQ
5261static int
5262thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5263 uint16_t insn2, struct regcache *regs,
5264 struct displaced_step_closure *dsc)
5265{
5266 unsigned int op = bits (insn1, 5, 8);
5267 unsigned int rn, rm, rd;
5268 ULONGEST rd_val, rn_val;
5269
5270 rn = bits (insn1, 0, 3); /* Rn */
5271 rm = bits (insn2, 0, 3); /* Rm */
5272 rd = bits (insn2, 8, 11); /* Rd */
5273
5274 /* This routine is only called for instruction MOV. */
5275 gdb_assert (op == 0x2 && rn == 0xf);
5276
5277 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5278 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5279
5280 if (debug_displaced)
5281 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5282 "ALU", insn1, insn2);
5283
5284 /* Instruction is of form:
5285
5286 <op><cond> rd, [rn,] #imm
5287
5288 Rewrite as:
5289
5290 Preparation: tmp1, tmp2 <- r0, r1;
5291 r0, r1 <- rd, rn
5292 Insn: <op><cond> r0, r1, #imm
5293 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5294 */
5295
5296 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5297 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5298 rn_val = displaced_read_reg (regs, dsc, rn);
5299 rd_val = displaced_read_reg (regs, dsc, rd);
5300 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5301 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5302 dsc->rd = rd;
5303
5304 dsc->modinsn[0] = insn1;
5305 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5306 dsc->numinsns = 2;
5307
5308 dsc->cleanup = &cleanup_alu_imm;
5309
5310 return 0;
5311}
5312
cca44b1b
JB
5313/* Copy/cleanup arithmetic/logic insns with register RHS. */
5314
5315static void
6e39997a 5316cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5317 struct regcache *regs, struct displaced_step_closure *dsc)
5318{
5319 ULONGEST rd_val;
5320 int i;
5321
36073a92 5322 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5323
5324 for (i = 0; i < 3; i++)
5325 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5326
5327 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5328}
5329
7ff120b4
YQ
5330static void
5331install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5332 struct displaced_step_closure *dsc,
5333 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5334{
cca44b1b 5335 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5336
cca44b1b
JB
5337 /* Instruction is of form:
5338
5339 <op><cond> rd, [rn,] rm [, <shift>]
5340
5341 Rewrite as:
5342
5343 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5344 r0, r1, r2 <- rd, rn, rm
ef713951 5345 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5346 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5347 */
5348
36073a92
YQ
5349 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5350 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5351 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5352 rd_val = displaced_read_reg (regs, dsc, rd);
5353 rn_val = displaced_read_reg (regs, dsc, rn);
5354 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5355 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5356 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5357 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5358 dsc->rd = rd;
5359
7ff120b4
YQ
5360 dsc->cleanup = &cleanup_alu_reg;
5361}
5362
5363static int
5364arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5365 struct displaced_step_closure *dsc)
5366{
5367 unsigned int op = bits (insn, 21, 24);
5368 int is_mov = (op == 0xd);
5369
5370 if (!insn_references_pc (insn, 0x000ff00ful))
5371 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5372
5373 if (debug_displaced)
5374 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5375 is_mov ? "move" : "ALU", (unsigned long) insn);
5376
cca44b1b
JB
5377 if (is_mov)
5378 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5379 else
5380 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5381
7ff120b4
YQ
5382 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5383 bits (insn, 0, 3));
cca44b1b
JB
5384 return 0;
5385}
5386
34518530
YQ
5387static int
5388thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5389 struct regcache *regs,
5390 struct displaced_step_closure *dsc)
5391{
ef713951 5392 unsigned rm, rd;
34518530 5393
ef713951
YQ
5394 rm = bits (insn, 3, 6);
5395 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5396
ef713951 5397 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5398 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5399
5400 if (debug_displaced)
ef713951
YQ
5401 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5402 (unsigned short) insn);
34518530 5403
ef713951 5404 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5405
ef713951 5406 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5407
5408 return 0;
5409}
5410
cca44b1b
JB
5411/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5412
5413static void
6e39997a 5414cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5415 struct regcache *regs,
5416 struct displaced_step_closure *dsc)
5417{
36073a92 5418 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5419 int i;
5420
5421 for (i = 0; i < 4; i++)
5422 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5423
5424 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5425}
5426
7ff120b4
YQ
5427static void
5428install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5429 struct displaced_step_closure *dsc,
5430 unsigned int rd, unsigned int rn, unsigned int rm,
5431 unsigned rs)
cca44b1b 5432{
7ff120b4 5433 int i;
cca44b1b 5434 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5435
cca44b1b
JB
5436 /* Instruction is of form:
5437
5438 <op><cond> rd, [rn,] rm, <shift> rs
5439
5440 Rewrite as:
5441
5442 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5443 r0, r1, r2, r3 <- rd, rn, rm, rs
5444 Insn: <op><cond> r0, r1, r2, <shift> r3
5445 Cleanup: tmp5 <- r0
5446 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5447 rd <- tmp5
5448 */
5449
5450 for (i = 0; i < 4; i++)
36073a92 5451 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5452
36073a92
YQ
5453 rd_val = displaced_read_reg (regs, dsc, rd);
5454 rn_val = displaced_read_reg (regs, dsc, rn);
5455 rm_val = displaced_read_reg (regs, dsc, rm);
5456 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5457 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5458 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5459 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5460 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5461 dsc->rd = rd;
7ff120b4
YQ
5462 dsc->cleanup = &cleanup_alu_shifted_reg;
5463}
5464
5465static int
5466arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5467 struct regcache *regs,
5468 struct displaced_step_closure *dsc)
5469{
5470 unsigned int op = bits (insn, 21, 24);
5471 int is_mov = (op == 0xd);
5472 unsigned int rd, rn, rm, rs;
5473
5474 if (!insn_references_pc (insn, 0x000fff0ful))
5475 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5476
5477 if (debug_displaced)
5478 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5479 "%.8lx\n", is_mov ? "move" : "ALU",
5480 (unsigned long) insn);
5481
5482 rn = bits (insn, 16, 19);
5483 rm = bits (insn, 0, 3);
5484 rs = bits (insn, 8, 11);
5485 rd = bits (insn, 12, 15);
cca44b1b
JB
5486
5487 if (is_mov)
5488 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5489 else
5490 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5491
7ff120b4 5492 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5493
5494 return 0;
5495}
5496
5497/* Clean up load instructions. */
5498
5499static void
6e39997a 5500cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5501 struct displaced_step_closure *dsc)
5502{
5503 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5504
36073a92 5505 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5506 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5507 rt_val2 = displaced_read_reg (regs, dsc, 1);
5508 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5509
5510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5511 if (dsc->u.ldst.xfersize > 4)
5512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5514 if (!dsc->u.ldst.immed)
5515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5516
5517 /* Handle register writeback. */
5518 if (dsc->u.ldst.writeback)
5519 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5520 /* Put result in right place. */
5521 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5522 if (dsc->u.ldst.xfersize == 8)
5523 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5524}
5525
5526/* Clean up store instructions. */
5527
5528static void
6e39997a 5529cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5530 struct displaced_step_closure *dsc)
5531{
36073a92 5532 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5533
5534 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5535 if (dsc->u.ldst.xfersize > 4)
5536 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5537 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5538 if (!dsc->u.ldst.immed)
5539 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5540 if (!dsc->u.ldst.restore_r4)
5541 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5542
5543 /* Writeback. */
5544 if (dsc->u.ldst.writeback)
5545 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5546}
5547
5548/* Copy "extra" load/store instructions. These are halfword/doubleword
5549 transfers, which have a different encoding to byte/word transfers. */
5550
5551static int
550dc4e2 5552arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5553 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5554{
5555 unsigned int op1 = bits (insn, 20, 24);
5556 unsigned int op2 = bits (insn, 5, 6);
5557 unsigned int rt = bits (insn, 12, 15);
5558 unsigned int rn = bits (insn, 16, 19);
5559 unsigned int rm = bits (insn, 0, 3);
5560 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5561 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5562 int immed = (op1 & 0x4) != 0;
5563 int opcode;
5564 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5565
5566 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5567 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5568
5569 if (debug_displaced)
5570 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5571 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5572 (unsigned long) insn);
5573
5574 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5575
5576 if (opcode < 0)
5577 internal_error (__FILE__, __LINE__,
5578 _("copy_extra_ld_st: instruction decode error"));
5579
36073a92
YQ
5580 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5581 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5582 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5583 if (!immed)
36073a92 5584 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5585
36073a92 5586 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5587 if (bytesize[opcode] == 8)
36073a92
YQ
5588 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5589 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5590 if (!immed)
36073a92 5591 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5592
5593 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5594 if (bytesize[opcode] == 8)
5595 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5596 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5597 if (!immed)
5598 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5599
5600 dsc->rd = rt;
5601 dsc->u.ldst.xfersize = bytesize[opcode];
5602 dsc->u.ldst.rn = rn;
5603 dsc->u.ldst.immed = immed;
5604 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5605 dsc->u.ldst.restore_r4 = 0;
5606
5607 if (immed)
5608 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5609 ->
5610 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5611 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5612 else
5613 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5614 ->
5615 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5616 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5617
5618 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5619
5620 return 0;
5621}
5622
0f6f04ba 5623/* Copy byte/half word/word loads and stores. */
cca44b1b 5624
7ff120b4 5625static void
0f6f04ba
YQ
5626install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5627 struct displaced_step_closure *dsc, int load,
5628 int immed, int writeback, int size, int usermode,
5629 int rt, int rm, int rn)
cca44b1b 5630{
cca44b1b 5631 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5632
36073a92
YQ
5633 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5634 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5635 if (!immed)
36073a92 5636 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5637 if (!load)
36073a92 5638 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5639
36073a92
YQ
5640 rt_val = displaced_read_reg (regs, dsc, rt);
5641 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5642 if (!immed)
36073a92 5643 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5644
5645 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5646 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5647 if (!immed)
5648 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5649 dsc->rd = rt;
0f6f04ba 5650 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5651 dsc->u.ldst.rn = rn;
5652 dsc->u.ldst.immed = immed;
7ff120b4 5653 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5654
5655 /* To write PC we can do:
5656
494e194e
YQ
5657 Before this sequence of instructions:
5658 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5659 r2 is the Rn value got from dispalced_read_reg.
5660
5661 Insn1: push {pc} Write address of STR instruction + offset on stack
5662 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5663 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5664 = addr(Insn1) + offset - addr(Insn3) - 8
5665 = offset - 16
5666 Insn4: add r4, r4, #8 r4 = offset - 8
5667 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5668 = from + offset
5669 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5670
5671 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5672 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5673 of this can be found in Section "Saving from r15" in
5674 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5675
7ff120b4
YQ
5676 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5677}
5678
34518530
YQ
5679
5680static int
5681thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5682 uint16_t insn2, struct regcache *regs,
5683 struct displaced_step_closure *dsc, int size)
5684{
5685 unsigned int u_bit = bit (insn1, 7);
5686 unsigned int rt = bits (insn2, 12, 15);
5687 int imm12 = bits (insn2, 0, 11);
5688 ULONGEST pc_val;
5689
5690 if (debug_displaced)
5691 fprintf_unfiltered (gdb_stdlog,
5692 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5693 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5694 imm12);
5695
5696 if (!u_bit)
5697 imm12 = -1 * imm12;
5698
5699 /* Rewrite instruction LDR Rt imm12 into:
5700
5701 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5702
5703 LDR R0, R2, R3,
5704
5705 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5706
5707
5708 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5709 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5710 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5711
5712 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5713
5714 pc_val = pc_val & 0xfffffffc;
5715
5716 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5717 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5718
5719 dsc->rd = rt;
5720
5721 dsc->u.ldst.xfersize = size;
5722 dsc->u.ldst.immed = 0;
5723 dsc->u.ldst.writeback = 0;
5724 dsc->u.ldst.restore_r4 = 0;
5725
5726 /* LDR R0, R2, R3 */
5727 dsc->modinsn[0] = 0xf852;
5728 dsc->modinsn[1] = 0x3;
5729 dsc->numinsns = 2;
5730
5731 dsc->cleanup = &cleanup_load;
5732
5733 return 0;
5734}
5735
5736static int
5737thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5738 uint16_t insn2, struct regcache *regs,
5739 struct displaced_step_closure *dsc,
5740 int writeback, int immed)
5741{
5742 unsigned int rt = bits (insn2, 12, 15);
5743 unsigned int rn = bits (insn1, 0, 3);
5744 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5745 /* In LDR (register), there is also a register Rm, which is not allowed to
5746 be PC, so we don't have to check it. */
5747
5748 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5749 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5750 dsc);
5751
5752 if (debug_displaced)
5753 fprintf_unfiltered (gdb_stdlog,
5754 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5755 rt, rn, insn1, insn2);
5756
5757 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5758 0, rt, rm, rn);
5759
5760 dsc->u.ldst.restore_r4 = 0;
5761
5762 if (immed)
5763 /* ldr[b]<cond> rt, [rn, #imm], etc.
5764 ->
5765 ldr[b]<cond> r0, [r2, #imm]. */
5766 {
5767 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5768 dsc->modinsn[1] = insn2 & 0x0fff;
5769 }
5770 else
5771 /* ldr[b]<cond> rt, [rn, rm], etc.
5772 ->
5773 ldr[b]<cond> r0, [r2, r3]. */
5774 {
5775 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5776 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5777 }
5778
5779 dsc->numinsns = 2;
5780
5781 return 0;
5782}
5783
5784
7ff120b4
YQ
5785static int
5786arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5787 struct regcache *regs,
5788 struct displaced_step_closure *dsc,
0f6f04ba 5789 int load, int size, int usermode)
7ff120b4
YQ
5790{
5791 int immed = !bit (insn, 25);
5792 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5793 unsigned int rt = bits (insn, 12, 15);
5794 unsigned int rn = bits (insn, 16, 19);
5795 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5796
5797 if (!insn_references_pc (insn, 0x000ff00ful))
5798 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5799
5800 if (debug_displaced)
5801 fprintf_unfiltered (gdb_stdlog,
5802 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5803 load ? (size == 1 ? "ldrb" : "ldr")
5804 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5805 rt, rn,
5806 (unsigned long) insn);
5807
0f6f04ba
YQ
5808 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5809 usermode, rt, rm, rn);
7ff120b4 5810
bf9f652a 5811 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5812 {
5813 dsc->u.ldst.restore_r4 = 0;
5814
5815 if (immed)
5816 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5817 ->
5818 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5819 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5820 else
5821 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5822 ->
5823 {ldr,str}[b]<cond> r0, [r2, r3]. */
5824 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5825 }
5826 else
5827 {
5828 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5829 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5830 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5831 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5832 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5833 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5834 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5835
5836 /* As above. */
5837 if (immed)
5838 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5839 else
5840 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5841
cca44b1b
JB
5842 dsc->numinsns = 6;
5843 }
5844
5845 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5846
5847 return 0;
5848}
5849
5850/* Cleanup LDM instructions with fully-populated register list. This is an
5851 unfortunate corner case: it's impossible to implement correctly by modifying
5852 the instruction. The issue is as follows: we have an instruction,
5853
5854 ldm rN, {r0-r15}
5855
5856 which we must rewrite to avoid loading PC. A possible solution would be to
5857 do the load in two halves, something like (with suitable cleanup
5858 afterwards):
5859
5860 mov r8, rN
5861 ldm[id][ab] r8!, {r0-r7}
5862 str r7, <temp>
5863 ldm[id][ab] r8, {r7-r14}
5864 <bkpt>
5865
5866 but at present there's no suitable place for <temp>, since the scratch space
5867 is overwritten before the cleanup routine is called. For now, we simply
5868 emulate the instruction. */
5869
5870static void
5871cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5872 struct displaced_step_closure *dsc)
5873{
cca44b1b
JB
5874 int inc = dsc->u.block.increment;
5875 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5876 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5877 uint32_t regmask = dsc->u.block.regmask;
5878 int regno = inc ? 0 : 15;
5879 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5880 int exception_return = dsc->u.block.load && dsc->u.block.user
5881 && (regmask & 0x8000) != 0;
36073a92 5882 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5883 int do_transfer = condition_true (dsc->u.block.cond, status);
5884 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5885
5886 if (!do_transfer)
5887 return;
5888
5889 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5890 sensible we can do here. Complain loudly. */
5891 if (exception_return)
5892 error (_("Cannot single-step exception return"));
5893
5894 /* We don't handle any stores here for now. */
5895 gdb_assert (dsc->u.block.load != 0);
5896
5897 if (debug_displaced)
5898 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5899 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5900 dsc->u.block.increment ? "inc" : "dec",
5901 dsc->u.block.before ? "before" : "after");
5902
5903 while (regmask)
5904 {
5905 uint32_t memword;
5906
5907 if (inc)
bf9f652a 5908 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5909 regno++;
5910 else
5911 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5912 regno--;
5913
5914 xfer_addr += bump_before;
5915
5916 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5917 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5918
5919 xfer_addr += bump_after;
5920
5921 regmask &= ~(1 << regno);
5922 }
5923
5924 if (dsc->u.block.writeback)
5925 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5926 CANNOT_WRITE_PC);
5927}
5928
5929/* Clean up an STM which included the PC in the register list. */
5930
5931static void
5932cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5933 struct displaced_step_closure *dsc)
5934{
36073a92 5935 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5936 int store_executed = condition_true (dsc->u.block.cond, status);
5937 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5938 CORE_ADDR stm_insn_addr;
5939 uint32_t pc_val;
5940 long offset;
5941 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5942
5943 /* If condition code fails, there's nothing else to do. */
5944 if (!store_executed)
5945 return;
5946
5947 if (dsc->u.block.increment)
5948 {
5949 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5950
5951 if (dsc->u.block.before)
5952 pc_stored_at += 4;
5953 }
5954 else
5955 {
5956 pc_stored_at = dsc->u.block.xfer_addr;
5957
5958 if (dsc->u.block.before)
5959 pc_stored_at -= 4;
5960 }
5961
5962 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5963 stm_insn_addr = dsc->scratch_base;
5964 offset = pc_val - stm_insn_addr;
5965
5966 if (debug_displaced)
5967 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5968 "STM instruction\n", offset);
5969
5970 /* Rewrite the stored PC to the proper value for the non-displaced original
5971 instruction. */
5972 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5973 dsc->insn_addr + offset);
5974}
5975
5976/* Clean up an LDM which includes the PC in the register list. We clumped all
5977 the registers in the transferred list into a contiguous range r0...rX (to
5978 avoid loading PC directly and losing control of the debugged program), so we
5979 must undo that here. */
5980
5981static void
6e39997a 5982cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5983 struct regcache *regs,
5984 struct displaced_step_closure *dsc)
5985{
36073a92 5986 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5987 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5988 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5989 unsigned int regs_loaded = bitcount (mask);
5990 unsigned int num_to_shuffle = regs_loaded, clobbered;
5991
5992 /* The method employed here will fail if the register list is fully populated
5993 (we need to avoid loading PC directly). */
5994 gdb_assert (num_to_shuffle < 16);
5995
5996 if (!load_executed)
5997 return;
5998
5999 clobbered = (1 << num_to_shuffle) - 1;
6000
6001 while (num_to_shuffle > 0)
6002 {
6003 if ((mask & (1 << write_reg)) != 0)
6004 {
6005 unsigned int read_reg = num_to_shuffle - 1;
6006
6007 if (read_reg != write_reg)
6008 {
36073a92 6009 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
6010 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6011 if (debug_displaced)
6012 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6013 "loaded register r%d to r%d\n"), read_reg,
6014 write_reg);
6015 }
6016 else if (debug_displaced)
6017 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6018 "r%d already in the right place\n"),
6019 write_reg);
6020
6021 clobbered &= ~(1 << write_reg);
6022
6023 num_to_shuffle--;
6024 }
6025
6026 write_reg--;
6027 }
6028
6029 /* Restore any registers we scribbled over. */
6030 for (write_reg = 0; clobbered != 0; write_reg++)
6031 {
6032 if ((clobbered & (1 << write_reg)) != 0)
6033 {
6034 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6035 CANNOT_WRITE_PC);
6036 if (debug_displaced)
6037 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6038 "clobbered register r%d\n"), write_reg);
6039 clobbered &= ~(1 << write_reg);
6040 }
6041 }
6042
6043 /* Perform register writeback manually. */
6044 if (dsc->u.block.writeback)
6045 {
6046 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6047
6048 if (dsc->u.block.increment)
6049 new_rn_val += regs_loaded * 4;
6050 else
6051 new_rn_val -= regs_loaded * 4;
6052
6053 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6054 CANNOT_WRITE_PC);
6055 }
6056}
6057
6058/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6059 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6060
6061static int
7ff120b4
YQ
6062arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6063 struct regcache *regs,
6064 struct displaced_step_closure *dsc)
cca44b1b
JB
6065{
6066 int load = bit (insn, 20);
6067 int user = bit (insn, 22);
6068 int increment = bit (insn, 23);
6069 int before = bit (insn, 24);
6070 int writeback = bit (insn, 21);
6071 int rn = bits (insn, 16, 19);
cca44b1b 6072
0963b4bd
MS
6073 /* Block transfers which don't mention PC can be run directly
6074 out-of-line. */
bf9f652a 6075 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6076 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6077
bf9f652a 6078 if (rn == ARM_PC_REGNUM)
cca44b1b 6079 {
0963b4bd
MS
6080 warning (_("displaced: Unpredictable LDM or STM with "
6081 "base register r15"));
7ff120b4 6082 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6083 }
6084
6085 if (debug_displaced)
6086 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6087 "%.8lx\n", (unsigned long) insn);
6088
36073a92 6089 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6090 dsc->u.block.rn = rn;
6091
6092 dsc->u.block.load = load;
6093 dsc->u.block.user = user;
6094 dsc->u.block.increment = increment;
6095 dsc->u.block.before = before;
6096 dsc->u.block.writeback = writeback;
6097 dsc->u.block.cond = bits (insn, 28, 31);
6098
6099 dsc->u.block.regmask = insn & 0xffff;
6100
6101 if (load)
6102 {
6103 if ((insn & 0xffff) == 0xffff)
6104 {
6105 /* LDM with a fully-populated register list. This case is
6106 particularly tricky. Implement for now by fully emulating the
6107 instruction (which might not behave perfectly in all cases, but
6108 these instructions should be rare enough for that not to matter
6109 too much). */
6110 dsc->modinsn[0] = ARM_NOP;
6111
6112 dsc->cleanup = &cleanup_block_load_all;
6113 }
6114 else
6115 {
6116 /* LDM of a list of registers which includes PC. Implement by
6117 rewriting the list of registers to be transferred into a
6118 contiguous chunk r0...rX before doing the transfer, then shuffling
6119 registers into the correct places in the cleanup routine. */
6120 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6121 unsigned int num_in_list = bitcount (regmask), new_regmask;
6122 unsigned int i;
cca44b1b
JB
6123
6124 for (i = 0; i < num_in_list; i++)
36073a92 6125 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6126
6127 /* Writeback makes things complicated. We need to avoid clobbering
6128 the base register with one of the registers in our modified
6129 register list, but just using a different register can't work in
6130 all cases, e.g.:
6131
6132 ldm r14!, {r0-r13,pc}
6133
6134 which would need to be rewritten as:
6135
6136 ldm rN!, {r0-r14}
6137
6138 but that can't work, because there's no free register for N.
6139
6140 Solve this by turning off the writeback bit, and emulating
6141 writeback manually in the cleanup routine. */
6142
6143 if (writeback)
6144 insn &= ~(1 << 21);
6145
6146 new_regmask = (1 << num_in_list) - 1;
6147
6148 if (debug_displaced)
6149 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6150 "{..., pc}: original reg list %.4x, modified "
6151 "list %.4x\n"), rn, writeback ? "!" : "",
6152 (int) insn & 0xffff, new_regmask);
6153
6154 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6155
6156 dsc->cleanup = &cleanup_block_load_pc;
6157 }
6158 }
6159 else
6160 {
6161 /* STM of a list of registers which includes PC. Run the instruction
6162 as-is, but out of line: this will store the wrong value for the PC,
6163 so we must manually fix up the memory in the cleanup routine.
6164 Doing things this way has the advantage that we can auto-detect
6165 the offset of the PC write (which is architecture-dependent) in
6166 the cleanup routine. */
6167 dsc->modinsn[0] = insn;
6168
6169 dsc->cleanup = &cleanup_block_store_pc;
6170 }
6171
6172 return 0;
6173}
6174
34518530
YQ
6175static int
6176thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6177 struct regcache *regs,
6178 struct displaced_step_closure *dsc)
cca44b1b 6179{
34518530
YQ
6180 int rn = bits (insn1, 0, 3);
6181 int load = bit (insn1, 4);
6182 int writeback = bit (insn1, 5);
cca44b1b 6183
34518530
YQ
6184 /* Block transfers which don't mention PC can be run directly
6185 out-of-line. */
6186 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6187 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6188
34518530
YQ
6189 if (rn == ARM_PC_REGNUM)
6190 {
6191 warning (_("displaced: Unpredictable LDM or STM with "
6192 "base register r15"));
6193 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6194 "unpredictable ldm/stm", dsc);
6195 }
cca44b1b
JB
6196
6197 if (debug_displaced)
34518530
YQ
6198 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6199 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6200
34518530
YQ
6201 /* Clear bit 13, since it should be always zero. */
6202 dsc->u.block.regmask = (insn2 & 0xdfff);
6203 dsc->u.block.rn = rn;
cca44b1b 6204
34518530
YQ
6205 dsc->u.block.load = load;
6206 dsc->u.block.user = 0;
6207 dsc->u.block.increment = bit (insn1, 7);
6208 dsc->u.block.before = bit (insn1, 8);
6209 dsc->u.block.writeback = writeback;
6210 dsc->u.block.cond = INST_AL;
6211 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6212
34518530
YQ
6213 if (load)
6214 {
6215 if (dsc->u.block.regmask == 0xffff)
6216 {
6217 /* This branch is impossible to happen. */
6218 gdb_assert (0);
6219 }
6220 else
6221 {
6222 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6223 unsigned int num_in_list = bitcount (regmask), new_regmask;
6224 unsigned int i;
34518530
YQ
6225
6226 for (i = 0; i < num_in_list; i++)
6227 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6228
6229 if (writeback)
6230 insn1 &= ~(1 << 5);
6231
6232 new_regmask = (1 << num_in_list) - 1;
6233
6234 if (debug_displaced)
6235 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6236 "{..., pc}: original reg list %.4x, modified "
6237 "list %.4x\n"), rn, writeback ? "!" : "",
6238 (int) dsc->u.block.regmask, new_regmask);
6239
6240 dsc->modinsn[0] = insn1;
6241 dsc->modinsn[1] = (new_regmask & 0xffff);
6242 dsc->numinsns = 2;
6243
6244 dsc->cleanup = &cleanup_block_load_pc;
6245 }
6246 }
6247 else
6248 {
6249 dsc->modinsn[0] = insn1;
6250 dsc->modinsn[1] = insn2;
6251 dsc->numinsns = 2;
6252 dsc->cleanup = &cleanup_block_store_pc;
6253 }
6254 return 0;
6255}
6256
d9311bfa
AT
6257/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6258 This is used to avoid a dependency on BFD's bfd_endian enum. */
6259
6260ULONGEST
6261arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6262 int byte_order)
6263{
5f2dfcfd
AT
6264 return read_memory_unsigned_integer (memaddr, len,
6265 (enum bfd_endian) byte_order);
d9311bfa
AT
6266}
6267
6268/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6269
6270CORE_ADDR
6271arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6272 CORE_ADDR val)
6273{
6274 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6275}
6276
6277/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6278
e7cf25a8 6279static CORE_ADDR
553cb527 6280arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6281{
d9311bfa
AT
6282 return 0;
6283}
6284
6285/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6286
6287int
6288arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6289{
6290 return arm_is_thumb (self->regcache);
6291}
6292
6293/* single_step() is called just before we want to resume the inferior,
6294 if we want to single-step it but there is no hardware or kernel
6295 single-step support. We find the target of the coming instructions
6296 and breakpoint them. */
6297
93f9a11f 6298VEC (CORE_ADDR) *
f5ea389a 6299arm_software_single_step (struct regcache *regcache)
d9311bfa 6300{
d9311bfa 6301 struct gdbarch *gdbarch = get_regcache_arch (regcache);
d9311bfa
AT
6302 struct arm_get_next_pcs next_pcs_ctx;
6303 CORE_ADDR pc;
6304 int i;
6305 VEC (CORE_ADDR) *next_pcs = NULL;
6306 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6307
6308 arm_get_next_pcs_ctor (&next_pcs_ctx,
6309 &arm_get_next_pcs_ops,
6310 gdbarch_byte_order (gdbarch),
6311 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6312 0,
d9311bfa
AT
6313 regcache);
6314
4d18591b 6315 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6316
6317 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
771da62d
YQ
6318 {
6319 pc = gdbarch_addr_bits_remove (gdbarch, pc);
0bc5d801 6320 VEC_replace (CORE_ADDR, next_pcs, i, pc);
771da62d 6321 }
d9311bfa 6322
93f9a11f 6323 discard_cleanups (old_chain);
d9311bfa 6324
93f9a11f 6325 return next_pcs;
d9311bfa
AT
6326}
6327
34518530
YQ
6328/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6329 for Linux, where some SVC instructions must be treated specially. */
6330
6331static void
6332cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6333 struct displaced_step_closure *dsc)
6334{
6335 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6336
6337 if (debug_displaced)
6338 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6339 "%.8lx\n", (unsigned long) resume_addr);
6340
6341 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6342}
6343
6344
6345/* Common copy routine for svc instruciton. */
6346
6347static int
6348install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6349 struct displaced_step_closure *dsc)
6350{
6351 /* Preparation: none.
6352 Insn: unmodified svc.
6353 Cleanup: pc <- insn_addr + insn_size. */
6354
6355 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6356 instruction. */
6357 dsc->wrote_to_pc = 1;
6358
6359 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6360 if (dsc->u.svc.copy_svc_os)
6361 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6362 else
6363 {
6364 dsc->cleanup = &cleanup_svc;
6365 return 0;
6366 }
34518530
YQ
6367}
6368
6369static int
6370arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6371 struct regcache *regs, struct displaced_step_closure *dsc)
6372{
6373
6374 if (debug_displaced)
6375 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6376 (unsigned long) insn);
6377
6378 dsc->modinsn[0] = insn;
6379
6380 return install_svc (gdbarch, regs, dsc);
6381}
6382
6383static int
6384thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6385 struct regcache *regs, struct displaced_step_closure *dsc)
6386{
6387
6388 if (debug_displaced)
6389 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6390 insn);
bd18283a 6391
34518530
YQ
6392 dsc->modinsn[0] = insn;
6393
6394 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6395}
6396
6397/* Copy undefined instructions. */
6398
6399static int
7ff120b4
YQ
6400arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6401 struct displaced_step_closure *dsc)
cca44b1b
JB
6402{
6403 if (debug_displaced)
0963b4bd
MS
6404 fprintf_unfiltered (gdb_stdlog,
6405 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6406 (unsigned long) insn);
6407
6408 dsc->modinsn[0] = insn;
6409
6410 return 0;
6411}
6412
34518530
YQ
6413static int
6414thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6415 struct displaced_step_closure *dsc)
6416{
6417
6418 if (debug_displaced)
6419 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6420 "%.4x %.4x\n", (unsigned short) insn1,
6421 (unsigned short) insn2);
6422
6423 dsc->modinsn[0] = insn1;
6424 dsc->modinsn[1] = insn2;
6425 dsc->numinsns = 2;
6426
6427 return 0;
6428}
6429
cca44b1b
JB
6430/* Copy unpredictable instructions. */
6431
6432static int
7ff120b4
YQ
6433arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6434 struct displaced_step_closure *dsc)
cca44b1b
JB
6435{
6436 if (debug_displaced)
6437 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6438 "%.8lx\n", (unsigned long) insn);
6439
6440 dsc->modinsn[0] = insn;
6441
6442 return 0;
6443}
6444
6445/* The decode_* functions are instruction decoding helpers. They mostly follow
6446 the presentation in the ARM ARM. */
6447
6448static int
7ff120b4
YQ
6449arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6450 struct regcache *regs,
6451 struct displaced_step_closure *dsc)
cca44b1b
JB
6452{
6453 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6454 unsigned int rn = bits (insn, 16, 19);
6455
6456 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6457 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6458 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6459 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6460 else if ((op1 & 0x60) == 0x20)
7ff120b4 6461 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6462 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6463 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6464 dsc);
cca44b1b 6465 else if ((op1 & 0x77) == 0x41)
7ff120b4 6466 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6467 else if ((op1 & 0x77) == 0x45)
7ff120b4 6468 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6469 else if ((op1 & 0x77) == 0x51)
6470 {
6471 if (rn != 0xf)
7ff120b4 6472 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6473 else
7ff120b4 6474 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6475 }
6476 else if ((op1 & 0x77) == 0x55)
7ff120b4 6477 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6478 else if (op1 == 0x57)
6479 switch (op2)
6480 {
7ff120b4
YQ
6481 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6482 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6483 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6484 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6485 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6486 }
6487 else if ((op1 & 0x63) == 0x43)
7ff120b4 6488 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6489 else if ((op2 & 0x1) == 0x0)
6490 switch (op1 & ~0x80)
6491 {
6492 case 0x61:
7ff120b4 6493 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6494 case 0x65:
7ff120b4 6495 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6496 case 0x71: case 0x75:
6497 /* pld/pldw reg. */
7ff120b4 6498 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6499 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6500 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6501 default:
7ff120b4 6502 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6503 }
6504 else
7ff120b4 6505 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6506}
6507
6508static int
7ff120b4
YQ
6509arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6510 struct regcache *regs,
6511 struct displaced_step_closure *dsc)
cca44b1b
JB
6512{
6513 if (bit (insn, 27) == 0)
7ff120b4 6514 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6515 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6516 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6517 {
6518 case 0x0: case 0x2:
7ff120b4 6519 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6520
6521 case 0x1: case 0x3:
7ff120b4 6522 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6523
6524 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6525 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6526
6527 case 0x8:
6528 switch ((insn & 0xe00000) >> 21)
6529 {
6530 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6531 /* stc/stc2. */
7ff120b4 6532 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6533
6534 case 0x2:
7ff120b4 6535 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6536
6537 default:
7ff120b4 6538 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6539 }
6540
6541 case 0x9:
6542 {
6543 int rn_f = (bits (insn, 16, 19) == 0xf);
6544 switch ((insn & 0xe00000) >> 21)
6545 {
6546 case 0x1: case 0x3:
6547 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6548 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6549 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6550
6551 case 0x2:
7ff120b4 6552 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6553
6554 case 0x4: case 0x5: case 0x6: case 0x7:
6555 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6556 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6557 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6558
6559 default:
7ff120b4 6560 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6561 }
6562 }
6563
6564 case 0xa:
7ff120b4 6565 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6566
6567 case 0xb:
6568 if (bits (insn, 16, 19) == 0xf)
6569 /* ldc/ldc2 lit. */
7ff120b4 6570 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6571 else
7ff120b4 6572 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6573
6574 case 0xc:
6575 if (bit (insn, 4))
7ff120b4 6576 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6577 else
7ff120b4 6578 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6579
6580 case 0xd:
6581 if (bit (insn, 4))
7ff120b4 6582 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6583 else
7ff120b4 6584 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6585
6586 default:
7ff120b4 6587 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6588 }
6589}
6590
6591/* Decode miscellaneous instructions in dp/misc encoding space. */
6592
6593static int
7ff120b4
YQ
6594arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6595 struct regcache *regs,
6596 struct displaced_step_closure *dsc)
cca44b1b
JB
6597{
6598 unsigned int op2 = bits (insn, 4, 6);
6599 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6600
6601 switch (op2)
6602 {
6603 case 0x0:
7ff120b4 6604 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6605
6606 case 0x1:
6607 if (op == 0x1) /* bx. */
7ff120b4 6608 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6609 else if (op == 0x3)
7ff120b4 6610 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6611 else
7ff120b4 6612 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6613
6614 case 0x2:
6615 if (op == 0x1)
6616 /* Not really supported. */
7ff120b4 6617 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6618 else
7ff120b4 6619 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6620
6621 case 0x3:
6622 if (op == 0x1)
7ff120b4 6623 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6624 regs, dsc); /* blx register. */
cca44b1b 6625 else
7ff120b4 6626 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6627
6628 case 0x5:
7ff120b4 6629 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6630
6631 case 0x7:
6632 if (op == 0x1)
7ff120b4 6633 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6634 else if (op == 0x3)
6635 /* Not really supported. */
7ff120b4 6636 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6637
6638 default:
7ff120b4 6639 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6640 }
6641}
6642
6643static int
7ff120b4
YQ
6644arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6645 struct regcache *regs,
6646 struct displaced_step_closure *dsc)
cca44b1b
JB
6647{
6648 if (bit (insn, 25))
6649 switch (bits (insn, 20, 24))
6650 {
6651 case 0x10:
7ff120b4 6652 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6653
6654 case 0x14:
7ff120b4 6655 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6656
6657 case 0x12: case 0x16:
7ff120b4 6658 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6659
6660 default:
7ff120b4 6661 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6662 }
6663 else
6664 {
6665 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6666
6667 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6668 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6669 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6670 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6671 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6672 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6673 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6674 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6675 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6676 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6677 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6678 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6679 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6680 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6681 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6682 dsc);
cca44b1b
JB
6683 }
6684
6685 /* Should be unreachable. */
6686 return 1;
6687}
6688
6689static int
7ff120b4
YQ
6690arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6691 struct regcache *regs,
6692 struct displaced_step_closure *dsc)
cca44b1b
JB
6693{
6694 int a = bit (insn, 25), b = bit (insn, 4);
6695 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6696
6697 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6698 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6699 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6700 else if ((!a && (op1 & 0x17) == 0x02)
6701 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6702 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6703 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6704 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6705 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6706 else if ((!a && (op1 & 0x17) == 0x03)
6707 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6708 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6709 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6710 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6711 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6712 else if ((!a && (op1 & 0x17) == 0x06)
6713 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6714 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6715 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6716 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6717 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6718 else if ((!a && (op1 & 0x17) == 0x07)
6719 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6720 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6721
6722 /* Should be unreachable. */
6723 return 1;
6724}
6725
6726static int
7ff120b4
YQ
6727arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6728 struct displaced_step_closure *dsc)
cca44b1b
JB
6729{
6730 switch (bits (insn, 20, 24))
6731 {
6732 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6733 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6734
6735 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6736 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6737
6738 case 0x08: case 0x09: case 0x0a: case 0x0b:
6739 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6740 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6741 "decode/pack/unpack/saturate/reverse", dsc);
6742
6743 case 0x18:
6744 if (bits (insn, 5, 7) == 0) /* op2. */
6745 {
6746 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6747 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6748 else
7ff120b4 6749 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6750 }
6751 else
7ff120b4 6752 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6753
6754 case 0x1a: case 0x1b:
6755 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6756 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6757 else
7ff120b4 6758 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6759
6760 case 0x1c: case 0x1d:
6761 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6762 {
6763 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6764 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6765 else
7ff120b4 6766 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6767 }
6768 else
7ff120b4 6769 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6770
6771 case 0x1e: case 0x1f:
6772 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6773 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6774 else
7ff120b4 6775 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6776 }
6777
6778 /* Should be unreachable. */
6779 return 1;
6780}
6781
6782static int
615234c1 6783arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6784 struct regcache *regs,
6785 struct displaced_step_closure *dsc)
cca44b1b
JB
6786{
6787 if (bit (insn, 25))
7ff120b4 6788 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6789 else
7ff120b4 6790 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6791}
6792
6793static int
7ff120b4
YQ
6794arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6795 struct regcache *regs,
6796 struct displaced_step_closure *dsc)
cca44b1b
JB
6797{
6798 unsigned int opcode = bits (insn, 20, 24);
6799
6800 switch (opcode)
6801 {
6802 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6803 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6804
6805 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6806 case 0x12: case 0x16:
7ff120b4 6807 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6808
6809 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6810 case 0x13: case 0x17:
7ff120b4 6811 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6812
6813 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6814 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6815 /* Note: no writeback for these instructions. Bit 25 will always be
6816 zero though (via caller), so the following works OK. */
7ff120b4 6817 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6818 }
6819
6820 /* Should be unreachable. */
6821 return 1;
6822}
6823
34518530
YQ
6824/* Decode shifted register instructions. */
6825
6826static int
6827thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6828 uint16_t insn2, struct regcache *regs,
6829 struct displaced_step_closure *dsc)
6830{
6831 /* PC is only allowed to be used in instruction MOV. */
6832
6833 unsigned int op = bits (insn1, 5, 8);
6834 unsigned int rn = bits (insn1, 0, 3);
6835
6836 if (op == 0x2 && rn == 0xf) /* MOV */
6837 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6838 else
6839 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6840 "dp (shift reg)", dsc);
6841}
6842
6843
6844/* Decode extension register load/store. Exactly the same as
6845 arm_decode_ext_reg_ld_st. */
6846
6847static int
6848thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6849 uint16_t insn2, struct regcache *regs,
6850 struct displaced_step_closure *dsc)
6851{
6852 unsigned int opcode = bits (insn1, 4, 8);
6853
6854 switch (opcode)
6855 {
6856 case 0x04: case 0x05:
6857 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6858 "vfp/neon vmov", dsc);
6859
6860 case 0x08: case 0x0c: /* 01x00 */
6861 case 0x0a: case 0x0e: /* 01x10 */
6862 case 0x12: case 0x16: /* 10x10 */
6863 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6864 "vfp/neon vstm/vpush", dsc);
6865
6866 case 0x09: case 0x0d: /* 01x01 */
6867 case 0x0b: case 0x0f: /* 01x11 */
6868 case 0x13: case 0x17: /* 10x11 */
6869 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6870 "vfp/neon vldm/vpop", dsc);
6871
6872 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6873 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6874 "vstr", dsc);
6875 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6876 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6877 }
6878
6879 /* Should be unreachable. */
6880 return 1;
6881}
6882
cca44b1b 6883static int
12545665 6884arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6885 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6886{
6887 unsigned int op1 = bits (insn, 20, 25);
6888 int op = bit (insn, 4);
6889 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6890
6891 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6892 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6893 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6894 && (coproc & 0xe) != 0xa)
6895 /* stc/stc2. */
7ff120b4 6896 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6897 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6898 && (coproc & 0xe) != 0xa)
6899 /* ldc/ldc2 imm/lit. */
7ff120b4 6900 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6901 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6902 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6903 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6904 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6905 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6906 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6907 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6908 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6909 else if ((op1 & 0x30) == 0x20 && !op)
6910 {
6911 if ((coproc & 0xe) == 0xa)
7ff120b4 6912 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6913 else
7ff120b4 6914 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6915 }
6916 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6917 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6918 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6919 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6920 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6921 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6922 else if ((op1 & 0x30) == 0x30)
7ff120b4 6923 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6924 else
7ff120b4 6925 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6926}
6927
34518530
YQ
6928static int
6929thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6930 uint16_t insn2, struct regcache *regs,
6931 struct displaced_step_closure *dsc)
6932{
6933 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6934 unsigned int bit_5_8 = bits (insn1, 5, 8);
6935 unsigned int bit_9 = bit (insn1, 9);
6936 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6937
6938 if (bit_9 == 0)
6939 {
6940 if (bit_5_8 == 2)
6941 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6942 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6943 dsc);
6944 else if (bit_5_8 == 0) /* UNDEFINED. */
6945 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6946 else
6947 {
6948 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6949 if ((coproc & 0xe) == 0xa)
6950 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6951 dsc);
6952 else /* coproc is not 101x. */
6953 {
6954 if (bit_4 == 0) /* STC/STC2. */
6955 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6956 "stc/stc2", dsc);
6957 else /* LDC/LDC2 {literal, immeidate}. */
6958 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6959 regs, dsc);
6960 }
6961 }
6962 }
6963 else
6964 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6965
6966 return 0;
6967}
6968
6969static void
6970install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6971 struct displaced_step_closure *dsc, int rd)
6972{
6973 /* ADR Rd, #imm
6974
6975 Rewrite as:
6976
6977 Preparation: Rd <- PC
6978 Insn: ADD Rd, #imm
6979 Cleanup: Null.
6980 */
6981
6982 /* Rd <- PC */
6983 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6984 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6985}
6986
6987static int
6988thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6989 struct displaced_step_closure *dsc,
6990 int rd, unsigned int imm)
6991{
6992
6993 /* Encoding T2: ADDS Rd, #imm */
6994 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6995
6996 install_pc_relative (gdbarch, regs, dsc, rd);
6997
6998 return 0;
6999}
7000
7001static int
7002thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7003 struct regcache *regs,
7004 struct displaced_step_closure *dsc)
7005{
7006 unsigned int rd = bits (insn, 8, 10);
7007 unsigned int imm8 = bits (insn, 0, 7);
7008
7009 if (debug_displaced)
7010 fprintf_unfiltered (gdb_stdlog,
7011 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7012 rd, imm8, insn);
7013
7014 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7015}
7016
7017static int
7018thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7019 uint16_t insn2, struct regcache *regs,
7020 struct displaced_step_closure *dsc)
7021{
7022 unsigned int rd = bits (insn2, 8, 11);
7023 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7024 extract raw immediate encoding rather than computing immediate. When
7025 generating ADD or SUB instruction, we can simply perform OR operation to
7026 set immediate into ADD. */
7027 unsigned int imm_3_8 = insn2 & 0x70ff;
7028 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7029
7030 if (debug_displaced)
7031 fprintf_unfiltered (gdb_stdlog,
7032 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7033 rd, imm_i, imm_3_8, insn1, insn2);
7034
7035 if (bit (insn1, 7)) /* Encoding T2 */
7036 {
7037 /* Encoding T3: SUB Rd, Rd, #imm */
7038 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7039 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7040 }
7041 else /* Encoding T3 */
7042 {
7043 /* Encoding T3: ADD Rd, Rd, #imm */
7044 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7045 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7046 }
7047 dsc->numinsns = 2;
7048
7049 install_pc_relative (gdbarch, regs, dsc, rd);
7050
7051 return 0;
7052}
7053
7054static int
615234c1 7055thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7056 struct regcache *regs,
7057 struct displaced_step_closure *dsc)
7058{
7059 unsigned int rt = bits (insn1, 8, 10);
7060 unsigned int pc;
7061 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7062
7063 /* LDR Rd, #imm8
7064
7065 Rwrite as:
7066
7067 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7068
7069 Insn: LDR R0, [R2, R3];
7070 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7071
7072 if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog,
7074 "displaced: copying thumb ldr r%d [pc #%d]\n"
7075 , rt, imm8);
7076
7077 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7078 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7079 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7080 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7081 /* The assembler calculates the required value of the offset from the
7082 Align(PC,4) value of this instruction to the label. */
7083 pc = pc & 0xfffffffc;
7084
7085 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7086 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7087
7088 dsc->rd = rt;
7089 dsc->u.ldst.xfersize = 4;
7090 dsc->u.ldst.rn = 0;
7091 dsc->u.ldst.immed = 0;
7092 dsc->u.ldst.writeback = 0;
7093 dsc->u.ldst.restore_r4 = 0;
7094
7095 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7096
7097 dsc->cleanup = &cleanup_load;
7098
7099 return 0;
7100}
7101
7102/* Copy Thumb cbnz/cbz insruction. */
7103
7104static int
7105thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7106 struct regcache *regs,
7107 struct displaced_step_closure *dsc)
7108{
7109 int non_zero = bit (insn1, 11);
7110 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7111 CORE_ADDR from = dsc->insn_addr;
7112 int rn = bits (insn1, 0, 2);
7113 int rn_val = displaced_read_reg (regs, dsc, rn);
7114
7115 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7116 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7117 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7118 condition is false, let it be, cleanup_branch will do nothing. */
7119 if (dsc->u.branch.cond)
7120 {
7121 dsc->u.branch.cond = INST_AL;
7122 dsc->u.branch.dest = from + 4 + imm5;
7123 }
7124 else
7125 dsc->u.branch.dest = from + 2;
7126
7127 dsc->u.branch.link = 0;
7128 dsc->u.branch.exchange = 0;
7129
7130 if (debug_displaced)
7131 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7132 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7133 rn, rn_val, insn1, dsc->u.branch.dest);
7134
7135 dsc->modinsn[0] = THUMB_NOP;
7136
7137 dsc->cleanup = &cleanup_branch;
7138 return 0;
7139}
7140
7141/* Copy Table Branch Byte/Halfword */
7142static int
7143thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7144 uint16_t insn2, struct regcache *regs,
7145 struct displaced_step_closure *dsc)
7146{
7147 ULONGEST rn_val, rm_val;
7148 int is_tbh = bit (insn2, 4);
7149 CORE_ADDR halfwords = 0;
7150 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7151
7152 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7153 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7154
7155 if (is_tbh)
7156 {
7157 gdb_byte buf[2];
7158
7159 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7160 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7161 }
7162 else
7163 {
7164 gdb_byte buf[1];
7165
7166 target_read_memory (rn_val + rm_val, buf, 1);
7167 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7168 }
7169
7170 if (debug_displaced)
7171 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7172 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7173 (unsigned int) rn_val, (unsigned int) rm_val,
7174 (unsigned int) halfwords);
7175
7176 dsc->u.branch.cond = INST_AL;
7177 dsc->u.branch.link = 0;
7178 dsc->u.branch.exchange = 0;
7179 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7180
7181 dsc->cleanup = &cleanup_branch;
7182
7183 return 0;
7184}
7185
7186static void
7187cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7188 struct displaced_step_closure *dsc)
7189{
7190 /* PC <- r7 */
7191 int val = displaced_read_reg (regs, dsc, 7);
7192 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7193
7194 /* r7 <- r8 */
7195 val = displaced_read_reg (regs, dsc, 8);
7196 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7197
7198 /* r8 <- tmp[0] */
7199 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7200
7201}
7202
7203static int
615234c1 7204thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7205 struct regcache *regs,
7206 struct displaced_step_closure *dsc)
7207{
7208 dsc->u.block.regmask = insn1 & 0x00ff;
7209
7210 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7211 to :
7212
7213 (1) register list is full, that is, r0-r7 are used.
7214 Prepare: tmp[0] <- r8
7215
7216 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7217 MOV r8, r7; Move value of r7 to r8;
7218 POP {r7}; Store PC value into r7.
7219
7220 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7221
7222 (2) register list is not full, supposing there are N registers in
7223 register list (except PC, 0 <= N <= 7).
7224 Prepare: for each i, 0 - N, tmp[i] <- ri.
7225
7226 POP {r0, r1, ...., rN};
7227
7228 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7229 from tmp[] properly.
7230 */
7231 if (debug_displaced)
7232 fprintf_unfiltered (gdb_stdlog,
7233 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7234 dsc->u.block.regmask, insn1);
7235
7236 if (dsc->u.block.regmask == 0xff)
7237 {
7238 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7239
7240 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7241 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7242 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7243
7244 dsc->numinsns = 3;
7245 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7246 }
7247 else
7248 {
7249 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7250 unsigned int i;
7251 unsigned int new_regmask;
34518530
YQ
7252
7253 for (i = 0; i < num_in_list + 1; i++)
7254 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7255
7256 new_regmask = (1 << (num_in_list + 1)) - 1;
7257
7258 if (debug_displaced)
7259 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7260 "{..., pc}: original reg list %.4x,"
7261 " modified list %.4x\n"),
7262 (int) dsc->u.block.regmask, new_regmask);
7263
7264 dsc->u.block.regmask |= 0x8000;
7265 dsc->u.block.writeback = 0;
7266 dsc->u.block.cond = INST_AL;
7267
7268 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7269
7270 dsc->cleanup = &cleanup_block_load_pc;
7271 }
7272
7273 return 0;
7274}
7275
7276static void
7277thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7278 struct regcache *regs,
7279 struct displaced_step_closure *dsc)
7280{
7281 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7282 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7283 int err = 0;
7284
7285 /* 16-bit thumb instructions. */
7286 switch (op_bit_12_15)
7287 {
7288 /* Shift (imme), add, subtract, move and compare. */
7289 case 0: case 1: case 2: case 3:
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7291 "shift/add/sub/mov/cmp",
7292 dsc);
7293 break;
7294 case 4:
7295 switch (op_bit_10_11)
7296 {
7297 case 0: /* Data-processing */
7298 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7299 "data-processing",
7300 dsc);
7301 break;
7302 case 1: /* Special data instructions and branch and exchange. */
7303 {
7304 unsigned short op = bits (insn1, 7, 9);
7305 if (op == 6 || op == 7) /* BX or BLX */
7306 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7307 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7308 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7309 else
7310 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7311 dsc);
7312 }
7313 break;
7314 default: /* LDR (literal) */
7315 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7316 }
7317 break;
7318 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7319 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7320 break;
7321 case 10:
7322 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7323 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7324 else /* Generate SP-relative address */
7325 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7326 break;
7327 case 11: /* Misc 16-bit instructions */
7328 {
7329 switch (bits (insn1, 8, 11))
7330 {
7331 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7332 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7333 break;
7334 case 12: case 13: /* POP */
7335 if (bit (insn1, 8)) /* PC is in register list. */
7336 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7337 else
7338 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7339 break;
7340 case 15: /* If-Then, and hints */
7341 if (bits (insn1, 0, 3))
7342 /* If-Then makes up to four following instructions conditional.
7343 IT instruction itself is not conditional, so handle it as a
7344 common unmodified instruction. */
7345 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7346 dsc);
7347 else
7348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7349 break;
7350 default:
7351 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7352 }
7353 }
7354 break;
7355 case 12:
7356 if (op_bit_10_11 < 2) /* Store multiple registers */
7357 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7358 else /* Load multiple registers */
7359 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7360 break;
7361 case 13: /* Conditional branch and supervisor call */
7362 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7363 err = thumb_copy_b (gdbarch, insn1, dsc);
7364 else
7365 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7366 break;
7367 case 14: /* Unconditional branch */
7368 err = thumb_copy_b (gdbarch, insn1, dsc);
7369 break;
7370 default:
7371 err = 1;
7372 }
7373
7374 if (err)
7375 internal_error (__FILE__, __LINE__,
7376 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7377}
7378
7379static int
7380decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7381 uint16_t insn1, uint16_t insn2,
7382 struct regcache *regs,
7383 struct displaced_step_closure *dsc)
7384{
7385 int rt = bits (insn2, 12, 15);
7386 int rn = bits (insn1, 0, 3);
7387 int op1 = bits (insn1, 7, 8);
34518530
YQ
7388
7389 switch (bits (insn1, 5, 6))
7390 {
7391 case 0: /* Load byte and memory hints */
7392 if (rt == 0xf) /* PLD/PLI */
7393 {
7394 if (rn == 0xf)
7395 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7396 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7397 else
7398 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7399 "pli/pld", dsc);
7400 }
7401 else
7402 {
7403 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7404 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7405 1);
7406 else
7407 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7408 "ldrb{reg, immediate}/ldrbt",
7409 dsc);
7410 }
7411
7412 break;
7413 case 1: /* Load halfword and memory hints. */
7414 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7415 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7416 "pld/unalloc memhint", dsc);
7417 else
7418 {
7419 if (rn == 0xf)
7420 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7421 2);
7422 else
7423 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 "ldrh/ldrht", dsc);
7425 }
7426 break;
7427 case 2: /* Load word */
7428 {
7429 int insn2_bit_8_11 = bits (insn2, 8, 11);
7430
7431 if (rn == 0xf)
7432 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7433 else if (op1 == 0x1) /* Encoding T3 */
7434 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7435 0, 1);
7436 else /* op1 == 0x0 */
7437 {
7438 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7439 /* LDR (immediate) */
7440 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7441 dsc, bit (insn2, 8), 1);
7442 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7443 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7444 "ldrt", dsc);
7445 else
7446 /* LDR (register) */
7447 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7448 dsc, 0, 0);
7449 }
7450 break;
7451 }
7452 default:
7453 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7454 break;
7455 }
7456 return 0;
7457}
7458
7459static void
7460thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7461 uint16_t insn2, struct regcache *regs,
7462 struct displaced_step_closure *dsc)
7463{
7464 int err = 0;
7465 unsigned short op = bit (insn2, 15);
7466 unsigned int op1 = bits (insn1, 11, 12);
7467
7468 switch (op1)
7469 {
7470 case 1:
7471 {
7472 switch (bits (insn1, 9, 10))
7473 {
7474 case 0:
7475 if (bit (insn1, 6))
7476 {
7477 /* Load/store {dual, execlusive}, table branch. */
7478 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7479 && bits (insn2, 5, 7) == 0)
7480 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7481 dsc);
7482 else
7483 /* PC is not allowed to use in load/store {dual, exclusive}
7484 instructions. */
7485 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7486 "load/store dual/ex", dsc);
7487 }
7488 else /* load/store multiple */
7489 {
7490 switch (bits (insn1, 7, 8))
7491 {
7492 case 0: case 3: /* SRS, RFE */
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "srs/rfe", dsc);
7495 break;
7496 case 1: case 2: /* LDM/STM/PUSH/POP */
7497 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7498 break;
7499 }
7500 }
7501 break;
7502
7503 case 1:
7504 /* Data-processing (shift register). */
7505 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7506 dsc);
7507 break;
7508 default: /* Coprocessor instructions. */
7509 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7510 break;
7511 }
7512 break;
7513 }
7514 case 2: /* op1 = 2 */
7515 if (op) /* Branch and misc control. */
7516 {
7517 if (bit (insn2, 14) /* BLX/BL */
7518 || bit (insn2, 12) /* Unconditional branch */
7519 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7520 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7521 else
7522 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7523 "misc ctrl", dsc);
7524 }
7525 else
7526 {
7527 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7528 {
7529 int op = bits (insn1, 4, 8);
7530 int rn = bits (insn1, 0, 3);
7531 if ((op == 0 || op == 0xa) && rn == 0xf)
7532 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7533 regs, dsc);
7534 else
7535 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7536 "dp/pb", dsc);
7537 }
7538 else /* Data processing (modified immeidate) */
7539 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7540 "dp/mi", dsc);
7541 }
7542 break;
7543 case 3: /* op1 = 3 */
7544 switch (bits (insn1, 9, 10))
7545 {
7546 case 0:
7547 if (bit (insn1, 4))
7548 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7549 regs, dsc);
7550 else /* NEON Load/Store and Store single data item */
7551 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7552 "neon elt/struct load/store",
7553 dsc);
7554 break;
7555 case 1: /* op1 = 3, bits (9, 10) == 1 */
7556 switch (bits (insn1, 7, 8))
7557 {
7558 case 0: case 1: /* Data processing (register) */
7559 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7560 "dp(reg)", dsc);
7561 break;
7562 case 2: /* Multiply and absolute difference */
7563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7564 "mul/mua/diff", dsc);
7565 break;
7566 case 3: /* Long multiply and divide */
7567 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7568 "lmul/lmua", dsc);
7569 break;
7570 }
7571 break;
7572 default: /* Coprocessor instructions */
7573 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7574 break;
7575 }
7576 break;
7577 default:
7578 err = 1;
7579 }
7580
7581 if (err)
7582 internal_error (__FILE__, __LINE__,
7583 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7584
7585}
7586
b434a28f
YQ
7587static void
7588thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7589 struct regcache *regs,
b434a28f
YQ
7590 struct displaced_step_closure *dsc)
7591{
34518530
YQ
7592 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7593 uint16_t insn1
7594 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7595
7596 if (debug_displaced)
7597 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7598 "at %.8lx\n", insn1, (unsigned long) from);
7599
7600 dsc->is_thumb = 1;
7601 dsc->insn_size = thumb_insn_size (insn1);
7602 if (thumb_insn_size (insn1) == 4)
7603 {
7604 uint16_t insn2
7605 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7606 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7607 }
7608 else
7609 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7610}
7611
cca44b1b 7612void
b434a28f
YQ
7613arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7614 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7615 struct displaced_step_closure *dsc)
7616{
7617 int err = 0;
b434a28f
YQ
7618 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7619 uint32_t insn;
cca44b1b
JB
7620
7621 /* Most displaced instructions use a 1-instruction scratch space, so set this
7622 here and override below if/when necessary. */
7623 dsc->numinsns = 1;
7624 dsc->insn_addr = from;
7625 dsc->scratch_base = to;
7626 dsc->cleanup = NULL;
7627 dsc->wrote_to_pc = 0;
7628
b434a28f 7629 if (!displaced_in_arm_mode (regs))
12545665 7630 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7631
4db71c0b
YQ
7632 dsc->is_thumb = 0;
7633 dsc->insn_size = 4;
b434a28f
YQ
7634 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7635 if (debug_displaced)
7636 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7637 "at %.8lx\n", (unsigned long) insn,
7638 (unsigned long) from);
7639
cca44b1b 7640 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7641 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7642 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7643 {
7644 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7645 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7646 break;
7647
7648 case 0x4: case 0x5: case 0x6:
7ff120b4 7649 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7650 break;
7651
7652 case 0x7:
7ff120b4 7653 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7654 break;
7655
7656 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7657 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7658 break;
7659
7660 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7661 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7662 break;
7663 }
7664
7665 if (err)
7666 internal_error (__FILE__, __LINE__,
7667 _("arm_process_displaced_insn: Instruction decode error"));
7668}
7669
7670/* Actually set up the scratch space for a displaced instruction. */
7671
7672void
7673arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7674 CORE_ADDR to, struct displaced_step_closure *dsc)
7675{
7676 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7677 unsigned int i, len, offset;
cca44b1b 7678 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7679 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7680 const gdb_byte *bkp_insn;
cca44b1b 7681
4db71c0b 7682 offset = 0;
cca44b1b
JB
7683 /* Poke modified instruction(s). */
7684 for (i = 0; i < dsc->numinsns; i++)
7685 {
7686 if (debug_displaced)
4db71c0b
YQ
7687 {
7688 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7689 if (size == 4)
7690 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7691 dsc->modinsn[i]);
7692 else if (size == 2)
7693 fprintf_unfiltered (gdb_stdlog, "%.4x",
7694 (unsigned short)dsc->modinsn[i]);
7695
7696 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7697 (unsigned long) to + offset);
7698
7699 }
7700 write_memory_unsigned_integer (to + offset, size,
7701 byte_order_for_code,
cca44b1b 7702 dsc->modinsn[i]);
4db71c0b
YQ
7703 offset += size;
7704 }
7705
7706 /* Choose the correct breakpoint instruction. */
7707 if (dsc->is_thumb)
7708 {
7709 bkp_insn = tdep->thumb_breakpoint;
7710 len = tdep->thumb_breakpoint_size;
7711 }
7712 else
7713 {
7714 bkp_insn = tdep->arm_breakpoint;
7715 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7716 }
7717
7718 /* Put breakpoint afterwards. */
4db71c0b 7719 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7720
7721 if (debug_displaced)
7722 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7723 paddress (gdbarch, from), paddress (gdbarch, to));
7724}
7725
cca44b1b
JB
7726/* Entry point for cleaning things up after a displaced instruction has been
7727 single-stepped. */
7728
7729void
7730arm_displaced_step_fixup (struct gdbarch *gdbarch,
7731 struct displaced_step_closure *dsc,
7732 CORE_ADDR from, CORE_ADDR to,
7733 struct regcache *regs)
7734{
7735 if (dsc->cleanup)
7736 dsc->cleanup (gdbarch, regs, dsc);
7737
7738 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7739 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7740 dsc->insn_addr + dsc->insn_size);
7741
cca44b1b
JB
7742}
7743
7744#include "bfd-in2.h"
7745#include "libcoff.h"
7746
7747static int
7748gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7749{
e47ad6c0
YQ
7750 gdb_disassembler *di
7751 = static_cast<gdb_disassembler *>(info->application_data);
7752 struct gdbarch *gdbarch = di->arch ();
9779414d
DJ
7753
7754 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7755 {
7756 static asymbol *asym;
7757 static combined_entry_type ce;
7758 static struct coff_symbol_struct csym;
7759 static struct bfd fake_bfd;
7760 static bfd_target fake_target;
7761
7762 if (csym.native == NULL)
7763 {
7764 /* Create a fake symbol vector containing a Thumb symbol.
7765 This is solely so that the code in print_insn_little_arm()
7766 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7767 the presence of a Thumb symbol and switch to decoding
7768 Thumb instructions. */
7769
7770 fake_target.flavour = bfd_target_coff_flavour;
7771 fake_bfd.xvec = &fake_target;
7772 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7773 csym.native = &ce;
7774 csym.symbol.the_bfd = &fake_bfd;
7775 csym.symbol.name = "fake";
7776 asym = (asymbol *) & csym;
7777 }
7778
7779 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7780 info->symbols = &asym;
7781 }
7782 else
7783 info->symbols = NULL;
7784
7785 if (info->endian == BFD_ENDIAN_BIG)
7786 return print_insn_big_arm (memaddr, info);
7787 else
7788 return print_insn_little_arm (memaddr, info);
7789}
7790
7791/* The following define instruction sequences that will cause ARM
7792 cpu's to take an undefined instruction trap. These are used to
7793 signal a breakpoint to GDB.
7794
7795 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7796 modes. A different instruction is required for each mode. The ARM
7797 cpu's can also be big or little endian. Thus four different
7798 instructions are needed to support all cases.
7799
7800 Note: ARMv4 defines several new instructions that will take the
7801 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7802 not in fact add the new instructions. The new undefined
7803 instructions in ARMv4 are all instructions that had no defined
7804 behaviour in earlier chips. There is no guarantee that they will
7805 raise an exception, but may be treated as NOP's. In practice, it
7806 may only safe to rely on instructions matching:
7807
7808 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7809 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7810 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7811
0963b4bd 7812 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7813 following use a condition predicate of ALWAYS so it is always TRUE.
7814
7815 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7816 and NetBSD all use a software interrupt rather than an undefined
7817 instruction to force a trap. This can be handled by by the
7818 abi-specific code during establishment of the gdbarch vector. */
7819
7820#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7821#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7822#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7823#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7824
948f8e3d
PA
7825static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7826static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7827static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7828static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b 7829
cd6c3b4f
YQ
7830/* Implement the breakpoint_kind_from_pc gdbarch method. */
7831
d19280ad
YQ
7832static int
7833arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
cca44b1b
JB
7834{
7835 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7836 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7837
9779414d 7838 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7839 {
7840 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7841
7842 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7843 check whether we are replacing a 32-bit instruction. */
7844 if (tdep->thumb2_breakpoint != NULL)
7845 {
7846 gdb_byte buf[2];
d19280ad 7847
177321bd
DJ
7848 if (target_read_memory (*pcptr, buf, 2) == 0)
7849 {
7850 unsigned short inst1;
d19280ad 7851
177321bd 7852 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7853 if (thumb_insn_size (inst1) == 4)
d19280ad 7854 return ARM_BP_KIND_THUMB2;
177321bd
DJ
7855 }
7856 }
7857
d19280ad 7858 return ARM_BP_KIND_THUMB;
cca44b1b
JB
7859 }
7860 else
d19280ad
YQ
7861 return ARM_BP_KIND_ARM;
7862
7863}
7864
cd6c3b4f
YQ
7865/* Implement the sw_breakpoint_from_kind gdbarch method. */
7866
d19280ad
YQ
7867static const gdb_byte *
7868arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7869{
7870 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7871
7872 switch (kind)
cca44b1b 7873 {
d19280ad
YQ
7874 case ARM_BP_KIND_ARM:
7875 *size = tdep->arm_breakpoint_size;
cca44b1b 7876 return tdep->arm_breakpoint;
d19280ad
YQ
7877 case ARM_BP_KIND_THUMB:
7878 *size = tdep->thumb_breakpoint_size;
7879 return tdep->thumb_breakpoint;
7880 case ARM_BP_KIND_THUMB2:
7881 *size = tdep->thumb2_breakpoint_size;
7882 return tdep->thumb2_breakpoint;
7883 default:
7884 gdb_assert_not_reached ("unexpected arm breakpoint kind");
cca44b1b
JB
7885 }
7886}
7887
833b7ab5
YQ
7888/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7889
7890static int
7891arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7892 struct regcache *regcache,
7893 CORE_ADDR *pcptr)
7894{
7895 gdb_byte buf[4];
7896
7897 /* Check the memory pointed by PC is readable. */
7898 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7899 {
7900 struct arm_get_next_pcs next_pcs_ctx;
7901 CORE_ADDR pc;
7902 int i;
7903 VEC (CORE_ADDR) *next_pcs = NULL;
7904 struct cleanup *old_chain
7905 = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
7906
7907 arm_get_next_pcs_ctor (&next_pcs_ctx,
7908 &arm_get_next_pcs_ops,
7909 gdbarch_byte_order (gdbarch),
7910 gdbarch_byte_order_for_code (gdbarch),
7911 0,
7912 regcache);
7913
7914 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7915
7916 /* If MEMADDR is the next instruction of current pc, do the
7917 software single step computation, and get the thumb mode by
7918 the destination address. */
7919 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
7920 {
7921 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7922 {
7923 do_cleanups (old_chain);
7924
7925 if (IS_THUMB_ADDR (pc))
7926 {
7927 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7928 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7929 }
7930 else
7931 return ARM_BP_KIND_ARM;
7932 }
7933 }
7934
7935 do_cleanups (old_chain);
7936 }
7937
7938 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7939}
7940
cca44b1b
JB
7941/* Extract from an array REGBUF containing the (raw) register state a
7942 function return value of type TYPE, and copy that, in virtual
7943 format, into VALBUF. */
7944
7945static void
7946arm_extract_return_value (struct type *type, struct regcache *regs,
7947 gdb_byte *valbuf)
7948{
7949 struct gdbarch *gdbarch = get_regcache_arch (regs);
7950 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7951
7952 if (TYPE_CODE_FLT == TYPE_CODE (type))
7953 {
7954 switch (gdbarch_tdep (gdbarch)->fp_model)
7955 {
7956 case ARM_FLOAT_FPA:
7957 {
7958 /* The value is in register F0 in internal format. We need to
7959 extract the raw value and then convert it to the desired
7960 internal type. */
7961 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7962
7963 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7964 convert_from_extended (floatformat_from_type (type), tmpbuf,
7965 valbuf, gdbarch_byte_order (gdbarch));
7966 }
7967 break;
7968
7969 case ARM_FLOAT_SOFT_FPA:
7970 case ARM_FLOAT_SOFT_VFP:
7971 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7972 not using the VFP ABI code. */
7973 case ARM_FLOAT_VFP:
7974 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7975 if (TYPE_LENGTH (type) > 4)
7976 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7977 valbuf + INT_REGISTER_SIZE);
7978 break;
7979
7980 default:
0963b4bd
MS
7981 internal_error (__FILE__, __LINE__,
7982 _("arm_extract_return_value: "
7983 "Floating point model not supported"));
cca44b1b
JB
7984 break;
7985 }
7986 }
7987 else if (TYPE_CODE (type) == TYPE_CODE_INT
7988 || TYPE_CODE (type) == TYPE_CODE_CHAR
7989 || TYPE_CODE (type) == TYPE_CODE_BOOL
7990 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 7991 || TYPE_IS_REFERENCE (type)
cca44b1b
JB
7992 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7993 {
b021a221
MS
7994 /* If the type is a plain integer, then the access is
7995 straight-forward. Otherwise we have to play around a bit
7996 more. */
cca44b1b
JB
7997 int len = TYPE_LENGTH (type);
7998 int regno = ARM_A1_REGNUM;
7999 ULONGEST tmp;
8000
8001 while (len > 0)
8002 {
8003 /* By using store_unsigned_integer we avoid having to do
8004 anything special for small big-endian values. */
8005 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8006 store_unsigned_integer (valbuf,
8007 (len > INT_REGISTER_SIZE
8008 ? INT_REGISTER_SIZE : len),
8009 byte_order, tmp);
8010 len -= INT_REGISTER_SIZE;
8011 valbuf += INT_REGISTER_SIZE;
8012 }
8013 }
8014 else
8015 {
8016 /* For a structure or union the behaviour is as if the value had
8017 been stored to word-aligned memory and then loaded into
8018 registers with 32-bit load instruction(s). */
8019 int len = TYPE_LENGTH (type);
8020 int regno = ARM_A1_REGNUM;
8021 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8022
8023 while (len > 0)
8024 {
8025 regcache_cooked_read (regs, regno++, tmpbuf);
8026 memcpy (valbuf, tmpbuf,
8027 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8028 len -= INT_REGISTER_SIZE;
8029 valbuf += INT_REGISTER_SIZE;
8030 }
8031 }
8032}
8033
8034
8035/* Will a function return an aggregate type in memory or in a
8036 register? Return 0 if an aggregate type can be returned in a
8037 register, 1 if it must be returned in memory. */
8038
8039static int
8040arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8041{
cca44b1b
JB
8042 enum type_code code;
8043
f168693b 8044 type = check_typedef (type);
cca44b1b 8045
b13c8ab2
YQ
8046 /* Simple, non-aggregate types (ie not including vectors and
8047 complex) are always returned in a register (or registers). */
8048 code = TYPE_CODE (type);
8049 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8050 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8051 return 0;
cca44b1b 8052
c4312b19
YQ
8053 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8054 {
8055 /* Vector values should be returned using ARM registers if they
8056 are not over 16 bytes. */
8057 return (TYPE_LENGTH (type) > 16);
8058 }
8059
b13c8ab2 8060 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 8061 {
b13c8ab2
YQ
8062 /* The AAPCS says all aggregates not larger than a word are returned
8063 in a register. */
8064 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8065 return 0;
8066
cca44b1b
JB
8067 return 1;
8068 }
b13c8ab2
YQ
8069 else
8070 {
8071 int nRc;
cca44b1b 8072
b13c8ab2
YQ
8073 /* All aggregate types that won't fit in a register must be returned
8074 in memory. */
8075 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8076 return 1;
cca44b1b 8077
b13c8ab2
YQ
8078 /* In the ARM ABI, "integer" like aggregate types are returned in
8079 registers. For an aggregate type to be integer like, its size
8080 must be less than or equal to INT_REGISTER_SIZE and the
8081 offset of each addressable subfield must be zero. Note that bit
8082 fields are not addressable, and all addressable subfields of
8083 unions always start at offset zero.
cca44b1b 8084
b13c8ab2
YQ
8085 This function is based on the behaviour of GCC 2.95.1.
8086 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8087
b13c8ab2
YQ
8088 Note: All versions of GCC before GCC 2.95.2 do not set up the
8089 parameters correctly for a function returning the following
8090 structure: struct { float f;}; This should be returned in memory,
8091 not a register. Richard Earnshaw sent me a patch, but I do not
8092 know of any way to detect if a function like the above has been
8093 compiled with the correct calling convention. */
8094
8095 /* Assume all other aggregate types can be returned in a register.
8096 Run a check for structures, unions and arrays. */
8097 nRc = 0;
67255d04 8098
b13c8ab2
YQ
8099 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8100 {
8101 int i;
8102 /* Need to check if this struct/union is "integer" like. For
8103 this to be true, its size must be less than or equal to
8104 INT_REGISTER_SIZE and the offset of each addressable
8105 subfield must be zero. Note that bit fields are not
8106 addressable, and unions always start at offset zero. If any
8107 of the subfields is a floating point type, the struct/union
8108 cannot be an integer type. */
8109
8110 /* For each field in the object, check:
8111 1) Is it FP? --> yes, nRc = 1;
8112 2) Is it addressable (bitpos != 0) and
8113 not packed (bitsize == 0)?
8114 --> yes, nRc = 1
8115 */
8116
8117 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8118 {
b13c8ab2
YQ
8119 enum type_code field_type_code;
8120
8121 field_type_code
8122 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8123 i)));
8124
8125 /* Is it a floating point type field? */
8126 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8127 {
8128 nRc = 1;
8129 break;
8130 }
b13c8ab2
YQ
8131
8132 /* If bitpos != 0, then we have to care about it. */
8133 if (TYPE_FIELD_BITPOS (type, i) != 0)
8134 {
8135 /* Bitfields are not addressable. If the field bitsize is
8136 zero, then the field is not packed. Hence it cannot be
8137 a bitfield or any other packed type. */
8138 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8139 {
8140 nRc = 1;
8141 break;
8142 }
8143 }
67255d04
RE
8144 }
8145 }
67255d04 8146
b13c8ab2
YQ
8147 return nRc;
8148 }
67255d04
RE
8149}
8150
34e8f22d
RE
8151/* Write into appropriate registers a function return value of type
8152 TYPE, given in virtual format. */
8153
8154static void
b508a996 8155arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8156 const gdb_byte *valbuf)
34e8f22d 8157{
be8626e0 8158 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8159 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8160
34e8f22d
RE
8161 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8162 {
64403bd1 8163 gdb_byte buf[FP_REGISTER_SIZE];
34e8f22d 8164
be8626e0 8165 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8166 {
8167 case ARM_FLOAT_FPA:
8168
be8626e0
MD
8169 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8170 gdbarch_byte_order (gdbarch));
b508a996 8171 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8172 break;
8173
fd50bc42 8174 case ARM_FLOAT_SOFT_FPA:
08216dd7 8175 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8176 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8177 not using the VFP ABI code. */
8178 case ARM_FLOAT_VFP:
b508a996
RE
8179 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8180 if (TYPE_LENGTH (type) > 4)
8181 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8182 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8183 break;
8184
8185 default:
9b20d036
MS
8186 internal_error (__FILE__, __LINE__,
8187 _("arm_store_return_value: Floating "
8188 "point model not supported"));
08216dd7
RE
8189 break;
8190 }
34e8f22d 8191 }
b508a996
RE
8192 else if (TYPE_CODE (type) == TYPE_CODE_INT
8193 || TYPE_CODE (type) == TYPE_CODE_CHAR
8194 || TYPE_CODE (type) == TYPE_CODE_BOOL
8195 || TYPE_CODE (type) == TYPE_CODE_PTR
aa006118 8196 || TYPE_IS_REFERENCE (type)
b508a996
RE
8197 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8198 {
8199 if (TYPE_LENGTH (type) <= 4)
8200 {
8201 /* Values of one word or less are zero/sign-extended and
8202 returned in r0. */
7a5ea0d4 8203 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8204 LONGEST val = unpack_long (type, valbuf);
8205
e17a4113 8206 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8207 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8208 }
8209 else
8210 {
8211 /* Integral values greater than one word are stored in consecutive
8212 registers starting with r0. This will always be a multiple of
8213 the regiser size. */
8214 int len = TYPE_LENGTH (type);
8215 int regno = ARM_A1_REGNUM;
8216
8217 while (len > 0)
8218 {
8219 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8220 len -= INT_REGISTER_SIZE;
8221 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8222 }
8223 }
8224 }
34e8f22d 8225 else
b508a996
RE
8226 {
8227 /* For a structure or union the behaviour is as if the value had
8228 been stored to word-aligned memory and then loaded into
8229 registers with 32-bit load instruction(s). */
8230 int len = TYPE_LENGTH (type);
8231 int regno = ARM_A1_REGNUM;
7a5ea0d4 8232 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8233
8234 while (len > 0)
8235 {
8236 memcpy (tmpbuf, valbuf,
7a5ea0d4 8237 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8238 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8239 len -= INT_REGISTER_SIZE;
8240 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8241 }
8242 }
34e8f22d
RE
8243}
8244
2af48f68
PB
8245
8246/* Handle function return values. */
8247
8248static enum return_value_convention
6a3a010b 8249arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8250 struct type *valtype, struct regcache *regcache,
8251 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8252{
7c00367c 8253 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8254 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8255 enum arm_vfp_cprc_base_type vfp_base_type;
8256 int vfp_base_count;
8257
8258 if (arm_vfp_abi_for_function (gdbarch, func_type)
8259 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8260 {
8261 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8262 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8263 int i;
8264 for (i = 0; i < vfp_base_count; i++)
8265 {
58d6951d
DJ
8266 if (reg_char == 'q')
8267 {
8268 if (writebuf)
8269 arm_neon_quad_write (gdbarch, regcache, i,
8270 writebuf + i * unit_length);
8271
8272 if (readbuf)
8273 arm_neon_quad_read (gdbarch, regcache, i,
8274 readbuf + i * unit_length);
8275 }
8276 else
8277 {
8278 char name_buf[4];
8279 int regnum;
8280
8c042590 8281 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8282 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8283 strlen (name_buf));
8284 if (writebuf)
8285 regcache_cooked_write (regcache, regnum,
8286 writebuf + i * unit_length);
8287 if (readbuf)
8288 regcache_cooked_read (regcache, regnum,
8289 readbuf + i * unit_length);
8290 }
90445bd3
DJ
8291 }
8292 return RETURN_VALUE_REGISTER_CONVENTION;
8293 }
7c00367c 8294
2af48f68
PB
8295 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8296 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8297 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8298 {
7c00367c
MK
8299 if (tdep->struct_return == pcc_struct_return
8300 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8301 return RETURN_VALUE_STRUCT_CONVENTION;
8302 }
b13c8ab2
YQ
8303 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8304 {
8305 if (arm_return_in_memory (gdbarch, valtype))
8306 return RETURN_VALUE_STRUCT_CONVENTION;
8307 }
7052e42c 8308
2af48f68
PB
8309 if (writebuf)
8310 arm_store_return_value (valtype, regcache, writebuf);
8311
8312 if (readbuf)
8313 arm_extract_return_value (valtype, regcache, readbuf);
8314
8315 return RETURN_VALUE_REGISTER_CONVENTION;
8316}
8317
8318
9df628e0 8319static int
60ade65d 8320arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8321{
e17a4113
UW
8322 struct gdbarch *gdbarch = get_frame_arch (frame);
8323 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8324 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8325 CORE_ADDR jb_addr;
e362b510 8326 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8327
60ade65d 8328 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8329
8330 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8331 INT_REGISTER_SIZE))
9df628e0
RE
8332 return 0;
8333
e17a4113 8334 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8335 return 1;
8336}
8337
faa95490
DJ
8338/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8339 return the target PC. Otherwise return 0. */
c906108c
SS
8340
8341CORE_ADDR
52f729a7 8342arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8343{
2c02bd72 8344 const char *name;
faa95490 8345 int namelen;
c906108c
SS
8346 CORE_ADDR start_addr;
8347
8348 /* Find the starting address and name of the function containing the PC. */
8349 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8350 {
8351 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8352 check here. */
8353 start_addr = arm_skip_bx_reg (frame, pc);
8354 if (start_addr != 0)
8355 return start_addr;
8356
8357 return 0;
8358 }
c906108c 8359
faa95490
DJ
8360 /* If PC is in a Thumb call or return stub, return the address of the
8361 target PC, which is in a register. The thunk functions are called
8362 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8363 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8364 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8365 if (startswith (name, "_call_via_")
8366 || startswith (name, "__ARM_call_via_"))
c906108c 8367 {
ed9a39eb
JM
8368 /* Use the name suffix to determine which register contains the
8369 target PC. */
a121b7c1 8370 static const char *table[15] =
c5aa993b
JM
8371 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8372 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8373 };
c906108c 8374 int regno;
faa95490 8375 int offset = strlen (name) - 2;
c906108c
SS
8376
8377 for (regno = 0; regno <= 14; regno++)
faa95490 8378 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8379 return get_frame_register_unsigned (frame, regno);
c906108c 8380 }
ed9a39eb 8381
faa95490
DJ
8382 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8383 non-interworking calls to foo. We could decode the stubs
8384 to find the target but it's easier to use the symbol table. */
8385 namelen = strlen (name);
8386 if (name[0] == '_' && name[1] == '_'
8387 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8388 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8389 || (namelen > 2 + strlen ("_from_arm")
61012eef 8390 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8391 {
8392 char *target_name;
8393 int target_len = namelen - 2;
3b7344d5 8394 struct bound_minimal_symbol minsym;
faa95490
DJ
8395 struct objfile *objfile;
8396 struct obj_section *sec;
8397
8398 if (name[namelen - 1] == 'b')
8399 target_len -= strlen ("_from_thumb");
8400 else
8401 target_len -= strlen ("_from_arm");
8402
224c3ddb 8403 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8404 memcpy (target_name, name + 2, target_len);
8405 target_name[target_len] = '\0';
8406
8407 sec = find_pc_section (pc);
8408 objfile = (sec == NULL) ? NULL : sec->objfile;
8409 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8410 if (minsym.minsym != NULL)
77e371c0 8411 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8412 else
8413 return 0;
8414 }
8415
c5aa993b 8416 return 0; /* not a stub */
c906108c
SS
8417}
8418
afd7eef0
RE
8419static void
8420set_arm_command (char *args, int from_tty)
8421{
edefbb7c
AC
8422 printf_unfiltered (_("\
8423\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8424 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8425}
8426
8427static void
8428show_arm_command (char *args, int from_tty)
8429{
26304000 8430 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8431}
8432
28e97307
DJ
8433static void
8434arm_update_current_architecture (void)
fd50bc42 8435{
28e97307 8436 struct gdbarch_info info;
fd50bc42 8437
28e97307 8438 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8439 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8440 return;
fd50bc42 8441
28e97307
DJ
8442 /* Update the architecture. */
8443 gdbarch_info_init (&info);
fd50bc42 8444
28e97307 8445 if (!gdbarch_update_p (info))
9b20d036 8446 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8447}
8448
8449static void
8450set_fp_model_sfunc (char *args, int from_tty,
8451 struct cmd_list_element *c)
8452{
570dc176 8453 int fp_model;
fd50bc42
RE
8454
8455 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8456 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8457 {
aead7601 8458 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8459 break;
8460 }
8461
8462 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8463 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8464 current_fp_model);
8465
28e97307 8466 arm_update_current_architecture ();
fd50bc42
RE
8467}
8468
8469static void
08546159
AC
8470show_fp_model (struct ui_file *file, int from_tty,
8471 struct cmd_list_element *c, const char *value)
fd50bc42 8472{
f5656ead 8473 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8474
28e97307 8475 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8476 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8477 fprintf_filtered (file, _("\
8478The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8479 fp_model_strings[tdep->fp_model]);
8480 else
8481 fprintf_filtered (file, _("\
8482The current ARM floating point model is \"%s\".\n"),
8483 fp_model_strings[arm_fp_model]);
8484}
8485
8486static void
8487arm_set_abi (char *args, int from_tty,
8488 struct cmd_list_element *c)
8489{
570dc176 8490 int arm_abi;
28e97307
DJ
8491
8492 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8493 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8494 {
aead7601 8495 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8496 break;
8497 }
8498
8499 if (arm_abi == ARM_ABI_LAST)
8500 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8501 arm_abi_string);
8502
8503 arm_update_current_architecture ();
8504}
8505
8506static void
8507arm_show_abi (struct ui_file *file, int from_tty,
8508 struct cmd_list_element *c, const char *value)
8509{
f5656ead 8510 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8511
8512 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8513 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8514 fprintf_filtered (file, _("\
8515The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8516 arm_abi_strings[tdep->arm_abi]);
8517 else
8518 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8519 arm_abi_string);
fd50bc42
RE
8520}
8521
0428b8f5
DJ
8522static void
8523arm_show_fallback_mode (struct ui_file *file, int from_tty,
8524 struct cmd_list_element *c, const char *value)
8525{
0963b4bd
MS
8526 fprintf_filtered (file,
8527 _("The current execution mode assumed "
8528 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8529 arm_fallback_mode_string);
8530}
8531
8532static void
8533arm_show_force_mode (struct ui_file *file, int from_tty,
8534 struct cmd_list_element *c, const char *value)
8535{
0963b4bd
MS
8536 fprintf_filtered (file,
8537 _("The current execution mode assumed "
8538 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8539 arm_force_mode_string);
8540}
8541
afd7eef0
RE
8542/* If the user changes the register disassembly style used for info
8543 register and other commands, we have to also switch the style used
8544 in opcodes for disassembly output. This function is run in the "set
8545 arm disassembly" command, and does that. */
bc90b915
FN
8546
8547static void
afd7eef0 8548set_disassembly_style_sfunc (char *args, int from_tty,
65b48a81 8549 struct cmd_list_element *c)
bc90b915 8550{
65b48a81
PB
8551 /* Convert the short style name into the long style name (eg, reg-names-*)
8552 before calling the generic set_disassembler_options() function. */
8553 std::string long_name = std::string ("reg-names-") + disassembly_style;
8554 set_disassembler_options (&long_name[0]);
8555}
8556
8557static void
8558show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8559 struct cmd_list_element *c, const char *value)
8560{
8561 struct gdbarch *gdbarch = get_current_arch ();
8562 char *options = get_disassembler_options (gdbarch);
8563 const char *style = "";
8564 int len = 0;
f995bbe8 8565 const char *opt;
65b48a81
PB
8566
8567 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8568 if (CONST_STRNEQ (opt, "reg-names-"))
8569 {
8570 style = &opt[strlen ("reg-names-")];
8571 len = strcspn (style, ",");
8572 }
8573
8574 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
bc90b915
FN
8575}
8576\f
966fbf70 8577/* Return the ARM register name corresponding to register I. */
a208b0cb 8578static const char *
d93859e2 8579arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8580{
58d6951d
DJ
8581 const int num_regs = gdbarch_num_regs (gdbarch);
8582
8583 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8584 && i >= num_regs && i < num_regs + 32)
8585 {
8586 static const char *const vfp_pseudo_names[] = {
8587 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8588 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8589 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8590 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8591 };
8592
8593 return vfp_pseudo_names[i - num_regs];
8594 }
8595
8596 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8597 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8598 {
8599 static const char *const neon_pseudo_names[] = {
8600 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8601 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8602 };
8603
8604 return neon_pseudo_names[i - num_regs - 32];
8605 }
8606
ff6f572f
DJ
8607 if (i >= ARRAY_SIZE (arm_register_names))
8608 /* These registers are only supported on targets which supply
8609 an XML description. */
8610 return "";
8611
966fbf70
RE
8612 return arm_register_names[i];
8613}
8614
082fc60d
RE
8615/* Test whether the coff symbol specific value corresponds to a Thumb
8616 function. */
8617
8618static int
8619coff_sym_is_thumb (int val)
8620{
f8bf5763
PM
8621 return (val == C_THUMBEXT
8622 || val == C_THUMBSTAT
8623 || val == C_THUMBEXTFUNC
8624 || val == C_THUMBSTATFUNC
8625 || val == C_THUMBLABEL);
082fc60d
RE
8626}
8627
8628/* arm_coff_make_msymbol_special()
8629 arm_elf_make_msymbol_special()
8630
8631 These functions test whether the COFF or ELF symbol corresponds to
8632 an address in thumb code, and set a "special" bit in a minimal
8633 symbol to indicate that it does. */
8634
34e8f22d 8635static void
082fc60d
RE
8636arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8637{
39d911fc
TP
8638 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8639
8640 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
467d42c4 8641 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8642 MSYMBOL_SET_SPECIAL (msym);
8643}
8644
34e8f22d 8645static void
082fc60d
RE
8646arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8647{
8648 if (coff_sym_is_thumb (val))
8649 MSYMBOL_SET_SPECIAL (msym);
8650}
8651
60c5725c 8652static void
c1bd65d0 8653arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8654{
9a3c8263 8655 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8656 unsigned int i;
8657
8658 for (i = 0; i < objfile->obfd->section_count; i++)
8659 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8660}
8661
8662static void
8663arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8664 asymbol *sym)
8665{
8666 const char *name = bfd_asymbol_name (sym);
8667 struct arm_per_objfile *data;
8668 VEC(arm_mapping_symbol_s) **map_p;
8669 struct arm_mapping_symbol new_map_sym;
8670
8671 gdb_assert (name[0] == '$');
8672 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8673 return;
8674
9a3c8263
SM
8675 data = (struct arm_per_objfile *) objfile_data (objfile,
8676 arm_objfile_data_key);
60c5725c
DJ
8677 if (data == NULL)
8678 {
8679 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8680 struct arm_per_objfile);
8681 set_objfile_data (objfile, arm_objfile_data_key, data);
8682 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8683 objfile->obfd->section_count,
8684 VEC(arm_mapping_symbol_s) *);
8685 }
8686 map_p = &data->section_maps[bfd_get_section (sym)->index];
8687
8688 new_map_sym.value = sym->value;
8689 new_map_sym.type = name[1];
8690
8691 /* Assume that most mapping symbols appear in order of increasing
8692 value. If they were randomly distributed, it would be faster to
8693 always push here and then sort at first use. */
8694 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8695 {
8696 struct arm_mapping_symbol *prev_map_sym;
8697
8698 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8699 if (prev_map_sym->value >= sym->value)
8700 {
8701 unsigned int idx;
8702 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8703 arm_compare_mapping_symbols);
8704 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8705 return;
8706 }
8707 }
8708
8709 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8710}
8711
756fe439 8712static void
61a1198a 8713arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8714{
9779414d 8715 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8716 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8717
8718 /* If necessary, set the T bit. */
8719 if (arm_apcs_32)
8720 {
9779414d 8721 ULONGEST val, t_bit;
61a1198a 8722 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8723 t_bit = arm_psr_thumb_bit (gdbarch);
8724 if (arm_pc_is_thumb (gdbarch, pc))
8725 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8726 val | t_bit);
756fe439 8727 else
61a1198a 8728 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8729 val & ~t_bit);
756fe439
DJ
8730 }
8731}
123dc839 8732
58d6951d
DJ
8733/* Read the contents of a NEON quad register, by reading from two
8734 double registers. This is used to implement the quad pseudo
8735 registers, and for argument passing in case the quad registers are
8736 missing; vectors are passed in quad registers when using the VFP
8737 ABI, even if a NEON unit is not present. REGNUM is the index of
8738 the quad register, in [0, 15]. */
8739
05d1431c 8740static enum register_status
58d6951d
DJ
8741arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8742 int regnum, gdb_byte *buf)
8743{
8744 char name_buf[4];
8745 gdb_byte reg_buf[8];
8746 int offset, double_regnum;
05d1431c 8747 enum register_status status;
58d6951d 8748
8c042590 8749 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8750 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8751 strlen (name_buf));
8752
8753 /* d0 is always the least significant half of q0. */
8754 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8755 offset = 8;
8756 else
8757 offset = 0;
8758
05d1431c
PA
8759 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8760 if (status != REG_VALID)
8761 return status;
58d6951d
DJ
8762 memcpy (buf + offset, reg_buf, 8);
8763
8764 offset = 8 - offset;
05d1431c
PA
8765 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8766 if (status != REG_VALID)
8767 return status;
58d6951d 8768 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8769
8770 return REG_VALID;
58d6951d
DJ
8771}
8772
05d1431c 8773static enum register_status
58d6951d
DJ
8774arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8775 int regnum, gdb_byte *buf)
8776{
8777 const int num_regs = gdbarch_num_regs (gdbarch);
8778 char name_buf[4];
8779 gdb_byte reg_buf[8];
8780 int offset, double_regnum;
8781
8782 gdb_assert (regnum >= num_regs);
8783 regnum -= num_regs;
8784
8785 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8786 /* Quad-precision register. */
05d1431c 8787 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8788 else
8789 {
05d1431c
PA
8790 enum register_status status;
8791
58d6951d
DJ
8792 /* Single-precision register. */
8793 gdb_assert (regnum < 32);
8794
8795 /* s0 is always the least significant half of d0. */
8796 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8797 offset = (regnum & 1) ? 0 : 4;
8798 else
8799 offset = (regnum & 1) ? 4 : 0;
8800
8c042590 8801 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8802 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8803 strlen (name_buf));
8804
05d1431c
PA
8805 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8806 if (status == REG_VALID)
8807 memcpy (buf, reg_buf + offset, 4);
8808 return status;
58d6951d
DJ
8809 }
8810}
8811
8812/* Store the contents of BUF to a NEON quad register, by writing to
8813 two double registers. This is used to implement the quad pseudo
8814 registers, and for argument passing in case the quad registers are
8815 missing; vectors are passed in quad registers when using the VFP
8816 ABI, even if a NEON unit is not present. REGNUM is the index
8817 of the quad register, in [0, 15]. */
8818
8819static void
8820arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8821 int regnum, const gdb_byte *buf)
8822{
8823 char name_buf[4];
58d6951d
DJ
8824 int offset, double_regnum;
8825
8c042590 8826 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8827 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8828 strlen (name_buf));
8829
8830 /* d0 is always the least significant half of q0. */
8831 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8832 offset = 8;
8833 else
8834 offset = 0;
8835
8836 regcache_raw_write (regcache, double_regnum, buf + offset);
8837 offset = 8 - offset;
8838 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8839}
8840
8841static void
8842arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8843 int regnum, const gdb_byte *buf)
8844{
8845 const int num_regs = gdbarch_num_regs (gdbarch);
8846 char name_buf[4];
8847 gdb_byte reg_buf[8];
8848 int offset, double_regnum;
8849
8850 gdb_assert (regnum >= num_regs);
8851 regnum -= num_regs;
8852
8853 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8854 /* Quad-precision register. */
8855 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8856 else
8857 {
8858 /* Single-precision register. */
8859 gdb_assert (regnum < 32);
8860
8861 /* s0 is always the least significant half of d0. */
8862 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8863 offset = (regnum & 1) ? 0 : 4;
8864 else
8865 offset = (regnum & 1) ? 4 : 0;
8866
8c042590 8867 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8868 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8869 strlen (name_buf));
8870
8871 regcache_raw_read (regcache, double_regnum, reg_buf);
8872 memcpy (reg_buf + offset, buf, 4);
8873 regcache_raw_write (regcache, double_regnum, reg_buf);
8874 }
8875}
8876
123dc839
DJ
8877static struct value *
8878value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8879{
9a3c8263 8880 const int *reg_p = (const int *) baton;
123dc839
DJ
8881 return value_of_register (*reg_p, frame);
8882}
97e03143 8883\f
70f80edf
JT
8884static enum gdb_osabi
8885arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8886{
2af48f68 8887 unsigned int elfosabi;
70f80edf 8888 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8889
70f80edf 8890 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8891
28e97307
DJ
8892 if (elfosabi == ELFOSABI_ARM)
8893 /* GNU tools use this value. Check note sections in this case,
8894 as well. */
8895 bfd_map_over_sections (abfd,
8896 generic_elf_osabi_sniff_abi_tag_sections,
8897 &osabi);
97e03143 8898
28e97307 8899 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8900 return osabi;
97e03143
RE
8901}
8902
54483882
YQ
8903static int
8904arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8905 struct reggroup *group)
8906{
2c291032
YQ
8907 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8908 this, FPS register belongs to save_regroup, restore_reggroup, and
8909 all_reggroup, of course. */
54483882 8910 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8911 return (group == float_reggroup
8912 || group == save_reggroup
8913 || group == restore_reggroup
8914 || group == all_reggroup);
54483882
YQ
8915 else
8916 return default_register_reggroup_p (gdbarch, regnum, group);
8917}
8918
25f8c692
JL
8919\f
8920/* For backward-compatibility we allow two 'g' packet lengths with
8921 the remote protocol depending on whether FPA registers are
8922 supplied. M-profile targets do not have FPA registers, but some
8923 stubs already exist in the wild which use a 'g' packet which
8924 supplies them albeit with dummy values. The packet format which
8925 includes FPA registers should be considered deprecated for
8926 M-profile targets. */
8927
8928static void
8929arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8930{
8931 if (gdbarch_tdep (gdbarch)->is_m)
8932 {
8933 /* If we know from the executable this is an M-profile target,
8934 cater for remote targets whose register set layout is the
8935 same as the FPA layout. */
8936 register_remote_g_packet_guess (gdbarch,
03145bf4 8937 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8938 (16 * INT_REGISTER_SIZE)
8939 + (8 * FP_REGISTER_SIZE)
8940 + (2 * INT_REGISTER_SIZE),
8941 tdesc_arm_with_m_fpa_layout);
8942
8943 /* The regular M-profile layout. */
8944 register_remote_g_packet_guess (gdbarch,
8945 /* r0-r12,sp,lr,pc; xpsr */
8946 (16 * INT_REGISTER_SIZE)
8947 + INT_REGISTER_SIZE,
8948 tdesc_arm_with_m);
3184d3f9
JL
8949
8950 /* M-profile plus M4F VFP. */
8951 register_remote_g_packet_guess (gdbarch,
8952 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8953 (16 * INT_REGISTER_SIZE)
8954 + (16 * VFP_REGISTER_SIZE)
8955 + (2 * INT_REGISTER_SIZE),
8956 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8957 }
8958
8959 /* Otherwise we don't have a useful guess. */
8960}
8961
7eb89530
YQ
8962/* Implement the code_of_frame_writable gdbarch method. */
8963
8964static int
8965arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8966{
8967 if (gdbarch_tdep (gdbarch)->is_m
8968 && get_frame_type (frame) == SIGTRAMP_FRAME)
8969 {
8970 /* M-profile exception frames return to some magic PCs, where
8971 isn't writable at all. */
8972 return 0;
8973 }
8974 else
8975 return 1;
8976}
8977
70f80edf 8978\f
da3c6d4a
MS
8979/* Initialize the current architecture based on INFO. If possible,
8980 re-use an architecture from ARCHES, which is a list of
8981 architectures already created during this debugging session.
97e03143 8982
da3c6d4a
MS
8983 Called e.g. at program startup, when reading a core file, and when
8984 reading a binary file. */
97e03143 8985
39bbf761
RE
8986static struct gdbarch *
8987arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8988{
97e03143 8989 struct gdbarch_tdep *tdep;
39bbf761 8990 struct gdbarch *gdbarch;
28e97307
DJ
8991 struct gdbarch_list *best_arch;
8992 enum arm_abi_kind arm_abi = arm_abi_global;
8993 enum arm_float_model fp_model = arm_fp_model;
123dc839 8994 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8995 int i, is_m = 0;
330c6ca9 8996 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8997 int have_wmmx_registers = 0;
58d6951d 8998 int have_neon = 0;
ff6f572f 8999 int have_fpa_registers = 1;
9779414d
DJ
9000 const struct target_desc *tdesc = info.target_desc;
9001
9002 /* If we have an object to base this architecture on, try to determine
9003 its ABI. */
9004
9005 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9006 {
9007 int ei_osabi, e_flags;
9008
9009 switch (bfd_get_flavour (info.abfd))
9010 {
9779414d
DJ
9011 case bfd_target_coff_flavour:
9012 /* Assume it's an old APCS-style ABI. */
9013 /* XXX WinCE? */
9014 arm_abi = ARM_ABI_APCS;
9015 break;
9016
9017 case bfd_target_elf_flavour:
9018 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9019 e_flags = elf_elfheader (info.abfd)->e_flags;
9020
9021 if (ei_osabi == ELFOSABI_ARM)
9022 {
9023 /* GNU tools used to use this value, but do not for EABI
9024 objects. There's nowhere to tag an EABI version
9025 anyway, so assume APCS. */
9026 arm_abi = ARM_ABI_APCS;
9027 }
d403db27 9028 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
9029 {
9030 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9031 int attr_arch, attr_profile;
9032
9033 switch (eabi_ver)
9034 {
9035 case EF_ARM_EABI_UNKNOWN:
9036 /* Assume GNU tools. */
9037 arm_abi = ARM_ABI_APCS;
9038 break;
9039
9040 case EF_ARM_EABI_VER4:
9041 case EF_ARM_EABI_VER5:
9042 arm_abi = ARM_ABI_AAPCS;
9043 /* EABI binaries default to VFP float ordering.
9044 They may also contain build attributes that can
9045 be used to identify if the VFP argument-passing
9046 ABI is in use. */
9047 if (fp_model == ARM_FLOAT_AUTO)
9048 {
9049#ifdef HAVE_ELF
9050 switch (bfd_elf_get_obj_attr_int (info.abfd,
9051 OBJ_ATTR_PROC,
9052 Tag_ABI_VFP_args))
9053 {
b35b0298 9054 case AEABI_VFP_args_base:
9779414d
DJ
9055 /* "The user intended FP parameter/result
9056 passing to conform to AAPCS, base
9057 variant". */
9058 fp_model = ARM_FLOAT_SOFT_VFP;
9059 break;
b35b0298 9060 case AEABI_VFP_args_vfp:
9779414d
DJ
9061 /* "The user intended FP parameter/result
9062 passing to conform to AAPCS, VFP
9063 variant". */
9064 fp_model = ARM_FLOAT_VFP;
9065 break;
b35b0298 9066 case AEABI_VFP_args_toolchain:
9779414d
DJ
9067 /* "The user intended FP parameter/result
9068 passing to conform to tool chain-specific
9069 conventions" - we don't know any such
9070 conventions, so leave it as "auto". */
9071 break;
b35b0298 9072 case AEABI_VFP_args_compatible:
5c294fee
TG
9073 /* "Code is compatible with both the base
9074 and VFP variants; the user did not permit
9075 non-variadic functions to pass FP
9076 parameters/results" - leave it as
9077 "auto". */
9078 break;
9779414d
DJ
9079 default:
9080 /* Attribute value not mentioned in the
5c294fee 9081 November 2012 ABI, so leave it as
9779414d
DJ
9082 "auto". */
9083 break;
9084 }
9085#else
9086 fp_model = ARM_FLOAT_SOFT_VFP;
9087#endif
9088 }
9089 break;
9090
9091 default:
9092 /* Leave it as "auto". */
9093 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9094 break;
9095 }
9096
9097#ifdef HAVE_ELF
9098 /* Detect M-profile programs. This only works if the
9099 executable file includes build attributes; GCC does
9100 copy them to the executable, but e.g. RealView does
9101 not. */
9102 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9103 Tag_CPU_arch);
0963b4bd
MS
9104 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9105 OBJ_ATTR_PROC,
9779414d
DJ
9106 Tag_CPU_arch_profile);
9107 /* GCC specifies the profile for v6-M; RealView only
9108 specifies the profile for architectures starting with
9109 V7 (as opposed to architectures with a tag
9110 numerically greater than TAG_CPU_ARCH_V7). */
9111 if (!tdesc_has_registers (tdesc)
9112 && (attr_arch == TAG_CPU_ARCH_V6_M
9113 || attr_arch == TAG_CPU_ARCH_V6S_M
9114 || attr_profile == 'M'))
25f8c692 9115 is_m = 1;
9779414d
DJ
9116#endif
9117 }
9118
9119 if (fp_model == ARM_FLOAT_AUTO)
9120 {
9121 int e_flags = elf_elfheader (info.abfd)->e_flags;
9122
9123 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9124 {
9125 case 0:
9126 /* Leave it as "auto". Strictly speaking this case
9127 means FPA, but almost nobody uses that now, and
9128 many toolchains fail to set the appropriate bits
9129 for the floating-point model they use. */
9130 break;
9131 case EF_ARM_SOFT_FLOAT:
9132 fp_model = ARM_FLOAT_SOFT_FPA;
9133 break;
9134 case EF_ARM_VFP_FLOAT:
9135 fp_model = ARM_FLOAT_VFP;
9136 break;
9137 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9138 fp_model = ARM_FLOAT_SOFT_VFP;
9139 break;
9140 }
9141 }
9142
9143 if (e_flags & EF_ARM_BE8)
9144 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9145
9146 break;
9147
9148 default:
9149 /* Leave it as "auto". */
9150 break;
9151 }
9152 }
123dc839
DJ
9153
9154 /* Check any target description for validity. */
9779414d 9155 if (tdesc_has_registers (tdesc))
123dc839
DJ
9156 {
9157 /* For most registers we require GDB's default names; but also allow
9158 the numeric names for sp / lr / pc, as a convenience. */
9159 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9160 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9161 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9162
9163 const struct tdesc_feature *feature;
58d6951d 9164 int valid_p;
123dc839 9165
9779414d 9166 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9167 "org.gnu.gdb.arm.core");
9168 if (feature == NULL)
9779414d
DJ
9169 {
9170 feature = tdesc_find_feature (tdesc,
9171 "org.gnu.gdb.arm.m-profile");
9172 if (feature == NULL)
9173 return NULL;
9174 else
9175 is_m = 1;
9176 }
123dc839
DJ
9177
9178 tdesc_data = tdesc_data_alloc ();
9179
9180 valid_p = 1;
9181 for (i = 0; i < ARM_SP_REGNUM; i++)
9182 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9183 arm_register_names[i]);
9184 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9185 ARM_SP_REGNUM,
9186 arm_sp_names);
9187 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9188 ARM_LR_REGNUM,
9189 arm_lr_names);
9190 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9191 ARM_PC_REGNUM,
9192 arm_pc_names);
9779414d
DJ
9193 if (is_m)
9194 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9195 ARM_PS_REGNUM, "xpsr");
9196 else
9197 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9198 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9199
9200 if (!valid_p)
9201 {
9202 tdesc_data_cleanup (tdesc_data);
9203 return NULL;
9204 }
9205
9779414d 9206 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9207 "org.gnu.gdb.arm.fpa");
9208 if (feature != NULL)
9209 {
9210 valid_p = 1;
9211 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9212 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9213 arm_register_names[i]);
9214 if (!valid_p)
9215 {
9216 tdesc_data_cleanup (tdesc_data);
9217 return NULL;
9218 }
9219 }
ff6f572f
DJ
9220 else
9221 have_fpa_registers = 0;
9222
9779414d 9223 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9224 "org.gnu.gdb.xscale.iwmmxt");
9225 if (feature != NULL)
9226 {
9227 static const char *const iwmmxt_names[] = {
9228 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9229 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9230 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9231 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9232 };
9233
9234 valid_p = 1;
9235 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9236 valid_p
9237 &= tdesc_numbered_register (feature, tdesc_data, i,
9238 iwmmxt_names[i - ARM_WR0_REGNUM]);
9239
9240 /* Check for the control registers, but do not fail if they
9241 are missing. */
9242 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9243 tdesc_numbered_register (feature, tdesc_data, i,
9244 iwmmxt_names[i - ARM_WR0_REGNUM]);
9245
9246 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9247 valid_p
9248 &= tdesc_numbered_register (feature, tdesc_data, i,
9249 iwmmxt_names[i - ARM_WR0_REGNUM]);
9250
9251 if (!valid_p)
9252 {
9253 tdesc_data_cleanup (tdesc_data);
9254 return NULL;
9255 }
a56cc1ce
YQ
9256
9257 have_wmmx_registers = 1;
ff6f572f 9258 }
58d6951d
DJ
9259
9260 /* If we have a VFP unit, check whether the single precision registers
9261 are present. If not, then we will synthesize them as pseudo
9262 registers. */
9779414d 9263 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9264 "org.gnu.gdb.arm.vfp");
9265 if (feature != NULL)
9266 {
9267 static const char *const vfp_double_names[] = {
9268 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9269 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9270 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9271 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9272 };
9273
9274 /* Require the double precision registers. There must be either
9275 16 or 32. */
9276 valid_p = 1;
9277 for (i = 0; i < 32; i++)
9278 {
9279 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9280 ARM_D0_REGNUM + i,
9281 vfp_double_names[i]);
9282 if (!valid_p)
9283 break;
9284 }
2b9e5ea6
UW
9285 if (!valid_p && i == 16)
9286 valid_p = 1;
58d6951d 9287
2b9e5ea6
UW
9288 /* Also require FPSCR. */
9289 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9290 ARM_FPSCR_REGNUM, "fpscr");
9291 if (!valid_p)
58d6951d
DJ
9292 {
9293 tdesc_data_cleanup (tdesc_data);
9294 return NULL;
9295 }
9296
9297 if (tdesc_unnumbered_register (feature, "s0") == 0)
9298 have_vfp_pseudos = 1;
9299
330c6ca9 9300 vfp_register_count = i;
58d6951d
DJ
9301
9302 /* If we have VFP, also check for NEON. The architecture allows
9303 NEON without VFP (integer vector operations only), but GDB
9304 does not support that. */
9779414d 9305 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9306 "org.gnu.gdb.arm.neon");
9307 if (feature != NULL)
9308 {
9309 /* NEON requires 32 double-precision registers. */
9310 if (i != 32)
9311 {
9312 tdesc_data_cleanup (tdesc_data);
9313 return NULL;
9314 }
9315
9316 /* If there are quad registers defined by the stub, use
9317 their type; otherwise (normally) provide them with
9318 the default type. */
9319 if (tdesc_unnumbered_register (feature, "q0") == 0)
9320 have_neon_pseudos = 1;
9321
9322 have_neon = 1;
9323 }
9324 }
123dc839 9325 }
39bbf761 9326
28e97307
DJ
9327 /* If there is already a candidate, use it. */
9328 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9329 best_arch != NULL;
9330 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9331 {
b8926edc
DJ
9332 if (arm_abi != ARM_ABI_AUTO
9333 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9334 continue;
9335
b8926edc
DJ
9336 if (fp_model != ARM_FLOAT_AUTO
9337 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9338 continue;
9339
58d6951d
DJ
9340 /* There are various other properties in tdep that we do not
9341 need to check here: those derived from a target description,
9342 since gdbarches with a different target description are
9343 automatically disqualified. */
9344
9779414d
DJ
9345 /* Do check is_m, though, since it might come from the binary. */
9346 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9347 continue;
9348
28e97307
DJ
9349 /* Found a match. */
9350 break;
9351 }
97e03143 9352
28e97307 9353 if (best_arch != NULL)
123dc839
DJ
9354 {
9355 if (tdesc_data != NULL)
9356 tdesc_data_cleanup (tdesc_data);
9357 return best_arch->gdbarch;
9358 }
28e97307 9359
8d749320 9360 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9361 gdbarch = gdbarch_alloc (&info, tdep);
9362
28e97307
DJ
9363 /* Record additional information about the architecture we are defining.
9364 These are gdbarch discriminators, like the OSABI. */
9365 tdep->arm_abi = arm_abi;
9366 tdep->fp_model = fp_model;
9779414d 9367 tdep->is_m = is_m;
ff6f572f 9368 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9369 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9370 gdb_assert (vfp_register_count == 0
9371 || vfp_register_count == 16
9372 || vfp_register_count == 32);
9373 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9374 tdep->have_vfp_pseudos = have_vfp_pseudos;
9375 tdep->have_neon_pseudos = have_neon_pseudos;
9376 tdep->have_neon = have_neon;
08216dd7 9377
25f8c692
JL
9378 arm_register_g_packet_guesses (gdbarch);
9379
08216dd7 9380 /* Breakpoints. */
9d4fde75 9381 switch (info.byte_order_for_code)
67255d04
RE
9382 {
9383 case BFD_ENDIAN_BIG:
66e810cd
RE
9384 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9385 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9386 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9387 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9388
67255d04
RE
9389 break;
9390
9391 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9392 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9393 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9394 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9395 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9396
67255d04
RE
9397 break;
9398
9399 default:
9400 internal_error (__FILE__, __LINE__,
edefbb7c 9401 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9402 }
9403
d7b486e7
RE
9404 /* On ARM targets char defaults to unsigned. */
9405 set_gdbarch_char_signed (gdbarch, 0);
9406
53375380
PA
9407 /* wchar_t is unsigned under the AAPCS. */
9408 if (tdep->arm_abi == ARM_ABI_AAPCS)
9409 set_gdbarch_wchar_signed (gdbarch, 0);
9410 else
9411 set_gdbarch_wchar_signed (gdbarch, 1);
9412 set_gdbarch_wchar_bit (gdbarch, 32);
9413
cca44b1b
JB
9414 /* Note: for displaced stepping, this includes the breakpoint, and one word
9415 of additional scratch space. This setting isn't used for anything beside
9416 displaced stepping at present. */
9417 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9418
9df628e0 9419 /* This should be low enough for everything. */
97e03143 9420 tdep->lowest_pc = 0x20;
94c30b78 9421 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9422
7c00367c
MK
9423 /* The default, for both APCS and AAPCS, is to return small
9424 structures in registers. */
9425 tdep->struct_return = reg_struct_return;
9426
2dd604e7 9427 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9428 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9429
7eb89530
YQ
9430 if (is_m)
9431 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9432
756fe439
DJ
9433 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9434
148754e5 9435 /* Frame handling. */
a262aec2 9436 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9437 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9438 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9439
eb5492fa 9440 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9441
34e8f22d 9442 /* Address manipulation. */
34e8f22d
RE
9443 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9444
34e8f22d
RE
9445 /* Advance PC across function entry code. */
9446 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9447
c9cf6e20
MG
9448 /* Detect whether PC is at a point where the stack has been destroyed. */
9449 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9450
190dce09
UW
9451 /* Skip trampolines. */
9452 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9453
34e8f22d
RE
9454 /* The stack grows downward. */
9455 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9456
9457 /* Breakpoint manipulation. */
04180708
YQ
9458 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9459 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
833b7ab5
YQ
9460 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9461 arm_breakpoint_kind_from_current_state);
34e8f22d
RE
9462
9463 /* Information about registers, etc. */
34e8f22d
RE
9464 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9465 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9466 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9467 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9468 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9469
ff6f572f
DJ
9470 /* This "info float" is FPA-specific. Use the generic version if we
9471 do not have FPA. */
9472 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9473 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9474
26216b98 9475 /* Internal <-> external register number maps. */
ff6f572f 9476 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9477 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9478
34e8f22d
RE
9479 set_gdbarch_register_name (gdbarch, arm_register_name);
9480
9481 /* Returning results. */
2af48f68 9482 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9483
03d48a7d
RE
9484 /* Disassembly. */
9485 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9486
34e8f22d
RE
9487 /* Minsymbol frobbing. */
9488 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9489 set_gdbarch_coff_make_msymbol_special (gdbarch,
9490 arm_coff_make_msymbol_special);
60c5725c 9491 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9492
f9d67f43
DJ
9493 /* Thumb-2 IT block support. */
9494 set_gdbarch_adjust_breakpoint_address (gdbarch,
9495 arm_adjust_breakpoint_address);
9496
0d5de010
DJ
9497 /* Virtual tables. */
9498 set_gdbarch_vbit_in_delta (gdbarch, 1);
9499
97e03143 9500 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9501 gdbarch_init_osabi (info, gdbarch);
97e03143 9502
b39cc962
DJ
9503 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9504
eb5492fa 9505 /* Add some default predicates. */
2ae28aa9
YQ
9506 if (is_m)
9507 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9508 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9509 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9510 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9511 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9512 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9513
97e03143
RE
9514 /* Now we have tuned the configuration, set a few final things,
9515 based on what the OS ABI has told us. */
9516
b8926edc
DJ
9517 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9518 binaries are always marked. */
9519 if (tdep->arm_abi == ARM_ABI_AUTO)
9520 tdep->arm_abi = ARM_ABI_APCS;
9521
e3039479
UW
9522 /* Watchpoints are not steppable. */
9523 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9524
b8926edc
DJ
9525 /* We used to default to FPA for generic ARM, but almost nobody
9526 uses that now, and we now provide a way for the user to force
9527 the model. So default to the most useful variant. */
9528 if (tdep->fp_model == ARM_FLOAT_AUTO)
9529 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9530
9df628e0
RE
9531 if (tdep->jb_pc >= 0)
9532 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9533
08216dd7 9534 /* Floating point sizes and format. */
8da61cc4 9535 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9536 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9537 {
8da61cc4
DJ
9538 set_gdbarch_double_format
9539 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9540 set_gdbarch_long_double_format
9541 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9542 }
9543 else
9544 {
9545 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9546 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9547 }
9548
58d6951d
DJ
9549 if (have_vfp_pseudos)
9550 {
9551 /* NOTE: These are the only pseudo registers used by
9552 the ARM target at the moment. If more are added, a
9553 little more care in numbering will be needed. */
9554
9555 int num_pseudos = 32;
9556 if (have_neon_pseudos)
9557 num_pseudos += 16;
9558 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9559 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9560 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9561 }
9562
123dc839 9563 if (tdesc_data)
58d6951d
DJ
9564 {
9565 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9566
9779414d 9567 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9568
9569 /* Override tdesc_register_type to adjust the types of VFP
9570 registers for NEON. */
9571 set_gdbarch_register_type (gdbarch, arm_register_type);
9572 }
123dc839
DJ
9573
9574 /* Add standard register aliases. We add aliases even for those
9575 nanes which are used by the current architecture - it's simpler,
9576 and does no harm, since nothing ever lists user registers. */
9577 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9578 user_reg_add (gdbarch, arm_register_aliases[i].name,
9579 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9580
65b48a81
PB
9581 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9582 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9583
39bbf761
RE
9584 return gdbarch;
9585}
9586
97e03143 9587static void
2af46ca0 9588arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9589{
2af46ca0 9590 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9591
9592 if (tdep == NULL)
9593 return;
9594
edefbb7c 9595 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9596 (unsigned long) tdep->lowest_pc);
9597}
9598
0d4c07af 9599#if GDB_SELF_TEST
b121eeb9
YQ
9600namespace selftests
9601{
9602static void arm_record_test (void);
9603}
0d4c07af 9604#endif
b121eeb9 9605
a78f21af
AC
9606extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9607
c906108c 9608void
ed9a39eb 9609_initialize_arm_tdep (void)
c906108c 9610{
bc90b915 9611 long length;
53904c9e
AC
9612 const char *setname;
9613 const char *setdesc;
65b48a81 9614 int i, j;
edefbb7c
AC
9615 char regdesc[1024], *rdptr = regdesc;
9616 size_t rest = sizeof (regdesc);
085dd6e6 9617
42cf1509 9618 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9619
60c5725c 9620 arm_objfile_data_key
c1bd65d0 9621 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9622
0e9e9abd
UW
9623 /* Add ourselves to objfile event chain. */
9624 observer_attach_new_objfile (arm_exidx_new_objfile);
9625 arm_exidx_data_key
9626 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9627
70f80edf
JT
9628 /* Register an ELF OS ABI sniffer for ARM binaries. */
9629 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9630 bfd_target_elf_flavour,
9631 arm_elf_osabi_sniffer);
9632
9779414d
DJ
9633 /* Initialize the standard target descriptions. */
9634 initialize_tdesc_arm_with_m ();
25f8c692 9635 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9636 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9637 initialize_tdesc_arm_with_iwmmxt ();
9638 initialize_tdesc_arm_with_vfpv2 ();
9639 initialize_tdesc_arm_with_vfpv3 ();
9640 initialize_tdesc_arm_with_neon ();
9779414d 9641
afd7eef0
RE
9642 /* Add root prefix command for all "set arm"/"show arm" commands. */
9643 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9644 _("Various ARM-specific commands."),
afd7eef0
RE
9645 &setarmcmdlist, "set arm ", 0, &setlist);
9646
9647 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9648 _("Various ARM-specific commands."),
afd7eef0 9649 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9650
c5aa993b 9651
65b48a81
PB
9652 arm_disassembler_options = xstrdup ("reg-names-std");
9653 const disasm_options_t *disasm_options = disassembler_options_arm ();
9654 int num_disassembly_styles = 0;
9655 for (i = 0; disasm_options->name[i] != NULL; i++)
9656 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9657 num_disassembly_styles++;
9658
9659 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
8d749320 9660 valid_disassembly_styles = XNEWVEC (const char *,
65b48a81
PB
9661 num_disassembly_styles + 1);
9662 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9663 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9664 {
9665 size_t offset = strlen ("reg-names-");
9666 const char *style = disasm_options->name[i];
9667 valid_disassembly_styles[j++] = &style[offset];
9668 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9669 disasm_options->description[i]);
9670 rdptr += length;
9671 rest -= length;
9672 }
94c30b78 9673 /* Mark the end of valid options. */
65b48a81 9674 valid_disassembly_styles[num_disassembly_styles] = NULL;
c906108c 9675
edefbb7c 9676 /* Create the help text. */
d7e74731
PA
9677 std::string helptext = string_printf ("%s%s%s",
9678 _("The valid values are:\n"),
9679 regdesc,
9680 _("The default is \"std\"."));
ed9a39eb 9681
edefbb7c
AC
9682 add_setshow_enum_cmd("disassembler", no_class,
9683 valid_disassembly_styles, &disassembly_style,
9684 _("Set the disassembly style."),
9685 _("Show the disassembly style."),
09b0e4b0 9686 helptext.c_str (),
2c5b56ce 9687 set_disassembly_style_sfunc,
65b48a81 9688 show_disassembly_style_sfunc,
7376b4c2 9689 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9690
9691 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9692 _("Set usage of ARM 32-bit mode."),
9693 _("Show usage of ARM 32-bit mode."),
9694 _("When off, a 26-bit PC will be used."),
2c5b56ce 9695 NULL,
0963b4bd
MS
9696 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9697 mode is %s. */
26304000 9698 &setarmcmdlist, &showarmcmdlist);
c906108c 9699
fd50bc42 9700 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9701 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9702 _("Set the floating point type."),
9703 _("Show the floating point type."),
9704 _("auto - Determine the FP typefrom the OS-ABI.\n\
9705softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9706fpa - FPA co-processor (GCC compiled).\n\
9707softvfp - Software FP with pure-endian doubles.\n\
9708vfp - VFP co-processor."),
edefbb7c 9709 set_fp_model_sfunc, show_fp_model,
7376b4c2 9710 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9711
28e97307
DJ
9712 /* Add a command to allow the user to force the ABI. */
9713 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9714 _("Set the ABI."),
9715 _("Show the ABI."),
9716 NULL, arm_set_abi, arm_show_abi,
9717 &setarmcmdlist, &showarmcmdlist);
9718
0428b8f5
DJ
9719 /* Add two commands to allow the user to force the assumed
9720 execution mode. */
9721 add_setshow_enum_cmd ("fallback-mode", class_support,
9722 arm_mode_strings, &arm_fallback_mode_string,
9723 _("Set the mode assumed when symbols are unavailable."),
9724 _("Show the mode assumed when symbols are unavailable."),
9725 NULL, NULL, arm_show_fallback_mode,
9726 &setarmcmdlist, &showarmcmdlist);
9727 add_setshow_enum_cmd ("force-mode", class_support,
9728 arm_mode_strings, &arm_force_mode_string,
9729 _("Set the mode assumed even when symbols are available."),
9730 _("Show the mode assumed even when symbols are available."),
9731 NULL, NULL, arm_show_force_mode,
9732 &setarmcmdlist, &showarmcmdlist);
9733
6529d2dd 9734 /* Debugging flag. */
edefbb7c
AC
9735 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9736 _("Set ARM debugging."),
9737 _("Show ARM debugging."),
9738 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9739 NULL,
7915a72c 9740 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9741 &setdebuglist, &showdebuglist);
b121eeb9
YQ
9742
9743#if GDB_SELF_TEST
9744 register_self_test (selftests::arm_record_test);
9745#endif
9746
c906108c 9747}
72508ac0
PO
9748
9749/* ARM-reversible process record data structures. */
9750
9751#define ARM_INSN_SIZE_BYTES 4
9752#define THUMB_INSN_SIZE_BYTES 2
9753#define THUMB2_INSN_SIZE_BYTES 4
9754
9755
71e396f9
LM
9756/* Position of the bit within a 32-bit ARM instruction
9757 that defines whether the instruction is a load or store. */
72508ac0
PO
9758#define INSN_S_L_BIT_NUM 20
9759
9760#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9761 do \
9762 { \
9763 unsigned int reg_len = LENGTH; \
9764 if (reg_len) \
9765 { \
9766 REGS = XNEWVEC (uint32_t, reg_len); \
9767 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9768 } \
9769 } \
9770 while (0)
9771
9772#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9773 do \
9774 { \
9775 unsigned int mem_len = LENGTH; \
9776 if (mem_len) \
9777 { \
9778 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9779 memcpy(&MEMS->len, &RECORD_BUF[0], \
9780 sizeof(struct arm_mem_r) * LENGTH); \
9781 } \
9782 } \
9783 while (0)
9784
9785/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9786#define INSN_RECORDED(ARM_RECORD) \
9787 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9788
9789/* ARM memory record structure. */
9790struct arm_mem_r
9791{
9792 uint32_t len; /* Record length. */
bfbbec00 9793 uint32_t addr; /* Memory address. */
72508ac0
PO
9794};
9795
9796/* ARM instruction record contains opcode of current insn
9797 and execution state (before entry to decode_insn()),
9798 contains list of to-be-modified registers and
9799 memory blocks (on return from decode_insn()). */
9800
9801typedef struct insn_decode_record_t
9802{
9803 struct gdbarch *gdbarch;
9804 struct regcache *regcache;
9805 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9806 uint32_t arm_insn; /* Should accommodate thumb. */
9807 uint32_t cond; /* Condition code. */
9808 uint32_t opcode; /* Insn opcode. */
9809 uint32_t decode; /* Insn decode bits. */
9810 uint32_t mem_rec_count; /* No of mem records. */
9811 uint32_t reg_rec_count; /* No of reg records. */
9812 uint32_t *arm_regs; /* Registers to be saved for this record. */
9813 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9814} insn_decode_record;
9815
9816
9817/* Checks ARM SBZ and SBO mandatory fields. */
9818
9819static int
9820sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9821{
9822 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9823
9824 if (!len)
9825 return 1;
9826
9827 if (!sbo)
9828 ones = ~ones;
9829
9830 while (ones)
9831 {
9832 if (!(ones & sbo))
9833 {
9834 return 0;
9835 }
9836 ones = ones >> 1;
9837 }
9838 return 1;
9839}
9840
c6ec2b30
OJ
9841enum arm_record_result
9842{
9843 ARM_RECORD_SUCCESS = 0,
9844 ARM_RECORD_FAILURE = 1
9845};
9846
72508ac0
PO
9847typedef enum
9848{
9849 ARM_RECORD_STRH=1,
9850 ARM_RECORD_STRD
9851} arm_record_strx_t;
9852
9853typedef enum
9854{
9855 ARM_RECORD=1,
9856 THUMB_RECORD,
9857 THUMB2_RECORD
9858} record_type_t;
9859
9860
9861static int
9862arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9863 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9864{
9865
9866 struct regcache *reg_cache = arm_insn_r->regcache;
9867 ULONGEST u_regval[2]= {0};
9868
9869 uint32_t reg_src1 = 0, reg_src2 = 0;
9870 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9871
9872 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9873 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9874
9875 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9876 {
9877 /* 1) Handle misc store, immediate offset. */
9878 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9879 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9880 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9881 regcache_raw_read_unsigned (reg_cache, reg_src1,
9882 &u_regval[0]);
9883 if (ARM_PC_REGNUM == reg_src1)
9884 {
9885 /* If R15 was used as Rn, hence current PC+8. */
9886 u_regval[0] = u_regval[0] + 8;
9887 }
9888 offset_8 = (immed_high << 4) | immed_low;
9889 /* Calculate target store address. */
9890 if (14 == arm_insn_r->opcode)
9891 {
9892 tgt_mem_addr = u_regval[0] + offset_8;
9893 }
9894 else
9895 {
9896 tgt_mem_addr = u_regval[0] - offset_8;
9897 }
9898 if (ARM_RECORD_STRH == str_type)
9899 {
9900 record_buf_mem[0] = 2;
9901 record_buf_mem[1] = tgt_mem_addr;
9902 arm_insn_r->mem_rec_count = 1;
9903 }
9904 else if (ARM_RECORD_STRD == str_type)
9905 {
9906 record_buf_mem[0] = 4;
9907 record_buf_mem[1] = tgt_mem_addr;
9908 record_buf_mem[2] = 4;
9909 record_buf_mem[3] = tgt_mem_addr + 4;
9910 arm_insn_r->mem_rec_count = 2;
9911 }
9912 }
9913 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9914 {
9915 /* 2) Store, register offset. */
9916 /* Get Rm. */
9917 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9918 /* Get Rn. */
9919 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9920 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9921 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9922 if (15 == reg_src2)
9923 {
9924 /* If R15 was used as Rn, hence current PC+8. */
9925 u_regval[0] = u_regval[0] + 8;
9926 }
9927 /* Calculate target store address, Rn +/- Rm, register offset. */
9928 if (12 == arm_insn_r->opcode)
9929 {
9930 tgt_mem_addr = u_regval[0] + u_regval[1];
9931 }
9932 else
9933 {
9934 tgt_mem_addr = u_regval[1] - u_regval[0];
9935 }
9936 if (ARM_RECORD_STRH == str_type)
9937 {
9938 record_buf_mem[0] = 2;
9939 record_buf_mem[1] = tgt_mem_addr;
9940 arm_insn_r->mem_rec_count = 1;
9941 }
9942 else if (ARM_RECORD_STRD == str_type)
9943 {
9944 record_buf_mem[0] = 4;
9945 record_buf_mem[1] = tgt_mem_addr;
9946 record_buf_mem[2] = 4;
9947 record_buf_mem[3] = tgt_mem_addr + 4;
9948 arm_insn_r->mem_rec_count = 2;
9949 }
9950 }
9951 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9952 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9953 {
9954 /* 3) Store, immediate pre-indexed. */
9955 /* 5) Store, immediate post-indexed. */
9956 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9957 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9958 offset_8 = (immed_high << 4) | immed_low;
9959 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9960 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9961 /* Calculate target store address, Rn +/- Rm, register offset. */
9962 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9963 {
9964 tgt_mem_addr = u_regval[0] + offset_8;
9965 }
9966 else
9967 {
9968 tgt_mem_addr = u_regval[0] - offset_8;
9969 }
9970 if (ARM_RECORD_STRH == str_type)
9971 {
9972 record_buf_mem[0] = 2;
9973 record_buf_mem[1] = tgt_mem_addr;
9974 arm_insn_r->mem_rec_count = 1;
9975 }
9976 else if (ARM_RECORD_STRD == str_type)
9977 {
9978 record_buf_mem[0] = 4;
9979 record_buf_mem[1] = tgt_mem_addr;
9980 record_buf_mem[2] = 4;
9981 record_buf_mem[3] = tgt_mem_addr + 4;
9982 arm_insn_r->mem_rec_count = 2;
9983 }
9984 /* Record Rn also as it changes. */
9985 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9986 arm_insn_r->reg_rec_count = 1;
9987 }
9988 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9989 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9990 {
9991 /* 4) Store, register pre-indexed. */
9992 /* 6) Store, register post -indexed. */
9993 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9994 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9995 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9996 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9997 /* Calculate target store address, Rn +/- Rm, register offset. */
9998 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9999 {
10000 tgt_mem_addr = u_regval[0] + u_regval[1];
10001 }
10002 else
10003 {
10004 tgt_mem_addr = u_regval[1] - u_regval[0];
10005 }
10006 if (ARM_RECORD_STRH == str_type)
10007 {
10008 record_buf_mem[0] = 2;
10009 record_buf_mem[1] = tgt_mem_addr;
10010 arm_insn_r->mem_rec_count = 1;
10011 }
10012 else if (ARM_RECORD_STRD == str_type)
10013 {
10014 record_buf_mem[0] = 4;
10015 record_buf_mem[1] = tgt_mem_addr;
10016 record_buf_mem[2] = 4;
10017 record_buf_mem[3] = tgt_mem_addr + 4;
10018 arm_insn_r->mem_rec_count = 2;
10019 }
10020 /* Record Rn also as it changes. */
10021 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10022 arm_insn_r->reg_rec_count = 1;
10023 }
10024 return 0;
10025}
10026
10027/* Handling ARM extension space insns. */
10028
10029static int
10030arm_record_extension_space (insn_decode_record *arm_insn_r)
10031{
10032 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10033 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10034 uint32_t record_buf[8], record_buf_mem[8];
10035 uint32_t reg_src1 = 0;
72508ac0
PO
10036 struct regcache *reg_cache = arm_insn_r->regcache;
10037 ULONGEST u_regval = 0;
10038
10039 gdb_assert (!INSN_RECORDED(arm_insn_r));
10040 /* Handle unconditional insn extension space. */
10041
10042 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10043 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10044 if (arm_insn_r->cond)
10045 {
10046 /* PLD has no affect on architectural state, it just affects
10047 the caches. */
10048 if (5 == ((opcode1 & 0xE0) >> 5))
10049 {
10050 /* BLX(1) */
10051 record_buf[0] = ARM_PS_REGNUM;
10052 record_buf[1] = ARM_LR_REGNUM;
10053 arm_insn_r->reg_rec_count = 2;
10054 }
10055 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10056 }
10057
10058
10059 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10060 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10061 {
10062 ret = -1;
10063 /* Undefined instruction on ARM V5; need to handle if later
10064 versions define it. */
10065 }
10066
10067 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10068 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10069 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10070
10071 /* Handle arithmetic insn extension space. */
10072 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10073 && !INSN_RECORDED(arm_insn_r))
10074 {
10075 /* Handle MLA(S) and MUL(S). */
10076 if (0 <= insn_op1 && 3 >= insn_op1)
10077 {
10078 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10079 record_buf[1] = ARM_PS_REGNUM;
10080 arm_insn_r->reg_rec_count = 2;
10081 }
10082 else if (4 <= insn_op1 && 15 >= insn_op1)
10083 {
10084 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10085 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10086 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10087 record_buf[2] = ARM_PS_REGNUM;
10088 arm_insn_r->reg_rec_count = 3;
10089 }
10090 }
10091
10092 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10093 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10094 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10095
10096 /* Handle control insn extension space. */
10097
10098 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10099 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10100 {
10101 if (!bit (arm_insn_r->arm_insn,25))
10102 {
10103 if (!bits (arm_insn_r->arm_insn, 4, 7))
10104 {
10105 if ((0 == insn_op1) || (2 == insn_op1))
10106 {
10107 /* MRS. */
10108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10109 arm_insn_r->reg_rec_count = 1;
10110 }
10111 else if (1 == insn_op1)
10112 {
10113 /* CSPR is going to be changed. */
10114 record_buf[0] = ARM_PS_REGNUM;
10115 arm_insn_r->reg_rec_count = 1;
10116 }
10117 else if (3 == insn_op1)
10118 {
10119 /* SPSR is going to be changed. */
10120 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
10121 return -1;
10122 }
10123 }
10124 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10125 {
10126 if (1 == insn_op1)
10127 {
10128 /* BX. */
10129 record_buf[0] = ARM_PS_REGNUM;
10130 arm_insn_r->reg_rec_count = 1;
10131 }
10132 else if (3 == insn_op1)
10133 {
10134 /* CLZ. */
10135 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10136 arm_insn_r->reg_rec_count = 1;
10137 }
10138 }
10139 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10140 {
10141 /* BLX. */
10142 record_buf[0] = ARM_PS_REGNUM;
10143 record_buf[1] = ARM_LR_REGNUM;
10144 arm_insn_r->reg_rec_count = 2;
10145 }
10146 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10147 {
10148 /* QADD, QSUB, QDADD, QDSUB */
10149 record_buf[0] = ARM_PS_REGNUM;
10150 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10151 arm_insn_r->reg_rec_count = 2;
10152 }
10153 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10154 {
10155 /* BKPT. */
10156 record_buf[0] = ARM_PS_REGNUM;
10157 record_buf[1] = ARM_LR_REGNUM;
10158 arm_insn_r->reg_rec_count = 2;
10159
10160 /* Save SPSR also;how? */
72508ac0
PO
10161 return -1;
10162 }
10163 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10164 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10165 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10166 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10167 )
10168 {
10169 if (0 == insn_op1 || 1 == insn_op1)
10170 {
10171 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10172 /* We dont do optimization for SMULW<y> where we
10173 need only Rd. */
10174 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10175 record_buf[1] = ARM_PS_REGNUM;
10176 arm_insn_r->reg_rec_count = 2;
10177 }
10178 else if (2 == insn_op1)
10179 {
10180 /* SMLAL<x><y>. */
10181 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10182 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10183 arm_insn_r->reg_rec_count = 2;
10184 }
10185 else if (3 == insn_op1)
10186 {
10187 /* SMUL<x><y>. */
10188 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10189 arm_insn_r->reg_rec_count = 1;
10190 }
10191 }
10192 }
10193 else
10194 {
10195 /* MSR : immediate form. */
10196 if (1 == insn_op1)
10197 {
10198 /* CSPR is going to be changed. */
10199 record_buf[0] = ARM_PS_REGNUM;
10200 arm_insn_r->reg_rec_count = 1;
10201 }
10202 else if (3 == insn_op1)
10203 {
10204 /* SPSR is going to be changed. */
10205 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10206 return -1;
10207 }
10208 }
10209 }
10210
10211 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10212 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10213 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10214
10215 /* Handle load/store insn extension space. */
10216
10217 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10218 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10219 && !INSN_RECORDED(arm_insn_r))
10220 {
10221 /* SWP/SWPB. */
10222 if (0 == insn_op1)
10223 {
10224 /* These insn, changes register and memory as well. */
10225 /* SWP or SWPB insn. */
10226 /* Get memory address given by Rn. */
10227 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10228 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10229 /* SWP insn ?, swaps word. */
10230 if (8 == arm_insn_r->opcode)
10231 {
10232 record_buf_mem[0] = 4;
10233 }
10234 else
10235 {
10236 /* SWPB insn, swaps only byte. */
10237 record_buf_mem[0] = 1;
10238 }
10239 record_buf_mem[1] = u_regval;
10240 arm_insn_r->mem_rec_count = 1;
10241 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10242 arm_insn_r->reg_rec_count = 1;
10243 }
10244 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10245 {
10246 /* STRH. */
10247 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10248 ARM_RECORD_STRH);
10249 }
10250 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10251 {
10252 /* LDRD. */
10253 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10254 record_buf[1] = record_buf[0] + 1;
10255 arm_insn_r->reg_rec_count = 2;
10256 }
10257 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10258 {
10259 /* STRD. */
10260 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10261 ARM_RECORD_STRD);
10262 }
10263 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10264 {
10265 /* LDRH, LDRSB, LDRSH. */
10266 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10267 arm_insn_r->reg_rec_count = 1;
10268 }
10269
10270 }
10271
10272 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10273 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10274 && !INSN_RECORDED(arm_insn_r))
10275 {
10276 ret = -1;
10277 /* Handle coprocessor insn extension space. */
10278 }
10279
10280 /* To be done for ARMv5 and later; as of now we return -1. */
10281 if (-1 == ret)
ca92db2d 10282 return ret;
72508ac0
PO
10283
10284 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10285 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10286
10287 return ret;
10288}
10289
10290/* Handling opcode 000 insns. */
10291
10292static int
10293arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10294{
10295 struct regcache *reg_cache = arm_insn_r->regcache;
10296 uint32_t record_buf[8], record_buf_mem[8];
10297 ULONGEST u_regval[2] = {0};
10298
bec2ab5a 10299 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10300 uint32_t opcode1 = 0;
10301
10302 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10303 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10304 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10305
10306 /* Data processing insn /multiply insn. */
10307 if (9 == arm_insn_r->decode
10308 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10309 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10310 {
10311 /* Handle multiply instructions. */
10312 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10313 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10314 {
10315 /* Handle MLA and MUL. */
10316 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10317 record_buf[1] = ARM_PS_REGNUM;
10318 arm_insn_r->reg_rec_count = 2;
10319 }
10320 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10321 {
10322 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10323 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10324 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10325 record_buf[2] = ARM_PS_REGNUM;
10326 arm_insn_r->reg_rec_count = 3;
10327 }
10328 }
10329 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10330 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10331 {
10332 /* Handle misc load insns, as 20th bit (L = 1). */
10333 /* LDR insn has a capability to do branching, if
10334 MOV LR, PC is precceded by LDR insn having Rn as R15
10335 in that case, it emulates branch and link insn, and hence we
10336 need to save CSPR and PC as well. I am not sure this is right
10337 place; as opcode = 010 LDR insn make this happen, if R15 was
10338 used. */
10339 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10340 if (15 != reg_dest)
10341 {
10342 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10343 arm_insn_r->reg_rec_count = 1;
10344 }
10345 else
10346 {
10347 record_buf[0] = reg_dest;
10348 record_buf[1] = ARM_PS_REGNUM;
10349 arm_insn_r->reg_rec_count = 2;
10350 }
10351 }
10352 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10353 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10354 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10355 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10356 {
10357 /* Handle MSR insn. */
10358 if (9 == arm_insn_r->opcode)
10359 {
10360 /* CSPR is going to be changed. */
10361 record_buf[0] = ARM_PS_REGNUM;
10362 arm_insn_r->reg_rec_count = 1;
10363 }
10364 else
10365 {
10366 /* SPSR is going to be changed. */
10367 /* How to read SPSR value? */
72508ac0
PO
10368 return -1;
10369 }
10370 }
10371 else if (9 == arm_insn_r->decode
10372 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10373 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10374 {
10375 /* Handling SWP, SWPB. */
10376 /* These insn, changes register and memory as well. */
10377 /* SWP or SWPB insn. */
10378
10379 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10380 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10381 /* SWP insn ?, swaps word. */
10382 if (8 == arm_insn_r->opcode)
10383 {
10384 record_buf_mem[0] = 4;
10385 }
10386 else
10387 {
10388 /* SWPB insn, swaps only byte. */
10389 record_buf_mem[0] = 1;
10390 }
10391 record_buf_mem[1] = u_regval[0];
10392 arm_insn_r->mem_rec_count = 1;
10393 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10394 arm_insn_r->reg_rec_count = 1;
10395 }
10396 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10397 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10398 {
10399 /* Handle BLX, branch and link/exchange. */
10400 if (9 == arm_insn_r->opcode)
10401 {
10402 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10403 and R14 stores the return address. */
10404 record_buf[0] = ARM_PS_REGNUM;
10405 record_buf[1] = ARM_LR_REGNUM;
10406 arm_insn_r->reg_rec_count = 2;
10407 }
10408 }
10409 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10410 {
10411 /* Handle enhanced software breakpoint insn, BKPT. */
10412 /* CPSR is changed to be executed in ARM state, disabling normal
10413 interrupts, entering abort mode. */
10414 /* According to high vector configuration PC is set. */
10415 /* user hit breakpoint and type reverse, in
10416 that case, we need to go back with previous CPSR and
10417 Program Counter. */
10418 record_buf[0] = ARM_PS_REGNUM;
10419 record_buf[1] = ARM_LR_REGNUM;
10420 arm_insn_r->reg_rec_count = 2;
10421
10422 /* Save SPSR also; how? */
72508ac0
PO
10423 return -1;
10424 }
10425 else if (11 == arm_insn_r->decode
10426 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10427 {
10428 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10429
10430 /* Handle str(x) insn */
10431 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10432 ARM_RECORD_STRH);
10433 }
10434 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10435 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10436 {
10437 /* Handle BX, branch and link/exchange. */
10438 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10439 record_buf[0] = ARM_PS_REGNUM;
10440 arm_insn_r->reg_rec_count = 1;
10441 }
10442 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10443 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10444 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10445 {
10446 /* Count leading zeros: CLZ. */
10447 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10448 arm_insn_r->reg_rec_count = 1;
10449 }
10450 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10451 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10452 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10453 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10454 )
10455 {
10456 /* Handle MRS insn. */
10457 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10458 arm_insn_r->reg_rec_count = 1;
10459 }
10460 else if (arm_insn_r->opcode <= 15)
10461 {
10462 /* Normal data processing insns. */
10463 /* Out of 11 shifter operands mode, all the insn modifies destination
10464 register, which is specified by 13-16 decode. */
10465 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10466 record_buf[1] = ARM_PS_REGNUM;
10467 arm_insn_r->reg_rec_count = 2;
10468 }
10469 else
10470 {
10471 return -1;
10472 }
10473
10474 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10475 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10476 return 0;
10477}
10478
10479/* Handling opcode 001 insns. */
10480
10481static int
10482arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10483{
10484 uint32_t record_buf[8], record_buf_mem[8];
10485
10486 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10487 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10488
10489 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10490 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10491 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10492 )
10493 {
10494 /* Handle MSR insn. */
10495 if (9 == arm_insn_r->opcode)
10496 {
10497 /* CSPR is going to be changed. */
10498 record_buf[0] = ARM_PS_REGNUM;
10499 arm_insn_r->reg_rec_count = 1;
10500 }
10501 else
10502 {
10503 /* SPSR is going to be changed. */
10504 }
10505 }
10506 else if (arm_insn_r->opcode <= 15)
10507 {
10508 /* Normal data processing insns. */
10509 /* Out of 11 shifter operands mode, all the insn modifies destination
10510 register, which is specified by 13-16 decode. */
10511 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10512 record_buf[1] = ARM_PS_REGNUM;
10513 arm_insn_r->reg_rec_count = 2;
10514 }
10515 else
10516 {
10517 return -1;
10518 }
10519
10520 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10521 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10522 return 0;
10523}
10524
c55978a6
YQ
10525static int
10526arm_record_media (insn_decode_record *arm_insn_r)
10527{
10528 uint32_t record_buf[8];
10529
10530 switch (bits (arm_insn_r->arm_insn, 22, 24))
10531 {
10532 case 0:
10533 /* Parallel addition and subtraction, signed */
10534 case 1:
10535 /* Parallel addition and subtraction, unsigned */
10536 case 2:
10537 case 3:
10538 /* Packing, unpacking, saturation and reversal */
10539 {
10540 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10541
10542 record_buf[arm_insn_r->reg_rec_count++] = rd;
10543 }
10544 break;
10545
10546 case 4:
10547 case 5:
10548 /* Signed multiplies */
10549 {
10550 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10551 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10552
10553 record_buf[arm_insn_r->reg_rec_count++] = rd;
10554 if (op1 == 0x0)
10555 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10556 else if (op1 == 0x4)
10557 record_buf[arm_insn_r->reg_rec_count++]
10558 = bits (arm_insn_r->arm_insn, 12, 15);
10559 }
10560 break;
10561
10562 case 6:
10563 {
10564 if (bit (arm_insn_r->arm_insn, 21)
10565 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10566 {
10567 /* SBFX */
10568 record_buf[arm_insn_r->reg_rec_count++]
10569 = bits (arm_insn_r->arm_insn, 12, 15);
10570 }
10571 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10572 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10573 {
10574 /* USAD8 and USADA8 */
10575 record_buf[arm_insn_r->reg_rec_count++]
10576 = bits (arm_insn_r->arm_insn, 16, 19);
10577 }
10578 }
10579 break;
10580
10581 case 7:
10582 {
10583 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10584 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10585 {
10586 /* Permanently UNDEFINED */
10587 return -1;
10588 }
10589 else
10590 {
10591 /* BFC, BFI and UBFX */
10592 record_buf[arm_insn_r->reg_rec_count++]
10593 = bits (arm_insn_r->arm_insn, 12, 15);
10594 }
10595 }
10596 break;
10597
10598 default:
10599 return -1;
10600 }
10601
10602 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10603
10604 return 0;
10605}
10606
71e396f9 10607/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10608
10609static int
10610arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10611{
10612 struct regcache *reg_cache = arm_insn_r->regcache;
10613
71e396f9
LM
10614 uint32_t reg_base , reg_dest;
10615 uint32_t offset_12, tgt_mem_addr;
72508ac0 10616 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10617 unsigned char wback;
10618 ULONGEST u_regval;
72508ac0 10619
71e396f9
LM
10620 /* Calculate wback. */
10621 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10622 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10623
71e396f9
LM
10624 arm_insn_r->reg_rec_count = 0;
10625 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10626
10627 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10628 {
71e396f9
LM
10629 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10630 and LDRT. */
10631
72508ac0 10632 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10633 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10634
10635 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10636 preceeds a LDR instruction having R15 as reg_base, it
10637 emulates a branch and link instruction, and hence we need to save
10638 CPSR and PC as well. */
10639 if (ARM_PC_REGNUM == reg_dest)
10640 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10641
10642 /* If wback is true, also save the base register, which is going to be
10643 written to. */
10644 if (wback)
10645 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10646 }
10647 else
10648 {
71e396f9
LM
10649 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10650
72508ac0 10651 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10652 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10653
10654 /* Handle bit U. */
72508ac0 10655 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10656 {
10657 /* U == 1: Add the offset. */
10658 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10659 }
72508ac0 10660 else
71e396f9
LM
10661 {
10662 /* U == 0: subtract the offset. */
10663 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10664 }
10665
10666 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10667 bytes. */
10668 if (bit (arm_insn_r->arm_insn, 22))
10669 {
10670 /* STRB and STRBT: 1 byte. */
10671 record_buf_mem[0] = 1;
10672 }
10673 else
10674 {
10675 /* STR and STRT: 4 bytes. */
10676 record_buf_mem[0] = 4;
10677 }
10678
10679 /* Handle bit P. */
10680 if (bit (arm_insn_r->arm_insn, 24))
10681 record_buf_mem[1] = tgt_mem_addr;
10682 else
10683 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10684
72508ac0
PO
10685 arm_insn_r->mem_rec_count = 1;
10686
71e396f9
LM
10687 /* If wback is true, also save the base register, which is going to be
10688 written to. */
10689 if (wback)
10690 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10691 }
10692
10693 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10694 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10695 return 0;
10696}
10697
10698/* Handling opcode 011 insns. */
10699
10700static int
10701arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10702{
10703 struct regcache *reg_cache = arm_insn_r->regcache;
10704
10705 uint32_t shift_imm = 0;
10706 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10707 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10708 uint32_t record_buf[8], record_buf_mem[8];
10709
10710 LONGEST s_word;
10711 ULONGEST u_regval[2];
10712
c55978a6
YQ
10713 if (bit (arm_insn_r->arm_insn, 4))
10714 return arm_record_media (arm_insn_r);
10715
72508ac0
PO
10716 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10717 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10718
10719 /* Handle enhanced store insns and LDRD DSP insn,
10720 order begins according to addressing modes for store insns
10721 STRH insn. */
10722
10723 /* LDR or STR? */
10724 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10725 {
10726 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10727 /* LDR insn has a capability to do branching, if
10728 MOV LR, PC is precedded by LDR insn having Rn as R15
10729 in that case, it emulates branch and link insn, and hence we
10730 need to save CSPR and PC as well. */
10731 if (15 != reg_dest)
10732 {
10733 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10734 arm_insn_r->reg_rec_count = 1;
10735 }
10736 else
10737 {
10738 record_buf[0] = reg_dest;
10739 record_buf[1] = ARM_PS_REGNUM;
10740 arm_insn_r->reg_rec_count = 2;
10741 }
10742 }
10743 else
10744 {
10745 if (! bits (arm_insn_r->arm_insn, 4, 11))
10746 {
10747 /* Store insn, register offset and register pre-indexed,
10748 register post-indexed. */
10749 /* Get Rm. */
10750 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10751 /* Get Rn. */
10752 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10753 regcache_raw_read_unsigned (reg_cache, reg_src1
10754 , &u_regval[0]);
10755 regcache_raw_read_unsigned (reg_cache, reg_src2
10756 , &u_regval[1]);
10757 if (15 == reg_src2)
10758 {
10759 /* If R15 was used as Rn, hence current PC+8. */
10760 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10761 u_regval[0] = u_regval[0] + 8;
10762 }
10763 /* Calculate target store address, Rn +/- Rm, register offset. */
10764 /* U == 1. */
10765 if (bit (arm_insn_r->arm_insn, 23))
10766 {
10767 tgt_mem_addr = u_regval[0] + u_regval[1];
10768 }
10769 else
10770 {
10771 tgt_mem_addr = u_regval[1] - u_regval[0];
10772 }
10773
10774 switch (arm_insn_r->opcode)
10775 {
10776 /* STR. */
10777 case 8:
10778 case 12:
10779 /* STR. */
10780 case 9:
10781 case 13:
10782 /* STRT. */
10783 case 1:
10784 case 5:
10785 /* STR. */
10786 case 0:
10787 case 4:
10788 record_buf_mem[0] = 4;
10789 break;
10790
10791 /* STRB. */
10792 case 10:
10793 case 14:
10794 /* STRB. */
10795 case 11:
10796 case 15:
10797 /* STRBT. */
10798 case 3:
10799 case 7:
10800 /* STRB. */
10801 case 2:
10802 case 6:
10803 record_buf_mem[0] = 1;
10804 break;
10805
10806 default:
10807 gdb_assert_not_reached ("no decoding pattern found");
10808 break;
10809 }
10810 record_buf_mem[1] = tgt_mem_addr;
10811 arm_insn_r->mem_rec_count = 1;
10812
10813 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10814 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10815 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10816 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10817 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10818 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10819 )
10820 {
10821 /* Rn is going to be changed in pre-indexed mode and
10822 post-indexed mode as well. */
10823 record_buf[0] = reg_src2;
10824 arm_insn_r->reg_rec_count = 1;
10825 }
10826 }
10827 else
10828 {
10829 /* Store insn, scaled register offset; scaled pre-indexed. */
10830 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10831 /* Get Rm. */
10832 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10833 /* Get Rn. */
10834 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10835 /* Get shift_imm. */
10836 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10837 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10838 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10839 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10840 /* Offset_12 used as shift. */
10841 switch (offset_12)
10842 {
10843 case 0:
10844 /* Offset_12 used as index. */
10845 offset_12 = u_regval[0] << shift_imm;
10846 break;
10847
10848 case 1:
10849 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10850 break;
10851
10852 case 2:
10853 if (!shift_imm)
10854 {
10855 if (bit (u_regval[0], 31))
10856 {
10857 offset_12 = 0xFFFFFFFF;
10858 }
10859 else
10860 {
10861 offset_12 = 0;
10862 }
10863 }
10864 else
10865 {
10866 /* This is arithmetic shift. */
10867 offset_12 = s_word >> shift_imm;
10868 }
10869 break;
10870
10871 case 3:
10872 if (!shift_imm)
10873 {
10874 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10875 &u_regval[1]);
10876 /* Get C flag value and shift it by 31. */
10877 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10878 | (u_regval[0]) >> 1);
10879 }
10880 else
10881 {
10882 offset_12 = (u_regval[0] >> shift_imm) \
10883 | (u_regval[0] <<
10884 (sizeof(uint32_t) - shift_imm));
10885 }
10886 break;
10887
10888 default:
10889 gdb_assert_not_reached ("no decoding pattern found");
10890 break;
10891 }
10892
10893 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10894 /* bit U set. */
10895 if (bit (arm_insn_r->arm_insn, 23))
10896 {
10897 tgt_mem_addr = u_regval[1] + offset_12;
10898 }
10899 else
10900 {
10901 tgt_mem_addr = u_regval[1] - offset_12;
10902 }
10903
10904 switch (arm_insn_r->opcode)
10905 {
10906 /* STR. */
10907 case 8:
10908 case 12:
10909 /* STR. */
10910 case 9:
10911 case 13:
10912 /* STRT. */
10913 case 1:
10914 case 5:
10915 /* STR. */
10916 case 0:
10917 case 4:
10918 record_buf_mem[0] = 4;
10919 break;
10920
10921 /* STRB. */
10922 case 10:
10923 case 14:
10924 /* STRB. */
10925 case 11:
10926 case 15:
10927 /* STRBT. */
10928 case 3:
10929 case 7:
10930 /* STRB. */
10931 case 2:
10932 case 6:
10933 record_buf_mem[0] = 1;
10934 break;
10935
10936 default:
10937 gdb_assert_not_reached ("no decoding pattern found");
10938 break;
10939 }
10940 record_buf_mem[1] = tgt_mem_addr;
10941 arm_insn_r->mem_rec_count = 1;
10942
10943 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10944 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10945 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10946 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10947 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10948 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10949 )
10950 {
10951 /* Rn is going to be changed in register scaled pre-indexed
10952 mode,and scaled post indexed mode. */
10953 record_buf[0] = reg_src2;
10954 arm_insn_r->reg_rec_count = 1;
10955 }
10956 }
10957 }
10958
10959 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10960 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10961 return 0;
10962}
10963
71e396f9 10964/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10965
10966static int
10967arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10968{
10969 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10970 uint32_t register_count = 0, register_bits;
10971 uint32_t reg_base, addr_mode;
72508ac0 10972 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10973 uint32_t wback;
10974 ULONGEST u_regval;
72508ac0 10975
71e396f9
LM
10976 /* Fetch the list of registers. */
10977 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10978 arm_insn_r->reg_rec_count = 0;
10979
10980 /* Fetch the base register that contains the address we are loading data
10981 to. */
10982 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10983
71e396f9
LM
10984 /* Calculate wback. */
10985 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10986
10987 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10988 {
71e396f9 10989 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10990
71e396f9 10991 /* Find out which registers are going to be loaded from memory. */
72508ac0 10992 while (register_bits)
71e396f9
LM
10993 {
10994 if (register_bits & 0x00000001)
10995 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10996 register_bits = register_bits >> 1;
10997 register_count++;
10998 }
72508ac0 10999
71e396f9
LM
11000
11001 /* If wback is true, also save the base register, which is going to be
11002 written to. */
11003 if (wback)
11004 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11005
11006 /* Save the CPSR register. */
11007 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
11008 }
11009 else
11010 {
71e396f9 11011 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 11012
71e396f9
LM
11013 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11014
11015 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11016
11017 /* Find out how many registers are going to be stored to memory. */
72508ac0 11018 while (register_bits)
71e396f9
LM
11019 {
11020 if (register_bits & 0x00000001)
11021 register_count++;
11022 register_bits = register_bits >> 1;
11023 }
72508ac0
PO
11024
11025 switch (addr_mode)
71e396f9
LM
11026 {
11027 /* STMDA (STMED): Decrement after. */
11028 case 0:
11029 record_buf_mem[1] = (uint32_t) u_regval
11030 - register_count * INT_REGISTER_SIZE + 4;
11031 break;
11032 /* STM (STMIA, STMEA): Increment after. */
11033 case 1:
11034 record_buf_mem[1] = (uint32_t) u_regval;
11035 break;
11036 /* STMDB (STMFD): Decrement before. */
11037 case 2:
11038 record_buf_mem[1] = (uint32_t) u_regval
11039 - register_count * INT_REGISTER_SIZE;
11040 break;
11041 /* STMIB (STMFA): Increment before. */
11042 case 3:
11043 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11044 break;
11045 default:
11046 gdb_assert_not_reached ("no decoding pattern found");
11047 break;
11048 }
72508ac0 11049
71e396f9
LM
11050 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11051 arm_insn_r->mem_rec_count = 1;
11052
11053 /* If wback is true, also save the base register, which is going to be
11054 written to. */
11055 if (wback)
11056 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
11057 }
11058
11059 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11060 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11061 return 0;
11062}
11063
11064/* Handling opcode 101 insns. */
11065
11066static int
11067arm_record_b_bl (insn_decode_record *arm_insn_r)
11068{
11069 uint32_t record_buf[8];
11070
11071 /* Handle B, BL, BLX(1) insns. */
11072 /* B simply branches so we do nothing here. */
11073 /* Note: BLX(1) doesnt fall here but instead it falls into
11074 extension space. */
11075 if (bit (arm_insn_r->arm_insn, 24))
11076 {
11077 record_buf[0] = ARM_LR_REGNUM;
11078 arm_insn_r->reg_rec_count = 1;
11079 }
11080
11081 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11082
11083 return 0;
11084}
11085
72508ac0 11086static int
c6ec2b30 11087arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
11088{
11089 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
11090 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11091 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
11092
11093 return -1;
11094}
11095
5a578da5
OJ
11096/* Record handler for vector data transfer instructions. */
11097
11098static int
11099arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11100{
11101 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11102 uint32_t record_buf[4];
11103
5a578da5
OJ
11104 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11105 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11106 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11107 bit_l = bit (arm_insn_r->arm_insn, 20);
11108 bit_c = bit (arm_insn_r->arm_insn, 8);
11109
11110 /* Handle VMOV instruction. */
11111 if (bit_l && bit_c)
11112 {
11113 record_buf[0] = reg_t;
11114 arm_insn_r->reg_rec_count = 1;
11115 }
11116 else if (bit_l && !bit_c)
11117 {
11118 /* Handle VMOV instruction. */
11119 if (bits_a == 0x00)
11120 {
f1771dce 11121 record_buf[0] = reg_t;
5a578da5
OJ
11122 arm_insn_r->reg_rec_count = 1;
11123 }
11124 /* Handle VMRS instruction. */
11125 else if (bits_a == 0x07)
11126 {
11127 if (reg_t == 15)
11128 reg_t = ARM_PS_REGNUM;
11129
11130 record_buf[0] = reg_t;
11131 arm_insn_r->reg_rec_count = 1;
11132 }
11133 }
11134 else if (!bit_l && !bit_c)
11135 {
11136 /* Handle VMOV instruction. */
11137 if (bits_a == 0x00)
11138 {
f1771dce 11139 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11140
11141 arm_insn_r->reg_rec_count = 1;
11142 }
11143 /* Handle VMSR instruction. */
11144 else if (bits_a == 0x07)
11145 {
11146 record_buf[0] = ARM_FPSCR_REGNUM;
11147 arm_insn_r->reg_rec_count = 1;
11148 }
11149 }
11150 else if (!bit_l && bit_c)
11151 {
11152 /* Handle VMOV instruction. */
11153 if (!(bits_a & 0x04))
11154 {
11155 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11156 + ARM_D0_REGNUM;
11157 arm_insn_r->reg_rec_count = 1;
11158 }
11159 /* Handle VDUP instruction. */
11160 else
11161 {
11162 if (bit (arm_insn_r->arm_insn, 21))
11163 {
11164 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11165 record_buf[0] = reg_v + ARM_D0_REGNUM;
11166 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11167 arm_insn_r->reg_rec_count = 2;
11168 }
11169 else
11170 {
11171 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11172 record_buf[0] = reg_v + ARM_D0_REGNUM;
11173 arm_insn_r->reg_rec_count = 1;
11174 }
11175 }
11176 }
11177
11178 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11179 return 0;
11180}
11181
f20f80dd
OJ
11182/* Record handler for extension register load/store instructions. */
11183
11184static int
11185arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11186{
11187 uint32_t opcode, single_reg;
11188 uint8_t op_vldm_vstm;
11189 uint32_t record_buf[8], record_buf_mem[128];
11190 ULONGEST u_regval = 0;
11191
11192 struct regcache *reg_cache = arm_insn_r->regcache;
f20f80dd
OJ
11193
11194 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11195 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11196 op_vldm_vstm = opcode & 0x1b;
11197
11198 /* Handle VMOV instructions. */
11199 if ((opcode & 0x1e) == 0x04)
11200 {
9fde51ed 11201 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11202 {
11203 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11204 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11205 arm_insn_r->reg_rec_count = 2;
11206 }
f20f80dd 11207 else
01e57735 11208 {
9fde51ed
YQ
11209 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11210 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11211
9fde51ed 11212 if (single_reg)
01e57735 11213 {
9fde51ed
YQ
11214 /* The first S register number m is REG_M:M (M is bit 5),
11215 the corresponding D register number is REG_M:M / 2, which
11216 is REG_M. */
11217 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11218 /* The second S register number is REG_M:M + 1, the
11219 corresponding D register number is (REG_M:M + 1) / 2.
11220 IOW, if bit M is 1, the first and second S registers
11221 are mapped to different D registers, otherwise, they are
11222 in the same D register. */
11223 if (bit_m)
11224 {
11225 record_buf[arm_insn_r->reg_rec_count++]
11226 = ARM_D0_REGNUM + reg_m + 1;
11227 }
01e57735
YQ
11228 }
11229 else
11230 {
9fde51ed 11231 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11232 arm_insn_r->reg_rec_count = 1;
11233 }
11234 }
f20f80dd
OJ
11235 }
11236 /* Handle VSTM and VPUSH instructions. */
11237 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11238 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11239 {
11240 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11241 uint32_t memory_index = 0;
11242
11243 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11244 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11245 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11246 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11247 memory_count = imm_off8;
11248
11249 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11250 start_address = u_regval;
f20f80dd 11251 else
01e57735 11252 start_address = u_regval - imm_off32;
f20f80dd
OJ
11253
11254 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11255 {
11256 record_buf[0] = reg_rn;
11257 arm_insn_r->reg_rec_count = 1;
11258 }
f20f80dd
OJ
11259
11260 while (memory_count > 0)
01e57735 11261 {
9fde51ed 11262 if (single_reg)
01e57735 11263 {
9fde51ed
YQ
11264 record_buf_mem[memory_index] = 4;
11265 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11266 start_address = start_address + 4;
11267 memory_index = memory_index + 2;
11268 }
11269 else
11270 {
9fde51ed
YQ
11271 record_buf_mem[memory_index] = 4;
11272 record_buf_mem[memory_index + 1] = start_address;
11273 record_buf_mem[memory_index + 2] = 4;
11274 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11275 start_address = start_address + 8;
11276 memory_index = memory_index + 4;
11277 }
11278 memory_count--;
11279 }
f20f80dd
OJ
11280 arm_insn_r->mem_rec_count = (memory_index >> 1);
11281 }
11282 /* Handle VLDM instructions. */
11283 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11284 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11285 {
11286 uint32_t reg_count, reg_vd;
11287 uint32_t reg_index = 0;
9fde51ed 11288 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11289
11290 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11291 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11292
9fde51ed
YQ
11293 /* REG_VD is the first D register number. If the instruction
11294 loads memory to S registers (SINGLE_REG is TRUE), the register
11295 number is (REG_VD << 1 | bit D), so the corresponding D
11296 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11297 if (!single_reg)
11298 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11299
9fde51ed 11300 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11301 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11302
9fde51ed
YQ
11303 /* If the instruction loads memory to D register, REG_COUNT should
11304 be divided by 2, according to the ARM Architecture Reference
11305 Manual. If the instruction loads memory to S register, divide by
11306 2 as well because two S registers are mapped to D register. */
11307 reg_count = reg_count / 2;
11308 if (single_reg && bit_d)
01e57735 11309 {
9fde51ed
YQ
11310 /* Increase the register count if S register list starts from
11311 an odd number (bit d is one). */
11312 reg_count++;
11313 }
f20f80dd 11314
9fde51ed
YQ
11315 while (reg_count > 0)
11316 {
11317 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11318 reg_count--;
11319 }
f20f80dd
OJ
11320 arm_insn_r->reg_rec_count = reg_index;
11321 }
11322 /* VSTR Vector store register. */
11323 else if ((opcode & 0x13) == 0x10)
11324 {
bec2ab5a 11325 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11326 uint32_t memory_index = 0;
11327
11328 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11329 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11330 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11331 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11332
11333 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11334 start_address = u_regval + imm_off32;
f20f80dd 11335 else
01e57735 11336 start_address = u_regval - imm_off32;
f20f80dd
OJ
11337
11338 if (single_reg)
01e57735 11339 {
9fde51ed
YQ
11340 record_buf_mem[memory_index] = 4;
11341 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11342 arm_insn_r->mem_rec_count = 1;
11343 }
f20f80dd 11344 else
01e57735 11345 {
9fde51ed
YQ
11346 record_buf_mem[memory_index] = 4;
11347 record_buf_mem[memory_index + 1] = start_address;
11348 record_buf_mem[memory_index + 2] = 4;
11349 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11350 arm_insn_r->mem_rec_count = 2;
11351 }
f20f80dd
OJ
11352 }
11353 /* VLDR Vector load register. */
11354 else if ((opcode & 0x13) == 0x11)
11355 {
11356 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11357
11358 if (!single_reg)
01e57735
YQ
11359 {
11360 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11361 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11362 }
f20f80dd 11363 else
01e57735
YQ
11364 {
11365 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11366 /* Record register D rather than pseudo register S. */
11367 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11368 }
f20f80dd
OJ
11369 arm_insn_r->reg_rec_count = 1;
11370 }
11371
11372 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11373 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11374 return 0;
11375}
11376
851f26ae
OJ
11377/* Record handler for arm/thumb mode VFP data processing instructions. */
11378
11379static int
11380arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11381{
11382 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11383 uint32_t record_buf[4];
11384 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11385 enum insn_types curr_insn_type = INSN_INV;
11386
11387 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11388 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11389 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11390 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11391 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11392 bit_d = bit (arm_insn_r->arm_insn, 22);
11393 opc1 = opc1 & 0x04;
11394
11395 /* Handle VMLA, VMLS. */
11396 if (opc1 == 0x00)
11397 {
11398 if (bit (arm_insn_r->arm_insn, 10))
11399 {
11400 if (bit (arm_insn_r->arm_insn, 6))
11401 curr_insn_type = INSN_T0;
11402 else
11403 curr_insn_type = INSN_T1;
11404 }
11405 else
11406 {
11407 if (dp_op_sz)
11408 curr_insn_type = INSN_T1;
11409 else
11410 curr_insn_type = INSN_T2;
11411 }
11412 }
11413 /* Handle VNMLA, VNMLS, VNMUL. */
11414 else if (opc1 == 0x01)
11415 {
11416 if (dp_op_sz)
11417 curr_insn_type = INSN_T1;
11418 else
11419 curr_insn_type = INSN_T2;
11420 }
11421 /* Handle VMUL. */
11422 else if (opc1 == 0x02 && !(opc3 & 0x01))
11423 {
11424 if (bit (arm_insn_r->arm_insn, 10))
11425 {
11426 if (bit (arm_insn_r->arm_insn, 6))
11427 curr_insn_type = INSN_T0;
11428 else
11429 curr_insn_type = INSN_T1;
11430 }
11431 else
11432 {
11433 if (dp_op_sz)
11434 curr_insn_type = INSN_T1;
11435 else
11436 curr_insn_type = INSN_T2;
11437 }
11438 }
11439 /* Handle VADD, VSUB. */
11440 else if (opc1 == 0x03)
11441 {
11442 if (!bit (arm_insn_r->arm_insn, 9))
11443 {
11444 if (bit (arm_insn_r->arm_insn, 6))
11445 curr_insn_type = INSN_T0;
11446 else
11447 curr_insn_type = INSN_T1;
11448 }
11449 else
11450 {
11451 if (dp_op_sz)
11452 curr_insn_type = INSN_T1;
11453 else
11454 curr_insn_type = INSN_T2;
11455 }
11456 }
11457 /* Handle VDIV. */
11458 else if (opc1 == 0x0b)
11459 {
11460 if (dp_op_sz)
11461 curr_insn_type = INSN_T1;
11462 else
11463 curr_insn_type = INSN_T2;
11464 }
11465 /* Handle all other vfp data processing instructions. */
11466 else if (opc1 == 0x0b)
11467 {
11468 /* Handle VMOV. */
11469 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11470 {
11471 if (bit (arm_insn_r->arm_insn, 4))
11472 {
11473 if (bit (arm_insn_r->arm_insn, 6))
11474 curr_insn_type = INSN_T0;
11475 else
11476 curr_insn_type = INSN_T1;
11477 }
11478 else
11479 {
11480 if (dp_op_sz)
11481 curr_insn_type = INSN_T1;
11482 else
11483 curr_insn_type = INSN_T2;
11484 }
11485 }
11486 /* Handle VNEG and VABS. */
11487 else if ((opc2 == 0x01 && opc3 == 0x01)
11488 || (opc2 == 0x00 && opc3 == 0x03))
11489 {
11490 if (!bit (arm_insn_r->arm_insn, 11))
11491 {
11492 if (bit (arm_insn_r->arm_insn, 6))
11493 curr_insn_type = INSN_T0;
11494 else
11495 curr_insn_type = INSN_T1;
11496 }
11497 else
11498 {
11499 if (dp_op_sz)
11500 curr_insn_type = INSN_T1;
11501 else
11502 curr_insn_type = INSN_T2;
11503 }
11504 }
11505 /* Handle VSQRT. */
11506 else if (opc2 == 0x01 && opc3 == 0x03)
11507 {
11508 if (dp_op_sz)
11509 curr_insn_type = INSN_T1;
11510 else
11511 curr_insn_type = INSN_T2;
11512 }
11513 /* Handle VCVT. */
11514 else if (opc2 == 0x07 && opc3 == 0x03)
11515 {
11516 if (!dp_op_sz)
11517 curr_insn_type = INSN_T1;
11518 else
11519 curr_insn_type = INSN_T2;
11520 }
11521 else if (opc3 & 0x01)
11522 {
11523 /* Handle VCVT. */
11524 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11525 {
11526 if (!bit (arm_insn_r->arm_insn, 18))
11527 curr_insn_type = INSN_T2;
11528 else
11529 {
11530 if (dp_op_sz)
11531 curr_insn_type = INSN_T1;
11532 else
11533 curr_insn_type = INSN_T2;
11534 }
11535 }
11536 /* Handle VCVT. */
11537 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11538 {
11539 if (dp_op_sz)
11540 curr_insn_type = INSN_T1;
11541 else
11542 curr_insn_type = INSN_T2;
11543 }
11544 /* Handle VCVTB, VCVTT. */
11545 else if ((opc2 & 0x0e) == 0x02)
11546 curr_insn_type = INSN_T2;
11547 /* Handle VCMP, VCMPE. */
11548 else if ((opc2 & 0x0e) == 0x04)
11549 curr_insn_type = INSN_T3;
11550 }
11551 }
11552
11553 switch (curr_insn_type)
11554 {
11555 case INSN_T0:
11556 reg_vd = reg_vd | (bit_d << 4);
11557 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11558 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11559 arm_insn_r->reg_rec_count = 2;
11560 break;
11561
11562 case INSN_T1:
11563 reg_vd = reg_vd | (bit_d << 4);
11564 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11565 arm_insn_r->reg_rec_count = 1;
11566 break;
11567
11568 case INSN_T2:
11569 reg_vd = (reg_vd << 1) | bit_d;
11570 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11571 arm_insn_r->reg_rec_count = 1;
11572 break;
11573
11574 case INSN_T3:
11575 record_buf[0] = ARM_FPSCR_REGNUM;
11576 arm_insn_r->reg_rec_count = 1;
11577 break;
11578
11579 default:
11580 gdb_assert_not_reached ("no decoding pattern found");
11581 break;
11582 }
11583
11584 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11585 return 0;
11586}
11587
60cc5e93
OJ
11588/* Handling opcode 110 insns. */
11589
11590static int
11591arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11592{
bec2ab5a 11593 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11594
11595 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11596 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11597 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11598
11599 if ((coproc & 0x0e) == 0x0a)
11600 {
11601 /* Handle extension register ld/st instructions. */
11602 if (!(op1 & 0x20))
f20f80dd 11603 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11604
11605 /* 64-bit transfers between arm core and extension registers. */
11606 if ((op1 & 0x3e) == 0x04)
f20f80dd 11607 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11608 }
11609 else
11610 {
11611 /* Handle coprocessor ld/st instructions. */
11612 if (!(op1 & 0x3a))
11613 {
11614 /* Store. */
11615 if (!op1_ebit)
11616 return arm_record_unsupported_insn (arm_insn_r);
11617 else
11618 /* Load. */
11619 return arm_record_unsupported_insn (arm_insn_r);
11620 }
11621
11622 /* Move to coprocessor from two arm core registers. */
11623 if (op1 == 0x4)
11624 return arm_record_unsupported_insn (arm_insn_r);
11625
11626 /* Move to two arm core registers from coprocessor. */
11627 if (op1 == 0x5)
11628 {
11629 uint32_t reg_t[2];
11630
11631 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11632 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11633 arm_insn_r->reg_rec_count = 2;
11634
11635 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11636 return 0;
11637 }
11638 }
11639 return arm_record_unsupported_insn (arm_insn_r);
11640}
11641
72508ac0
PO
11642/* Handling opcode 111 insns. */
11643
11644static int
11645arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11646{
60cc5e93 11647 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11648 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11649 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11650
11651 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11652 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11653 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11654 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11655 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11656
11657 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11658 if (op1_sbit)
97dfe206
OJ
11659 {
11660 if (tdep->arm_syscall_record != NULL)
11661 {
11662 ULONGEST svc_operand, svc_number;
11663
11664 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11665
11666 if (svc_operand) /* OABI. */
11667 svc_number = svc_operand - 0x900000;
11668 else /* EABI. */
11669 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11670
60cc5e93 11671 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11672 }
11673 else
11674 {
11675 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11676 return -1;
97dfe206
OJ
11677 }
11678 }
60cc5e93
OJ
11679
11680 if ((coproc & 0x0e) == 0x0a)
11681 {
11682 /* VFP data-processing instructions. */
11683 if (!op1_sbit && !op)
851f26ae 11684 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11685
11686 /* Advanced SIMD, VFP instructions. */
11687 if (!op1_sbit && op)
5a578da5 11688 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11689 }
97dfe206
OJ
11690 else
11691 {
60cc5e93
OJ
11692 /* Coprocessor data operations. */
11693 if (!op1_sbit && !op)
11694 return arm_record_unsupported_insn (arm_insn_r);
11695
11696 /* Move to Coprocessor from ARM core register. */
11697 if (!op1_sbit && !op1_ebit && op)
11698 return arm_record_unsupported_insn (arm_insn_r);
11699
11700 /* Move to arm core register from coprocessor. */
11701 if (!op1_sbit && op1_ebit && op)
11702 {
11703 uint32_t record_buf[1];
11704
11705 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11706 if (record_buf[0] == 15)
11707 record_buf[0] = ARM_PS_REGNUM;
11708
11709 arm_insn_r->reg_rec_count = 1;
11710 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11711 record_buf);
11712 return 0;
11713 }
97dfe206 11714 }
72508ac0 11715
60cc5e93 11716 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11717}
11718
11719/* Handling opcode 000 insns. */
11720
11721static int
11722thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11723{
11724 uint32_t record_buf[8];
11725 uint32_t reg_src1 = 0;
11726
11727 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11728
11729 record_buf[0] = ARM_PS_REGNUM;
11730 record_buf[1] = reg_src1;
11731 thumb_insn_r->reg_rec_count = 2;
11732
11733 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11734
11735 return 0;
11736}
11737
11738
11739/* Handling opcode 001 insns. */
11740
11741static int
11742thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11743{
11744 uint32_t record_buf[8];
11745 uint32_t reg_src1 = 0;
11746
11747 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11748
11749 record_buf[0] = ARM_PS_REGNUM;
11750 record_buf[1] = reg_src1;
11751 thumb_insn_r->reg_rec_count = 2;
11752
11753 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11754
11755 return 0;
11756}
11757
11758/* Handling opcode 010 insns. */
11759
11760static int
11761thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11762{
11763 struct regcache *reg_cache = thumb_insn_r->regcache;
11764 uint32_t record_buf[8], record_buf_mem[8];
11765
11766 uint32_t reg_src1 = 0, reg_src2 = 0;
11767 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11768
11769 ULONGEST u_regval[2] = {0};
11770
11771 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11772
11773 if (bit (thumb_insn_r->arm_insn, 12))
11774 {
11775 /* Handle load/store register offset. */
b121eeb9
YQ
11776 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11777
11778 if (opB >= 4 && opB <= 7)
72508ac0
PO
11779 {
11780 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11781 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11782 record_buf[0] = reg_src1;
11783 thumb_insn_r->reg_rec_count = 1;
11784 }
b121eeb9 11785 else if (opB >= 0 && opB <= 2)
72508ac0
PO
11786 {
11787 /* STR(2), STRB(2), STRH(2) . */
11788 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11789 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11790 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11791 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
b121eeb9 11792 if (0 == opB)
72508ac0 11793 record_buf_mem[0] = 4; /* STR (2). */
b121eeb9 11794 else if (2 == opB)
72508ac0 11795 record_buf_mem[0] = 1; /* STRB (2). */
b121eeb9 11796 else if (1 == opB)
72508ac0
PO
11797 record_buf_mem[0] = 2; /* STRH (2). */
11798 record_buf_mem[1] = u_regval[0] + u_regval[1];
11799 thumb_insn_r->mem_rec_count = 1;
11800 }
11801 }
11802 else if (bit (thumb_insn_r->arm_insn, 11))
11803 {
11804 /* Handle load from literal pool. */
11805 /* LDR(3). */
11806 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11807 record_buf[0] = reg_src1;
11808 thumb_insn_r->reg_rec_count = 1;
11809 }
11810 else if (opcode1)
11811 {
b121eeb9 11812 /* Special data instructions and branch and exchange */
72508ac0
PO
11813 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11814 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11815 if ((3 == opcode2) && (!opcode3))
11816 {
11817 /* Branch with exchange. */
11818 record_buf[0] = ARM_PS_REGNUM;
11819 thumb_insn_r->reg_rec_count = 1;
11820 }
11821 else
11822 {
1f33efec
YQ
11823 /* Format 8; special data processing insns. */
11824 record_buf[0] = ARM_PS_REGNUM;
11825 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11826 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11827 thumb_insn_r->reg_rec_count = 2;
11828 }
11829 }
11830 else
11831 {
11832 /* Format 5; data processing insns. */
11833 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11834 if (bit (thumb_insn_r->arm_insn, 7))
11835 {
11836 reg_src1 = reg_src1 + 8;
11837 }
11838 record_buf[0] = ARM_PS_REGNUM;
11839 record_buf[1] = reg_src1;
11840 thumb_insn_r->reg_rec_count = 2;
11841 }
11842
11843 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11844 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11845 record_buf_mem);
11846
11847 return 0;
11848}
11849
11850/* Handling opcode 001 insns. */
11851
11852static int
11853thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11854{
11855 struct regcache *reg_cache = thumb_insn_r->regcache;
11856 uint32_t record_buf[8], record_buf_mem[8];
11857
11858 uint32_t reg_src1 = 0;
11859 uint32_t opcode = 0, immed_5 = 0;
11860
11861 ULONGEST u_regval = 0;
11862
11863 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11864
11865 if (opcode)
11866 {
11867 /* LDR(1). */
11868 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11869 record_buf[0] = reg_src1;
11870 thumb_insn_r->reg_rec_count = 1;
11871 }
11872 else
11873 {
11874 /* STR(1). */
11875 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11876 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11877 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11878 record_buf_mem[0] = 4;
11879 record_buf_mem[1] = u_regval + (immed_5 * 4);
11880 thumb_insn_r->mem_rec_count = 1;
11881 }
11882
11883 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11884 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11885 record_buf_mem);
11886
11887 return 0;
11888}
11889
11890/* Handling opcode 100 insns. */
11891
11892static int
11893thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11894{
11895 struct regcache *reg_cache = thumb_insn_r->regcache;
11896 uint32_t record_buf[8], record_buf_mem[8];
11897
11898 uint32_t reg_src1 = 0;
11899 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11900
11901 ULONGEST u_regval = 0;
11902
11903 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11904
11905 if (3 == opcode)
11906 {
11907 /* LDR(4). */
11908 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11909 record_buf[0] = reg_src1;
11910 thumb_insn_r->reg_rec_count = 1;
11911 }
11912 else if (1 == opcode)
11913 {
11914 /* LDRH(1). */
11915 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11916 record_buf[0] = reg_src1;
11917 thumb_insn_r->reg_rec_count = 1;
11918 }
11919 else if (2 == opcode)
11920 {
11921 /* STR(3). */
11922 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11923 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11924 record_buf_mem[0] = 4;
11925 record_buf_mem[1] = u_regval + (immed_8 * 4);
11926 thumb_insn_r->mem_rec_count = 1;
11927 }
11928 else if (0 == opcode)
11929 {
11930 /* STRH(1). */
11931 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11932 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11933 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11934 record_buf_mem[0] = 2;
11935 record_buf_mem[1] = u_regval + (immed_5 * 2);
11936 thumb_insn_r->mem_rec_count = 1;
11937 }
11938
11939 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11940 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11941 record_buf_mem);
11942
11943 return 0;
11944}
11945
11946/* Handling opcode 101 insns. */
11947
11948static int
11949thumb_record_misc (insn_decode_record *thumb_insn_r)
11950{
11951 struct regcache *reg_cache = thumb_insn_r->regcache;
11952
b121eeb9 11953 uint32_t opcode = 0;
72508ac0 11954 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11955 uint32_t index = 0, start_address = 0;
72508ac0
PO
11956 uint32_t record_buf[24], record_buf_mem[48];
11957 uint32_t reg_src1;
11958
11959 ULONGEST u_regval = 0;
11960
11961 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
72508ac0 11962
b121eeb9 11963 if (opcode == 0 || opcode == 1)
72508ac0 11964 {
b121eeb9
YQ
11965 /* ADR and ADD (SP plus immediate) */
11966
72508ac0
PO
11967 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11968 record_buf[0] = reg_src1;
11969 thumb_insn_r->reg_rec_count = 1;
11970 }
b121eeb9 11971 else
72508ac0 11972 {
b121eeb9
YQ
11973 /* Miscellaneous 16-bit instructions */
11974 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11975
11976 switch (opcode2)
11977 {
11978 case 6:
11979 /* SETEND and CPS */
11980 break;
11981 case 0:
11982 /* ADD/SUB (SP plus immediate) */
11983 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11984 record_buf[0] = ARM_SP_REGNUM;
11985 thumb_insn_r->reg_rec_count = 1;
11986 break;
11987 case 1: /* fall through */
11988 case 3: /* fall through */
11989 case 9: /* fall through */
11990 case 11:
11991 /* CBNZ, CBZ */
b121eeb9
YQ
11992 break;
11993 case 2:
11994 /* SXTH, SXTB, UXTH, UXTB */
11995 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11996 thumb_insn_r->reg_rec_count = 1;
11997 break;
11998 case 4: /* fall through */
11999 case 5:
12000 /* PUSH. */
12001 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12002 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12003 while (register_bits)
12004 {
12005 if (register_bits & 0x00000001)
12006 register_count++;
12007 register_bits = register_bits >> 1;
12008 }
12009 start_address = u_regval - \
12010 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12011 thumb_insn_r->mem_rec_count = register_count;
12012 while (register_count)
12013 {
12014 record_buf_mem[(register_count * 2) - 1] = start_address;
12015 record_buf_mem[(register_count * 2) - 2] = 4;
12016 start_address = start_address + 4;
12017 register_count--;
12018 }
12019 record_buf[0] = ARM_SP_REGNUM;
12020 thumb_insn_r->reg_rec_count = 1;
12021 break;
12022 case 10:
12023 /* REV, REV16, REVSH */
ba14f379
YQ
12024 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12025 thumb_insn_r->reg_rec_count = 1;
b121eeb9
YQ
12026 break;
12027 case 12: /* fall through */
12028 case 13:
12029 /* POP. */
12030 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12031 while (register_bits)
12032 {
12033 if (register_bits & 0x00000001)
12034 record_buf[index++] = register_count;
12035 register_bits = register_bits >> 1;
12036 register_count++;
12037 }
12038 record_buf[index++] = ARM_PS_REGNUM;
12039 record_buf[index++] = ARM_SP_REGNUM;
12040 thumb_insn_r->reg_rec_count = index;
12041 break;
12042 case 0xe:
12043 /* BKPT insn. */
12044 /* Handle enhanced software breakpoint insn, BKPT. */
12045 /* CPSR is changed to be executed in ARM state, disabling normal
12046 interrupts, entering abort mode. */
12047 /* According to high vector configuration PC is set. */
12048 /* User hits breakpoint and type reverse, in that case, we need to go back with
12049 previous CPSR and Program Counter. */
12050 record_buf[0] = ARM_PS_REGNUM;
12051 record_buf[1] = ARM_LR_REGNUM;
12052 thumb_insn_r->reg_rec_count = 2;
12053 /* We need to save SPSR value, which is not yet done. */
12054 printf_unfiltered (_("Process record does not support instruction "
12055 "0x%0x at address %s.\n"),
12056 thumb_insn_r->arm_insn,
12057 paddress (thumb_insn_r->gdbarch,
12058 thumb_insn_r->this_addr));
12059 return -1;
12060
12061 case 0xf:
12062 /* If-Then, and hints */
12063 break;
12064 default:
12065 return -1;
12066 };
72508ac0
PO
12067 }
12068
12069 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12070 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12071 record_buf_mem);
12072
12073 return 0;
12074}
12075
12076/* Handling opcode 110 insns. */
12077
12078static int
12079thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12080{
12081 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12082 struct regcache *reg_cache = thumb_insn_r->regcache;
12083
12084 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12085 uint32_t reg_src1 = 0;
12086 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 12087 uint32_t index = 0, start_address = 0;
72508ac0
PO
12088 uint32_t record_buf[24], record_buf_mem[48];
12089
12090 ULONGEST u_regval = 0;
12091
12092 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12093 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12094
12095 if (1 == opcode2)
12096 {
12097
12098 /* LDMIA. */
12099 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12100 /* Get Rn. */
12101 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12102 while (register_bits)
12103 {
12104 if (register_bits & 0x00000001)
f969241e 12105 record_buf[index++] = register_count;
72508ac0 12106 register_bits = register_bits >> 1;
f969241e 12107 register_count++;
72508ac0 12108 }
f969241e
OJ
12109 record_buf[index++] = reg_src1;
12110 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
12111 }
12112 else if (0 == opcode2)
12113 {
12114 /* It handles both STMIA. */
12115 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12116 /* Get Rn. */
12117 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12118 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12119 while (register_bits)
12120 {
12121 if (register_bits & 0x00000001)
12122 register_count++;
12123 register_bits = register_bits >> 1;
12124 }
12125 start_address = u_regval;
12126 thumb_insn_r->mem_rec_count = register_count;
12127 while (register_count)
12128 {
12129 record_buf_mem[(register_count * 2) - 1] = start_address;
12130 record_buf_mem[(register_count * 2) - 2] = 4;
12131 start_address = start_address + 4;
12132 register_count--;
12133 }
12134 }
12135 else if (0x1F == opcode1)
12136 {
12137 /* Handle arm syscall insn. */
97dfe206 12138 if (tdep->arm_syscall_record != NULL)
72508ac0 12139 {
97dfe206
OJ
12140 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12141 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
12142 }
12143 else
12144 {
12145 printf_unfiltered (_("no syscall record support\n"));
12146 return -1;
12147 }
12148 }
12149
12150 /* B (1), conditional branch is automatically taken care in process_record,
12151 as PC is saved there. */
12152
12153 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12154 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12155 record_buf_mem);
12156
12157 return ret;
12158}
12159
12160/* Handling opcode 111 insns. */
12161
12162static int
12163thumb_record_branch (insn_decode_record *thumb_insn_r)
12164{
12165 uint32_t record_buf[8];
12166 uint32_t bits_h = 0;
12167
12168 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12169
12170 if (2 == bits_h || 3 == bits_h)
12171 {
12172 /* BL */
12173 record_buf[0] = ARM_LR_REGNUM;
12174 thumb_insn_r->reg_rec_count = 1;
12175 }
12176 else if (1 == bits_h)
12177 {
12178 /* BLX(1). */
12179 record_buf[0] = ARM_PS_REGNUM;
12180 record_buf[1] = ARM_LR_REGNUM;
12181 thumb_insn_r->reg_rec_count = 2;
12182 }
12183
12184 /* B(2) is automatically taken care in process_record, as PC is
12185 saved there. */
12186
12187 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12188
12189 return 0;
12190}
12191
c6ec2b30
OJ
12192/* Handler for thumb2 load/store multiple instructions. */
12193
12194static int
12195thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12196{
12197 struct regcache *reg_cache = thumb2_insn_r->regcache;
12198
12199 uint32_t reg_rn, op;
12200 uint32_t register_bits = 0, register_count = 0;
12201 uint32_t index = 0, start_address = 0;
12202 uint32_t record_buf[24], record_buf_mem[48];
12203
12204 ULONGEST u_regval = 0;
12205
12206 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12207 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12208
12209 if (0 == op || 3 == op)
12210 {
12211 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12212 {
12213 /* Handle RFE instruction. */
12214 record_buf[0] = ARM_PS_REGNUM;
12215 thumb2_insn_r->reg_rec_count = 1;
12216 }
12217 else
12218 {
12219 /* Handle SRS instruction after reading banked SP. */
12220 return arm_record_unsupported_insn (thumb2_insn_r);
12221 }
12222 }
12223 else if (1 == op || 2 == op)
12224 {
12225 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12226 {
12227 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12228 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12229 while (register_bits)
12230 {
12231 if (register_bits & 0x00000001)
12232 record_buf[index++] = register_count;
12233
12234 register_count++;
12235 register_bits = register_bits >> 1;
12236 }
12237 record_buf[index++] = reg_rn;
12238 record_buf[index++] = ARM_PS_REGNUM;
12239 thumb2_insn_r->reg_rec_count = index;
12240 }
12241 else
12242 {
12243 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12244 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12245 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12246 while (register_bits)
12247 {
12248 if (register_bits & 0x00000001)
12249 register_count++;
12250
12251 register_bits = register_bits >> 1;
12252 }
12253
12254 if (1 == op)
12255 {
12256 /* Start address calculation for LDMDB/LDMEA. */
12257 start_address = u_regval;
12258 }
12259 else if (2 == op)
12260 {
12261 /* Start address calculation for LDMDB/LDMEA. */
12262 start_address = u_regval - register_count * 4;
12263 }
12264
12265 thumb2_insn_r->mem_rec_count = register_count;
12266 while (register_count)
12267 {
12268 record_buf_mem[register_count * 2 - 1] = start_address;
12269 record_buf_mem[register_count * 2 - 2] = 4;
12270 start_address = start_address + 4;
12271 register_count--;
12272 }
12273 record_buf[0] = reg_rn;
12274 record_buf[1] = ARM_PS_REGNUM;
12275 thumb2_insn_r->reg_rec_count = 2;
12276 }
12277 }
12278
12279 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12280 record_buf_mem);
12281 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12282 record_buf);
12283 return ARM_RECORD_SUCCESS;
12284}
12285
12286/* Handler for thumb2 load/store (dual/exclusive) and table branch
12287 instructions. */
12288
12289static int
12290thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12291{
12292 struct regcache *reg_cache = thumb2_insn_r->regcache;
12293
12294 uint32_t reg_rd, reg_rn, offset_imm;
12295 uint32_t reg_dest1, reg_dest2;
12296 uint32_t address, offset_addr;
12297 uint32_t record_buf[8], record_buf_mem[8];
12298 uint32_t op1, op2, op3;
c6ec2b30
OJ
12299
12300 ULONGEST u_regval[2];
12301
12302 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12303 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12304 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12305
12306 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12307 {
12308 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12309 {
12310 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12311 record_buf[0] = reg_dest1;
12312 record_buf[1] = ARM_PS_REGNUM;
12313 thumb2_insn_r->reg_rec_count = 2;
12314 }
12315
12316 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12317 {
12318 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12319 record_buf[2] = reg_dest2;
12320 thumb2_insn_r->reg_rec_count = 3;
12321 }
12322 }
12323 else
12324 {
12325 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12326 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12327
12328 if (0 == op1 && 0 == op2)
12329 {
12330 /* Handle STREX. */
12331 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12332 address = u_regval[0] + (offset_imm * 4);
12333 record_buf_mem[0] = 4;
12334 record_buf_mem[1] = address;
12335 thumb2_insn_r->mem_rec_count = 1;
12336 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12337 record_buf[0] = reg_rd;
12338 thumb2_insn_r->reg_rec_count = 1;
12339 }
12340 else if (1 == op1 && 0 == op2)
12341 {
12342 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12343 record_buf[0] = reg_rd;
12344 thumb2_insn_r->reg_rec_count = 1;
12345 address = u_regval[0];
12346 record_buf_mem[1] = address;
12347
12348 if (4 == op3)
12349 {
12350 /* Handle STREXB. */
12351 record_buf_mem[0] = 1;
12352 thumb2_insn_r->mem_rec_count = 1;
12353 }
12354 else if (5 == op3)
12355 {
12356 /* Handle STREXH. */
12357 record_buf_mem[0] = 2 ;
12358 thumb2_insn_r->mem_rec_count = 1;
12359 }
12360 else if (7 == op3)
12361 {
12362 /* Handle STREXD. */
12363 address = u_regval[0];
12364 record_buf_mem[0] = 4;
12365 record_buf_mem[2] = 4;
12366 record_buf_mem[3] = address + 4;
12367 thumb2_insn_r->mem_rec_count = 2;
12368 }
12369 }
12370 else
12371 {
12372 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12373
12374 if (bit (thumb2_insn_r->arm_insn, 24))
12375 {
12376 if (bit (thumb2_insn_r->arm_insn, 23))
12377 offset_addr = u_regval[0] + (offset_imm * 4);
12378 else
12379 offset_addr = u_regval[0] - (offset_imm * 4);
12380
12381 address = offset_addr;
12382 }
12383 else
12384 address = u_regval[0];
12385
12386 record_buf_mem[0] = 4;
12387 record_buf_mem[1] = address;
12388 record_buf_mem[2] = 4;
12389 record_buf_mem[3] = address + 4;
12390 thumb2_insn_r->mem_rec_count = 2;
12391 record_buf[0] = reg_rn;
12392 thumb2_insn_r->reg_rec_count = 1;
12393 }
12394 }
12395
12396 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12397 record_buf);
12398 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12399 record_buf_mem);
12400 return ARM_RECORD_SUCCESS;
12401}
12402
12403/* Handler for thumb2 data processing (shift register and modified immediate)
12404 instructions. */
12405
12406static int
12407thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12408{
12409 uint32_t reg_rd, op;
12410 uint32_t record_buf[8];
12411
12412 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12413 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12414
12415 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12416 {
12417 record_buf[0] = ARM_PS_REGNUM;
12418 thumb2_insn_r->reg_rec_count = 1;
12419 }
12420 else
12421 {
12422 record_buf[0] = reg_rd;
12423 record_buf[1] = ARM_PS_REGNUM;
12424 thumb2_insn_r->reg_rec_count = 2;
12425 }
12426
12427 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12428 record_buf);
12429 return ARM_RECORD_SUCCESS;
12430}
12431
12432/* Generic handler for thumb2 instructions which effect destination and PS
12433 registers. */
12434
12435static int
12436thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12437{
12438 uint32_t reg_rd;
12439 uint32_t record_buf[8];
12440
12441 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12442
12443 record_buf[0] = reg_rd;
12444 record_buf[1] = ARM_PS_REGNUM;
12445 thumb2_insn_r->reg_rec_count = 2;
12446
12447 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12448 record_buf);
12449 return ARM_RECORD_SUCCESS;
12450}
12451
12452/* Handler for thumb2 branch and miscellaneous control instructions. */
12453
12454static int
12455thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12456{
12457 uint32_t op, op1, op2;
12458 uint32_t record_buf[8];
12459
12460 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12461 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12462 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12463
12464 /* Handle MSR insn. */
12465 if (!(op1 & 0x2) && 0x38 == op)
12466 {
12467 if (!(op2 & 0x3))
12468 {
12469 /* CPSR is going to be changed. */
12470 record_buf[0] = ARM_PS_REGNUM;
12471 thumb2_insn_r->reg_rec_count = 1;
12472 }
12473 else
12474 {
12475 arm_record_unsupported_insn(thumb2_insn_r);
12476 return -1;
12477 }
12478 }
12479 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12480 {
12481 /* BLX. */
12482 record_buf[0] = ARM_PS_REGNUM;
12483 record_buf[1] = ARM_LR_REGNUM;
12484 thumb2_insn_r->reg_rec_count = 2;
12485 }
12486
12487 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12488 record_buf);
12489 return ARM_RECORD_SUCCESS;
12490}
12491
12492/* Handler for thumb2 store single data item instructions. */
12493
12494static int
12495thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12496{
12497 struct regcache *reg_cache = thumb2_insn_r->regcache;
12498
12499 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12500 uint32_t address, offset_addr;
12501 uint32_t record_buf[8], record_buf_mem[8];
12502 uint32_t op1, op2;
12503
12504 ULONGEST u_regval[2];
12505
12506 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12507 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12508 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12509 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12510
12511 if (bit (thumb2_insn_r->arm_insn, 23))
12512 {
12513 /* T2 encoding. */
12514 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12515 offset_addr = u_regval[0] + offset_imm;
12516 address = offset_addr;
12517 }
12518 else
12519 {
12520 /* T3 encoding. */
12521 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12522 {
12523 /* Handle STRB (register). */
12524 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12525 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12526 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12527 offset_addr = u_regval[1] << shift_imm;
12528 address = u_regval[0] + offset_addr;
12529 }
12530 else
12531 {
12532 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12533 if (bit (thumb2_insn_r->arm_insn, 10))
12534 {
12535 if (bit (thumb2_insn_r->arm_insn, 9))
12536 offset_addr = u_regval[0] + offset_imm;
12537 else
12538 offset_addr = u_regval[0] - offset_imm;
12539
12540 address = offset_addr;
12541 }
12542 else
12543 address = u_regval[0];
12544 }
12545 }
12546
12547 switch (op1)
12548 {
12549 /* Store byte instructions. */
12550 case 4:
12551 case 0:
12552 record_buf_mem[0] = 1;
12553 break;
12554 /* Store half word instructions. */
12555 case 1:
12556 case 5:
12557 record_buf_mem[0] = 2;
12558 break;
12559 /* Store word instructions. */
12560 case 2:
12561 case 6:
12562 record_buf_mem[0] = 4;
12563 break;
12564
12565 default:
12566 gdb_assert_not_reached ("no decoding pattern found");
12567 break;
12568 }
12569
12570 record_buf_mem[1] = address;
12571 thumb2_insn_r->mem_rec_count = 1;
12572 record_buf[0] = reg_rn;
12573 thumb2_insn_r->reg_rec_count = 1;
12574
12575 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12576 record_buf);
12577 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12578 record_buf_mem);
12579 return ARM_RECORD_SUCCESS;
12580}
12581
12582/* Handler for thumb2 load memory hints instructions. */
12583
12584static int
12585thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12586{
12587 uint32_t record_buf[8];
12588 uint32_t reg_rt, reg_rn;
12589
12590 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12591 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12592
12593 if (ARM_PC_REGNUM != reg_rt)
12594 {
12595 record_buf[0] = reg_rt;
12596 record_buf[1] = reg_rn;
12597 record_buf[2] = ARM_PS_REGNUM;
12598 thumb2_insn_r->reg_rec_count = 3;
12599
12600 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12601 record_buf);
12602 return ARM_RECORD_SUCCESS;
12603 }
12604
12605 return ARM_RECORD_FAILURE;
12606}
12607
12608/* Handler for thumb2 load word instructions. */
12609
12610static int
12611thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12612{
c6ec2b30
OJ
12613 uint32_t record_buf[8];
12614
12615 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12616 record_buf[1] = ARM_PS_REGNUM;
12617 thumb2_insn_r->reg_rec_count = 2;
12618
12619 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12620 record_buf);
12621 return ARM_RECORD_SUCCESS;
12622}
12623
12624/* Handler for thumb2 long multiply, long multiply accumulate, and
12625 divide instructions. */
12626
12627static int
12628thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12629{
12630 uint32_t opcode1 = 0, opcode2 = 0;
12631 uint32_t record_buf[8];
c6ec2b30
OJ
12632
12633 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12634 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12635
12636 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12637 {
12638 /* Handle SMULL, UMULL, SMULAL. */
12639 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12640 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12641 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12642 record_buf[2] = ARM_PS_REGNUM;
12643 thumb2_insn_r->reg_rec_count = 3;
12644 }
12645 else if (1 == opcode1 || 3 == opcode2)
12646 {
12647 /* Handle SDIV and UDIV. */
12648 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12649 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12650 record_buf[2] = ARM_PS_REGNUM;
12651 thumb2_insn_r->reg_rec_count = 3;
12652 }
12653 else
12654 return ARM_RECORD_FAILURE;
12655
12656 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12657 record_buf);
12658 return ARM_RECORD_SUCCESS;
12659}
12660
60cc5e93
OJ
12661/* Record handler for thumb32 coprocessor instructions. */
12662
12663static int
12664thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12665{
12666 if (bit (thumb2_insn_r->arm_insn, 25))
12667 return arm_record_coproc_data_proc (thumb2_insn_r);
12668 else
12669 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12670}
12671
1e1b6563
OJ
12672/* Record handler for advance SIMD structure load/store instructions. */
12673
12674static int
12675thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12676{
12677 struct regcache *reg_cache = thumb2_insn_r->regcache;
12678 uint32_t l_bit, a_bit, b_bits;
12679 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12680 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12681 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12682 uint8_t f_ebytes;
12683
12684 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12685 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12686 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12687 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12688 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12689 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12690 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12691 f_elem = 8 / f_ebytes;
12692
12693 if (!l_bit)
12694 {
12695 ULONGEST u_regval = 0;
12696 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12697 address = u_regval;
12698
12699 if (!a_bit)
12700 {
12701 /* Handle VST1. */
12702 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12703 {
12704 if (b_bits == 0x07)
12705 bf_regs = 1;
12706 else if (b_bits == 0x0a)
12707 bf_regs = 2;
12708 else if (b_bits == 0x06)
12709 bf_regs = 3;
12710 else if (b_bits == 0x02)
12711 bf_regs = 4;
12712 else
12713 bf_regs = 0;
12714
12715 for (index_r = 0; index_r < bf_regs; index_r++)
12716 {
12717 for (index_e = 0; index_e < f_elem; index_e++)
12718 {
12719 record_buf_mem[index_m++] = f_ebytes;
12720 record_buf_mem[index_m++] = address;
12721 address = address + f_ebytes;
12722 thumb2_insn_r->mem_rec_count += 1;
12723 }
12724 }
12725 }
12726 /* Handle VST2. */
12727 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12728 {
12729 if (b_bits == 0x09 || b_bits == 0x08)
12730 bf_regs = 1;
12731 else if (b_bits == 0x03)
12732 bf_regs = 2;
12733 else
12734 bf_regs = 0;
12735
12736 for (index_r = 0; index_r < bf_regs; index_r++)
12737 for (index_e = 0; index_e < f_elem; index_e++)
12738 {
12739 for (loop_t = 0; loop_t < 2; loop_t++)
12740 {
12741 record_buf_mem[index_m++] = f_ebytes;
12742 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12743 thumb2_insn_r->mem_rec_count += 1;
12744 }
12745 address = address + (2 * f_ebytes);
12746 }
12747 }
12748 /* Handle VST3. */
12749 else if ((b_bits & 0x0e) == 0x04)
12750 {
12751 for (index_e = 0; index_e < f_elem; index_e++)
12752 {
12753 for (loop_t = 0; loop_t < 3; loop_t++)
12754 {
12755 record_buf_mem[index_m++] = f_ebytes;
12756 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12757 thumb2_insn_r->mem_rec_count += 1;
12758 }
12759 address = address + (3 * f_ebytes);
12760 }
12761 }
12762 /* Handle VST4. */
12763 else if (!(b_bits & 0x0e))
12764 {
12765 for (index_e = 0; index_e < f_elem; index_e++)
12766 {
12767 for (loop_t = 0; loop_t < 4; loop_t++)
12768 {
12769 record_buf_mem[index_m++] = f_ebytes;
12770 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12771 thumb2_insn_r->mem_rec_count += 1;
12772 }
12773 address = address + (4 * f_ebytes);
12774 }
12775 }
12776 }
12777 else
12778 {
12779 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12780
12781 if (bft_size == 0x00)
12782 f_ebytes = 1;
12783 else if (bft_size == 0x01)
12784 f_ebytes = 2;
12785 else if (bft_size == 0x02)
12786 f_ebytes = 4;
12787 else
12788 f_ebytes = 0;
12789
12790 /* Handle VST1. */
12791 if (!(b_bits & 0x0b) || b_bits == 0x08)
12792 thumb2_insn_r->mem_rec_count = 1;
12793 /* Handle VST2. */
12794 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12795 thumb2_insn_r->mem_rec_count = 2;
12796 /* Handle VST3. */
12797 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12798 thumb2_insn_r->mem_rec_count = 3;
12799 /* Handle VST4. */
12800 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12801 thumb2_insn_r->mem_rec_count = 4;
12802
12803 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12804 {
12805 record_buf_mem[index_m] = f_ebytes;
12806 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12807 }
12808 }
12809 }
12810 else
12811 {
12812 if (!a_bit)
12813 {
12814 /* Handle VLD1. */
12815 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12816 thumb2_insn_r->reg_rec_count = 1;
12817 /* Handle VLD2. */
12818 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12819 thumb2_insn_r->reg_rec_count = 2;
12820 /* Handle VLD3. */
12821 else if ((b_bits & 0x0e) == 0x04)
12822 thumb2_insn_r->reg_rec_count = 3;
12823 /* Handle VLD4. */
12824 else if (!(b_bits & 0x0e))
12825 thumb2_insn_r->reg_rec_count = 4;
12826 }
12827 else
12828 {
12829 /* Handle VLD1. */
12830 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12831 thumb2_insn_r->reg_rec_count = 1;
12832 /* Handle VLD2. */
12833 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12834 thumb2_insn_r->reg_rec_count = 2;
12835 /* Handle VLD3. */
12836 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12837 thumb2_insn_r->reg_rec_count = 3;
12838 /* Handle VLD4. */
12839 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12840 thumb2_insn_r->reg_rec_count = 4;
12841
12842 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12843 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12844 }
12845 }
12846
12847 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12848 {
12849 record_buf[index_r] = reg_rn;
12850 thumb2_insn_r->reg_rec_count += 1;
12851 }
12852
12853 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12854 record_buf);
12855 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12856 record_buf_mem);
12857 return 0;
12858}
12859
c6ec2b30
OJ
12860/* Decodes thumb2 instruction type and invokes its record handler. */
12861
12862static unsigned int
12863thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12864{
12865 uint32_t op, op1, op2;
12866
12867 op = bit (thumb2_insn_r->arm_insn, 15);
12868 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12869 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12870
12871 if (op1 == 0x01)
12872 {
12873 if (!(op2 & 0x64 ))
12874 {
12875 /* Load/store multiple instruction. */
12876 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12877 }
b121eeb9 12878 else if ((op2 & 0x64) == 0x4)
c6ec2b30
OJ
12879 {
12880 /* Load/store (dual/exclusive) and table branch instruction. */
12881 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12882 }
b121eeb9 12883 else if ((op2 & 0x60) == 0x20)
c6ec2b30
OJ
12884 {
12885 /* Data-processing (shifted register). */
12886 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12887 }
12888 else if (op2 & 0x40)
12889 {
12890 /* Co-processor instructions. */
60cc5e93 12891 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12892 }
12893 }
12894 else if (op1 == 0x02)
12895 {
12896 if (op)
12897 {
12898 /* Branches and miscellaneous control instructions. */
12899 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12900 }
12901 else if (op2 & 0x20)
12902 {
12903 /* Data-processing (plain binary immediate) instruction. */
12904 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12905 }
12906 else
12907 {
12908 /* Data-processing (modified immediate). */
12909 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12910 }
12911 }
12912 else if (op1 == 0x03)
12913 {
12914 if (!(op2 & 0x71 ))
12915 {
12916 /* Store single data item. */
12917 return thumb2_record_str_single_data (thumb2_insn_r);
12918 }
12919 else if (!((op2 & 0x71) ^ 0x10))
12920 {
12921 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12922 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12923 }
12924 else if (!((op2 & 0x67) ^ 0x01))
12925 {
12926 /* Load byte, memory hints instruction. */
12927 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12928 }
12929 else if (!((op2 & 0x67) ^ 0x03))
12930 {
12931 /* Load halfword, memory hints instruction. */
12932 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12933 }
12934 else if (!((op2 & 0x67) ^ 0x05))
12935 {
12936 /* Load word instruction. */
12937 return thumb2_record_ld_word (thumb2_insn_r);
12938 }
12939 else if (!((op2 & 0x70) ^ 0x20))
12940 {
12941 /* Data-processing (register) instruction. */
12942 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12943 }
12944 else if (!((op2 & 0x78) ^ 0x30))
12945 {
12946 /* Multiply, multiply accumulate, abs diff instruction. */
12947 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12948 }
12949 else if (!((op2 & 0x78) ^ 0x38))
12950 {
12951 /* Long multiply, long multiply accumulate, and divide. */
12952 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12953 }
12954 else if (op2 & 0x40)
12955 {
12956 /* Co-processor instructions. */
60cc5e93 12957 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12958 }
12959 }
12960
12961 return -1;
12962}
72508ac0 12963
ffdbe864 12964namespace {
728a7913
YQ
12965/* Abstract memory reader. */
12966
12967class abstract_memory_reader
12968{
12969public:
12970 /* Read LEN bytes of target memory at address MEMADDR, placing the
12971 results in GDB's memory at BUF. Return true on success. */
12972
12973 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12974};
12975
12976/* Instruction reader from real target. */
12977
12978class instruction_reader : public abstract_memory_reader
12979{
12980 public:
12981 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12982 {
12983 if (target_read_memory (memaddr, buf, len))
12984 return false;
12985 else
12986 return true;
12987 }
12988};
12989
ffdbe864
YQ
12990} // namespace
12991
72508ac0
PO
12992/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12993and positive val on fauilure. */
12994
12995static int
728a7913
YQ
12996extract_arm_insn (abstract_memory_reader& reader,
12997 insn_decode_record *insn_record, uint32_t insn_size)
72508ac0
PO
12998{
12999 gdb_byte buf[insn_size];
13000
13001 memset (&buf[0], 0, insn_size);
13002
728a7913 13003 if (!reader.read (insn_record->this_addr, buf, insn_size))
72508ac0
PO
13004 return 1;
13005 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13006 insn_size,
2959fed9 13007 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
13008 return 0;
13009}
13010
13011typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13012
13013/* Decode arm/thumb insn depending on condition cods and opcodes; and
13014 dispatch it. */
13015
13016static int
728a7913
YQ
13017decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13018 record_type_t record_type, uint32_t insn_size)
72508ac0
PO
13019{
13020
01e57735
YQ
13021 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13022 instruction. */
0fa9c223 13023 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
13024 {
13025 arm_record_data_proc_misc_ld_str, /* 000. */
13026 arm_record_data_proc_imm, /* 001. */
13027 arm_record_ld_st_imm_offset, /* 010. */
13028 arm_record_ld_st_reg_offset, /* 011. */
13029 arm_record_ld_st_multiple, /* 100. */
13030 arm_record_b_bl, /* 101. */
60cc5e93 13031 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
13032 arm_record_coproc_data_proc /* 111. */
13033 };
13034
01e57735
YQ
13035 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13036 instruction. */
0fa9c223 13037 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
13038 { \
13039 thumb_record_shift_add_sub, /* 000. */
13040 thumb_record_add_sub_cmp_mov, /* 001. */
13041 thumb_record_ld_st_reg_offset, /* 010. */
13042 thumb_record_ld_st_imm_offset, /* 011. */
13043 thumb_record_ld_st_stack, /* 100. */
13044 thumb_record_misc, /* 101. */
13045 thumb_record_ldm_stm_swi, /* 110. */
13046 thumb_record_branch /* 111. */
13047 };
13048
13049 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13050 uint32_t insn_id = 0;
13051
728a7913 13052 if (extract_arm_insn (reader, arm_record, insn_size))
72508ac0
PO
13053 {
13054 if (record_debug)
01e57735
YQ
13055 {
13056 printf_unfiltered (_("Process record: error reading memory at "
13057 "addr %s len = %d.\n"),
13058 paddress (arm_record->gdbarch,
13059 arm_record->this_addr), insn_size);
13060 }
72508ac0
PO
13061 return -1;
13062 }
13063 else if (ARM_RECORD == record_type)
13064 {
13065 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13066 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
13067
13068 if (arm_record->cond == 0xf)
13069 ret = arm_record_extension_space (arm_record);
13070 else
01e57735 13071 {
ca92db2d
YQ
13072 /* If this insn has fallen into extension space
13073 then we need not decode it anymore. */
01e57735
YQ
13074 ret = arm_handle_insn[insn_id] (arm_record);
13075 }
ca92db2d
YQ
13076 if (ret != ARM_RECORD_SUCCESS)
13077 {
13078 arm_record_unsupported_insn (arm_record);
13079 ret = -1;
13080 }
72508ac0
PO
13081 }
13082 else if (THUMB_RECORD == record_type)
13083 {
13084 /* As thumb does not have condition codes, we set negative. */
13085 arm_record->cond = -1;
13086 insn_id = bits (arm_record->arm_insn, 13, 15);
13087 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
13088 if (ret != ARM_RECORD_SUCCESS)
13089 {
13090 arm_record_unsupported_insn (arm_record);
13091 ret = -1;
13092 }
72508ac0
PO
13093 }
13094 else if (THUMB2_RECORD == record_type)
13095 {
c6ec2b30
OJ
13096 /* As thumb does not have condition codes, we set negative. */
13097 arm_record->cond = -1;
13098
13099 /* Swap first half of 32bit thumb instruction with second half. */
13100 arm_record->arm_insn
01e57735 13101 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 13102
ca92db2d 13103 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 13104
ca92db2d 13105 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
13106 {
13107 arm_record_unsupported_insn (arm_record);
13108 ret = -1;
13109 }
72508ac0
PO
13110 }
13111 else
13112 {
13113 /* Throw assertion. */
13114 gdb_assert_not_reached ("not a valid instruction, could not decode");
13115 }
13116
13117 return ret;
13118}
13119
b121eeb9
YQ
13120#if GDB_SELF_TEST
13121namespace selftests {
13122
13123/* Provide both 16-bit and 32-bit thumb instructions. */
13124
13125class instruction_reader_thumb : public abstract_memory_reader
13126{
13127public:
13128 template<size_t SIZE>
13129 instruction_reader_thumb (enum bfd_endian endian,
13130 const uint16_t (&insns)[SIZE])
13131 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13132 {}
13133
13134 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13135 {
13136 SELF_CHECK (len == 4 || len == 2);
13137 SELF_CHECK (memaddr % 2 == 0);
13138 SELF_CHECK ((memaddr / 2) < m_insns_size);
13139
13140 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13141 if (len == 4)
13142 {
13143 store_unsigned_integer (&buf[2], 2, m_endian,
13144 m_insns[memaddr / 2 + 1]);
13145 }
13146 return true;
13147 }
13148
13149private:
13150 enum bfd_endian m_endian;
13151 const uint16_t *m_insns;
13152 size_t m_insns_size;
13153};
13154
13155static void
13156arm_record_test (void)
13157{
13158 struct gdbarch_info info;
13159 gdbarch_info_init (&info);
13160 info.bfd_arch_info = bfd_scan_arch ("arm");
13161
13162 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13163
13164 SELF_CHECK (gdbarch != NULL);
13165
13166 /* 16-bit Thumb instructions. */
13167 {
13168 insn_decode_record arm_record;
13169
13170 memset (&arm_record, 0, sizeof (insn_decode_record));
13171 arm_record.gdbarch = gdbarch;
13172
13173 static const uint16_t insns[] = {
13174 /* db b2 uxtb r3, r3 */
13175 0xb2db,
13176 /* cd 58 ldr r5, [r1, r3] */
13177 0x58cd,
13178 };
13179
13180 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13181 instruction_reader_thumb reader (endian, insns);
13182 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13183 THUMB_INSN_SIZE_BYTES);
13184
13185 SELF_CHECK (ret == 0);
13186 SELF_CHECK (arm_record.mem_rec_count == 0);
13187 SELF_CHECK (arm_record.reg_rec_count == 1);
13188 SELF_CHECK (arm_record.arm_regs[0] == 3);
13189
13190 arm_record.this_addr += 2;
13191 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13192 THUMB_INSN_SIZE_BYTES);
13193
13194 SELF_CHECK (ret == 0);
13195 SELF_CHECK (arm_record.mem_rec_count == 0);
13196 SELF_CHECK (arm_record.reg_rec_count == 1);
13197 SELF_CHECK (arm_record.arm_regs[0] == 5);
13198 }
13199
13200 /* 32-bit Thumb-2 instructions. */
13201 {
13202 insn_decode_record arm_record;
13203
13204 memset (&arm_record, 0, sizeof (insn_decode_record));
13205 arm_record.gdbarch = gdbarch;
13206
13207 static const uint16_t insns[] = {
13208 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13209 0xee1d, 0x7f70,
13210 };
13211
13212 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13213 instruction_reader_thumb reader (endian, insns);
13214 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13215 THUMB2_INSN_SIZE_BYTES);
13216
13217 SELF_CHECK (ret == 0);
13218 SELF_CHECK (arm_record.mem_rec_count == 0);
13219 SELF_CHECK (arm_record.reg_rec_count == 1);
13220 SELF_CHECK (arm_record.arm_regs[0] == 7);
13221 }
13222}
13223} // namespace selftests
13224#endif /* GDB_SELF_TEST */
72508ac0
PO
13225
13226/* Cleans up local record registers and memory allocations. */
13227
13228static void
13229deallocate_reg_mem (insn_decode_record *record)
13230{
13231 xfree (record->arm_regs);
13232 xfree (record->arm_mems);
13233}
13234
13235
01e57735 13236/* Parse the current instruction and record the values of the registers and
72508ac0
PO
13237 memory that will be changed in current instruction to record_arch_list".
13238 Return -1 if something is wrong. */
13239
13240int
01e57735
YQ
13241arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13242 CORE_ADDR insn_addr)
72508ac0
PO
13243{
13244
72508ac0
PO
13245 uint32_t no_of_rec = 0;
13246 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13247 ULONGEST t_bit = 0, insn_id = 0;
13248
13249 ULONGEST u_regval = 0;
13250
13251 insn_decode_record arm_record;
13252
13253 memset (&arm_record, 0, sizeof (insn_decode_record));
13254 arm_record.regcache = regcache;
13255 arm_record.this_addr = insn_addr;
13256 arm_record.gdbarch = gdbarch;
13257
13258
13259 if (record_debug > 1)
13260 {
13261 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 13262 "addr = %s\n",
72508ac0
PO
13263 paddress (gdbarch, arm_record.this_addr));
13264 }
13265
728a7913
YQ
13266 instruction_reader reader;
13267 if (extract_arm_insn (reader, &arm_record, 2))
72508ac0
PO
13268 {
13269 if (record_debug)
01e57735
YQ
13270 {
13271 printf_unfiltered (_("Process record: error reading memory at "
13272 "addr %s len = %d.\n"),
13273 paddress (arm_record.gdbarch,
13274 arm_record.this_addr), 2);
13275 }
72508ac0
PO
13276 return -1;
13277 }
13278
13279 /* Check the insn, whether it is thumb or arm one. */
13280
13281 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13282 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13283
13284
13285 if (!(u_regval & t_bit))
13286 {
13287 /* We are decoding arm insn. */
728a7913 13288 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
72508ac0
PO
13289 }
13290 else
13291 {
13292 insn_id = bits (arm_record.arm_insn, 11, 15);
13293 /* is it thumb2 insn? */
13294 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735 13295 {
728a7913 13296 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
01e57735
YQ
13297 THUMB2_INSN_SIZE_BYTES);
13298 }
72508ac0 13299 else
01e57735
YQ
13300 {
13301 /* We are decoding thumb insn. */
728a7913
YQ
13302 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13303 THUMB_INSN_SIZE_BYTES);
01e57735 13304 }
72508ac0
PO
13305 }
13306
13307 if (0 == ret)
13308 {
13309 /* Record registers. */
25ea693b 13310 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13311 if (arm_record.arm_regs)
01e57735
YQ
13312 {
13313 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13314 {
13315 if (record_full_arch_list_add_reg
25ea693b 13316 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13317 ret = -1;
13318 }
13319 }
72508ac0
PO
13320 /* Record memories. */
13321 if (arm_record.arm_mems)
01e57735
YQ
13322 {
13323 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13324 {
13325 if (record_full_arch_list_add_mem
13326 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13327 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13328 ret = -1;
13329 }
13330 }
72508ac0 13331
25ea693b 13332 if (record_full_arch_list_add_end ())
01e57735 13333 ret = -1;
72508ac0
PO
13334 }
13335
13336
13337 deallocate_reg_mem (&arm_record);
13338
13339 return ret;
13340}
This page took 1.952115 seconds and 4 git commands to generate.