Add arm epilogue unwinder
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
CommitLineData
ed9a39eb 1/* Common target dependent code for GDB on ARM systems.
0fd88904 2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
c906108c 4
c5aa993b 5 This file is part of GDB.
c906108c 6
c5aa993b
JM
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
a9762ec7 9 the Free Software Foundation; either version 3 of the License, or
c5aa993b 10 (at your option) any later version.
c906108c 11
c5aa993b
JM
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
c906108c 16
c5aa993b 17 You should have received a copy of the GNU General Public License
a9762ec7 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
c906108c 19
0baeab03
PA
20#include "defs.h"
21
0963b4bd 22#include <ctype.h> /* XXX for isupper (). */
34e8f22d 23
c906108c
SS
24#include "frame.h"
25#include "inferior.h"
45741a9c 26#include "infrun.h"
c906108c
SS
27#include "gdbcmd.h"
28#include "gdbcore.h"
0963b4bd 29#include "dis-asm.h" /* For register styles. */
4e052eda 30#include "regcache.h"
54483882 31#include "reggroups.h"
d16aafd8 32#include "doublest.h"
fd0407d6 33#include "value.h"
34e8f22d 34#include "arch-utils.h"
4be87837 35#include "osabi.h"
eb5492fa
DJ
36#include "frame-unwind.h"
37#include "frame-base.h"
38#include "trad-frame.h"
842e1f1e
DJ
39#include "objfiles.h"
40#include "dwarf2-frame.h"
e4c16157 41#include "gdbtypes.h"
29d73ae4 42#include "prologue-value.h"
25f8c692 43#include "remote.h"
123dc839
DJ
44#include "target-descriptions.h"
45#include "user-regs.h"
0e9e9abd 46#include "observer.h"
34e8f22d 47
8689682c 48#include "arch/arm.h"
d9311bfa 49#include "arch/arm-get-next-pcs.h"
34e8f22d 50#include "arm-tdep.h"
26216b98 51#include "gdb/sim-arm.h"
34e8f22d 52
082fc60d
RE
53#include "elf-bfd.h"
54#include "coff/internal.h"
97e03143 55#include "elf/arm.h"
c906108c 56
60c5725c 57#include "vec.h"
26216b98 58
72508ac0 59#include "record.h"
d02ed0bb 60#include "record-full.h"
72508ac0 61
9779414d 62#include "features/arm-with-m.c"
25f8c692 63#include "features/arm-with-m-fpa-layout.c"
3184d3f9 64#include "features/arm-with-m-vfp-d16.c"
ef7e8358
UW
65#include "features/arm-with-iwmmxt.c"
66#include "features/arm-with-vfpv2.c"
67#include "features/arm-with-vfpv3.c"
68#include "features/arm-with-neon.c"
9779414d 69
6529d2dd
AC
70static int arm_debug;
71
082fc60d
RE
72/* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
f594e5e9 74 is used for this purpose.
082fc60d
RE
75
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
f594e5e9 77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
082fc60d 78
0963b4bd 79#define MSYMBOL_SET_SPECIAL(msym) \
b887350f 80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
082fc60d
RE
81
82#define MSYMBOL_IS_SPECIAL(msym) \
b887350f 83 MSYMBOL_TARGET_FLAG_1 (msym)
082fc60d 84
60c5725c
DJ
85/* Per-objfile data used for mapping symbols. */
86static const struct objfile_data *arm_objfile_data_key;
87
88struct arm_mapping_symbol
89{
90 bfd_vma value;
91 char type;
92};
93typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94DEF_VEC_O(arm_mapping_symbol_s);
95
96struct arm_per_objfile
97{
98 VEC(arm_mapping_symbol_s) **section_maps;
99};
100
afd7eef0
RE
101/* The list of available "set arm ..." and "show arm ..." commands. */
102static struct cmd_list_element *setarmcmdlist = NULL;
103static struct cmd_list_element *showarmcmdlist = NULL;
104
fd50bc42
RE
105/* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
40478521 107static const char *const fp_model_strings[] =
fd50bc42
RE
108{
109 "auto",
110 "softfpa",
111 "fpa",
112 "softvfp",
28e97307
DJ
113 "vfp",
114 NULL
fd50bc42
RE
115};
116
117/* A variable that can be configured by the user. */
118static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119static const char *current_fp_model = "auto";
120
28e97307 121/* The ABI to use. Keep this in sync with arm_abi_kind. */
40478521 122static const char *const arm_abi_strings[] =
28e97307
DJ
123{
124 "auto",
125 "APCS",
126 "AAPCS",
127 NULL
128};
129
130/* A variable that can be configured by the user. */
131static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132static const char *arm_abi_string = "auto";
133
0428b8f5 134/* The execution mode to assume. */
40478521 135static const char *const arm_mode_strings[] =
0428b8f5
DJ
136 {
137 "auto",
138 "arm",
68770265
MGD
139 "thumb",
140 NULL
0428b8f5
DJ
141 };
142
143static const char *arm_fallback_mode_string = "auto";
144static const char *arm_force_mode_string = "auto";
145
18819fa6
UW
146/* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151static int arm_override_mode = -1;
152
94c30b78 153/* Number of different reg name sets (options). */
afd7eef0 154static int num_disassembly_options;
bc90b915 155
f32bf4a4
YQ
156/* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
123dc839
DJ
160static const struct
161{
162 const char *name;
163 int regnum;
164} arm_register_aliases[] = {
165 /* Basic register numbers. */
166 { "r0", 0 },
167 { "r1", 1 },
168 { "r2", 2 },
169 { "r3", 3 },
170 { "r4", 4 },
171 { "r5", 5 },
172 { "r6", 6 },
173 { "r7", 7 },
174 { "r8", 8 },
175 { "r9", 9 },
176 { "r10", 10 },
177 { "r11", 11 },
178 { "r12", 12 },
179 { "r13", 13 },
180 { "r14", 14 },
181 { "r15", 15 },
182 /* Synonyms (argument and variable registers). */
183 { "a1", 0 },
184 { "a2", 1 },
185 { "a3", 2 },
186 { "a4", 3 },
187 { "v1", 4 },
188 { "v2", 5 },
189 { "v3", 6 },
190 { "v4", 7 },
191 { "v5", 8 },
192 { "v6", 9 },
193 { "v7", 10 },
194 { "v8", 11 },
195 /* Other platform-specific names for r9. */
196 { "sb", 9 },
197 { "tr", 9 },
198 /* Special names. */
199 { "ip", 12 },
123dc839 200 { "lr", 14 },
123dc839
DJ
201 /* Names used by GCC (not listed in the ARM EABI). */
202 { "sl", 10 },
123dc839
DJ
203 /* A special name from the older ATPCS. */
204 { "wr", 7 },
205};
bc90b915 206
123dc839 207static const char *const arm_register_names[] =
da59e081
JM
208{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
94c30b78 214 "fps", "cpsr" }; /* 24 25 */
ed9a39eb 215
afd7eef0
RE
216/* Valid register name styles. */
217static const char **valid_disassembly_styles;
ed9a39eb 218
afd7eef0
RE
219/* Disassembly style to use. Default to "std" register names. */
220static const char *disassembly_style;
96baa820 221
ed9a39eb 222/* This is used to keep the bfd arch_info in sync with the disassembly
afd7eef0
RE
223 style. */
224static void set_disassembly_style_sfunc(char *, int,
ed9a39eb 225 struct cmd_list_element *);
afd7eef0 226static void set_disassembly_style (void);
ed9a39eb 227
b508a996 228static void convert_from_extended (const struct floatformat *, const void *,
be8626e0 229 void *, int);
b508a996 230static void convert_to_extended (const struct floatformat *, void *,
be8626e0 231 const void *, int);
ed9a39eb 232
05d1431c
PA
233static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
58d6951d
DJ
236static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
239
e7cf25a8 240static CORE_ADDR
553cb527 241 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
e7cf25a8
YQ
242
243
d9311bfa
AT
244/* get_next_pcs operations. */
245static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
246 arm_get_next_pcs_read_memory_unsigned_integer,
247 arm_get_next_pcs_syscall_next_pc,
248 arm_get_next_pcs_addr_bits_remove,
ed443b61
YQ
249 arm_get_next_pcs_is_thumb,
250 NULL,
d9311bfa
AT
251};
252
9b8d791a 253struct arm_prologue_cache
c3b4394c 254{
eb5492fa
DJ
255 /* The stack pointer at the time this frame was created; i.e. the
256 caller's stack pointer when this function was called. It is used
257 to identify this frame. */
258 CORE_ADDR prev_sp;
259
4be43953
DJ
260 /* The frame base for this frame is just prev_sp - frame size.
261 FRAMESIZE is the distance from the frame pointer to the
262 initial stack pointer. */
eb5492fa 263
c3b4394c 264 int framesize;
eb5492fa
DJ
265
266 /* The register used to hold the frame pointer for this frame. */
c3b4394c 267 int framereg;
eb5492fa
DJ
268
269 /* Saved register offsets. */
270 struct trad_frame_saved_reg *saved_regs;
c3b4394c 271};
ed9a39eb 272
0d39a070
DJ
273static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
274 CORE_ADDR prologue_start,
275 CORE_ADDR prologue_end,
276 struct arm_prologue_cache *cache);
277
cca44b1b
JB
278/* Architecture version for displaced stepping. This effects the behaviour of
279 certain instructions, and really should not be hard-wired. */
280
281#define DISPLACED_STEPPING_ARCH_VERSION 5
282
94c30b78 283/* Set to true if the 32-bit mode is in use. */
c906108c
SS
284
285int arm_apcs_32 = 1;
286
9779414d
DJ
287/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
288
478fd957 289int
9779414d
DJ
290arm_psr_thumb_bit (struct gdbarch *gdbarch)
291{
292 if (gdbarch_tdep (gdbarch)->is_m)
293 return XPSR_T;
294 else
295 return CPSR_T;
296}
297
d0e59a68
AT
298/* Determine if the processor is currently executing in Thumb mode. */
299
300int
301arm_is_thumb (struct regcache *regcache)
302{
303 ULONGEST cpsr;
304 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
305
306 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
307
308 return (cpsr & t_bit) != 0;
309}
310
b39cc962
DJ
311/* Determine if FRAME is executing in Thumb mode. */
312
25b41d01 313int
b39cc962
DJ
314arm_frame_is_thumb (struct frame_info *frame)
315{
316 CORE_ADDR cpsr;
9779414d 317 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
b39cc962
DJ
318
319 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
320 directly (from a signal frame or dummy frame) or by interpreting
321 the saved LR (from a prologue or DWARF frame). So consult it and
322 trust the unwinders. */
323 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
324
9779414d 325 return (cpsr & t_bit) != 0;
b39cc962
DJ
326}
327
60c5725c
DJ
328/* Callback for VEC_lower_bound. */
329
330static inline int
331arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
332 const struct arm_mapping_symbol *rhs)
333{
334 return lhs->value < rhs->value;
335}
336
f9d67f43
DJ
337/* Search for the mapping symbol covering MEMADDR. If one is found,
338 return its type. Otherwise, return 0. If START is non-NULL,
339 set *START to the location of the mapping symbol. */
c906108c 340
f9d67f43
DJ
341static char
342arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
c906108c 343{
60c5725c 344 struct obj_section *sec;
0428b8f5 345
60c5725c
DJ
346 /* If there are mapping symbols, consult them. */
347 sec = find_pc_section (memaddr);
348 if (sec != NULL)
349 {
350 struct arm_per_objfile *data;
351 VEC(arm_mapping_symbol_s) *map;
aded6f54
PA
352 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
353 0 };
60c5725c
DJ
354 unsigned int idx;
355
9a3c8263
SM
356 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
357 arm_objfile_data_key);
60c5725c
DJ
358 if (data != NULL)
359 {
360 map = data->section_maps[sec->the_bfd_section->index];
361 if (!VEC_empty (arm_mapping_symbol_s, map))
362 {
363 struct arm_mapping_symbol *map_sym;
364
365 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
366 arm_compare_mapping_symbols);
367
368 /* VEC_lower_bound finds the earliest ordered insertion
369 point. If the following symbol starts at this exact
370 address, we use that; otherwise, the preceding
371 mapping symbol covers this address. */
372 if (idx < VEC_length (arm_mapping_symbol_s, map))
373 {
374 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
375 if (map_sym->value == map_key.value)
f9d67f43
DJ
376 {
377 if (start)
378 *start = map_sym->value + obj_section_addr (sec);
379 return map_sym->type;
380 }
60c5725c
DJ
381 }
382
383 if (idx > 0)
384 {
385 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
f9d67f43
DJ
386 if (start)
387 *start = map_sym->value + obj_section_addr (sec);
388 return map_sym->type;
60c5725c
DJ
389 }
390 }
391 }
392 }
393
f9d67f43
DJ
394 return 0;
395}
396
397/* Determine if the program counter specified in MEMADDR is in a Thumb
398 function. This function should be called for addresses unrelated to
399 any executing frame; otherwise, prefer arm_frame_is_thumb. */
400
e3039479 401int
9779414d 402arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
f9d67f43 403{
7cbd4a93 404 struct bound_minimal_symbol sym;
f9d67f43 405 char type;
a42244db
YQ
406 struct displaced_step_closure* dsc
407 = get_displaced_step_closure_by_addr(memaddr);
408
409 /* If checking the mode of displaced instruction in copy area, the mode
410 should be determined by instruction on the original address. */
411 if (dsc)
412 {
413 if (debug_displaced)
414 fprintf_unfiltered (gdb_stdlog,
415 "displaced: check mode of %.8lx instead of %.8lx\n",
416 (unsigned long) dsc->insn_addr,
417 (unsigned long) memaddr);
418 memaddr = dsc->insn_addr;
419 }
f9d67f43
DJ
420
421 /* If bit 0 of the address is set, assume this is a Thumb address. */
422 if (IS_THUMB_ADDR (memaddr))
423 return 1;
424
18819fa6
UW
425 /* Respect internal mode override if active. */
426 if (arm_override_mode != -1)
427 return arm_override_mode;
428
f9d67f43
DJ
429 /* If the user wants to override the symbol table, let him. */
430 if (strcmp (arm_force_mode_string, "arm") == 0)
431 return 0;
432 if (strcmp (arm_force_mode_string, "thumb") == 0)
433 return 1;
434
9779414d
DJ
435 /* ARM v6-M and v7-M are always in Thumb mode. */
436 if (gdbarch_tdep (gdbarch)->is_m)
437 return 1;
438
f9d67f43
DJ
439 /* If there are mapping symbols, consult them. */
440 type = arm_find_mapping_symbol (memaddr, NULL);
441 if (type)
442 return type == 't';
443
ed9a39eb 444 /* Thumb functions have a "special" bit set in minimal symbols. */
c906108c 445 sym = lookup_minimal_symbol_by_pc (memaddr);
7cbd4a93
TT
446 if (sym.minsym)
447 return (MSYMBOL_IS_SPECIAL (sym.minsym));
0428b8f5
DJ
448
449 /* If the user wants to override the fallback mode, let them. */
450 if (strcmp (arm_fallback_mode_string, "arm") == 0)
451 return 0;
452 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
453 return 1;
454
455 /* If we couldn't find any symbol, but we're talking to a running
456 target, then trust the current value of $cpsr. This lets
457 "display/i $pc" always show the correct mode (though if there is
458 a symbol table we will not reach here, so it still may not be
18819fa6 459 displayed in the mode it will be executed). */
0428b8f5 460 if (target_has_registers)
18819fa6 461 return arm_frame_is_thumb (get_current_frame ());
0428b8f5
DJ
462
463 /* Otherwise we're out of luck; we assume ARM. */
464 return 0;
c906108c
SS
465}
466
181c1381 467/* Remove useless bits from addresses in a running program. */
34e8f22d 468static CORE_ADDR
24568a2c 469arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
c906108c 470{
2ae28aa9
YQ
471 /* On M-profile devices, do not strip the low bit from EXC_RETURN
472 (the magic exception return address). */
473 if (gdbarch_tdep (gdbarch)->is_m
474 && (val & 0xfffffff0) == 0xfffffff0)
475 return val;
476
a3a2ee65 477 if (arm_apcs_32)
dd6be234 478 return UNMAKE_THUMB_ADDR (val);
c906108c 479 else
a3a2ee65 480 return (val & 0x03fffffc);
c906108c
SS
481}
482
0d39a070 483/* Return 1 if PC is the start of a compiler helper function which
e0634ccf
UW
484 can be safely ignored during prologue skipping. IS_THUMB is true
485 if the function is known to be a Thumb function due to the way it
486 is being called. */
0d39a070 487static int
e0634ccf 488skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
0d39a070 489{
e0634ccf 490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7cbd4a93 491 struct bound_minimal_symbol msym;
0d39a070
DJ
492
493 msym = lookup_minimal_symbol_by_pc (pc);
7cbd4a93 494 if (msym.minsym != NULL
77e371c0 495 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
efd66ac6 496 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
e0634ccf 497 {
efd66ac6 498 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
0d39a070 499
e0634ccf
UW
500 /* The GNU linker's Thumb call stub to foo is named
501 __foo_from_thumb. */
502 if (strstr (name, "_from_thumb") != NULL)
503 name += 2;
0d39a070 504
e0634ccf
UW
505 /* On soft-float targets, __truncdfsf2 is called to convert promoted
506 arguments to their argument types in non-prototyped
507 functions. */
61012eef 508 if (startswith (name, "__truncdfsf2"))
e0634ccf 509 return 1;
61012eef 510 if (startswith (name, "__aeabi_d2f"))
e0634ccf 511 return 1;
0d39a070 512
e0634ccf 513 /* Internal functions related to thread-local storage. */
61012eef 514 if (startswith (name, "__tls_get_addr"))
e0634ccf 515 return 1;
61012eef 516 if (startswith (name, "__aeabi_read_tp"))
e0634ccf
UW
517 return 1;
518 }
519 else
520 {
521 /* If we run against a stripped glibc, we may be unable to identify
522 special functions by name. Check for one important case,
523 __aeabi_read_tp, by comparing the *code* against the default
524 implementation (this is hand-written ARM assembler in glibc). */
525
526 if (!is_thumb
527 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
528 == 0xe3e00a0f /* mov r0, #0xffff0fff */
529 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
530 == 0xe240f01f) /* sub pc, r0, #31 */
531 return 1;
532 }
ec3d575a 533
0d39a070
DJ
534 return 0;
535}
536
621c6d5b
YQ
537/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
538 the first 16-bit of instruction, and INSN2 is the second 16-bit of
539 instruction. */
540#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
541 ((bits ((insn1), 0, 3) << 12) \
542 | (bits ((insn1), 10, 10) << 11) \
543 | (bits ((insn2), 12, 14) << 8) \
544 | bits ((insn2), 0, 7))
545
546/* Extract the immediate from instruction movw/movt of encoding A. INSN is
547 the 32-bit instruction. */
548#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
549 ((bits ((insn), 16, 19) << 12) \
550 | bits ((insn), 0, 11))
551
ec3d575a
UW
552/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
553
554static unsigned int
555thumb_expand_immediate (unsigned int imm)
556{
557 unsigned int count = imm >> 7;
558
559 if (count < 8)
560 switch (count / 2)
561 {
562 case 0:
563 return imm & 0xff;
564 case 1:
565 return (imm & 0xff) | ((imm & 0xff) << 16);
566 case 2:
567 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
568 case 3:
569 return (imm & 0xff) | ((imm & 0xff) << 8)
570 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
571 }
572
573 return (0x80 | (imm & 0x7f)) << (32 - count);
574}
575
540314bd
YQ
576/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
577 epilogue, 0 otherwise. */
578
579static int
580thumb_instruction_restores_sp (unsigned short insn)
581{
582 return (insn == 0x46bd /* mov sp, r7 */
583 || (insn & 0xff80) == 0xb000 /* add sp, imm */
584 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
585}
586
29d73ae4
DJ
587/* Analyze a Thumb prologue, looking for a recognizable stack frame
588 and frame pointer. Scan until we encounter a store that could
0d39a070
DJ
589 clobber the stack frame unexpectedly, or an unknown instruction.
590 Return the last address which is definitely safe to skip for an
591 initial breakpoint. */
c906108c
SS
592
593static CORE_ADDR
29d73ae4
DJ
594thumb_analyze_prologue (struct gdbarch *gdbarch,
595 CORE_ADDR start, CORE_ADDR limit,
596 struct arm_prologue_cache *cache)
c906108c 597{
0d39a070 598 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
e17a4113 599 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
29d73ae4
DJ
600 int i;
601 pv_t regs[16];
602 struct pv_area *stack;
603 struct cleanup *back_to;
604 CORE_ADDR offset;
ec3d575a 605 CORE_ADDR unrecognized_pc = 0;
da3c6d4a 606
29d73ae4
DJ
607 for (i = 0; i < 16; i++)
608 regs[i] = pv_register (i, 0);
55f960e1 609 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
29d73ae4
DJ
610 back_to = make_cleanup_free_pv_area (stack);
611
29d73ae4 612 while (start < limit)
c906108c 613 {
29d73ae4
DJ
614 unsigned short insn;
615
e17a4113 616 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
9d4fde75 617
94c30b78 618 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
da59e081 619 {
29d73ae4
DJ
620 int regno;
621 int mask;
4be43953
DJ
622
623 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
624 break;
29d73ae4
DJ
625
626 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
627 whether to save LR (R14). */
628 mask = (insn & 0xff) | ((insn & 0x100) << 6);
629
630 /* Calculate offsets of saved R0-R7 and LR. */
631 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
632 if (mask & (1 << regno))
633 {
29d73ae4
DJ
634 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
635 -4);
636 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
637 }
da59e081 638 }
1db01f22 639 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
da59e081 640 {
29d73ae4 641 offset = (insn & 0x7f) << 2; /* get scaled offset */
1db01f22
YQ
642 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
643 -offset);
da59e081 644 }
808f7ab1
YQ
645 else if (thumb_instruction_restores_sp (insn))
646 {
647 /* Don't scan past the epilogue. */
648 break;
649 }
0d39a070
DJ
650 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
651 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
652 (insn & 0xff) << 2);
653 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
654 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
655 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
656 bits (insn, 6, 8));
657 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
658 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
659 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
660 bits (insn, 0, 7));
661 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
662 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
663 && pv_is_constant (regs[bits (insn, 3, 5)]))
664 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
665 regs[bits (insn, 6, 8)]);
666 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
667 && pv_is_constant (regs[bits (insn, 3, 6)]))
668 {
669 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
670 int rm = bits (insn, 3, 6);
671 regs[rd] = pv_add (regs[rd], regs[rm]);
672 }
29d73ae4 673 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
da59e081 674 {
29d73ae4
DJ
675 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
676 int src_reg = (insn & 0x78) >> 3;
677 regs[dst_reg] = regs[src_reg];
da59e081 678 }
29d73ae4 679 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
da59e081 680 {
29d73ae4
DJ
681 /* Handle stores to the stack. Normally pushes are used,
682 but with GCC -mtpcs-frame, there may be other stores
683 in the prologue to create the frame. */
684 int regno = (insn >> 8) & 0x7;
685 pv_t addr;
686
687 offset = (insn & 0xff) << 2;
688 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
689
690 if (pv_area_store_would_trash (stack, addr))
691 break;
692
693 pv_area_store (stack, addr, 4, regs[regno]);
da59e081 694 }
0d39a070
DJ
695 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
696 {
697 int rd = bits (insn, 0, 2);
698 int rn = bits (insn, 3, 5);
699 pv_t addr;
700
701 offset = bits (insn, 6, 10) << 2;
702 addr = pv_add_constant (regs[rn], offset);
703
704 if (pv_area_store_would_trash (stack, addr))
705 break;
706
707 pv_area_store (stack, addr, 4, regs[rd]);
708 }
709 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
710 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 /* Ignore stores of argument registers to the stack. */
713 ;
714 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 /* Ignore block loads from the stack, potentially copying
717 parameters from memory. */
718 ;
719 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
720 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
721 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
722 /* Similarly ignore single loads from the stack. */
723 ;
724 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
725 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
726 /* Skip register copies, i.e. saves to another register
727 instead of the stack. */
728 ;
729 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
730 /* Recognize constant loads; even with small stacks these are necessary
731 on Thumb. */
732 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
733 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
734 {
735 /* Constant pool loads, for the same reason. */
736 unsigned int constant;
737 CORE_ADDR loc;
738
739 loc = start + 4 + bits (insn, 0, 7) * 4;
740 constant = read_memory_unsigned_integer (loc, 4, byte_order);
741 regs[bits (insn, 8, 10)] = pv_constant (constant);
742 }
db24da6d 743 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
0d39a070 744 {
0d39a070
DJ
745 unsigned short inst2;
746
747 inst2 = read_memory_unsigned_integer (start + 2, 2,
748 byte_order_for_code);
749
750 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
751 {
752 /* BL, BLX. Allow some special function calls when
753 skipping the prologue; GCC generates these before
754 storing arguments to the stack. */
755 CORE_ADDR nextpc;
756 int j1, j2, imm1, imm2;
757
758 imm1 = sbits (insn, 0, 10);
759 imm2 = bits (inst2, 0, 10);
760 j1 = bit (inst2, 13);
761 j2 = bit (inst2, 11);
762
763 offset = ((imm1 << 12) + (imm2 << 1));
764 offset ^= ((!j2) << 22) | ((!j1) << 23);
765
766 nextpc = start + 4 + offset;
767 /* For BLX make sure to clear the low bits. */
768 if (bit (inst2, 12) == 0)
769 nextpc = nextpc & 0xfffffffc;
770
e0634ccf
UW
771 if (!skip_prologue_function (gdbarch, nextpc,
772 bit (inst2, 12) != 0))
0d39a070
DJ
773 break;
774 }
ec3d575a 775
0963b4bd
MS
776 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
777 { registers } */
ec3d575a
UW
778 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
779 {
780 pv_t addr = regs[bits (insn, 0, 3)];
781 int regno;
782
783 if (pv_area_store_would_trash (stack, addr))
784 break;
785
786 /* Calculate offsets of saved registers. */
787 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
788 if (inst2 & (1 << regno))
789 {
790 addr = pv_add_constant (addr, -4);
791 pv_area_store (stack, addr, 4, regs[regno]);
792 }
793
794 if (insn & 0x0020)
795 regs[bits (insn, 0, 3)] = addr;
796 }
797
0963b4bd
MS
798 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
799 [Rn, #+/-imm]{!} */
ec3d575a
UW
800 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
801 {
802 int regno1 = bits (inst2, 12, 15);
803 int regno2 = bits (inst2, 8, 11);
804 pv_t addr = regs[bits (insn, 0, 3)];
805
806 offset = inst2 & 0xff;
807 if (insn & 0x0080)
808 addr = pv_add_constant (addr, offset);
809 else
810 addr = pv_add_constant (addr, -offset);
811
812 if (pv_area_store_would_trash (stack, addr))
813 break;
814
815 pv_area_store (stack, addr, 4, regs[regno1]);
816 pv_area_store (stack, pv_add_constant (addr, 4),
817 4, regs[regno2]);
818
819 if (insn & 0x0020)
820 regs[bits (insn, 0, 3)] = addr;
821 }
822
823 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
824 && (inst2 & 0x0c00) == 0x0c00
825 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
826 {
827 int regno = bits (inst2, 12, 15);
828 pv_t addr = regs[bits (insn, 0, 3)];
829
830 offset = inst2 & 0xff;
831 if (inst2 & 0x0200)
832 addr = pv_add_constant (addr, offset);
833 else
834 addr = pv_add_constant (addr, -offset);
835
836 if (pv_area_store_would_trash (stack, addr))
837 break;
838
839 pv_area_store (stack, addr, 4, regs[regno]);
840
841 if (inst2 & 0x0100)
842 regs[bits (insn, 0, 3)] = addr;
843 }
844
845 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
847 {
848 int regno = bits (inst2, 12, 15);
849 pv_t addr;
850
851 offset = inst2 & 0xfff;
852 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
853
854 if (pv_area_store_would_trash (stack, addr))
855 break;
856
857 pv_area_store (stack, addr, 4, regs[regno]);
858 }
859
860 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
0d39a070 861 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 862 /* Ignore stores of argument registers to the stack. */
0d39a070 863 ;
ec3d575a
UW
864
865 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
866 && (inst2 & 0x0d00) == 0x0c00
0d39a070 867 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 868 /* Ignore stores of argument registers to the stack. */
0d39a070 869 ;
ec3d575a 870
0963b4bd
MS
871 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
872 { registers } */
ec3d575a
UW
873 && (inst2 & 0x8000) == 0x0000
874 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 /* Ignore block loads from the stack, potentially copying
876 parameters from memory. */
0d39a070 877 ;
ec3d575a 878
0963b4bd
MS
879 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
880 [Rn, #+/-imm] */
0d39a070 881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 882 /* Similarly ignore dual loads from the stack. */
0d39a070 883 ;
ec3d575a
UW
884
885 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
886 && (inst2 & 0x0d00) == 0x0c00
0d39a070 887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 888 /* Similarly ignore single loads from the stack. */
0d39a070 889 ;
ec3d575a
UW
890
891 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
0d39a070 892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
ec3d575a 893 /* Similarly ignore single loads from the stack. */
0d39a070 894 ;
ec3d575a
UW
895
896 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
897 && (inst2 & 0x8000) == 0x0000)
898 {
899 unsigned int imm = ((bits (insn, 10, 10) << 11)
900 | (bits (inst2, 12, 14) << 8)
901 | bits (inst2, 0, 7));
902
903 regs[bits (inst2, 8, 11)]
904 = pv_add_constant (regs[bits (insn, 0, 3)],
905 thumb_expand_immediate (imm));
906 }
907
908 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
909 && (inst2 & 0x8000) == 0x0000)
0d39a070 910 {
ec3d575a
UW
911 unsigned int imm = ((bits (insn, 10, 10) << 11)
912 | (bits (inst2, 12, 14) << 8)
913 | bits (inst2, 0, 7));
914
915 regs[bits (inst2, 8, 11)]
916 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
917 }
918
919 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
920 && (inst2 & 0x8000) == 0x0000)
921 {
922 unsigned int imm = ((bits (insn, 10, 10) << 11)
923 | (bits (inst2, 12, 14) << 8)
924 | bits (inst2, 0, 7));
925
926 regs[bits (inst2, 8, 11)]
927 = pv_add_constant (regs[bits (insn, 0, 3)],
928 - (CORE_ADDR) thumb_expand_immediate (imm));
929 }
930
931 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
932 && (inst2 & 0x8000) == 0x0000)
933 {
934 unsigned int imm = ((bits (insn, 10, 10) << 11)
935 | (bits (inst2, 12, 14) << 8)
936 | bits (inst2, 0, 7));
937
938 regs[bits (inst2, 8, 11)]
939 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
940 }
941
942 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
943 {
944 unsigned int imm = ((bits (insn, 10, 10) << 11)
945 | (bits (inst2, 12, 14) << 8)
946 | bits (inst2, 0, 7));
947
948 regs[bits (inst2, 8, 11)]
949 = pv_constant (thumb_expand_immediate (imm));
950 }
951
952 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
953 {
621c6d5b
YQ
954 unsigned int imm
955 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
ec3d575a
UW
956
957 regs[bits (inst2, 8, 11)] = pv_constant (imm);
958 }
959
960 else if (insn == 0xea5f /* mov.w Rd,Rm */
961 && (inst2 & 0xf0f0) == 0)
962 {
963 int dst_reg = (inst2 & 0x0f00) >> 8;
964 int src_reg = inst2 & 0xf;
965 regs[dst_reg] = regs[src_reg];
966 }
967
968 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
969 {
970 /* Constant pool loads. */
971 unsigned int constant;
972 CORE_ADDR loc;
973
cac395ea 974 offset = bits (inst2, 0, 11);
ec3d575a
UW
975 if (insn & 0x0080)
976 loc = start + 4 + offset;
977 else
978 loc = start + 4 - offset;
979
980 constant = read_memory_unsigned_integer (loc, 4, byte_order);
981 regs[bits (inst2, 12, 15)] = pv_constant (constant);
982 }
983
984 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
985 {
986 /* Constant pool loads. */
987 unsigned int constant;
988 CORE_ADDR loc;
989
cac395ea 990 offset = bits (inst2, 0, 7) << 2;
ec3d575a
UW
991 if (insn & 0x0080)
992 loc = start + 4 + offset;
993 else
994 loc = start + 4 - offset;
995
996 constant = read_memory_unsigned_integer (loc, 4, byte_order);
997 regs[bits (inst2, 12, 15)] = pv_constant (constant);
998
999 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1000 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1001 }
1002
1003 else if (thumb2_instruction_changes_pc (insn, inst2))
1004 {
1005 /* Don't scan past anything that might change control flow. */
0d39a070
DJ
1006 break;
1007 }
ec3d575a
UW
1008 else
1009 {
1010 /* The optimizer might shove anything into the prologue,
1011 so we just skip what we don't recognize. */
1012 unrecognized_pc = start;
1013 }
0d39a070
DJ
1014
1015 start += 2;
1016 }
ec3d575a 1017 else if (thumb_instruction_changes_pc (insn))
3d74b771 1018 {
ec3d575a 1019 /* Don't scan past anything that might change control flow. */
da3c6d4a 1020 break;
3d74b771 1021 }
ec3d575a
UW
1022 else
1023 {
1024 /* The optimizer might shove anything into the prologue,
1025 so we just skip what we don't recognize. */
1026 unrecognized_pc = start;
1027 }
29d73ae4
DJ
1028
1029 start += 2;
c906108c
SS
1030 }
1031
0d39a070
DJ
1032 if (arm_debug)
1033 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1034 paddress (gdbarch, start));
1035
ec3d575a
UW
1036 if (unrecognized_pc == 0)
1037 unrecognized_pc = start;
1038
29d73ae4
DJ
1039 if (cache == NULL)
1040 {
1041 do_cleanups (back_to);
ec3d575a 1042 return unrecognized_pc;
29d73ae4
DJ
1043 }
1044
29d73ae4
DJ
1045 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1046 {
1047 /* Frame pointer is fp. Frame size is constant. */
1048 cache->framereg = ARM_FP_REGNUM;
1049 cache->framesize = -regs[ARM_FP_REGNUM].k;
1050 }
1051 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1052 {
1053 /* Frame pointer is r7. Frame size is constant. */
1054 cache->framereg = THUMB_FP_REGNUM;
1055 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1056 }
72a2e3dc 1057 else
29d73ae4
DJ
1058 {
1059 /* Try the stack pointer... this is a bit desperate. */
1060 cache->framereg = ARM_SP_REGNUM;
1061 cache->framesize = -regs[ARM_SP_REGNUM].k;
1062 }
29d73ae4
DJ
1063
1064 for (i = 0; i < 16; i++)
1065 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1066 cache->saved_regs[i].addr = offset;
1067
1068 do_cleanups (back_to);
ec3d575a 1069 return unrecognized_pc;
c906108c
SS
1070}
1071
621c6d5b
YQ
1072
1073/* Try to analyze the instructions starting from PC, which load symbol
1074 __stack_chk_guard. Return the address of instruction after loading this
1075 symbol, set the dest register number to *BASEREG, and set the size of
1076 instructions for loading symbol in OFFSET. Return 0 if instructions are
1077 not recognized. */
1078
1079static CORE_ADDR
1080arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1081 unsigned int *destreg, int *offset)
1082{
1083 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1084 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1085 unsigned int low, high, address;
1086
1087 address = 0;
1088 if (is_thumb)
1089 {
1090 unsigned short insn1
1091 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1092
1093 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1094 {
1095 *destreg = bits (insn1, 8, 10);
1096 *offset = 2;
6ae274b7
YQ
1097 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1098 address = read_memory_unsigned_integer (address, 4,
1099 byte_order_for_code);
621c6d5b
YQ
1100 }
1101 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1102 {
1103 unsigned short insn2
1104 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1105
1106 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1107
1108 insn1
1109 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1110 insn2
1111 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1112
1113 /* movt Rd, #const */
1114 if ((insn1 & 0xfbc0) == 0xf2c0)
1115 {
1116 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1117 *destreg = bits (insn2, 8, 11);
1118 *offset = 8;
1119 address = (high << 16 | low);
1120 }
1121 }
1122 }
1123 else
1124 {
2e9e421f
UW
1125 unsigned int insn
1126 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1127
6ae274b7 1128 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
2e9e421f 1129 {
6ae274b7
YQ
1130 address = bits (insn, 0, 11) + pc + 8;
1131 address = read_memory_unsigned_integer (address, 4,
1132 byte_order_for_code);
1133
2e9e421f
UW
1134 *destreg = bits (insn, 12, 15);
1135 *offset = 4;
1136 }
1137 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1138 {
1139 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1140
1141 insn
1142 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1143
1144 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1145 {
1146 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1147 *destreg = bits (insn, 12, 15);
1148 *offset = 8;
1149 address = (high << 16 | low);
1150 }
1151 }
621c6d5b
YQ
1152 }
1153
1154 return address;
1155}
1156
1157/* Try to skip a sequence of instructions used for stack protector. If PC
0963b4bd
MS
1158 points to the first instruction of this sequence, return the address of
1159 first instruction after this sequence, otherwise, return original PC.
621c6d5b
YQ
1160
1161 On arm, this sequence of instructions is composed of mainly three steps,
1162 Step 1: load symbol __stack_chk_guard,
1163 Step 2: load from address of __stack_chk_guard,
1164 Step 3: store it to somewhere else.
1165
1166 Usually, instructions on step 2 and step 3 are the same on various ARM
1167 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1168 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1169 instructions in step 1 vary from different ARM architectures. On ARMv7,
1170 they are,
1171
1172 movw Rn, #:lower16:__stack_chk_guard
1173 movt Rn, #:upper16:__stack_chk_guard
1174
1175 On ARMv5t, it is,
1176
1177 ldr Rn, .Label
1178 ....
1179 .Lable:
1180 .word __stack_chk_guard
1181
1182 Since ldr/str is a very popular instruction, we can't use them as
1183 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1184 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1185 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1186
1187static CORE_ADDR
1188arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1189{
1190 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
22e048c9 1191 unsigned int basereg;
7cbd4a93 1192 struct bound_minimal_symbol stack_chk_guard;
621c6d5b
YQ
1193 int offset;
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 CORE_ADDR addr;
1196
1197 /* Try to parse the instructions in Step 1. */
1198 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1199 &basereg, &offset);
1200 if (!addr)
1201 return pc;
1202
1203 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
6041179a
JB
1204 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1205 Otherwise, this sequence cannot be for stack protector. */
1206 if (stack_chk_guard.minsym == NULL
61012eef 1207 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
621c6d5b
YQ
1208 return pc;
1209
1210 if (is_thumb)
1211 {
1212 unsigned int destreg;
1213 unsigned short insn
1214 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1215
1216 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1217 if ((insn & 0xf800) != 0x6800)
1218 return pc;
1219 if (bits (insn, 3, 5) != basereg)
1220 return pc;
1221 destreg = bits (insn, 0, 2);
1222
1223 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1224 byte_order_for_code);
1225 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1226 if ((insn & 0xf800) != 0x6000)
1227 return pc;
1228 if (destreg != bits (insn, 0, 2))
1229 return pc;
1230 }
1231 else
1232 {
1233 unsigned int destreg;
1234 unsigned int insn
1235 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1236
1237 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1238 if ((insn & 0x0e500000) != 0x04100000)
1239 return pc;
1240 if (bits (insn, 16, 19) != basereg)
1241 return pc;
1242 destreg = bits (insn, 12, 15);
1243 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1244 insn = read_memory_unsigned_integer (pc + offset + 4,
1245 4, byte_order_for_code);
1246 if ((insn & 0x0e500000) != 0x04000000)
1247 return pc;
1248 if (bits (insn, 12, 15) != destreg)
1249 return pc;
1250 }
1251 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1252 on arm. */
1253 if (is_thumb)
1254 return pc + offset + 4;
1255 else
1256 return pc + offset + 8;
1257}
1258
da3c6d4a
MS
1259/* Advance the PC across any function entry prologue instructions to
1260 reach some "real" code.
34e8f22d
RE
1261
1262 The APCS (ARM Procedure Call Standard) defines the following
ed9a39eb 1263 prologue:
c906108c 1264
c5aa993b
JM
1265 mov ip, sp
1266 [stmfd sp!, {a1,a2,a3,a4}]
1267 stmfd sp!, {...,fp,ip,lr,pc}
ed9a39eb
JM
1268 [stfe f7, [sp, #-12]!]
1269 [stfe f6, [sp, #-12]!]
1270 [stfe f5, [sp, #-12]!]
1271 [stfe f4, [sp, #-12]!]
0963b4bd 1272 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
c906108c 1273
34e8f22d 1274static CORE_ADDR
6093d2eb 1275arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
c906108c 1276{
a89fea3c 1277 CORE_ADDR func_addr, limit_pc;
c906108c 1278
a89fea3c
JL
1279 /* See if we can determine the end of the prologue via the symbol table.
1280 If so, then return either PC, or the PC after the prologue, whichever
1281 is greater. */
1282 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
c906108c 1283 {
d80b854b
UW
1284 CORE_ADDR post_prologue_pc
1285 = skip_prologue_using_sal (gdbarch, func_addr);
43f3e411 1286 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
0d39a070 1287
621c6d5b
YQ
1288 if (post_prologue_pc)
1289 post_prologue_pc
1290 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1291
1292
0d39a070
DJ
1293 /* GCC always emits a line note before the prologue and another
1294 one after, even if the two are at the same address or on the
1295 same line. Take advantage of this so that we do not need to
1296 know every instruction that might appear in the prologue. We
1297 will have producer information for most binaries; if it is
1298 missing (e.g. for -gstabs), assuming the GNU tools. */
1299 if (post_prologue_pc
43f3e411
DE
1300 && (cust == NULL
1301 || COMPUNIT_PRODUCER (cust) == NULL
61012eef
GB
1302 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1303 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
0d39a070
DJ
1304 return post_prologue_pc;
1305
a89fea3c 1306 if (post_prologue_pc != 0)
0d39a070
DJ
1307 {
1308 CORE_ADDR analyzed_limit;
1309
1310 /* For non-GCC compilers, make sure the entire line is an
1311 acceptable prologue; GDB will round this function's
1312 return value up to the end of the following line so we
1313 can not skip just part of a line (and we do not want to).
1314
1315 RealView does not treat the prologue specially, but does
1316 associate prologue code with the opening brace; so this
1317 lets us skip the first line if we think it is the opening
1318 brace. */
9779414d 1319 if (arm_pc_is_thumb (gdbarch, func_addr))
0d39a070
DJ
1320 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1321 post_prologue_pc, NULL);
1322 else
1323 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1324 post_prologue_pc, NULL);
1325
1326 if (analyzed_limit != post_prologue_pc)
1327 return func_addr;
1328
1329 return post_prologue_pc;
1330 }
c906108c
SS
1331 }
1332
a89fea3c
JL
1333 /* Can't determine prologue from the symbol table, need to examine
1334 instructions. */
c906108c 1335
a89fea3c
JL
1336 /* Find an upper limit on the function prologue using the debug
1337 information. If the debug information could not be used to provide
1338 that bound, then use an arbitrary large number as the upper bound. */
0963b4bd 1339 /* Like arm_scan_prologue, stop no later than pc + 64. */
d80b854b 1340 limit_pc = skip_prologue_using_sal (gdbarch, pc);
a89fea3c
JL
1341 if (limit_pc == 0)
1342 limit_pc = pc + 64; /* Magic. */
1343
c906108c 1344
29d73ae4 1345 /* Check if this is Thumb code. */
9779414d 1346 if (arm_pc_is_thumb (gdbarch, pc))
a89fea3c 1347 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
21daaaaf
YQ
1348 else
1349 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
c906108c 1350}
94c30b78 1351
c5aa993b 1352/* *INDENT-OFF* */
c906108c
SS
1353/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1354 This function decodes a Thumb function prologue to determine:
1355 1) the size of the stack frame
1356 2) which registers are saved on it
1357 3) the offsets of saved regs
1358 4) the offset from the stack pointer to the frame pointer
c906108c 1359
da59e081
JM
1360 A typical Thumb function prologue would create this stack frame
1361 (offsets relative to FP)
c906108c
SS
1362 old SP -> 24 stack parameters
1363 20 LR
1364 16 R7
1365 R7 -> 0 local variables (16 bytes)
1366 SP -> -12 additional stack space (12 bytes)
1367 The frame size would thus be 36 bytes, and the frame offset would be
0963b4bd 1368 12 bytes. The frame register is R7.
da59e081 1369
da3c6d4a
MS
1370 The comments for thumb_skip_prolog() describe the algorithm we use
1371 to detect the end of the prolog. */
c5aa993b
JM
1372/* *INDENT-ON* */
1373
c906108c 1374static void
be8626e0 1375thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
b39cc962 1376 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
c906108c
SS
1377{
1378 CORE_ADDR prologue_start;
1379 CORE_ADDR prologue_end;
c906108c 1380
b39cc962
DJ
1381 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1382 &prologue_end))
c906108c 1383 {
ec3d575a
UW
1384 /* See comment in arm_scan_prologue for an explanation of
1385 this heuristics. */
1386 if (prologue_end > prologue_start + 64)
1387 {
1388 prologue_end = prologue_start + 64;
1389 }
c906108c
SS
1390 }
1391 else
f7060f85
DJ
1392 /* We're in the boondocks: we have no idea where the start of the
1393 function is. */
1394 return;
c906108c 1395
eb5492fa 1396 prologue_end = min (prologue_end, prev_pc);
c906108c 1397
be8626e0 1398 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1399}
1400
f303bc3e
YQ
1401/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1402 otherwise. */
1403
1404static int
1405arm_instruction_restores_sp (unsigned int insn)
1406{
1407 if (bits (insn, 28, 31) != INST_NV)
1408 {
1409 if ((insn & 0x0df0f000) == 0x0080d000
1410 /* ADD SP (register or immediate). */
1411 || (insn & 0x0df0f000) == 0x0040d000
1412 /* SUB SP (register or immediate). */
1413 || (insn & 0x0ffffff0) == 0x01a0d000
1414 /* MOV SP. */
1415 || (insn & 0x0fff0000) == 0x08bd0000
1416 /* POP (LDMIA). */
1417 || (insn & 0x0fff0000) == 0x049d0000)
1418 /* POP of a single register. */
1419 return 1;
1420 }
1421
1422 return 0;
1423}
1424
0d39a070
DJ
1425/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1426 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1427 fill it in. Return the first address not recognized as a prologue
1428 instruction.
eb5492fa 1429
0d39a070
DJ
1430 We recognize all the instructions typically found in ARM prologues,
1431 plus harmless instructions which can be skipped (either for analysis
1432 purposes, or a more restrictive set that can be skipped when finding
1433 the end of the prologue). */
1434
1435static CORE_ADDR
1436arm_analyze_prologue (struct gdbarch *gdbarch,
1437 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1438 struct arm_prologue_cache *cache)
1439{
0d39a070
DJ
1440 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1441 int regno;
1442 CORE_ADDR offset, current_pc;
1443 pv_t regs[ARM_FPS_REGNUM];
1444 struct pv_area *stack;
1445 struct cleanup *back_to;
0d39a070
DJ
1446 CORE_ADDR unrecognized_pc = 0;
1447
1448 /* Search the prologue looking for instructions that set up the
96baa820 1449 frame pointer, adjust the stack pointer, and save registers.
ed9a39eb 1450
96baa820
JM
1451 Be careful, however, and if it doesn't look like a prologue,
1452 don't try to scan it. If, for instance, a frameless function
1453 begins with stmfd sp!, then we will tell ourselves there is
b8d5e71d 1454 a frame, which will confuse stack traceback, as well as "finish"
96baa820 1455 and other operations that rely on a knowledge of the stack
0d39a070 1456 traceback. */
d4473757 1457
4be43953
DJ
1458 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1459 regs[regno] = pv_register (regno, 0);
55f960e1 1460 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
4be43953
DJ
1461 back_to = make_cleanup_free_pv_area (stack);
1462
94c30b78
MS
1463 for (current_pc = prologue_start;
1464 current_pc < prologue_end;
f43845b3 1465 current_pc += 4)
96baa820 1466 {
e17a4113
UW
1467 unsigned int insn
1468 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
9d4fde75 1469
94c30b78 1470 if (insn == 0xe1a0c00d) /* mov ip, sp */
f43845b3 1471 {
4be43953 1472 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
28cd8767
JG
1473 continue;
1474 }
0d39a070
DJ
1475 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1476 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1477 {
1478 unsigned imm = insn & 0xff; /* immediate value */
1479 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1480 int rd = bits (insn, 12, 15);
28cd8767 1481 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1482 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
28cd8767
JG
1483 continue;
1484 }
0d39a070
DJ
1485 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1486 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
28cd8767
JG
1487 {
1488 unsigned imm = insn & 0xff; /* immediate value */
1489 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
0d39a070 1490 int rd = bits (insn, 12, 15);
28cd8767 1491 imm = (imm >> rot) | (imm << (32 - rot));
0d39a070 1492 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
f43845b3
MS
1493 continue;
1494 }
0963b4bd
MS
1495 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1496 [sp, #-4]! */
f43845b3 1497 {
4be43953
DJ
1498 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1499 break;
1500 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
0d39a070
DJ
1501 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1502 regs[bits (insn, 12, 15)]);
f43845b3
MS
1503 continue;
1504 }
1505 else if ((insn & 0xffff0000) == 0xe92d0000)
d4473757
KB
1506 /* stmfd sp!, {..., fp, ip, lr, pc}
1507 or
1508 stmfd sp!, {a1, a2, a3, a4} */
c906108c 1509 {
d4473757 1510 int mask = insn & 0xffff;
ed9a39eb 1511
4be43953
DJ
1512 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1513 break;
1514
94c30b78 1515 /* Calculate offsets of saved registers. */
34e8f22d 1516 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
d4473757
KB
1517 if (mask & (1 << regno))
1518 {
0963b4bd
MS
1519 regs[ARM_SP_REGNUM]
1520 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
4be43953 1521 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
d4473757
KB
1522 }
1523 }
0d39a070
DJ
1524 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1525 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
f8bf5763 1526 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
b8d5e71d
MS
1527 {
1528 /* No need to add this to saved_regs -- it's just an arg reg. */
1529 continue;
1530 }
0d39a070
DJ
1531 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1532 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
f8bf5763 1533 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
f43845b3
MS
1534 {
1535 /* No need to add this to saved_regs -- it's just an arg reg. */
1536 continue;
1537 }
0963b4bd
MS
1538 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1539 { registers } */
0d39a070
DJ
1540 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1541 {
1542 /* No need to add this to saved_regs -- it's just arg regs. */
1543 continue;
1544 }
d4473757
KB
1545 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1546 {
94c30b78
MS
1547 unsigned imm = insn & 0xff; /* immediate value */
1548 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1549 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1550 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
d4473757
KB
1551 }
1552 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1553 {
94c30b78
MS
1554 unsigned imm = insn & 0xff; /* immediate value */
1555 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
d4473757 1556 imm = (imm >> rot) | (imm << (32 - rot));
4be43953 1557 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
d4473757 1558 }
0963b4bd
MS
1559 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1560 [sp, -#c]! */
2af46ca0 1561 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757 1562 {
4be43953
DJ
1563 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1564 break;
1565
1566 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
34e8f22d 1567 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
4be43953 1568 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
d4473757 1569 }
0963b4bd
MS
1570 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1571 [sp!] */
2af46ca0 1572 && gdbarch_tdep (gdbarch)->have_fpa_registers)
d4473757
KB
1573 {
1574 int n_saved_fp_regs;
1575 unsigned int fp_start_reg, fp_bound_reg;
1576
4be43953
DJ
1577 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1578 break;
1579
94c30b78 1580 if ((insn & 0x800) == 0x800) /* N0 is set */
96baa820 1581 {
d4473757
KB
1582 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1583 n_saved_fp_regs = 3;
1584 else
1585 n_saved_fp_regs = 1;
96baa820 1586 }
d4473757 1587 else
96baa820 1588 {
d4473757
KB
1589 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1590 n_saved_fp_regs = 2;
1591 else
1592 n_saved_fp_regs = 4;
96baa820 1593 }
d4473757 1594
34e8f22d 1595 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
d4473757
KB
1596 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1597 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
96baa820 1598 {
4be43953
DJ
1599 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1600 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1601 regs[fp_start_reg++]);
96baa820 1602 }
c906108c 1603 }
0d39a070
DJ
1604 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1605 {
1606 /* Allow some special function calls when skipping the
1607 prologue; GCC generates these before storing arguments to
1608 the stack. */
1609 CORE_ADDR dest = BranchDest (current_pc, insn);
1610
e0634ccf 1611 if (skip_prologue_function (gdbarch, dest, 0))
0d39a070
DJ
1612 continue;
1613 else
1614 break;
1615 }
d4473757 1616 else if ((insn & 0xf0000000) != 0xe0000000)
0963b4bd 1617 break; /* Condition not true, exit early. */
0d39a070
DJ
1618 else if (arm_instruction_changes_pc (insn))
1619 /* Don't scan past anything that might change control flow. */
1620 break;
f303bc3e
YQ
1621 else if (arm_instruction_restores_sp (insn))
1622 {
1623 /* Don't scan past the epilogue. */
1624 break;
1625 }
d19f7eee
UW
1626 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1627 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1628 /* Ignore block loads from the stack, potentially copying
1629 parameters from memory. */
1630 continue;
1631 else if ((insn & 0xfc500000) == 0xe4100000
1632 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1633 /* Similarly ignore single loads from the stack. */
1634 continue;
0d39a070
DJ
1635 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1636 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1637 register instead of the stack. */
d4473757 1638 continue;
0d39a070
DJ
1639 else
1640 {
21daaaaf
YQ
1641 /* The optimizer might shove anything into the prologue, if
1642 we build up cache (cache != NULL) from scanning prologue,
1643 we just skip what we don't recognize and scan further to
1644 make cache as complete as possible. However, if we skip
1645 prologue, we'll stop immediately on unrecognized
1646 instruction. */
0d39a070 1647 unrecognized_pc = current_pc;
21daaaaf
YQ
1648 if (cache != NULL)
1649 continue;
1650 else
1651 break;
0d39a070 1652 }
c906108c
SS
1653 }
1654
0d39a070
DJ
1655 if (unrecognized_pc == 0)
1656 unrecognized_pc = current_pc;
1657
0d39a070
DJ
1658 if (cache)
1659 {
4072f920
YQ
1660 int framereg, framesize;
1661
1662 /* The frame size is just the distance from the frame register
1663 to the original stack pointer. */
1664 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1665 {
1666 /* Frame pointer is fp. */
1667 framereg = ARM_FP_REGNUM;
1668 framesize = -regs[ARM_FP_REGNUM].k;
1669 }
1670 else
1671 {
1672 /* Try the stack pointer... this is a bit desperate. */
1673 framereg = ARM_SP_REGNUM;
1674 framesize = -regs[ARM_SP_REGNUM].k;
1675 }
1676
0d39a070
DJ
1677 cache->framereg = framereg;
1678 cache->framesize = framesize;
1679
1680 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1681 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1682 cache->saved_regs[regno].addr = offset;
1683 }
1684
1685 if (arm_debug)
1686 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1687 paddress (gdbarch, unrecognized_pc));
4be43953
DJ
1688
1689 do_cleanups (back_to);
0d39a070
DJ
1690 return unrecognized_pc;
1691}
1692
1693static void
1694arm_scan_prologue (struct frame_info *this_frame,
1695 struct arm_prologue_cache *cache)
1696{
1697 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1698 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
bec2ab5a 1699 CORE_ADDR prologue_start, prologue_end;
0d39a070
DJ
1700 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1701 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
0d39a070
DJ
1702
1703 /* Assume there is no frame until proven otherwise. */
1704 cache->framereg = ARM_SP_REGNUM;
1705 cache->framesize = 0;
1706
1707 /* Check for Thumb prologue. */
1708 if (arm_frame_is_thumb (this_frame))
1709 {
1710 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1711 return;
1712 }
1713
1714 /* Find the function prologue. If we can't find the function in
1715 the symbol table, peek in the stack frame to find the PC. */
1716 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1717 &prologue_end))
1718 {
1719 /* One way to find the end of the prologue (which works well
1720 for unoptimized code) is to do the following:
1721
1722 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1723
1724 if (sal.line == 0)
1725 prologue_end = prev_pc;
1726 else if (sal.end < prologue_end)
1727 prologue_end = sal.end;
1728
1729 This mechanism is very accurate so long as the optimizer
1730 doesn't move any instructions from the function body into the
1731 prologue. If this happens, sal.end will be the last
1732 instruction in the first hunk of prologue code just before
1733 the first instruction that the scheduler has moved from
1734 the body to the prologue.
1735
1736 In order to make sure that we scan all of the prologue
1737 instructions, we use a slightly less accurate mechanism which
1738 may scan more than necessary. To help compensate for this
1739 lack of accuracy, the prologue scanning loop below contains
1740 several clauses which'll cause the loop to terminate early if
1741 an implausible prologue instruction is encountered.
1742
1743 The expression
1744
1745 prologue_start + 64
1746
1747 is a suitable endpoint since it accounts for the largest
1748 possible prologue plus up to five instructions inserted by
1749 the scheduler. */
1750
1751 if (prologue_end > prologue_start + 64)
1752 {
1753 prologue_end = prologue_start + 64; /* See above. */
1754 }
1755 }
1756 else
1757 {
1758 /* We have no symbol information. Our only option is to assume this
1759 function has a standard stack frame and the normal frame register.
1760 Then, we can find the value of our frame pointer on entrance to
1761 the callee (or at the present moment if this is the innermost frame).
1762 The value stored there should be the address of the stmfd + 8. */
1763 CORE_ADDR frame_loc;
1764 LONGEST return_value;
1765
1766 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1767 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1768 return;
1769 else
1770 {
1771 prologue_start = gdbarch_addr_bits_remove
1772 (gdbarch, return_value) - 8;
1773 prologue_end = prologue_start + 64; /* See above. */
1774 }
1775 }
1776
1777 if (prev_pc < prologue_end)
1778 prologue_end = prev_pc;
1779
1780 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
c906108c
SS
1781}
1782
eb5492fa 1783static struct arm_prologue_cache *
a262aec2 1784arm_make_prologue_cache (struct frame_info *this_frame)
c906108c 1785{
eb5492fa
DJ
1786 int reg;
1787 struct arm_prologue_cache *cache;
1788 CORE_ADDR unwound_fp;
c5aa993b 1789
35d5d4ee 1790 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 1791 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
c906108c 1792
a262aec2 1793 arm_scan_prologue (this_frame, cache);
848cfffb 1794
a262aec2 1795 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
eb5492fa
DJ
1796 if (unwound_fp == 0)
1797 return cache;
c906108c 1798
4be43953 1799 cache->prev_sp = unwound_fp + cache->framesize;
c906108c 1800
eb5492fa
DJ
1801 /* Calculate actual addresses of saved registers using offsets
1802 determined by arm_scan_prologue. */
a262aec2 1803 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
e28a332c 1804 if (trad_frame_addr_p (cache->saved_regs, reg))
eb5492fa
DJ
1805 cache->saved_regs[reg].addr += cache->prev_sp;
1806
1807 return cache;
c906108c
SS
1808}
1809
c1ee9414
LM
1810/* Implementation of the stop_reason hook for arm_prologue frames. */
1811
1812static enum unwind_stop_reason
1813arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1814 void **this_cache)
1815{
1816 struct arm_prologue_cache *cache;
1817 CORE_ADDR pc;
1818
1819 if (*this_cache == NULL)
1820 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1821 cache = (struct arm_prologue_cache *) *this_cache;
c1ee9414
LM
1822
1823 /* This is meant to halt the backtrace at "_start". */
1824 pc = get_frame_pc (this_frame);
1825 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1826 return UNWIND_OUTERMOST;
1827
1828 /* If we've hit a wall, stop. */
1829 if (cache->prev_sp == 0)
1830 return UNWIND_OUTERMOST;
1831
1832 return UNWIND_NO_REASON;
1833}
1834
eb5492fa
DJ
1835/* Our frame ID for a normal frame is the current function's starting PC
1836 and the caller's SP when we were called. */
c906108c 1837
148754e5 1838static void
a262aec2 1839arm_prologue_this_id (struct frame_info *this_frame,
eb5492fa
DJ
1840 void **this_cache,
1841 struct frame_id *this_id)
c906108c 1842{
eb5492fa
DJ
1843 struct arm_prologue_cache *cache;
1844 struct frame_id id;
2c404490 1845 CORE_ADDR pc, func;
f079148d 1846
eb5492fa 1847 if (*this_cache == NULL)
a262aec2 1848 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1849 cache = (struct arm_prologue_cache *) *this_cache;
2a451106 1850
0e9e9abd
UW
1851 /* Use function start address as part of the frame ID. If we cannot
1852 identify the start address (due to missing symbol information),
1853 fall back to just using the current PC. */
c1ee9414 1854 pc = get_frame_pc (this_frame);
2c404490 1855 func = get_frame_func (this_frame);
0e9e9abd
UW
1856 if (!func)
1857 func = pc;
1858
eb5492fa 1859 id = frame_id_build (cache->prev_sp, func);
eb5492fa 1860 *this_id = id;
c906108c
SS
1861}
1862
a262aec2
DJ
1863static struct value *
1864arm_prologue_prev_register (struct frame_info *this_frame,
eb5492fa 1865 void **this_cache,
a262aec2 1866 int prev_regnum)
24de872b 1867{
24568a2c 1868 struct gdbarch *gdbarch = get_frame_arch (this_frame);
24de872b
DJ
1869 struct arm_prologue_cache *cache;
1870
eb5492fa 1871 if (*this_cache == NULL)
a262aec2 1872 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 1873 cache = (struct arm_prologue_cache *) *this_cache;
24de872b 1874
eb5492fa 1875 /* If we are asked to unwind the PC, then we need to return the LR
b39cc962
DJ
1876 instead. The prologue may save PC, but it will point into this
1877 frame's prologue, not the next frame's resume location. Also
1878 strip the saved T bit. A valid LR may have the low bit set, but
1879 a valid PC never does. */
eb5492fa 1880 if (prev_regnum == ARM_PC_REGNUM)
b39cc962
DJ
1881 {
1882 CORE_ADDR lr;
1883
1884 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1885 return frame_unwind_got_constant (this_frame, prev_regnum,
24568a2c 1886 arm_addr_bits_remove (gdbarch, lr));
b39cc962 1887 }
24de872b 1888
eb5492fa 1889 /* SP is generally not saved to the stack, but this frame is
a262aec2 1890 identified by the next frame's stack pointer at the time of the call.
eb5492fa
DJ
1891 The value was already reconstructed into PREV_SP. */
1892 if (prev_regnum == ARM_SP_REGNUM)
a262aec2 1893 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
eb5492fa 1894
b39cc962
DJ
1895 /* The CPSR may have been changed by the call instruction and by the
1896 called function. The only bit we can reconstruct is the T bit,
1897 by checking the low bit of LR as of the call. This is a reliable
1898 indicator of Thumb-ness except for some ARM v4T pre-interworking
1899 Thumb code, which could get away with a clear low bit as long as
1900 the called function did not use bx. Guess that all other
1901 bits are unchanged; the condition flags are presumably lost,
1902 but the processor status is likely valid. */
1903 if (prev_regnum == ARM_PS_REGNUM)
1904 {
1905 CORE_ADDR lr, cpsr;
9779414d 1906 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
1907
1908 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1909 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1910 if (IS_THUMB_ADDR (lr))
9779414d 1911 cpsr |= t_bit;
b39cc962 1912 else
9779414d 1913 cpsr &= ~t_bit;
b39cc962
DJ
1914 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1915 }
1916
a262aec2
DJ
1917 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1918 prev_regnum);
eb5492fa
DJ
1919}
1920
1921struct frame_unwind arm_prologue_unwind = {
1922 NORMAL_FRAME,
c1ee9414 1923 arm_prologue_unwind_stop_reason,
eb5492fa 1924 arm_prologue_this_id,
a262aec2
DJ
1925 arm_prologue_prev_register,
1926 NULL,
1927 default_frame_sniffer
eb5492fa
DJ
1928};
1929
0e9e9abd
UW
1930/* Maintain a list of ARM exception table entries per objfile, similar to the
1931 list of mapping symbols. We only cache entries for standard ARM-defined
1932 personality routines; the cache will contain only the frame unwinding
1933 instructions associated with the entry (not the descriptors). */
1934
1935static const struct objfile_data *arm_exidx_data_key;
1936
1937struct arm_exidx_entry
1938{
1939 bfd_vma addr;
1940 gdb_byte *entry;
1941};
1942typedef struct arm_exidx_entry arm_exidx_entry_s;
1943DEF_VEC_O(arm_exidx_entry_s);
1944
1945struct arm_exidx_data
1946{
1947 VEC(arm_exidx_entry_s) **section_maps;
1948};
1949
1950static void
1951arm_exidx_data_free (struct objfile *objfile, void *arg)
1952{
9a3c8263 1953 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
0e9e9abd
UW
1954 unsigned int i;
1955
1956 for (i = 0; i < objfile->obfd->section_count; i++)
1957 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1958}
1959
1960static inline int
1961arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1962 const struct arm_exidx_entry *rhs)
1963{
1964 return lhs->addr < rhs->addr;
1965}
1966
1967static struct obj_section *
1968arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1969{
1970 struct obj_section *osect;
1971
1972 ALL_OBJFILE_OSECTIONS (objfile, osect)
1973 if (bfd_get_section_flags (objfile->obfd,
1974 osect->the_bfd_section) & SEC_ALLOC)
1975 {
1976 bfd_vma start, size;
1977 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1978 size = bfd_get_section_size (osect->the_bfd_section);
1979
1980 if (start <= vma && vma < start + size)
1981 return osect;
1982 }
1983
1984 return NULL;
1985}
1986
1987/* Parse contents of exception table and exception index sections
1988 of OBJFILE, and fill in the exception table entry cache.
1989
1990 For each entry that refers to a standard ARM-defined personality
1991 routine, extract the frame unwinding instructions (from either
1992 the index or the table section). The unwinding instructions
1993 are normalized by:
1994 - extracting them from the rest of the table data
1995 - converting to host endianness
1996 - appending the implicit 0xb0 ("Finish") code
1997
1998 The extracted and normalized instructions are stored for later
1999 retrieval by the arm_find_exidx_entry routine. */
2000
2001static void
2002arm_exidx_new_objfile (struct objfile *objfile)
2003{
3bb47e8b 2004 struct cleanup *cleanups;
0e9e9abd
UW
2005 struct arm_exidx_data *data;
2006 asection *exidx, *extab;
2007 bfd_vma exidx_vma = 0, extab_vma = 0;
2008 bfd_size_type exidx_size = 0, extab_size = 0;
2009 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2010 LONGEST i;
2011
2012 /* If we've already touched this file, do nothing. */
2013 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2014 return;
3bb47e8b 2015 cleanups = make_cleanup (null_cleanup, NULL);
0e9e9abd
UW
2016
2017 /* Read contents of exception table and index. */
a5eda10c 2018 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
0e9e9abd
UW
2019 if (exidx)
2020 {
2021 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2022 exidx_size = bfd_get_section_size (exidx);
224c3ddb 2023 exidx_data = (gdb_byte *) xmalloc (exidx_size);
0e9e9abd
UW
2024 make_cleanup (xfree, exidx_data);
2025
2026 if (!bfd_get_section_contents (objfile->obfd, exidx,
2027 exidx_data, 0, exidx_size))
2028 {
2029 do_cleanups (cleanups);
2030 return;
2031 }
2032 }
2033
2034 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2035 if (extab)
2036 {
2037 extab_vma = bfd_section_vma (objfile->obfd, extab);
2038 extab_size = bfd_get_section_size (extab);
224c3ddb 2039 extab_data = (gdb_byte *) xmalloc (extab_size);
0e9e9abd
UW
2040 make_cleanup (xfree, extab_data);
2041
2042 if (!bfd_get_section_contents (objfile->obfd, extab,
2043 extab_data, 0, extab_size))
2044 {
2045 do_cleanups (cleanups);
2046 return;
2047 }
2048 }
2049
2050 /* Allocate exception table data structure. */
2051 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2052 set_objfile_data (objfile, arm_exidx_data_key, data);
2053 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2054 objfile->obfd->section_count,
2055 VEC(arm_exidx_entry_s) *);
2056
2057 /* Fill in exception table. */
2058 for (i = 0; i < exidx_size / 8; i++)
2059 {
2060 struct arm_exidx_entry new_exidx_entry;
2061 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2062 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2063 bfd_vma addr = 0, word = 0;
2064 int n_bytes = 0, n_words = 0;
2065 struct obj_section *sec;
2066 gdb_byte *entry = NULL;
2067
2068 /* Extract address of start of function. */
2069 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2070 idx += exidx_vma + i * 8;
2071
2072 /* Find section containing function and compute section offset. */
2073 sec = arm_obj_section_from_vma (objfile, idx);
2074 if (sec == NULL)
2075 continue;
2076 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2077
2078 /* Determine address of exception table entry. */
2079 if (val == 1)
2080 {
2081 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2082 }
2083 else if ((val & 0xff000000) == 0x80000000)
2084 {
2085 /* Exception table entry embedded in .ARM.exidx
2086 -- must be short form. */
2087 word = val;
2088 n_bytes = 3;
2089 }
2090 else if (!(val & 0x80000000))
2091 {
2092 /* Exception table entry in .ARM.extab. */
2093 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2094 addr += exidx_vma + i * 8 + 4;
2095
2096 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2097 {
2098 word = bfd_h_get_32 (objfile->obfd,
2099 extab_data + addr - extab_vma);
2100 addr += 4;
2101
2102 if ((word & 0xff000000) == 0x80000000)
2103 {
2104 /* Short form. */
2105 n_bytes = 3;
2106 }
2107 else if ((word & 0xff000000) == 0x81000000
2108 || (word & 0xff000000) == 0x82000000)
2109 {
2110 /* Long form. */
2111 n_bytes = 2;
2112 n_words = ((word >> 16) & 0xff);
2113 }
2114 else if (!(word & 0x80000000))
2115 {
2116 bfd_vma pers;
2117 struct obj_section *pers_sec;
2118 int gnu_personality = 0;
2119
2120 /* Custom personality routine. */
2121 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2122 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2123
2124 /* Check whether we've got one of the variants of the
2125 GNU personality routines. */
2126 pers_sec = arm_obj_section_from_vma (objfile, pers);
2127 if (pers_sec)
2128 {
2129 static const char *personality[] =
2130 {
2131 "__gcc_personality_v0",
2132 "__gxx_personality_v0",
2133 "__gcj_personality_v0",
2134 "__gnu_objc_personality_v0",
2135 NULL
2136 };
2137
2138 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2139 int k;
2140
2141 for (k = 0; personality[k]; k++)
2142 if (lookup_minimal_symbol_by_pc_name
2143 (pc, personality[k], objfile))
2144 {
2145 gnu_personality = 1;
2146 break;
2147 }
2148 }
2149
2150 /* If so, the next word contains a word count in the high
2151 byte, followed by the same unwind instructions as the
2152 pre-defined forms. */
2153 if (gnu_personality
2154 && addr + 4 <= extab_vma + extab_size)
2155 {
2156 word = bfd_h_get_32 (objfile->obfd,
2157 extab_data + addr - extab_vma);
2158 addr += 4;
2159 n_bytes = 3;
2160 n_words = ((word >> 24) & 0xff);
2161 }
2162 }
2163 }
2164 }
2165
2166 /* Sanity check address. */
2167 if (n_words)
2168 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2169 n_words = n_bytes = 0;
2170
2171 /* The unwind instructions reside in WORD (only the N_BYTES least
2172 significant bytes are valid), followed by N_WORDS words in the
2173 extab section starting at ADDR. */
2174 if (n_bytes || n_words)
2175 {
224c3ddb
SM
2176 gdb_byte *p = entry
2177 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2178 n_bytes + n_words * 4 + 1);
0e9e9abd
UW
2179
2180 while (n_bytes--)
2181 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2182
2183 while (n_words--)
2184 {
2185 word = bfd_h_get_32 (objfile->obfd,
2186 extab_data + addr - extab_vma);
2187 addr += 4;
2188
2189 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2190 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2191 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2192 *p++ = (gdb_byte) (word & 0xff);
2193 }
2194
2195 /* Implied "Finish" to terminate the list. */
2196 *p++ = 0xb0;
2197 }
2198
2199 /* Push entry onto vector. They are guaranteed to always
2200 appear in order of increasing addresses. */
2201 new_exidx_entry.addr = idx;
2202 new_exidx_entry.entry = entry;
2203 VEC_safe_push (arm_exidx_entry_s,
2204 data->section_maps[sec->the_bfd_section->index],
2205 &new_exidx_entry);
2206 }
2207
2208 do_cleanups (cleanups);
2209}
2210
2211/* Search for the exception table entry covering MEMADDR. If one is found,
2212 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2213 set *START to the start of the region covered by this entry. */
2214
2215static gdb_byte *
2216arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2217{
2218 struct obj_section *sec;
2219
2220 sec = find_pc_section (memaddr);
2221 if (sec != NULL)
2222 {
2223 struct arm_exidx_data *data;
2224 VEC(arm_exidx_entry_s) *map;
2225 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2226 unsigned int idx;
2227
9a3c8263
SM
2228 data = ((struct arm_exidx_data *)
2229 objfile_data (sec->objfile, arm_exidx_data_key));
0e9e9abd
UW
2230 if (data != NULL)
2231 {
2232 map = data->section_maps[sec->the_bfd_section->index];
2233 if (!VEC_empty (arm_exidx_entry_s, map))
2234 {
2235 struct arm_exidx_entry *map_sym;
2236
2237 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2238 arm_compare_exidx_entries);
2239
2240 /* VEC_lower_bound finds the earliest ordered insertion
2241 point. If the following symbol starts at this exact
2242 address, we use that; otherwise, the preceding
2243 exception table entry covers this address. */
2244 if (idx < VEC_length (arm_exidx_entry_s, map))
2245 {
2246 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2247 if (map_sym->addr == map_key.addr)
2248 {
2249 if (start)
2250 *start = map_sym->addr + obj_section_addr (sec);
2251 return map_sym->entry;
2252 }
2253 }
2254
2255 if (idx > 0)
2256 {
2257 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2258 if (start)
2259 *start = map_sym->addr + obj_section_addr (sec);
2260 return map_sym->entry;
2261 }
2262 }
2263 }
2264 }
2265
2266 return NULL;
2267}
2268
2269/* Given the current frame THIS_FRAME, and its associated frame unwinding
2270 instruction list from the ARM exception table entry ENTRY, allocate and
2271 return a prologue cache structure describing how to unwind this frame.
2272
2273 Return NULL if the unwinding instruction list contains a "spare",
2274 "reserved" or "refuse to unwind" instruction as defined in section
2275 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2276 for the ARM Architecture" document. */
2277
2278static struct arm_prologue_cache *
2279arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2280{
2281 CORE_ADDR vsp = 0;
2282 int vsp_valid = 0;
2283
2284 struct arm_prologue_cache *cache;
2285 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2286 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2287
2288 for (;;)
2289 {
2290 gdb_byte insn;
2291
2292 /* Whenever we reload SP, we actually have to retrieve its
2293 actual value in the current frame. */
2294 if (!vsp_valid)
2295 {
2296 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2297 {
2298 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2299 vsp = get_frame_register_unsigned (this_frame, reg);
2300 }
2301 else
2302 {
2303 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2304 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2305 }
2306
2307 vsp_valid = 1;
2308 }
2309
2310 /* Decode next unwind instruction. */
2311 insn = *entry++;
2312
2313 if ((insn & 0xc0) == 0)
2314 {
2315 int offset = insn & 0x3f;
2316 vsp += (offset << 2) + 4;
2317 }
2318 else if ((insn & 0xc0) == 0x40)
2319 {
2320 int offset = insn & 0x3f;
2321 vsp -= (offset << 2) + 4;
2322 }
2323 else if ((insn & 0xf0) == 0x80)
2324 {
2325 int mask = ((insn & 0xf) << 8) | *entry++;
2326 int i;
2327
2328 /* The special case of an all-zero mask identifies
2329 "Refuse to unwind". We return NULL to fall back
2330 to the prologue analyzer. */
2331 if (mask == 0)
2332 return NULL;
2333
2334 /* Pop registers r4..r15 under mask. */
2335 for (i = 0; i < 12; i++)
2336 if (mask & (1 << i))
2337 {
2338 cache->saved_regs[4 + i].addr = vsp;
2339 vsp += 4;
2340 }
2341
2342 /* Special-case popping SP -- we need to reload vsp. */
2343 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2344 vsp_valid = 0;
2345 }
2346 else if ((insn & 0xf0) == 0x90)
2347 {
2348 int reg = insn & 0xf;
2349
2350 /* Reserved cases. */
2351 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2352 return NULL;
2353
2354 /* Set SP from another register and mark VSP for reload. */
2355 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2356 vsp_valid = 0;
2357 }
2358 else if ((insn & 0xf0) == 0xa0)
2359 {
2360 int count = insn & 0x7;
2361 int pop_lr = (insn & 0x8) != 0;
2362 int i;
2363
2364 /* Pop r4..r[4+count]. */
2365 for (i = 0; i <= count; i++)
2366 {
2367 cache->saved_regs[4 + i].addr = vsp;
2368 vsp += 4;
2369 }
2370
2371 /* If indicated by flag, pop LR as well. */
2372 if (pop_lr)
2373 {
2374 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2375 vsp += 4;
2376 }
2377 }
2378 else if (insn == 0xb0)
2379 {
2380 /* We could only have updated PC by popping into it; if so, it
2381 will show up as address. Otherwise, copy LR into PC. */
2382 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2383 cache->saved_regs[ARM_PC_REGNUM]
2384 = cache->saved_regs[ARM_LR_REGNUM];
2385
2386 /* We're done. */
2387 break;
2388 }
2389 else if (insn == 0xb1)
2390 {
2391 int mask = *entry++;
2392 int i;
2393
2394 /* All-zero mask and mask >= 16 is "spare". */
2395 if (mask == 0 || mask >= 16)
2396 return NULL;
2397
2398 /* Pop r0..r3 under mask. */
2399 for (i = 0; i < 4; i++)
2400 if (mask & (1 << i))
2401 {
2402 cache->saved_regs[i].addr = vsp;
2403 vsp += 4;
2404 }
2405 }
2406 else if (insn == 0xb2)
2407 {
2408 ULONGEST offset = 0;
2409 unsigned shift = 0;
2410
2411 do
2412 {
2413 offset |= (*entry & 0x7f) << shift;
2414 shift += 7;
2415 }
2416 while (*entry++ & 0x80);
2417
2418 vsp += 0x204 + (offset << 2);
2419 }
2420 else if (insn == 0xb3)
2421 {
2422 int start = *entry >> 4;
2423 int count = (*entry++) & 0xf;
2424 int i;
2425
2426 /* Only registers D0..D15 are valid here. */
2427 if (start + count >= 16)
2428 return NULL;
2429
2430 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2431 for (i = 0; i <= count; i++)
2432 {
2433 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2434 vsp += 8;
2435 }
2436
2437 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2438 vsp += 4;
2439 }
2440 else if ((insn & 0xf8) == 0xb8)
2441 {
2442 int count = insn & 0x7;
2443 int i;
2444
2445 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2446 for (i = 0; i <= count; i++)
2447 {
2448 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2449 vsp += 8;
2450 }
2451
2452 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2453 vsp += 4;
2454 }
2455 else if (insn == 0xc6)
2456 {
2457 int start = *entry >> 4;
2458 int count = (*entry++) & 0xf;
2459 int i;
2460
2461 /* Only registers WR0..WR15 are valid. */
2462 if (start + count >= 16)
2463 return NULL;
2464
2465 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2466 for (i = 0; i <= count; i++)
2467 {
2468 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2469 vsp += 8;
2470 }
2471 }
2472 else if (insn == 0xc7)
2473 {
2474 int mask = *entry++;
2475 int i;
2476
2477 /* All-zero mask and mask >= 16 is "spare". */
2478 if (mask == 0 || mask >= 16)
2479 return NULL;
2480
2481 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2482 for (i = 0; i < 4; i++)
2483 if (mask & (1 << i))
2484 {
2485 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2486 vsp += 4;
2487 }
2488 }
2489 else if ((insn & 0xf8) == 0xc0)
2490 {
2491 int count = insn & 0x7;
2492 int i;
2493
2494 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2495 for (i = 0; i <= count; i++)
2496 {
2497 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2498 vsp += 8;
2499 }
2500 }
2501 else if (insn == 0xc8)
2502 {
2503 int start = *entry >> 4;
2504 int count = (*entry++) & 0xf;
2505 int i;
2506
2507 /* Only registers D0..D31 are valid. */
2508 if (start + count >= 16)
2509 return NULL;
2510
2511 /* Pop VFP double-precision registers
2512 D[16+start]..D[16+start+count]. */
2513 for (i = 0; i <= count; i++)
2514 {
2515 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2516 vsp += 8;
2517 }
2518 }
2519 else if (insn == 0xc9)
2520 {
2521 int start = *entry >> 4;
2522 int count = (*entry++) & 0xf;
2523 int i;
2524
2525 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2526 for (i = 0; i <= count; i++)
2527 {
2528 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2529 vsp += 8;
2530 }
2531 }
2532 else if ((insn & 0xf8) == 0xd0)
2533 {
2534 int count = insn & 0x7;
2535 int i;
2536
2537 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2538 for (i = 0; i <= count; i++)
2539 {
2540 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2541 vsp += 8;
2542 }
2543 }
2544 else
2545 {
2546 /* Everything else is "spare". */
2547 return NULL;
2548 }
2549 }
2550
2551 /* If we restore SP from a register, assume this was the frame register.
2552 Otherwise just fall back to SP as frame register. */
2553 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2554 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2555 else
2556 cache->framereg = ARM_SP_REGNUM;
2557
2558 /* Determine offset to previous frame. */
2559 cache->framesize
2560 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2561
2562 /* We already got the previous SP. */
2563 cache->prev_sp = vsp;
2564
2565 return cache;
2566}
2567
2568/* Unwinding via ARM exception table entries. Note that the sniffer
2569 already computes a filled-in prologue cache, which is then used
2570 with the same arm_prologue_this_id and arm_prologue_prev_register
2571 routines also used for prologue-parsing based unwinding. */
2572
2573static int
2574arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2575 struct frame_info *this_frame,
2576 void **this_prologue_cache)
2577{
2578 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2579 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2580 CORE_ADDR addr_in_block, exidx_region, func_start;
2581 struct arm_prologue_cache *cache;
2582 gdb_byte *entry;
2583
2584 /* See if we have an ARM exception table entry covering this address. */
2585 addr_in_block = get_frame_address_in_block (this_frame);
2586 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2587 if (!entry)
2588 return 0;
2589
2590 /* The ARM exception table does not describe unwind information
2591 for arbitrary PC values, but is guaranteed to be correct only
2592 at call sites. We have to decide here whether we want to use
2593 ARM exception table information for this frame, or fall back
2594 to using prologue parsing. (Note that if we have DWARF CFI,
2595 this sniffer isn't even called -- CFI is always preferred.)
2596
2597 Before we make this decision, however, we check whether we
2598 actually have *symbol* information for the current frame.
2599 If not, prologue parsing would not work anyway, so we might
2600 as well use the exception table and hope for the best. */
2601 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2602 {
2603 int exc_valid = 0;
2604
2605 /* If the next frame is "normal", we are at a call site in this
2606 frame, so exception information is guaranteed to be valid. */
2607 if (get_next_frame (this_frame)
2608 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2609 exc_valid = 1;
2610
2611 /* We also assume exception information is valid if we're currently
2612 blocked in a system call. The system library is supposed to
d9311bfa
AT
2613 ensure this, so that e.g. pthread cancellation works. */
2614 if (arm_frame_is_thumb (this_frame))
0e9e9abd 2615 {
d9311bfa 2616 LONGEST insn;
416dc9c6 2617
d9311bfa
AT
2618 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2619 byte_order_for_code, &insn)
2620 && (insn & 0xff00) == 0xdf00 /* svc */)
2621 exc_valid = 1;
0e9e9abd 2622 }
d9311bfa
AT
2623 else
2624 {
2625 LONGEST insn;
416dc9c6 2626
d9311bfa
AT
2627 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2628 byte_order_for_code, &insn)
2629 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2630 exc_valid = 1;
2631 }
2632
0e9e9abd
UW
2633 /* Bail out if we don't know that exception information is valid. */
2634 if (!exc_valid)
2635 return 0;
2636
2637 /* The ARM exception index does not mark the *end* of the region
2638 covered by the entry, and some functions will not have any entry.
2639 To correctly recognize the end of the covered region, the linker
2640 should have inserted dummy records with a CANTUNWIND marker.
2641
2642 Unfortunately, current versions of GNU ld do not reliably do
2643 this, and thus we may have found an incorrect entry above.
2644 As a (temporary) sanity check, we only use the entry if it
2645 lies *within* the bounds of the function. Note that this check
2646 might reject perfectly valid entries that just happen to cover
2647 multiple functions; therefore this check ought to be removed
2648 once the linker is fixed. */
2649 if (func_start > exidx_region)
2650 return 0;
2651 }
2652
2653 /* Decode the list of unwinding instructions into a prologue cache.
2654 Note that this may fail due to e.g. a "refuse to unwind" code. */
2655 cache = arm_exidx_fill_cache (this_frame, entry);
2656 if (!cache)
2657 return 0;
2658
2659 *this_prologue_cache = cache;
2660 return 1;
2661}
2662
2663struct frame_unwind arm_exidx_unwind = {
2664 NORMAL_FRAME,
8fbca658 2665 default_frame_unwind_stop_reason,
0e9e9abd
UW
2666 arm_prologue_this_id,
2667 arm_prologue_prev_register,
2668 NULL,
2669 arm_exidx_unwind_sniffer
2670};
2671
779aa56f
YQ
2672static struct arm_prologue_cache *
2673arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2674{
2675 struct arm_prologue_cache *cache;
2676 CORE_ADDR sp;
2677 int reg;
2678
2679 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2680 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2681
2682 /* Still rely on the offset calculated from prologue. */
2683 arm_scan_prologue (this_frame, cache);
2684
2685 /* Since we are in epilogue, the SP has been restored. */
2686 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2687
2688 /* Calculate actual addresses of saved registers using offsets
2689 determined by arm_scan_prologue. */
2690 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2691 if (trad_frame_addr_p (cache->saved_regs, reg))
2692 cache->saved_regs[reg].addr += cache->prev_sp;
2693
2694 return cache;
2695}
2696
2697/* Implementation of function hook 'this_id' in
2698 'struct frame_uwnind' for epilogue unwinder. */
2699
2700static void
2701arm_epilogue_frame_this_id (struct frame_info *this_frame,
2702 void **this_cache,
2703 struct frame_id *this_id)
2704{
2705 struct arm_prologue_cache *cache;
2706 CORE_ADDR pc, func;
2707
2708 if (*this_cache == NULL)
2709 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2710 cache = (struct arm_prologue_cache *) *this_cache;
2711
2712 /* Use function start address as part of the frame ID. If we cannot
2713 identify the start address (due to missing symbol information),
2714 fall back to just using the current PC. */
2715 pc = get_frame_pc (this_frame);
2716 func = get_frame_func (this_frame);
2717 if (func == NULL)
2718 func = pc;
2719
2720 (*this_id) = frame_id_build (cache->prev_sp, pc);
2721}
2722
2723/* Implementation of function hook 'prev_register' in
2724 'struct frame_uwnind' for epilogue unwinder. */
2725
2726static struct value *
2727arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2728 void **this_cache, int regnum)
2729{
2730 struct arm_prologue_cache *cache;
2731
2732 if (*this_cache == NULL)
2733 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2734 cache = (struct arm_prologue_cache *) *this_cache;
2735
2736 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2737}
2738
2739static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2740 CORE_ADDR pc);
2741static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2742 CORE_ADDR pc);
2743
2744/* Implementation of function hook 'sniffer' in
2745 'struct frame_uwnind' for epilogue unwinder. */
2746
2747static int
2748arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2749 struct frame_info *this_frame,
2750 void **this_prologue_cache)
2751{
2752 if (frame_relative_level (this_frame) == 0)
2753 {
2754 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2755 CORE_ADDR pc = get_frame_pc (this_frame);
2756
2757 if (arm_frame_is_thumb (this_frame))
2758 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2759 else
2760 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2761 }
2762 else
2763 return 0;
2764}
2765
2766/* Frame unwinder from epilogue. */
2767
2768static const struct frame_unwind arm_epilogue_frame_unwind =
2769{
2770 NORMAL_FRAME,
2771 default_frame_unwind_stop_reason,
2772 arm_epilogue_frame_this_id,
2773 arm_epilogue_frame_prev_register,
2774 NULL,
2775 arm_epilogue_frame_sniffer,
2776};
2777
80d8d390
YQ
2778/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2779 trampoline, return the target PC. Otherwise return 0.
2780
2781 void call0a (char c, short s, int i, long l) {}
2782
2783 int main (void)
2784 {
2785 (*pointer_to_call0a) (c, s, i, l);
2786 }
2787
2788 Instead of calling a stub library function _call_via_xx (xx is
2789 the register name), GCC may inline the trampoline in the object
2790 file as below (register r2 has the address of call0a).
2791
2792 .global main
2793 .type main, %function
2794 ...
2795 bl .L1
2796 ...
2797 .size main, .-main
2798
2799 .L1:
2800 bx r2
2801
2802 The trampoline 'bx r2' doesn't belong to main. */
2803
2804static CORE_ADDR
2805arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2806{
2807 /* The heuristics of recognizing such trampoline is that FRAME is
2808 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2809 if (arm_frame_is_thumb (frame))
2810 {
2811 gdb_byte buf[2];
2812
2813 if (target_read_memory (pc, buf, 2) == 0)
2814 {
2815 struct gdbarch *gdbarch = get_frame_arch (frame);
2816 enum bfd_endian byte_order_for_code
2817 = gdbarch_byte_order_for_code (gdbarch);
2818 uint16_t insn
2819 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2820
2821 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2822 {
2823 CORE_ADDR dest
2824 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2825
2826 /* Clear the LSB so that gdb core sets step-resume
2827 breakpoint at the right address. */
2828 return UNMAKE_THUMB_ADDR (dest);
2829 }
2830 }
2831 }
2832
2833 return 0;
2834}
2835
909cf6ea 2836static struct arm_prologue_cache *
a262aec2 2837arm_make_stub_cache (struct frame_info *this_frame)
909cf6ea 2838{
909cf6ea 2839 struct arm_prologue_cache *cache;
909cf6ea 2840
35d5d4ee 2841 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
a262aec2 2842 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
909cf6ea 2843
a262aec2 2844 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
909cf6ea
DJ
2845
2846 return cache;
2847}
2848
2849/* Our frame ID for a stub frame is the current SP and LR. */
2850
2851static void
a262aec2 2852arm_stub_this_id (struct frame_info *this_frame,
909cf6ea
DJ
2853 void **this_cache,
2854 struct frame_id *this_id)
2855{
2856 struct arm_prologue_cache *cache;
2857
2858 if (*this_cache == NULL)
a262aec2 2859 *this_cache = arm_make_stub_cache (this_frame);
9a3c8263 2860 cache = (struct arm_prologue_cache *) *this_cache;
909cf6ea 2861
a262aec2 2862 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
909cf6ea
DJ
2863}
2864
a262aec2
DJ
2865static int
2866arm_stub_unwind_sniffer (const struct frame_unwind *self,
2867 struct frame_info *this_frame,
2868 void **this_prologue_cache)
909cf6ea 2869{
93d42b30 2870 CORE_ADDR addr_in_block;
948f8e3d 2871 gdb_byte dummy[4];
18d18ac8
YQ
2872 CORE_ADDR pc, start_addr;
2873 const char *name;
909cf6ea 2874
a262aec2 2875 addr_in_block = get_frame_address_in_block (this_frame);
18d18ac8 2876 pc = get_frame_pc (this_frame);
3e5d3a5a 2877 if (in_plt_section (addr_in_block)
fc36e839
DE
2878 /* We also use the stub winder if the target memory is unreadable
2879 to avoid having the prologue unwinder trying to read it. */
18d18ac8
YQ
2880 || target_read_memory (pc, dummy, 4) != 0)
2881 return 1;
2882
2883 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2884 && arm_skip_bx_reg (this_frame, pc) != 0)
a262aec2 2885 return 1;
909cf6ea 2886
a262aec2 2887 return 0;
909cf6ea
DJ
2888}
2889
a262aec2
DJ
2890struct frame_unwind arm_stub_unwind = {
2891 NORMAL_FRAME,
8fbca658 2892 default_frame_unwind_stop_reason,
a262aec2
DJ
2893 arm_stub_this_id,
2894 arm_prologue_prev_register,
2895 NULL,
2896 arm_stub_unwind_sniffer
2897};
2898
2ae28aa9
YQ
2899/* Put here the code to store, into CACHE->saved_regs, the addresses
2900 of the saved registers of frame described by THIS_FRAME. CACHE is
2901 returned. */
2902
2903static struct arm_prologue_cache *
2904arm_m_exception_cache (struct frame_info *this_frame)
2905{
2906 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2907 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2908 struct arm_prologue_cache *cache;
2909 CORE_ADDR unwound_sp;
2910 LONGEST xpsr;
2911
2912 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2913 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2914
2915 unwound_sp = get_frame_register_unsigned (this_frame,
2916 ARM_SP_REGNUM);
2917
2918 /* The hardware saves eight 32-bit words, comprising xPSR,
2919 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2920 "B1.5.6 Exception entry behavior" in
2921 "ARMv7-M Architecture Reference Manual". */
2922 cache->saved_regs[0].addr = unwound_sp;
2923 cache->saved_regs[1].addr = unwound_sp + 4;
2924 cache->saved_regs[2].addr = unwound_sp + 8;
2925 cache->saved_regs[3].addr = unwound_sp + 12;
2926 cache->saved_regs[12].addr = unwound_sp + 16;
2927 cache->saved_regs[14].addr = unwound_sp + 20;
2928 cache->saved_regs[15].addr = unwound_sp + 24;
2929 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2930
2931 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2932 aligner between the top of the 32-byte stack frame and the
2933 previous context's stack pointer. */
2934 cache->prev_sp = unwound_sp + 32;
2935 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2936 && (xpsr & (1 << 9)) != 0)
2937 cache->prev_sp += 4;
2938
2939 return cache;
2940}
2941
2942/* Implementation of function hook 'this_id' in
2943 'struct frame_uwnind'. */
2944
2945static void
2946arm_m_exception_this_id (struct frame_info *this_frame,
2947 void **this_cache,
2948 struct frame_id *this_id)
2949{
2950 struct arm_prologue_cache *cache;
2951
2952 if (*this_cache == NULL)
2953 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2954 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2955
2956 /* Our frame ID for a stub frame is the current SP and LR. */
2957 *this_id = frame_id_build (cache->prev_sp,
2958 get_frame_pc (this_frame));
2959}
2960
2961/* Implementation of function hook 'prev_register' in
2962 'struct frame_uwnind'. */
2963
2964static struct value *
2965arm_m_exception_prev_register (struct frame_info *this_frame,
2966 void **this_cache,
2967 int prev_regnum)
2968{
2ae28aa9
YQ
2969 struct arm_prologue_cache *cache;
2970
2971 if (*this_cache == NULL)
2972 *this_cache = arm_m_exception_cache (this_frame);
9a3c8263 2973 cache = (struct arm_prologue_cache *) *this_cache;
2ae28aa9
YQ
2974
2975 /* The value was already reconstructed into PREV_SP. */
2976 if (prev_regnum == ARM_SP_REGNUM)
2977 return frame_unwind_got_constant (this_frame, prev_regnum,
2978 cache->prev_sp);
2979
2980 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2981 prev_regnum);
2982}
2983
2984/* Implementation of function hook 'sniffer' in
2985 'struct frame_uwnind'. */
2986
2987static int
2988arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2989 struct frame_info *this_frame,
2990 void **this_prologue_cache)
2991{
2992 CORE_ADDR this_pc = get_frame_pc (this_frame);
2993
2994 /* No need to check is_m; this sniffer is only registered for
2995 M-profile architectures. */
2996
2997 /* Exception frames return to one of these magic PCs. Other values
2998 are not defined as of v7-M. See details in "B1.5.8 Exception
2999 return behavior" in "ARMv7-M Architecture Reference Manual". */
3000 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3001 || this_pc == 0xfffffffd)
3002 return 1;
3003
3004 return 0;
3005}
3006
3007/* Frame unwinder for M-profile exceptions. */
3008
3009struct frame_unwind arm_m_exception_unwind =
3010{
3011 SIGTRAMP_FRAME,
3012 default_frame_unwind_stop_reason,
3013 arm_m_exception_this_id,
3014 arm_m_exception_prev_register,
3015 NULL,
3016 arm_m_exception_unwind_sniffer
3017};
3018
24de872b 3019static CORE_ADDR
a262aec2 3020arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
24de872b
DJ
3021{
3022 struct arm_prologue_cache *cache;
3023
eb5492fa 3024 if (*this_cache == NULL)
a262aec2 3025 *this_cache = arm_make_prologue_cache (this_frame);
9a3c8263 3026 cache = (struct arm_prologue_cache *) *this_cache;
eb5492fa 3027
4be43953 3028 return cache->prev_sp - cache->framesize;
24de872b
DJ
3029}
3030
eb5492fa
DJ
3031struct frame_base arm_normal_base = {
3032 &arm_prologue_unwind,
3033 arm_normal_frame_base,
3034 arm_normal_frame_base,
3035 arm_normal_frame_base
3036};
3037
a262aec2 3038/* Assuming THIS_FRAME is a dummy, return the frame ID of that
eb5492fa
DJ
3039 dummy frame. The frame ID's base needs to match the TOS value
3040 saved by save_dummy_frame_tos() and returned from
3041 arm_push_dummy_call, and the PC needs to match the dummy frame's
3042 breakpoint. */
c906108c 3043
eb5492fa 3044static struct frame_id
a262aec2 3045arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
c906108c 3046{
0963b4bd
MS
3047 return frame_id_build (get_frame_register_unsigned (this_frame,
3048 ARM_SP_REGNUM),
a262aec2 3049 get_frame_pc (this_frame));
eb5492fa 3050}
c3b4394c 3051
eb5492fa
DJ
3052/* Given THIS_FRAME, find the previous frame's resume PC (which will
3053 be used to construct the previous frame's ID, after looking up the
3054 containing function). */
c3b4394c 3055
eb5492fa
DJ
3056static CORE_ADDR
3057arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3058{
3059 CORE_ADDR pc;
3060 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
24568a2c 3061 return arm_addr_bits_remove (gdbarch, pc);
eb5492fa
DJ
3062}
3063
3064static CORE_ADDR
3065arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3066{
3067 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
c906108c
SS
3068}
3069
b39cc962
DJ
3070static struct value *
3071arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3072 int regnum)
3073{
24568a2c 3074 struct gdbarch * gdbarch = get_frame_arch (this_frame);
b39cc962 3075 CORE_ADDR lr, cpsr;
9779414d 3076 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
b39cc962
DJ
3077
3078 switch (regnum)
3079 {
3080 case ARM_PC_REGNUM:
3081 /* The PC is normally copied from the return column, which
3082 describes saves of LR. However, that version may have an
3083 extra bit set to indicate Thumb state. The bit is not
3084 part of the PC. */
3085 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3086 return frame_unwind_got_constant (this_frame, regnum,
24568a2c 3087 arm_addr_bits_remove (gdbarch, lr));
b39cc962
DJ
3088
3089 case ARM_PS_REGNUM:
3090 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
ca38c58e 3091 cpsr = get_frame_register_unsigned (this_frame, regnum);
b39cc962
DJ
3092 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3093 if (IS_THUMB_ADDR (lr))
9779414d 3094 cpsr |= t_bit;
b39cc962 3095 else
9779414d 3096 cpsr &= ~t_bit;
ca38c58e 3097 return frame_unwind_got_constant (this_frame, regnum, cpsr);
b39cc962
DJ
3098
3099 default:
3100 internal_error (__FILE__, __LINE__,
3101 _("Unexpected register %d"), regnum);
3102 }
3103}
3104
3105static void
3106arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3107 struct dwarf2_frame_state_reg *reg,
3108 struct frame_info *this_frame)
3109{
3110 switch (regnum)
3111 {
3112 case ARM_PC_REGNUM:
3113 case ARM_PS_REGNUM:
3114 reg->how = DWARF2_FRAME_REG_FN;
3115 reg->loc.fn = arm_dwarf2_prev_register;
3116 break;
3117 case ARM_SP_REGNUM:
3118 reg->how = DWARF2_FRAME_REG_CFA;
3119 break;
3120 }
3121}
3122
c9cf6e20 3123/* Implement the stack_frame_destroyed_p gdbarch method. */
4024ca99
UW
3124
3125static int
c9cf6e20 3126thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3127{
3128 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3129 unsigned int insn, insn2;
3130 int found_return = 0, found_stack_adjust = 0;
3131 CORE_ADDR func_start, func_end;
3132 CORE_ADDR scan_pc;
3133 gdb_byte buf[4];
3134
3135 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3136 return 0;
3137
3138 /* The epilogue is a sequence of instructions along the following lines:
3139
3140 - add stack frame size to SP or FP
3141 - [if frame pointer used] restore SP from FP
3142 - restore registers from SP [may include PC]
3143 - a return-type instruction [if PC wasn't already restored]
3144
3145 In a first pass, we scan forward from the current PC and verify the
3146 instructions we find as compatible with this sequence, ending in a
3147 return instruction.
3148
3149 However, this is not sufficient to distinguish indirect function calls
3150 within a function from indirect tail calls in the epilogue in some cases.
3151 Therefore, if we didn't already find any SP-changing instruction during
3152 forward scan, we add a backward scanning heuristic to ensure we actually
3153 are in the epilogue. */
3154
3155 scan_pc = pc;
3156 while (scan_pc < func_end && !found_return)
3157 {
3158 if (target_read_memory (scan_pc, buf, 2))
3159 break;
3160
3161 scan_pc += 2;
3162 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3163
3164 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3165 found_return = 1;
3166 else if (insn == 0x46f7) /* mov pc, lr */
3167 found_return = 1;
540314bd 3168 else if (thumb_instruction_restores_sp (insn))
4024ca99 3169 {
b7576e5c 3170 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4024ca99
UW
3171 found_return = 1;
3172 }
db24da6d 3173 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4024ca99
UW
3174 {
3175 if (target_read_memory (scan_pc, buf, 2))
3176 break;
3177
3178 scan_pc += 2;
3179 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3180
3181 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3182 {
4024ca99
UW
3183 if (insn2 & 0x8000) /* <registers> include PC. */
3184 found_return = 1;
3185 }
3186 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3187 && (insn2 & 0x0fff) == 0x0b04)
3188 {
4024ca99
UW
3189 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3190 found_return = 1;
3191 }
3192 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3193 && (insn2 & 0x0e00) == 0x0a00)
6b65d1b6 3194 ;
4024ca99
UW
3195 else
3196 break;
3197 }
3198 else
3199 break;
3200 }
3201
3202 if (!found_return)
3203 return 0;
3204
3205 /* Since any instruction in the epilogue sequence, with the possible
3206 exception of return itself, updates the stack pointer, we need to
3207 scan backwards for at most one instruction. Try either a 16-bit or
3208 a 32-bit instruction. This is just a heuristic, so we do not worry
0963b4bd 3209 too much about false positives. */
4024ca99 3210
6b65d1b6
YQ
3211 if (pc - 4 < func_start)
3212 return 0;
3213 if (target_read_memory (pc - 4, buf, 4))
3214 return 0;
4024ca99 3215
6b65d1b6
YQ
3216 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3217 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3218
3219 if (thumb_instruction_restores_sp (insn2))
3220 found_stack_adjust = 1;
3221 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3222 found_stack_adjust = 1;
3223 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3224 && (insn2 & 0x0fff) == 0x0b04)
3225 found_stack_adjust = 1;
3226 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3227 && (insn2 & 0x0e00) == 0x0a00)
3228 found_stack_adjust = 1;
4024ca99
UW
3229
3230 return found_stack_adjust;
3231}
3232
4024ca99 3233static int
c58b006a 3234arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4024ca99
UW
3235{
3236 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3237 unsigned int insn;
f303bc3e 3238 int found_return;
4024ca99
UW
3239 CORE_ADDR func_start, func_end;
3240
4024ca99
UW
3241 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3242 return 0;
3243
3244 /* We are in the epilogue if the previous instruction was a stack
3245 adjustment and the next instruction is a possible return (bx, mov
3246 pc, or pop). We could have to scan backwards to find the stack
3247 adjustment, or forwards to find the return, but this is a decent
3248 approximation. First scan forwards. */
3249
3250 found_return = 0;
3251 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3252 if (bits (insn, 28, 31) != INST_NV)
3253 {
3254 if ((insn & 0x0ffffff0) == 0x012fff10)
3255 /* BX. */
3256 found_return = 1;
3257 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3258 /* MOV PC. */
3259 found_return = 1;
3260 else if ((insn & 0x0fff0000) == 0x08bd0000
3261 && (insn & 0x0000c000) != 0)
3262 /* POP (LDMIA), including PC or LR. */
3263 found_return = 1;
3264 }
3265
3266 if (!found_return)
3267 return 0;
3268
3269 /* Scan backwards. This is just a heuristic, so do not worry about
3270 false positives from mode changes. */
3271
3272 if (pc < func_start + 4)
3273 return 0;
3274
3275 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
f303bc3e 3276 if (arm_instruction_restores_sp (insn))
4024ca99
UW
3277 return 1;
3278
3279 return 0;
3280}
3281
c58b006a
YQ
3282/* Implement the stack_frame_destroyed_p gdbarch method. */
3283
3284static int
3285arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3286{
3287 if (arm_pc_is_thumb (gdbarch, pc))
3288 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3289 else
3290 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3291}
4024ca99 3292
2dd604e7
RE
3293/* When arguments must be pushed onto the stack, they go on in reverse
3294 order. The code below implements a FILO (stack) to do this. */
3295
3296struct stack_item
3297{
3298 int len;
3299 struct stack_item *prev;
7c543f7b 3300 gdb_byte *data;
2dd604e7
RE
3301};
3302
3303static struct stack_item *
df3b6708 3304push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
2dd604e7
RE
3305{
3306 struct stack_item *si;
8d749320 3307 si = XNEW (struct stack_item);
7c543f7b 3308 si->data = (gdb_byte *) xmalloc (len);
2dd604e7
RE
3309 si->len = len;
3310 si->prev = prev;
3311 memcpy (si->data, contents, len);
3312 return si;
3313}
3314
3315static struct stack_item *
3316pop_stack_item (struct stack_item *si)
3317{
3318 struct stack_item *dead = si;
3319 si = si->prev;
3320 xfree (dead->data);
3321 xfree (dead);
3322 return si;
3323}
3324
2af48f68
PB
3325
3326/* Return the alignment (in bytes) of the given type. */
3327
3328static int
3329arm_type_align (struct type *t)
3330{
3331 int n;
3332 int align;
3333 int falign;
3334
3335 t = check_typedef (t);
3336 switch (TYPE_CODE (t))
3337 {
3338 default:
3339 /* Should never happen. */
3340 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3341 return 4;
3342
3343 case TYPE_CODE_PTR:
3344 case TYPE_CODE_ENUM:
3345 case TYPE_CODE_INT:
3346 case TYPE_CODE_FLT:
3347 case TYPE_CODE_SET:
3348 case TYPE_CODE_RANGE:
2af48f68
PB
3349 case TYPE_CODE_REF:
3350 case TYPE_CODE_CHAR:
3351 case TYPE_CODE_BOOL:
3352 return TYPE_LENGTH (t);
3353
3354 case TYPE_CODE_ARRAY:
c4312b19
YQ
3355 if (TYPE_VECTOR (t))
3356 {
3357 /* Use the natural alignment for vector types (the same for
3358 scalar type), but the maximum alignment is 64-bit. */
3359 if (TYPE_LENGTH (t) > 8)
3360 return 8;
3361 else
3362 return TYPE_LENGTH (t);
3363 }
3364 else
3365 return arm_type_align (TYPE_TARGET_TYPE (t));
2af48f68 3366 case TYPE_CODE_COMPLEX:
2af48f68
PB
3367 return arm_type_align (TYPE_TARGET_TYPE (t));
3368
3369 case TYPE_CODE_STRUCT:
3370 case TYPE_CODE_UNION:
3371 align = 1;
3372 for (n = 0; n < TYPE_NFIELDS (t); n++)
3373 {
3374 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3375 if (falign > align)
3376 align = falign;
3377 }
3378 return align;
3379 }
3380}
3381
90445bd3
DJ
3382/* Possible base types for a candidate for passing and returning in
3383 VFP registers. */
3384
3385enum arm_vfp_cprc_base_type
3386{
3387 VFP_CPRC_UNKNOWN,
3388 VFP_CPRC_SINGLE,
3389 VFP_CPRC_DOUBLE,
3390 VFP_CPRC_VEC64,
3391 VFP_CPRC_VEC128
3392};
3393
3394/* The length of one element of base type B. */
3395
3396static unsigned
3397arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3398{
3399 switch (b)
3400 {
3401 case VFP_CPRC_SINGLE:
3402 return 4;
3403 case VFP_CPRC_DOUBLE:
3404 return 8;
3405 case VFP_CPRC_VEC64:
3406 return 8;
3407 case VFP_CPRC_VEC128:
3408 return 16;
3409 default:
3410 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3411 (int) b);
3412 }
3413}
3414
3415/* The character ('s', 'd' or 'q') for the type of VFP register used
3416 for passing base type B. */
3417
3418static int
3419arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3420{
3421 switch (b)
3422 {
3423 case VFP_CPRC_SINGLE:
3424 return 's';
3425 case VFP_CPRC_DOUBLE:
3426 return 'd';
3427 case VFP_CPRC_VEC64:
3428 return 'd';
3429 case VFP_CPRC_VEC128:
3430 return 'q';
3431 default:
3432 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3433 (int) b);
3434 }
3435}
3436
3437/* Determine whether T may be part of a candidate for passing and
3438 returning in VFP registers, ignoring the limit on the total number
3439 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3440 classification of the first valid component found; if it is not
3441 VFP_CPRC_UNKNOWN, all components must have the same classification
3442 as *BASE_TYPE. If it is found that T contains a type not permitted
3443 for passing and returning in VFP registers, a type differently
3444 classified from *BASE_TYPE, or two types differently classified
3445 from each other, return -1, otherwise return the total number of
3446 base-type elements found (possibly 0 in an empty structure or
817e0957
YQ
3447 array). Vector types are not currently supported, matching the
3448 generic AAPCS support. */
90445bd3
DJ
3449
3450static int
3451arm_vfp_cprc_sub_candidate (struct type *t,
3452 enum arm_vfp_cprc_base_type *base_type)
3453{
3454 t = check_typedef (t);
3455 switch (TYPE_CODE (t))
3456 {
3457 case TYPE_CODE_FLT:
3458 switch (TYPE_LENGTH (t))
3459 {
3460 case 4:
3461 if (*base_type == VFP_CPRC_UNKNOWN)
3462 *base_type = VFP_CPRC_SINGLE;
3463 else if (*base_type != VFP_CPRC_SINGLE)
3464 return -1;
3465 return 1;
3466
3467 case 8:
3468 if (*base_type == VFP_CPRC_UNKNOWN)
3469 *base_type = VFP_CPRC_DOUBLE;
3470 else if (*base_type != VFP_CPRC_DOUBLE)
3471 return -1;
3472 return 1;
3473
3474 default:
3475 return -1;
3476 }
3477 break;
3478
817e0957
YQ
3479 case TYPE_CODE_COMPLEX:
3480 /* Arguments of complex T where T is one of the types float or
3481 double get treated as if they are implemented as:
3482
3483 struct complexT
3484 {
3485 T real;
3486 T imag;
5f52445b
YQ
3487 };
3488
3489 */
817e0957
YQ
3490 switch (TYPE_LENGTH (t))
3491 {
3492 case 8:
3493 if (*base_type == VFP_CPRC_UNKNOWN)
3494 *base_type = VFP_CPRC_SINGLE;
3495 else if (*base_type != VFP_CPRC_SINGLE)
3496 return -1;
3497 return 2;
3498
3499 case 16:
3500 if (*base_type == VFP_CPRC_UNKNOWN)
3501 *base_type = VFP_CPRC_DOUBLE;
3502 else if (*base_type != VFP_CPRC_DOUBLE)
3503 return -1;
3504 return 2;
3505
3506 default:
3507 return -1;
3508 }
3509 break;
3510
90445bd3
DJ
3511 case TYPE_CODE_ARRAY:
3512 {
c4312b19 3513 if (TYPE_VECTOR (t))
90445bd3 3514 {
c4312b19
YQ
3515 /* A 64-bit or 128-bit containerized vector type are VFP
3516 CPRCs. */
3517 switch (TYPE_LENGTH (t))
3518 {
3519 case 8:
3520 if (*base_type == VFP_CPRC_UNKNOWN)
3521 *base_type = VFP_CPRC_VEC64;
3522 return 1;
3523 case 16:
3524 if (*base_type == VFP_CPRC_UNKNOWN)
3525 *base_type = VFP_CPRC_VEC128;
3526 return 1;
3527 default:
3528 return -1;
3529 }
3530 }
3531 else
3532 {
3533 int count;
3534 unsigned unitlen;
3535
3536 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3537 base_type);
3538 if (count == -1)
3539 return -1;
3540 if (TYPE_LENGTH (t) == 0)
3541 {
3542 gdb_assert (count == 0);
3543 return 0;
3544 }
3545 else if (count == 0)
3546 return -1;
3547 unitlen = arm_vfp_cprc_unit_length (*base_type);
3548 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3549 return TYPE_LENGTH (t) / unitlen;
90445bd3 3550 }
90445bd3
DJ
3551 }
3552 break;
3553
3554 case TYPE_CODE_STRUCT:
3555 {
3556 int count = 0;
3557 unsigned unitlen;
3558 int i;
3559 for (i = 0; i < TYPE_NFIELDS (t); i++)
3560 {
3561 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3562 base_type);
3563 if (sub_count == -1)
3564 return -1;
3565 count += sub_count;
3566 }
3567 if (TYPE_LENGTH (t) == 0)
3568 {
3569 gdb_assert (count == 0);
3570 return 0;
3571 }
3572 else if (count == 0)
3573 return -1;
3574 unitlen = arm_vfp_cprc_unit_length (*base_type);
3575 if (TYPE_LENGTH (t) != unitlen * count)
3576 return -1;
3577 return count;
3578 }
3579
3580 case TYPE_CODE_UNION:
3581 {
3582 int count = 0;
3583 unsigned unitlen;
3584 int i;
3585 for (i = 0; i < TYPE_NFIELDS (t); i++)
3586 {
3587 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3588 base_type);
3589 if (sub_count == -1)
3590 return -1;
3591 count = (count > sub_count ? count : sub_count);
3592 }
3593 if (TYPE_LENGTH (t) == 0)
3594 {
3595 gdb_assert (count == 0);
3596 return 0;
3597 }
3598 else if (count == 0)
3599 return -1;
3600 unitlen = arm_vfp_cprc_unit_length (*base_type);
3601 if (TYPE_LENGTH (t) != unitlen * count)
3602 return -1;
3603 return count;
3604 }
3605
3606 default:
3607 break;
3608 }
3609
3610 return -1;
3611}
3612
3613/* Determine whether T is a VFP co-processor register candidate (CPRC)
3614 if passed to or returned from a non-variadic function with the VFP
3615 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3616 *BASE_TYPE to the base type for T and *COUNT to the number of
3617 elements of that base type before returning. */
3618
3619static int
3620arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3621 int *count)
3622{
3623 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3624 int c = arm_vfp_cprc_sub_candidate (t, &b);
3625 if (c <= 0 || c > 4)
3626 return 0;
3627 *base_type = b;
3628 *count = c;
3629 return 1;
3630}
3631
3632/* Return 1 if the VFP ABI should be used for passing arguments to and
3633 returning values from a function of type FUNC_TYPE, 0
3634 otherwise. */
3635
3636static int
3637arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3638{
3639 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3640 /* Variadic functions always use the base ABI. Assume that functions
3641 without debug info are not variadic. */
3642 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3643 return 0;
3644 /* The VFP ABI is only supported as a variant of AAPCS. */
3645 if (tdep->arm_abi != ARM_ABI_AAPCS)
3646 return 0;
3647 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3648}
3649
3650/* We currently only support passing parameters in integer registers, which
3651 conforms with GCC's default model, and VFP argument passing following
3652 the VFP variant of AAPCS. Several other variants exist and
2dd604e7
RE
3653 we should probably support some of them based on the selected ABI. */
3654
3655static CORE_ADDR
7d9b040b 3656arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
6a65450a
AC
3657 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3658 struct value **args, CORE_ADDR sp, int struct_return,
3659 CORE_ADDR struct_addr)
2dd604e7 3660{
e17a4113 3661 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2dd604e7
RE
3662 int argnum;
3663 int argreg;
3664 int nstack;
3665 struct stack_item *si = NULL;
90445bd3
DJ
3666 int use_vfp_abi;
3667 struct type *ftype;
3668 unsigned vfp_regs_free = (1 << 16) - 1;
3669
3670 /* Determine the type of this function and whether the VFP ABI
3671 applies. */
3672 ftype = check_typedef (value_type (function));
3673 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3674 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3675 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2dd604e7 3676
6a65450a
AC
3677 /* Set the return address. For the ARM, the return breakpoint is
3678 always at BP_ADDR. */
9779414d 3679 if (arm_pc_is_thumb (gdbarch, bp_addr))
9dca5578 3680 bp_addr |= 1;
6a65450a 3681 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2dd604e7
RE
3682
3683 /* Walk through the list of args and determine how large a temporary
3684 stack is required. Need to take care here as structs may be
7a9dd1b2 3685 passed on the stack, and we have to push them. */
2dd604e7
RE
3686 nstack = 0;
3687
3688 argreg = ARM_A1_REGNUM;
3689 nstack = 0;
3690
2dd604e7
RE
3691 /* The struct_return pointer occupies the first parameter
3692 passing register. */
3693 if (struct_return)
3694 {
3695 if (arm_debug)
5af949e3 3696 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2af46ca0 3697 gdbarch_register_name (gdbarch, argreg),
5af949e3 3698 paddress (gdbarch, struct_addr));
2dd604e7
RE
3699 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3700 argreg++;
3701 }
3702
3703 for (argnum = 0; argnum < nargs; argnum++)
3704 {
3705 int len;
3706 struct type *arg_type;
3707 struct type *target_type;
3708 enum type_code typecode;
8c6363cf 3709 const bfd_byte *val;
2af48f68 3710 int align;
90445bd3
DJ
3711 enum arm_vfp_cprc_base_type vfp_base_type;
3712 int vfp_base_count;
3713 int may_use_core_reg = 1;
2dd604e7 3714
df407dfe 3715 arg_type = check_typedef (value_type (args[argnum]));
2dd604e7
RE
3716 len = TYPE_LENGTH (arg_type);
3717 target_type = TYPE_TARGET_TYPE (arg_type);
3718 typecode = TYPE_CODE (arg_type);
8c6363cf 3719 val = value_contents (args[argnum]);
2dd604e7 3720
2af48f68
PB
3721 align = arm_type_align (arg_type);
3722 /* Round alignment up to a whole number of words. */
3723 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3724 /* Different ABIs have different maximum alignments. */
3725 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3726 {
3727 /* The APCS ABI only requires word alignment. */
3728 align = INT_REGISTER_SIZE;
3729 }
3730 else
3731 {
3732 /* The AAPCS requires at most doubleword alignment. */
3733 if (align > INT_REGISTER_SIZE * 2)
3734 align = INT_REGISTER_SIZE * 2;
3735 }
3736
90445bd3
DJ
3737 if (use_vfp_abi
3738 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3739 &vfp_base_count))
3740 {
3741 int regno;
3742 int unit_length;
3743 int shift;
3744 unsigned mask;
3745
3746 /* Because this is a CPRC it cannot go in a core register or
3747 cause a core register to be skipped for alignment.
3748 Either it goes in VFP registers and the rest of this loop
3749 iteration is skipped for this argument, or it goes on the
3750 stack (and the stack alignment code is correct for this
3751 case). */
3752 may_use_core_reg = 0;
3753
3754 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3755 shift = unit_length / 4;
3756 mask = (1 << (shift * vfp_base_count)) - 1;
3757 for (regno = 0; regno < 16; regno += shift)
3758 if (((vfp_regs_free >> regno) & mask) == mask)
3759 break;
3760
3761 if (regno < 16)
3762 {
3763 int reg_char;
3764 int reg_scaled;
3765 int i;
3766
3767 vfp_regs_free &= ~(mask << regno);
3768 reg_scaled = regno / shift;
3769 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3770 for (i = 0; i < vfp_base_count; i++)
3771 {
3772 char name_buf[4];
3773 int regnum;
58d6951d
DJ
3774 if (reg_char == 'q')
3775 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
90445bd3 3776 val + i * unit_length);
58d6951d
DJ
3777 else
3778 {
8c042590
PM
3779 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3780 reg_char, reg_scaled + i);
58d6951d
DJ
3781 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3782 strlen (name_buf));
3783 regcache_cooked_write (regcache, regnum,
3784 val + i * unit_length);
3785 }
90445bd3
DJ
3786 }
3787 continue;
3788 }
3789 else
3790 {
3791 /* This CPRC could not go in VFP registers, so all VFP
3792 registers are now marked as used. */
3793 vfp_regs_free = 0;
3794 }
3795 }
3796
2af48f68
PB
3797 /* Push stack padding for dowubleword alignment. */
3798 if (nstack & (align - 1))
3799 {
3800 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3801 nstack += INT_REGISTER_SIZE;
3802 }
3803
3804 /* Doubleword aligned quantities must go in even register pairs. */
90445bd3
DJ
3805 if (may_use_core_reg
3806 && argreg <= ARM_LAST_ARG_REGNUM
2af48f68
PB
3807 && align > INT_REGISTER_SIZE
3808 && argreg & 1)
3809 argreg++;
3810
2dd604e7
RE
3811 /* If the argument is a pointer to a function, and it is a
3812 Thumb function, create a LOCAL copy of the value and set
3813 the THUMB bit in it. */
3814 if (TYPE_CODE_PTR == typecode
3815 && target_type != NULL
f96b8fa0 3816 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2dd604e7 3817 {
e17a4113 3818 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
9779414d 3819 if (arm_pc_is_thumb (gdbarch, regval))
2dd604e7 3820 {
224c3ddb 3821 bfd_byte *copy = (bfd_byte *) alloca (len);
8c6363cf 3822 store_unsigned_integer (copy, len, byte_order,
e17a4113 3823 MAKE_THUMB_ADDR (regval));
8c6363cf 3824 val = copy;
2dd604e7
RE
3825 }
3826 }
3827
3828 /* Copy the argument to general registers or the stack in
3829 register-sized pieces. Large arguments are split between
3830 registers and stack. */
3831 while (len > 0)
3832 {
f0c9063c 3833 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
ef9bd0b8
YQ
3834 CORE_ADDR regval
3835 = extract_unsigned_integer (val, partial_len, byte_order);
2dd604e7 3836
90445bd3 3837 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2dd604e7
RE
3838 {
3839 /* The argument is being passed in a general purpose
3840 register. */
e17a4113 3841 if (byte_order == BFD_ENDIAN_BIG)
8bf8793c 3842 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2dd604e7
RE
3843 if (arm_debug)
3844 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
c9f4d572
UW
3845 argnum,
3846 gdbarch_register_name
2af46ca0 3847 (gdbarch, argreg),
f0c9063c 3848 phex (regval, INT_REGISTER_SIZE));
2dd604e7
RE
3849 regcache_cooked_write_unsigned (regcache, argreg, regval);
3850 argreg++;
3851 }
3852 else
3853 {
ef9bd0b8
YQ
3854 gdb_byte buf[INT_REGISTER_SIZE];
3855
3856 memset (buf, 0, sizeof (buf));
3857 store_unsigned_integer (buf, partial_len, byte_order, regval);
3858
2dd604e7
RE
3859 /* Push the arguments onto the stack. */
3860 if (arm_debug)
3861 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3862 argnum, nstack);
ef9bd0b8 3863 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
f0c9063c 3864 nstack += INT_REGISTER_SIZE;
2dd604e7
RE
3865 }
3866
3867 len -= partial_len;
3868 val += partial_len;
3869 }
3870 }
3871 /* If we have an odd number of words to push, then decrement the stack
3872 by one word now, so first stack argument will be dword aligned. */
3873 if (nstack & 4)
3874 sp -= 4;
3875
3876 while (si)
3877 {
3878 sp -= si->len;
3879 write_memory (sp, si->data, si->len);
3880 si = pop_stack_item (si);
3881 }
3882
3883 /* Finally, update teh SP register. */
3884 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3885
3886 return sp;
3887}
3888
f53f0d0b
PB
3889
3890/* Always align the frame to an 8-byte boundary. This is required on
3891 some platforms and harmless on the rest. */
3892
3893static CORE_ADDR
3894arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3895{
3896 /* Align the stack to eight bytes. */
3897 return sp & ~ (CORE_ADDR) 7;
3898}
3899
c906108c 3900static void
12b27276 3901print_fpu_flags (struct ui_file *file, int flags)
c906108c 3902{
c5aa993b 3903 if (flags & (1 << 0))
12b27276 3904 fputs_filtered ("IVO ", file);
c5aa993b 3905 if (flags & (1 << 1))
12b27276 3906 fputs_filtered ("DVZ ", file);
c5aa993b 3907 if (flags & (1 << 2))
12b27276 3908 fputs_filtered ("OFL ", file);
c5aa993b 3909 if (flags & (1 << 3))
12b27276 3910 fputs_filtered ("UFL ", file);
c5aa993b 3911 if (flags & (1 << 4))
12b27276
WN
3912 fputs_filtered ("INX ", file);
3913 fputc_filtered ('\n', file);
c906108c
SS
3914}
3915
5e74b15c
RE
3916/* Print interesting information about the floating point processor
3917 (if present) or emulator. */
34e8f22d 3918static void
d855c300 3919arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
23e3a7ac 3920 struct frame_info *frame, const char *args)
c906108c 3921{
9c9acae0 3922 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
c5aa993b
JM
3923 int type;
3924
3925 type = (status >> 24) & 127;
edefbb7c 3926 if (status & (1 << 31))
12b27276 3927 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
edefbb7c 3928 else
12b27276 3929 fprintf_filtered (file, _("Software FPU type %d\n"), type);
edefbb7c 3930 /* i18n: [floating point unit] mask */
12b27276
WN
3931 fputs_filtered (_("mask: "), file);
3932 print_fpu_flags (file, status >> 16);
edefbb7c 3933 /* i18n: [floating point unit] flags */
12b27276
WN
3934 fputs_filtered (_("flags: "), file);
3935 print_fpu_flags (file, status);
c906108c
SS
3936}
3937
27067745
UW
3938/* Construct the ARM extended floating point type. */
3939static struct type *
3940arm_ext_type (struct gdbarch *gdbarch)
3941{
3942 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3943
3944 if (!tdep->arm_ext_type)
3945 tdep->arm_ext_type
e9bb382b 3946 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
27067745
UW
3947 floatformats_arm_ext);
3948
3949 return tdep->arm_ext_type;
3950}
3951
58d6951d
DJ
3952static struct type *
3953arm_neon_double_type (struct gdbarch *gdbarch)
3954{
3955 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3956
3957 if (tdep->neon_double_type == NULL)
3958 {
3959 struct type *t, *elem;
3960
3961 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3962 TYPE_CODE_UNION);
3963 elem = builtin_type (gdbarch)->builtin_uint8;
3964 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3965 elem = builtin_type (gdbarch)->builtin_uint16;
3966 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3967 elem = builtin_type (gdbarch)->builtin_uint32;
3968 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3969 elem = builtin_type (gdbarch)->builtin_uint64;
3970 append_composite_type_field (t, "u64", elem);
3971 elem = builtin_type (gdbarch)->builtin_float;
3972 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3973 elem = builtin_type (gdbarch)->builtin_double;
3974 append_composite_type_field (t, "f64", elem);
3975
3976 TYPE_VECTOR (t) = 1;
3977 TYPE_NAME (t) = "neon_d";
3978 tdep->neon_double_type = t;
3979 }
3980
3981 return tdep->neon_double_type;
3982}
3983
3984/* FIXME: The vector types are not correctly ordered on big-endian
3985 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3986 bits of d0 - regardless of what unit size is being held in d0. So
3987 the offset of the first uint8 in d0 is 7, but the offset of the
3988 first float is 4. This code works as-is for little-endian
3989 targets. */
3990
3991static struct type *
3992arm_neon_quad_type (struct gdbarch *gdbarch)
3993{
3994 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3995
3996 if (tdep->neon_quad_type == NULL)
3997 {
3998 struct type *t, *elem;
3999
4000 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4001 TYPE_CODE_UNION);
4002 elem = builtin_type (gdbarch)->builtin_uint8;
4003 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4004 elem = builtin_type (gdbarch)->builtin_uint16;
4005 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4006 elem = builtin_type (gdbarch)->builtin_uint32;
4007 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4008 elem = builtin_type (gdbarch)->builtin_uint64;
4009 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4010 elem = builtin_type (gdbarch)->builtin_float;
4011 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4012 elem = builtin_type (gdbarch)->builtin_double;
4013 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4014
4015 TYPE_VECTOR (t) = 1;
4016 TYPE_NAME (t) = "neon_q";
4017 tdep->neon_quad_type = t;
4018 }
4019
4020 return tdep->neon_quad_type;
4021}
4022
34e8f22d
RE
4023/* Return the GDB type object for the "standard" data type of data in
4024 register N. */
4025
4026static struct type *
7a5ea0d4 4027arm_register_type (struct gdbarch *gdbarch, int regnum)
032758dc 4028{
58d6951d
DJ
4029 int num_regs = gdbarch_num_regs (gdbarch);
4030
4031 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4032 && regnum >= num_regs && regnum < num_regs + 32)
4033 return builtin_type (gdbarch)->builtin_float;
4034
4035 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4036 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4037 return arm_neon_quad_type (gdbarch);
4038
4039 /* If the target description has register information, we are only
4040 in this function so that we can override the types of
4041 double-precision registers for NEON. */
4042 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4043 {
4044 struct type *t = tdesc_register_type (gdbarch, regnum);
4045
4046 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4047 && TYPE_CODE (t) == TYPE_CODE_FLT
4048 && gdbarch_tdep (gdbarch)->have_neon)
4049 return arm_neon_double_type (gdbarch);
4050 else
4051 return t;
4052 }
4053
34e8f22d 4054 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
58d6951d
DJ
4055 {
4056 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4057 return builtin_type (gdbarch)->builtin_void;
4058
4059 return arm_ext_type (gdbarch);
4060 }
e4c16157 4061 else if (regnum == ARM_SP_REGNUM)
0dfff4cb 4062 return builtin_type (gdbarch)->builtin_data_ptr;
e4c16157 4063 else if (regnum == ARM_PC_REGNUM)
0dfff4cb 4064 return builtin_type (gdbarch)->builtin_func_ptr;
ff6f572f
DJ
4065 else if (regnum >= ARRAY_SIZE (arm_register_names))
4066 /* These registers are only supported on targets which supply
4067 an XML description. */
df4df182 4068 return builtin_type (gdbarch)->builtin_int0;
032758dc 4069 else
df4df182 4070 return builtin_type (gdbarch)->builtin_uint32;
032758dc
AC
4071}
4072
ff6f572f
DJ
4073/* Map a DWARF register REGNUM onto the appropriate GDB register
4074 number. */
4075
4076static int
d3f73121 4077arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
ff6f572f
DJ
4078{
4079 /* Core integer regs. */
4080 if (reg >= 0 && reg <= 15)
4081 return reg;
4082
4083 /* Legacy FPA encoding. These were once used in a way which
4084 overlapped with VFP register numbering, so their use is
4085 discouraged, but GDB doesn't support the ARM toolchain
4086 which used them for VFP. */
4087 if (reg >= 16 && reg <= 23)
4088 return ARM_F0_REGNUM + reg - 16;
4089
4090 /* New assignments for the FPA registers. */
4091 if (reg >= 96 && reg <= 103)
4092 return ARM_F0_REGNUM + reg - 96;
4093
4094 /* WMMX register assignments. */
4095 if (reg >= 104 && reg <= 111)
4096 return ARM_WCGR0_REGNUM + reg - 104;
4097
4098 if (reg >= 112 && reg <= 127)
4099 return ARM_WR0_REGNUM + reg - 112;
4100
4101 if (reg >= 192 && reg <= 199)
4102 return ARM_WC0_REGNUM + reg - 192;
4103
58d6951d
DJ
4104 /* VFP v2 registers. A double precision value is actually
4105 in d1 rather than s2, but the ABI only defines numbering
4106 for the single precision registers. This will "just work"
4107 in GDB for little endian targets (we'll read eight bytes,
4108 starting in s0 and then progressing to s1), but will be
4109 reversed on big endian targets with VFP. This won't
4110 be a problem for the new Neon quad registers; you're supposed
4111 to use DW_OP_piece for those. */
4112 if (reg >= 64 && reg <= 95)
4113 {
4114 char name_buf[4];
4115
8c042590 4116 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
58d6951d
DJ
4117 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4118 strlen (name_buf));
4119 }
4120
4121 /* VFP v3 / Neon registers. This range is also used for VFP v2
4122 registers, except that it now describes d0 instead of s0. */
4123 if (reg >= 256 && reg <= 287)
4124 {
4125 char name_buf[4];
4126
8c042590 4127 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
58d6951d
DJ
4128 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4129 strlen (name_buf));
4130 }
4131
ff6f572f
DJ
4132 return -1;
4133}
4134
26216b98
AC
4135/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4136static int
e7faf938 4137arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
26216b98
AC
4138{
4139 int reg = regnum;
e7faf938 4140 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
26216b98 4141
ff6f572f
DJ
4142 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4143 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4144
4145 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4146 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4147
4148 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4149 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4150
26216b98
AC
4151 if (reg < NUM_GREGS)
4152 return SIM_ARM_R0_REGNUM + reg;
4153 reg -= NUM_GREGS;
4154
4155 if (reg < NUM_FREGS)
4156 return SIM_ARM_FP0_REGNUM + reg;
4157 reg -= NUM_FREGS;
4158
4159 if (reg < NUM_SREGS)
4160 return SIM_ARM_FPS_REGNUM + reg;
4161 reg -= NUM_SREGS;
4162
edefbb7c 4163 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
26216b98 4164}
34e8f22d 4165
a37b3cc0
AC
4166/* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4167 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4168 It is thought that this is is the floating-point register format on
4169 little-endian systems. */
c906108c 4170
ed9a39eb 4171static void
b508a996 4172convert_from_extended (const struct floatformat *fmt, const void *ptr,
be8626e0 4173 void *dbl, int endianess)
c906108c 4174{
a37b3cc0 4175 DOUBLEST d;
be8626e0
MD
4176
4177 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4178 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4179 else
4180 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4181 ptr, &d);
b508a996 4182 floatformat_from_doublest (fmt, &d, dbl);
c906108c
SS
4183}
4184
34e8f22d 4185static void
be8626e0
MD
4186convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4187 int endianess)
c906108c 4188{
a37b3cc0 4189 DOUBLEST d;
be8626e0 4190
b508a996 4191 floatformat_to_doublest (fmt, ptr, &d);
be8626e0 4192 if (endianess == BFD_ENDIAN_BIG)
a37b3cc0
AC
4193 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4194 else
4195 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4196 &d, dbl);
c906108c 4197}
ed9a39eb 4198
d9311bfa
AT
4199/* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4200 of the appropriate mode (as encoded in the PC value), even if this
4201 differs from what would be expected according to the symbol tables. */
4202
4203void
4204arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4205 struct address_space *aspace,
4206 CORE_ADDR pc)
c906108c 4207{
d9311bfa
AT
4208 struct cleanup *old_chain
4209 = make_cleanup_restore_integer (&arm_override_mode);
c5aa993b 4210
d9311bfa
AT
4211 arm_override_mode = IS_THUMB_ADDR (pc);
4212 pc = gdbarch_addr_bits_remove (gdbarch, pc);
c5aa993b 4213
d9311bfa 4214 insert_single_step_breakpoint (gdbarch, aspace, pc);
c906108c 4215
d9311bfa
AT
4216 do_cleanups (old_chain);
4217}
c5aa993b 4218
d9311bfa
AT
4219/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4220 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4221 NULL if an error occurs. BUF is freed. */
c906108c 4222
d9311bfa
AT
4223static gdb_byte *
4224extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4225 int old_len, int new_len)
4226{
4227 gdb_byte *new_buf;
4228 int bytes_to_read = new_len - old_len;
c906108c 4229
d9311bfa
AT
4230 new_buf = (gdb_byte *) xmalloc (new_len);
4231 memcpy (new_buf + bytes_to_read, buf, old_len);
4232 xfree (buf);
4233 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4234 {
4235 xfree (new_buf);
4236 return NULL;
c906108c 4237 }
d9311bfa 4238 return new_buf;
c906108c
SS
4239}
4240
d9311bfa
AT
4241/* An IT block is at most the 2-byte IT instruction followed by
4242 four 4-byte instructions. The furthest back we must search to
4243 find an IT block that affects the current instruction is thus
4244 2 + 3 * 4 == 14 bytes. */
4245#define MAX_IT_BLOCK_PREFIX 14
177321bd 4246
d9311bfa
AT
4247/* Use a quick scan if there are more than this many bytes of
4248 code. */
4249#define IT_SCAN_THRESHOLD 32
177321bd 4250
d9311bfa
AT
4251/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4252 A breakpoint in an IT block may not be hit, depending on the
4253 condition flags. */
ad527d2e 4254static CORE_ADDR
d9311bfa 4255arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
c906108c 4256{
d9311bfa
AT
4257 gdb_byte *buf;
4258 char map_type;
4259 CORE_ADDR boundary, func_start;
4260 int buf_len;
4261 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4262 int i, any, last_it, last_it_count;
177321bd 4263
d9311bfa
AT
4264 /* If we are using BKPT breakpoints, none of this is necessary. */
4265 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4266 return bpaddr;
177321bd 4267
d9311bfa
AT
4268 /* ARM mode does not have this problem. */
4269 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4270 return bpaddr;
177321bd 4271
d9311bfa
AT
4272 /* We are setting a breakpoint in Thumb code that could potentially
4273 contain an IT block. The first step is to find how much Thumb
4274 code there is; we do not need to read outside of known Thumb
4275 sequences. */
4276 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4277 if (map_type == 0)
4278 /* Thumb-2 code must have mapping symbols to have a chance. */
4279 return bpaddr;
9dca5578 4280
d9311bfa 4281 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
177321bd 4282
d9311bfa
AT
4283 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4284 && func_start > boundary)
4285 boundary = func_start;
9dca5578 4286
d9311bfa
AT
4287 /* Search for a candidate IT instruction. We have to do some fancy
4288 footwork to distinguish a real IT instruction from the second
4289 half of a 32-bit instruction, but there is no need for that if
4290 there's no candidate. */
4291 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4292 if (buf_len == 0)
4293 /* No room for an IT instruction. */
4294 return bpaddr;
c906108c 4295
d9311bfa
AT
4296 buf = (gdb_byte *) xmalloc (buf_len);
4297 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4298 return bpaddr;
4299 any = 0;
4300 for (i = 0; i < buf_len; i += 2)
c906108c 4301 {
d9311bfa
AT
4302 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4303 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
25b41d01 4304 {
d9311bfa
AT
4305 any = 1;
4306 break;
25b41d01 4307 }
c906108c 4308 }
d9311bfa
AT
4309
4310 if (any == 0)
c906108c 4311 {
d9311bfa
AT
4312 xfree (buf);
4313 return bpaddr;
f9d67f43
DJ
4314 }
4315
4316 /* OK, the code bytes before this instruction contain at least one
4317 halfword which resembles an IT instruction. We know that it's
4318 Thumb code, but there are still two possibilities. Either the
4319 halfword really is an IT instruction, or it is the second half of
4320 a 32-bit Thumb instruction. The only way we can tell is to
4321 scan forwards from a known instruction boundary. */
4322 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4323 {
4324 int definite;
4325
4326 /* There's a lot of code before this instruction. Start with an
4327 optimistic search; it's easy to recognize halfwords that can
4328 not be the start of a 32-bit instruction, and use that to
4329 lock on to the instruction boundaries. */
4330 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4331 if (buf == NULL)
4332 return bpaddr;
4333 buf_len = IT_SCAN_THRESHOLD;
4334
4335 definite = 0;
4336 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4337 {
4338 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4339 if (thumb_insn_size (inst1) == 2)
4340 {
4341 definite = 1;
4342 break;
4343 }
4344 }
4345
4346 /* At this point, if DEFINITE, BUF[I] is the first place we
4347 are sure that we know the instruction boundaries, and it is far
4348 enough from BPADDR that we could not miss an IT instruction
4349 affecting BPADDR. If ! DEFINITE, give up - start from a
4350 known boundary. */
4351 if (! definite)
4352 {
0963b4bd
MS
4353 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4354 bpaddr - boundary);
f9d67f43
DJ
4355 if (buf == NULL)
4356 return bpaddr;
4357 buf_len = bpaddr - boundary;
4358 i = 0;
4359 }
4360 }
4361 else
4362 {
4363 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4364 if (buf == NULL)
4365 return bpaddr;
4366 buf_len = bpaddr - boundary;
4367 i = 0;
4368 }
4369
4370 /* Scan forwards. Find the last IT instruction before BPADDR. */
4371 last_it = -1;
4372 last_it_count = 0;
4373 while (i < buf_len)
4374 {
4375 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4376 last_it_count--;
4377 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4378 {
4379 last_it = i;
4380 if (inst1 & 0x0001)
4381 last_it_count = 4;
4382 else if (inst1 & 0x0002)
4383 last_it_count = 3;
4384 else if (inst1 & 0x0004)
4385 last_it_count = 2;
4386 else
4387 last_it_count = 1;
4388 }
4389 i += thumb_insn_size (inst1);
4390 }
4391
4392 xfree (buf);
4393
4394 if (last_it == -1)
4395 /* There wasn't really an IT instruction after all. */
4396 return bpaddr;
4397
4398 if (last_it_count < 1)
4399 /* It was too far away. */
4400 return bpaddr;
4401
4402 /* This really is a trouble spot. Move the breakpoint to the IT
4403 instruction. */
4404 return bpaddr - buf_len + last_it;
4405}
4406
cca44b1b 4407/* ARM displaced stepping support.
c906108c 4408
cca44b1b 4409 Generally ARM displaced stepping works as follows:
c906108c 4410
cca44b1b 4411 1. When an instruction is to be single-stepped, it is first decoded by
2ba163c8
SM
4412 arm_process_displaced_insn. Depending on the type of instruction, it is
4413 then copied to a scratch location, possibly in a modified form. The
4414 copy_* set of functions performs such modification, as necessary. A
4415 breakpoint is placed after the modified instruction in the scratch space
4416 to return control to GDB. Note in particular that instructions which
4417 modify the PC will no longer do so after modification.
c5aa993b 4418
cca44b1b
JB
4419 2. The instruction is single-stepped, by setting the PC to the scratch
4420 location address, and resuming. Control returns to GDB when the
4421 breakpoint is hit.
c5aa993b 4422
cca44b1b
JB
4423 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4424 function used for the current instruction. This function's job is to
4425 put the CPU/memory state back to what it would have been if the
4426 instruction had been executed unmodified in its original location. */
c5aa993b 4427
cca44b1b
JB
4428/* NOP instruction (mov r0, r0). */
4429#define ARM_NOP 0xe1a00000
34518530 4430#define THUMB_NOP 0x4600
cca44b1b
JB
4431
4432/* Helper for register reads for displaced stepping. In particular, this
4433 returns the PC as it would be seen by the instruction at its original
4434 location. */
4435
4436ULONGEST
36073a92
YQ
4437displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4438 int regno)
cca44b1b
JB
4439{
4440 ULONGEST ret;
36073a92 4441 CORE_ADDR from = dsc->insn_addr;
cca44b1b 4442
bf9f652a 4443 if (regno == ARM_PC_REGNUM)
cca44b1b 4444 {
4db71c0b
YQ
4445 /* Compute pipeline offset:
4446 - When executing an ARM instruction, PC reads as the address of the
4447 current instruction plus 8.
4448 - When executing a Thumb instruction, PC reads as the address of the
4449 current instruction plus 4. */
4450
36073a92 4451 if (!dsc->is_thumb)
4db71c0b
YQ
4452 from += 8;
4453 else
4454 from += 4;
4455
cca44b1b
JB
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4db71c0b
YQ
4458 (unsigned long) from);
4459 return (ULONGEST) from;
cca44b1b 4460 }
c906108c 4461 else
cca44b1b
JB
4462 {
4463 regcache_cooked_read_unsigned (regs, regno, &ret);
4464 if (debug_displaced)
4465 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4466 regno, (unsigned long) ret);
4467 return ret;
4468 }
c906108c
SS
4469}
4470
cca44b1b
JB
4471static int
4472displaced_in_arm_mode (struct regcache *regs)
4473{
4474 ULONGEST ps;
9779414d 4475 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
66e810cd 4476
cca44b1b 4477 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
66e810cd 4478
9779414d 4479 return (ps & t_bit) == 0;
cca44b1b 4480}
66e810cd 4481
cca44b1b 4482/* Write to the PC as from a branch instruction. */
c906108c 4483
cca44b1b 4484static void
36073a92
YQ
4485branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4486 ULONGEST val)
c906108c 4487{
36073a92 4488 if (!dsc->is_thumb)
cca44b1b
JB
4489 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4490 architecture versions < 6. */
0963b4bd
MS
4491 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4492 val & ~(ULONGEST) 0x3);
cca44b1b 4493 else
0963b4bd
MS
4494 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4495 val & ~(ULONGEST) 0x1);
cca44b1b 4496}
66e810cd 4497
cca44b1b
JB
4498/* Write to the PC as from a branch-exchange instruction. */
4499
4500static void
4501bx_write_pc (struct regcache *regs, ULONGEST val)
4502{
4503 ULONGEST ps;
9779414d 4504 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
cca44b1b
JB
4505
4506 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4507
4508 if ((val & 1) == 1)
c906108c 4509 {
9779414d 4510 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
cca44b1b
JB
4511 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4512 }
4513 else if ((val & 2) == 0)
4514 {
9779414d 4515 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4516 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
c906108c
SS
4517 }
4518 else
4519 {
cca44b1b
JB
4520 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4521 mode, align dest to 4 bytes). */
4522 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
9779414d 4523 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
cca44b1b 4524 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
c906108c
SS
4525 }
4526}
ed9a39eb 4527
cca44b1b 4528/* Write to the PC as if from a load instruction. */
ed9a39eb 4529
34e8f22d 4530static void
36073a92
YQ
4531load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4532 ULONGEST val)
ed9a39eb 4533{
cca44b1b
JB
4534 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4535 bx_write_pc (regs, val);
4536 else
36073a92 4537 branch_write_pc (regs, dsc, val);
cca44b1b 4538}
be8626e0 4539
cca44b1b
JB
4540/* Write to the PC as if from an ALU instruction. */
4541
4542static void
36073a92
YQ
4543alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4544 ULONGEST val)
cca44b1b 4545{
36073a92 4546 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
cca44b1b
JB
4547 bx_write_pc (regs, val);
4548 else
36073a92 4549 branch_write_pc (regs, dsc, val);
cca44b1b
JB
4550}
4551
4552/* Helper for writing to registers for displaced stepping. Writing to the PC
4553 has a varying effects depending on the instruction which does the write:
4554 this is controlled by the WRITE_PC argument. */
4555
4556void
4557displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4558 int regno, ULONGEST val, enum pc_write_style write_pc)
4559{
bf9f652a 4560 if (regno == ARM_PC_REGNUM)
08216dd7 4561 {
cca44b1b
JB
4562 if (debug_displaced)
4563 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4564 (unsigned long) val);
4565 switch (write_pc)
08216dd7 4566 {
cca44b1b 4567 case BRANCH_WRITE_PC:
36073a92 4568 branch_write_pc (regs, dsc, val);
08216dd7
RE
4569 break;
4570
cca44b1b
JB
4571 case BX_WRITE_PC:
4572 bx_write_pc (regs, val);
4573 break;
4574
4575 case LOAD_WRITE_PC:
36073a92 4576 load_write_pc (regs, dsc, val);
cca44b1b
JB
4577 break;
4578
4579 case ALU_WRITE_PC:
36073a92 4580 alu_write_pc (regs, dsc, val);
cca44b1b
JB
4581 break;
4582
4583 case CANNOT_WRITE_PC:
4584 warning (_("Instruction wrote to PC in an unexpected way when "
4585 "single-stepping"));
08216dd7
RE
4586 break;
4587
4588 default:
97b9747c
JB
4589 internal_error (__FILE__, __LINE__,
4590 _("Invalid argument to displaced_write_reg"));
08216dd7 4591 }
b508a996 4592
cca44b1b 4593 dsc->wrote_to_pc = 1;
b508a996 4594 }
ed9a39eb 4595 else
b508a996 4596 {
cca44b1b
JB
4597 if (debug_displaced)
4598 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4599 regno, (unsigned long) val);
4600 regcache_cooked_write_unsigned (regs, regno, val);
b508a996 4601 }
34e8f22d
RE
4602}
4603
cca44b1b
JB
4604/* This function is used to concisely determine if an instruction INSN
4605 references PC. Register fields of interest in INSN should have the
0963b4bd
MS
4606 corresponding fields of BITMASK set to 0b1111. The function
4607 returns return 1 if any of these fields in INSN reference the PC
4608 (also 0b1111, r15), else it returns 0. */
67255d04
RE
4609
4610static int
cca44b1b 4611insn_references_pc (uint32_t insn, uint32_t bitmask)
67255d04 4612{
cca44b1b 4613 uint32_t lowbit = 1;
67255d04 4614
cca44b1b
JB
4615 while (bitmask != 0)
4616 {
4617 uint32_t mask;
44e1a9eb 4618
cca44b1b
JB
4619 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4620 ;
67255d04 4621
cca44b1b
JB
4622 if (!lowbit)
4623 break;
67255d04 4624
cca44b1b 4625 mask = lowbit * 0xf;
67255d04 4626
cca44b1b
JB
4627 if ((insn & mask) == mask)
4628 return 1;
4629
4630 bitmask &= ~mask;
67255d04
RE
4631 }
4632
cca44b1b
JB
4633 return 0;
4634}
2af48f68 4635
cca44b1b
JB
4636/* The simplest copy function. Many instructions have the same effect no
4637 matter what address they are executed at: in those cases, use this. */
67255d04 4638
cca44b1b 4639static int
7ff120b4
YQ
4640arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4641 const char *iname, struct displaced_step_closure *dsc)
cca44b1b
JB
4642{
4643 if (debug_displaced)
4644 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4645 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4646 iname);
67255d04 4647
cca44b1b 4648 dsc->modinsn[0] = insn;
67255d04 4649
cca44b1b
JB
4650 return 0;
4651}
4652
34518530
YQ
4653static int
4654thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4655 uint16_t insn2, const char *iname,
4656 struct displaced_step_closure *dsc)
4657{
4658 if (debug_displaced)
4659 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4660 "opcode/class '%s' unmodified\n", insn1, insn2,
4661 iname);
4662
4663 dsc->modinsn[0] = insn1;
4664 dsc->modinsn[1] = insn2;
4665 dsc->numinsns = 2;
4666
4667 return 0;
4668}
4669
4670/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4671 modification. */
4672static int
615234c1 4673thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
4674 const char *iname,
4675 struct displaced_step_closure *dsc)
4676{
4677 if (debug_displaced)
4678 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4679 "opcode/class '%s' unmodified\n", insn,
4680 iname);
4681
4682 dsc->modinsn[0] = insn;
4683
4684 return 0;
4685}
4686
cca44b1b
JB
4687/* Preload instructions with immediate offset. */
4688
4689static void
6e39997a 4690cleanup_preload (struct gdbarch *gdbarch,
cca44b1b
JB
4691 struct regcache *regs, struct displaced_step_closure *dsc)
4692{
4693 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4694 if (!dsc->u.preload.immed)
4695 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4696}
4697
7ff120b4
YQ
4698static void
4699install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4700 struct displaced_step_closure *dsc, unsigned int rn)
cca44b1b 4701{
cca44b1b 4702 ULONGEST rn_val;
cca44b1b
JB
4703 /* Preload instructions:
4704
4705 {pli/pld} [rn, #+/-imm]
4706 ->
4707 {pli/pld} [r0, #+/-imm]. */
4708
36073a92
YQ
4709 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4710 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 4711 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
cca44b1b
JB
4712 dsc->u.preload.immed = 1;
4713
cca44b1b 4714 dsc->cleanup = &cleanup_preload;
cca44b1b
JB
4715}
4716
cca44b1b 4717static int
7ff120b4 4718arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
cca44b1b
JB
4719 struct displaced_step_closure *dsc)
4720{
4721 unsigned int rn = bits (insn, 16, 19);
cca44b1b 4722
7ff120b4
YQ
4723 if (!insn_references_pc (insn, 0x000f0000ul))
4724 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
cca44b1b
JB
4725
4726 if (debug_displaced)
4727 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4728 (unsigned long) insn);
4729
7ff120b4
YQ
4730 dsc->modinsn[0] = insn & 0xfff0ffff;
4731
4732 install_preload (gdbarch, regs, dsc, rn);
4733
4734 return 0;
4735}
4736
34518530
YQ
4737static int
4738thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4739 struct regcache *regs, struct displaced_step_closure *dsc)
4740{
4741 unsigned int rn = bits (insn1, 0, 3);
4742 unsigned int u_bit = bit (insn1, 7);
4743 int imm12 = bits (insn2, 0, 11);
4744 ULONGEST pc_val;
4745
4746 if (rn != ARM_PC_REGNUM)
4747 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4748
4749 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4750 PLD (literal) Encoding T1. */
4751 if (debug_displaced)
4752 fprintf_unfiltered (gdb_stdlog,
4753 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4754 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4755 imm12);
4756
4757 if (!u_bit)
4758 imm12 = -1 * imm12;
4759
4760 /* Rewrite instruction {pli/pld} PC imm12 into:
4761 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4762
4763 {pli/pld} [r0, r1]
4764
4765 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4766
4767 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4768 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4769
4770 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4771
4772 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4773 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4774 dsc->u.preload.immed = 0;
4775
4776 /* {pli/pld} [r0, r1] */
4777 dsc->modinsn[0] = insn1 & 0xfff0;
4778 dsc->modinsn[1] = 0xf001;
4779 dsc->numinsns = 2;
4780
4781 dsc->cleanup = &cleanup_preload;
4782 return 0;
4783}
4784
7ff120b4
YQ
4785/* Preload instructions with register offset. */
4786
4787static void
4788install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4789 struct displaced_step_closure *dsc, unsigned int rn,
4790 unsigned int rm)
4791{
4792 ULONGEST rn_val, rm_val;
4793
cca44b1b
JB
4794 /* Preload register-offset instructions:
4795
4796 {pli/pld} [rn, rm {, shift}]
4797 ->
4798 {pli/pld} [r0, r1 {, shift}]. */
4799
36073a92
YQ
4800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4801 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4802 rn_val = displaced_read_reg (regs, dsc, rn);
4803 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
4804 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4805 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
cca44b1b
JB
4806 dsc->u.preload.immed = 0;
4807
cca44b1b 4808 dsc->cleanup = &cleanup_preload;
7ff120b4
YQ
4809}
4810
4811static int
4812arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4813 struct regcache *regs,
4814 struct displaced_step_closure *dsc)
4815{
4816 unsigned int rn = bits (insn, 16, 19);
4817 unsigned int rm = bits (insn, 0, 3);
4818
4819
4820 if (!insn_references_pc (insn, 0x000f000ful))
4821 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4822
4823 if (debug_displaced)
4824 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4825 (unsigned long) insn);
4826
4827 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
cca44b1b 4828
7ff120b4 4829 install_preload_reg (gdbarch, regs, dsc, rn, rm);
cca44b1b
JB
4830 return 0;
4831}
4832
4833/* Copy/cleanup coprocessor load and store instructions. */
4834
4835static void
6e39997a 4836cleanup_copro_load_store (struct gdbarch *gdbarch,
cca44b1b
JB
4837 struct regcache *regs,
4838 struct displaced_step_closure *dsc)
4839{
36073a92 4840 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
4841
4842 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4843
4844 if (dsc->u.ldst.writeback)
4845 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4846}
4847
7ff120b4
YQ
4848static void
4849install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4850 struct displaced_step_closure *dsc,
4851 int writeback, unsigned int rn)
cca44b1b 4852{
cca44b1b 4853 ULONGEST rn_val;
cca44b1b 4854
cca44b1b
JB
4855 /* Coprocessor load/store instructions:
4856
4857 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4858 ->
4859 {stc/stc2} [r0, #+/-imm].
4860
4861 ldc/ldc2 are handled identically. */
4862
36073a92
YQ
4863 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4864 rn_val = displaced_read_reg (regs, dsc, rn);
2b16b2e3
YQ
4865 /* PC should be 4-byte aligned. */
4866 rn_val = rn_val & 0xfffffffc;
cca44b1b
JB
4867 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4868
7ff120b4 4869 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
4870 dsc->u.ldst.rn = rn;
4871
7ff120b4
YQ
4872 dsc->cleanup = &cleanup_copro_load_store;
4873}
4874
4875static int
4876arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4877 struct regcache *regs,
4878 struct displaced_step_closure *dsc)
4879{
4880 unsigned int rn = bits (insn, 16, 19);
4881
4882 if (!insn_references_pc (insn, 0x000f0000ul))
4883 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4884
4885 if (debug_displaced)
4886 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4887 "load/store insn %.8lx\n", (unsigned long) insn);
4888
cca44b1b
JB
4889 dsc->modinsn[0] = insn & 0xfff0ffff;
4890
7ff120b4 4891 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
cca44b1b
JB
4892
4893 return 0;
4894}
4895
34518530
YQ
4896static int
4897thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4898 uint16_t insn2, struct regcache *regs,
4899 struct displaced_step_closure *dsc)
4900{
4901 unsigned int rn = bits (insn1, 0, 3);
4902
4903 if (rn != ARM_PC_REGNUM)
4904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4905 "copro load/store", dsc);
4906
4907 if (debug_displaced)
4908 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4909 "load/store insn %.4x%.4x\n", insn1, insn2);
4910
4911 dsc->modinsn[0] = insn1 & 0xfff0;
4912 dsc->modinsn[1] = insn2;
4913 dsc->numinsns = 2;
4914
4915 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4916 doesn't support writeback, so pass 0. */
4917 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4918
4919 return 0;
4920}
4921
cca44b1b
JB
4922/* Clean up branch instructions (actually perform the branch, by setting
4923 PC). */
4924
4925static void
6e39997a 4926cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
4927 struct displaced_step_closure *dsc)
4928{
36073a92 4929 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
4930 int branch_taken = condition_true (dsc->u.branch.cond, status);
4931 enum pc_write_style write_pc = dsc->u.branch.exchange
4932 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4933
4934 if (!branch_taken)
4935 return;
4936
4937 if (dsc->u.branch.link)
4938 {
8c8dba6d
YQ
4939 /* The value of LR should be the next insn of current one. In order
4940 not to confuse logic hanlding later insn `bx lr', if current insn mode
4941 is Thumb, the bit 0 of LR value should be set to 1. */
4942 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4943
4944 if (dsc->is_thumb)
4945 next_insn_addr |= 0x1;
4946
4947 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4948 CANNOT_WRITE_PC);
cca44b1b
JB
4949 }
4950
bf9f652a 4951 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
cca44b1b
JB
4952}
4953
4954/* Copy B/BL/BLX instructions with immediate destinations. */
4955
7ff120b4
YQ
4956static void
4957install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4958 struct displaced_step_closure *dsc,
4959 unsigned int cond, int exchange, int link, long offset)
4960{
4961 /* Implement "BL<cond> <label>" as:
4962
4963 Preparation: cond <- instruction condition
4964 Insn: mov r0, r0 (nop)
4965 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4966
4967 B<cond> similar, but don't set r14 in cleanup. */
4968
4969 dsc->u.branch.cond = cond;
4970 dsc->u.branch.link = link;
4971 dsc->u.branch.exchange = exchange;
4972
2b16b2e3
YQ
4973 dsc->u.branch.dest = dsc->insn_addr;
4974 if (link && exchange)
4975 /* For BLX, offset is computed from the Align (PC, 4). */
4976 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4977
7ff120b4 4978 if (dsc->is_thumb)
2b16b2e3 4979 dsc->u.branch.dest += 4 + offset;
7ff120b4 4980 else
2b16b2e3 4981 dsc->u.branch.dest += 8 + offset;
7ff120b4
YQ
4982
4983 dsc->cleanup = &cleanup_branch;
4984}
cca44b1b 4985static int
7ff120b4
YQ
4986arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4987 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
4988{
4989 unsigned int cond = bits (insn, 28, 31);
4990 int exchange = (cond == 0xf);
4991 int link = exchange || bit (insn, 24);
cca44b1b
JB
4992 long offset;
4993
4994 if (debug_displaced)
4995 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4996 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4997 (unsigned long) insn);
cca44b1b
JB
4998 if (exchange)
4999 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5000 then arrange the switch into Thumb mode. */
5001 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5002 else
5003 offset = bits (insn, 0, 23) << 2;
5004
5005 if (bit (offset, 25))
5006 offset = offset | ~0x3ffffff;
5007
cca44b1b
JB
5008 dsc->modinsn[0] = ARM_NOP;
5009
7ff120b4 5010 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
cca44b1b
JB
5011 return 0;
5012}
5013
34518530
YQ
5014static int
5015thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5016 uint16_t insn2, struct regcache *regs,
5017 struct displaced_step_closure *dsc)
5018{
5019 int link = bit (insn2, 14);
5020 int exchange = link && !bit (insn2, 12);
5021 int cond = INST_AL;
5022 long offset = 0;
5023 int j1 = bit (insn2, 13);
5024 int j2 = bit (insn2, 11);
5025 int s = sbits (insn1, 10, 10);
5026 int i1 = !(j1 ^ bit (insn1, 10));
5027 int i2 = !(j2 ^ bit (insn1, 10));
5028
5029 if (!link && !exchange) /* B */
5030 {
5031 offset = (bits (insn2, 0, 10) << 1);
5032 if (bit (insn2, 12)) /* Encoding T4 */
5033 {
5034 offset |= (bits (insn1, 0, 9) << 12)
5035 | (i2 << 22)
5036 | (i1 << 23)
5037 | (s << 24);
5038 cond = INST_AL;
5039 }
5040 else /* Encoding T3 */
5041 {
5042 offset |= (bits (insn1, 0, 5) << 12)
5043 | (j1 << 18)
5044 | (j2 << 19)
5045 | (s << 20);
5046 cond = bits (insn1, 6, 9);
5047 }
5048 }
5049 else
5050 {
5051 offset = (bits (insn1, 0, 9) << 12);
5052 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5053 offset |= exchange ?
5054 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5055 }
5056
5057 if (debug_displaced)
5058 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5059 "%.4x %.4x with offset %.8lx\n",
5060 link ? (exchange) ? "blx" : "bl" : "b",
5061 insn1, insn2, offset);
5062
5063 dsc->modinsn[0] = THUMB_NOP;
5064
5065 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5066 return 0;
5067}
5068
5069/* Copy B Thumb instructions. */
5070static int
615234c1 5071thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
34518530
YQ
5072 struct displaced_step_closure *dsc)
5073{
5074 unsigned int cond = 0;
5075 int offset = 0;
5076 unsigned short bit_12_15 = bits (insn, 12, 15);
5077 CORE_ADDR from = dsc->insn_addr;
5078
5079 if (bit_12_15 == 0xd)
5080 {
5081 /* offset = SignExtend (imm8:0, 32) */
5082 offset = sbits ((insn << 1), 0, 8);
5083 cond = bits (insn, 8, 11);
5084 }
5085 else if (bit_12_15 == 0xe) /* Encoding T2 */
5086 {
5087 offset = sbits ((insn << 1), 0, 11);
5088 cond = INST_AL;
5089 }
5090
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog,
5093 "displaced: copying b immediate insn %.4x "
5094 "with offset %d\n", insn, offset);
5095
5096 dsc->u.branch.cond = cond;
5097 dsc->u.branch.link = 0;
5098 dsc->u.branch.exchange = 0;
5099 dsc->u.branch.dest = from + 4 + offset;
5100
5101 dsc->modinsn[0] = THUMB_NOP;
5102
5103 dsc->cleanup = &cleanup_branch;
5104
5105 return 0;
5106}
5107
cca44b1b
JB
5108/* Copy BX/BLX with register-specified destinations. */
5109
7ff120b4
YQ
5110static void
5111install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5112 struct displaced_step_closure *dsc, int link,
5113 unsigned int cond, unsigned int rm)
cca44b1b 5114{
cca44b1b
JB
5115 /* Implement {BX,BLX}<cond> <reg>" as:
5116
5117 Preparation: cond <- instruction condition
5118 Insn: mov r0, r0 (nop)
5119 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5120
5121 Don't set r14 in cleanup for BX. */
5122
36073a92 5123 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5124
5125 dsc->u.branch.cond = cond;
5126 dsc->u.branch.link = link;
cca44b1b 5127
7ff120b4 5128 dsc->u.branch.exchange = 1;
cca44b1b
JB
5129
5130 dsc->cleanup = &cleanup_branch;
7ff120b4 5131}
cca44b1b 5132
7ff120b4
YQ
5133static int
5134arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5135 struct regcache *regs, struct displaced_step_closure *dsc)
5136{
5137 unsigned int cond = bits (insn, 28, 31);
5138 /* BX: x12xxx1x
5139 BLX: x12xxx3x. */
5140 int link = bit (insn, 5);
5141 unsigned int rm = bits (insn, 0, 3);
5142
5143 if (debug_displaced)
5144 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5145 (unsigned long) insn);
5146
5147 dsc->modinsn[0] = ARM_NOP;
5148
5149 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
cca44b1b
JB
5150 return 0;
5151}
5152
34518530
YQ
5153static int
5154thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5155 struct regcache *regs,
5156 struct displaced_step_closure *dsc)
5157{
5158 int link = bit (insn, 7);
5159 unsigned int rm = bits (insn, 3, 6);
5160
5161 if (debug_displaced)
5162 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5163 (unsigned short) insn);
5164
5165 dsc->modinsn[0] = THUMB_NOP;
5166
5167 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5168
5169 return 0;
5170}
5171
5172
0963b4bd 5173/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
cca44b1b
JB
5174
5175static void
6e39997a 5176cleanup_alu_imm (struct gdbarch *gdbarch,
cca44b1b
JB
5177 struct regcache *regs, struct displaced_step_closure *dsc)
5178{
36073a92 5179 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5180 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5181 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5182 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5183}
5184
5185static int
7ff120b4
YQ
5186arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5187 struct displaced_step_closure *dsc)
cca44b1b
JB
5188{
5189 unsigned int rn = bits (insn, 16, 19);
5190 unsigned int rd = bits (insn, 12, 15);
5191 unsigned int op = bits (insn, 21, 24);
5192 int is_mov = (op == 0xd);
5193 ULONGEST rd_val, rn_val;
cca44b1b
JB
5194
5195 if (!insn_references_pc (insn, 0x000ff000ul))
7ff120b4 5196 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
cca44b1b
JB
5197
5198 if (debug_displaced)
5199 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5200 "%.8lx\n", is_mov ? "move" : "ALU",
5201 (unsigned long) insn);
5202
5203 /* Instruction is of form:
5204
5205 <op><cond> rd, [rn,] #imm
5206
5207 Rewrite as:
5208
5209 Preparation: tmp1, tmp2 <- r0, r1;
5210 r0, r1 <- rd, rn
5211 Insn: <op><cond> r0, r1, #imm
5212 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5213 */
5214
36073a92
YQ
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 rn_val = displaced_read_reg (regs, dsc, rn);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
cca44b1b
JB
5219 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5220 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5221 dsc->rd = rd;
5222
5223 if (is_mov)
5224 dsc->modinsn[0] = insn & 0xfff00fff;
5225 else
5226 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5227
5228 dsc->cleanup = &cleanup_alu_imm;
5229
5230 return 0;
5231}
5232
34518530
YQ
5233static int
5234thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5235 uint16_t insn2, struct regcache *regs,
5236 struct displaced_step_closure *dsc)
5237{
5238 unsigned int op = bits (insn1, 5, 8);
5239 unsigned int rn, rm, rd;
5240 ULONGEST rd_val, rn_val;
5241
5242 rn = bits (insn1, 0, 3); /* Rn */
5243 rm = bits (insn2, 0, 3); /* Rm */
5244 rd = bits (insn2, 8, 11); /* Rd */
5245
5246 /* This routine is only called for instruction MOV. */
5247 gdb_assert (op == 0x2 && rn == 0xf);
5248
5249 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5250 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5251
5252 if (debug_displaced)
5253 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5254 "ALU", insn1, insn2);
5255
5256 /* Instruction is of form:
5257
5258 <op><cond> rd, [rn,] #imm
5259
5260 Rewrite as:
5261
5262 Preparation: tmp1, tmp2 <- r0, r1;
5263 r0, r1 <- rd, rn
5264 Insn: <op><cond> r0, r1, #imm
5265 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5266 */
5267
5268 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5269 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5270 rn_val = displaced_read_reg (regs, dsc, rn);
5271 rd_val = displaced_read_reg (regs, dsc, rd);
5272 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5273 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5274 dsc->rd = rd;
5275
5276 dsc->modinsn[0] = insn1;
5277 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5278 dsc->numinsns = 2;
5279
5280 dsc->cleanup = &cleanup_alu_imm;
5281
5282 return 0;
5283}
5284
cca44b1b
JB
5285/* Copy/cleanup arithmetic/logic insns with register RHS. */
5286
5287static void
6e39997a 5288cleanup_alu_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5289 struct regcache *regs, struct displaced_step_closure *dsc)
5290{
5291 ULONGEST rd_val;
5292 int i;
5293
36073a92 5294 rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5295
5296 for (i = 0; i < 3; i++)
5297 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5298
5299 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5300}
5301
7ff120b4
YQ
5302static void
5303install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5304 struct displaced_step_closure *dsc,
5305 unsigned int rd, unsigned int rn, unsigned int rm)
cca44b1b 5306{
cca44b1b 5307 ULONGEST rd_val, rn_val, rm_val;
cca44b1b 5308
cca44b1b
JB
5309 /* Instruction is of form:
5310
5311 <op><cond> rd, [rn,] rm [, <shift>]
5312
5313 Rewrite as:
5314
5315 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5316 r0, r1, r2 <- rd, rn, rm
ef713951 5317 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
cca44b1b
JB
5318 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5319 */
5320
36073a92
YQ
5321 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5322 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5323 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5324 rd_val = displaced_read_reg (regs, dsc, rd);
5325 rn_val = displaced_read_reg (regs, dsc, rn);
5326 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5327 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5328 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5329 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5330 dsc->rd = rd;
5331
7ff120b4
YQ
5332 dsc->cleanup = &cleanup_alu_reg;
5333}
5334
5335static int
5336arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5337 struct displaced_step_closure *dsc)
5338{
5339 unsigned int op = bits (insn, 21, 24);
5340 int is_mov = (op == 0xd);
5341
5342 if (!insn_references_pc (insn, 0x000ff00ful))
5343 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5344
5345 if (debug_displaced)
5346 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5347 is_mov ? "move" : "ALU", (unsigned long) insn);
5348
cca44b1b
JB
5349 if (is_mov)
5350 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5351 else
5352 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5353
7ff120b4
YQ
5354 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5355 bits (insn, 0, 3));
cca44b1b
JB
5356 return 0;
5357}
5358
34518530
YQ
5359static int
5360thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5361 struct regcache *regs,
5362 struct displaced_step_closure *dsc)
5363{
ef713951 5364 unsigned rm, rd;
34518530 5365
ef713951
YQ
5366 rm = bits (insn, 3, 6);
5367 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
34518530 5368
ef713951 5369 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
34518530
YQ
5370 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5371
5372 if (debug_displaced)
ef713951
YQ
5373 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5374 (unsigned short) insn);
34518530 5375
ef713951 5376 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
34518530 5377
ef713951 5378 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
34518530
YQ
5379
5380 return 0;
5381}
5382
cca44b1b
JB
5383/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5384
5385static void
6e39997a 5386cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
cca44b1b
JB
5387 struct regcache *regs,
5388 struct displaced_step_closure *dsc)
5389{
36073a92 5390 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
cca44b1b
JB
5391 int i;
5392
5393 for (i = 0; i < 4; i++)
5394 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5395
5396 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5397}
5398
7ff120b4
YQ
5399static void
5400install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5401 struct displaced_step_closure *dsc,
5402 unsigned int rd, unsigned int rn, unsigned int rm,
5403 unsigned rs)
cca44b1b 5404{
7ff120b4 5405 int i;
cca44b1b 5406 ULONGEST rd_val, rn_val, rm_val, rs_val;
cca44b1b 5407
cca44b1b
JB
5408 /* Instruction is of form:
5409
5410 <op><cond> rd, [rn,] rm, <shift> rs
5411
5412 Rewrite as:
5413
5414 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5415 r0, r1, r2, r3 <- rd, rn, rm, rs
5416 Insn: <op><cond> r0, r1, r2, <shift> r3
5417 Cleanup: tmp5 <- r0
5418 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5419 rd <- tmp5
5420 */
5421
5422 for (i = 0; i < 4; i++)
36073a92 5423 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b 5424
36073a92
YQ
5425 rd_val = displaced_read_reg (regs, dsc, rd);
5426 rn_val = displaced_read_reg (regs, dsc, rn);
5427 rm_val = displaced_read_reg (regs, dsc, rm);
5428 rs_val = displaced_read_reg (regs, dsc, rs);
cca44b1b
JB
5429 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5430 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5431 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5432 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5433 dsc->rd = rd;
7ff120b4
YQ
5434 dsc->cleanup = &cleanup_alu_shifted_reg;
5435}
5436
5437static int
5438arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5439 struct regcache *regs,
5440 struct displaced_step_closure *dsc)
5441{
5442 unsigned int op = bits (insn, 21, 24);
5443 int is_mov = (op == 0xd);
5444 unsigned int rd, rn, rm, rs;
5445
5446 if (!insn_references_pc (insn, 0x000fff0ful))
5447 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5448
5449 if (debug_displaced)
5450 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5451 "%.8lx\n", is_mov ? "move" : "ALU",
5452 (unsigned long) insn);
5453
5454 rn = bits (insn, 16, 19);
5455 rm = bits (insn, 0, 3);
5456 rs = bits (insn, 8, 11);
5457 rd = bits (insn, 12, 15);
cca44b1b
JB
5458
5459 if (is_mov)
5460 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5461 else
5462 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5463
7ff120b4 5464 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
cca44b1b
JB
5465
5466 return 0;
5467}
5468
5469/* Clean up load instructions. */
5470
5471static void
6e39997a 5472cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5473 struct displaced_step_closure *dsc)
5474{
5475 ULONGEST rt_val, rt_val2 = 0, rn_val;
cca44b1b 5476
36073a92 5477 rt_val = displaced_read_reg (regs, dsc, 0);
cca44b1b 5478 if (dsc->u.ldst.xfersize == 8)
36073a92
YQ
5479 rt_val2 = displaced_read_reg (regs, dsc, 1);
5480 rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5481
5482 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5483 if (dsc->u.ldst.xfersize > 4)
5484 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5485 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5486 if (!dsc->u.ldst.immed)
5487 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5488
5489 /* Handle register writeback. */
5490 if (dsc->u.ldst.writeback)
5491 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5492 /* Put result in right place. */
5493 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5494 if (dsc->u.ldst.xfersize == 8)
5495 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5496}
5497
5498/* Clean up store instructions. */
5499
5500static void
6e39997a 5501cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
cca44b1b
JB
5502 struct displaced_step_closure *dsc)
5503{
36073a92 5504 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
cca44b1b
JB
5505
5506 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5507 if (dsc->u.ldst.xfersize > 4)
5508 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5509 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5510 if (!dsc->u.ldst.immed)
5511 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5512 if (!dsc->u.ldst.restore_r4)
5513 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5514
5515 /* Writeback. */
5516 if (dsc->u.ldst.writeback)
5517 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5518}
5519
5520/* Copy "extra" load/store instructions. These are halfword/doubleword
5521 transfers, which have a different encoding to byte/word transfers. */
5522
5523static int
550dc4e2 5524arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
7ff120b4 5525 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
5526{
5527 unsigned int op1 = bits (insn, 20, 24);
5528 unsigned int op2 = bits (insn, 5, 6);
5529 unsigned int rt = bits (insn, 12, 15);
5530 unsigned int rn = bits (insn, 16, 19);
5531 unsigned int rm = bits (insn, 0, 3);
5532 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5533 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5534 int immed = (op1 & 0x4) != 0;
5535 int opcode;
5536 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
cca44b1b
JB
5537
5538 if (!insn_references_pc (insn, 0x000ff00ful))
7ff120b4 5539 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
cca44b1b
JB
5540
5541 if (debug_displaced)
5542 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
550dc4e2 5543 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
cca44b1b
JB
5544 (unsigned long) insn);
5545
5546 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5547
5548 if (opcode < 0)
5549 internal_error (__FILE__, __LINE__,
5550 _("copy_extra_ld_st: instruction decode error"));
5551
36073a92
YQ
5552 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5553 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5554 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5555 if (!immed)
36073a92 5556 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5557
36073a92 5558 rt_val = displaced_read_reg (regs, dsc, rt);
cca44b1b 5559 if (bytesize[opcode] == 8)
36073a92
YQ
5560 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5561 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5562 if (!immed)
36073a92 5563 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5564
5565 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5566 if (bytesize[opcode] == 8)
5567 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5568 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5569 if (!immed)
5570 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5571
5572 dsc->rd = rt;
5573 dsc->u.ldst.xfersize = bytesize[opcode];
5574 dsc->u.ldst.rn = rn;
5575 dsc->u.ldst.immed = immed;
5576 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5577 dsc->u.ldst.restore_r4 = 0;
5578
5579 if (immed)
5580 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5581 ->
5582 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5583 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5584 else
5585 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5586 ->
5587 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5588 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5589
5590 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5591
5592 return 0;
5593}
5594
0f6f04ba 5595/* Copy byte/half word/word loads and stores. */
cca44b1b 5596
7ff120b4 5597static void
0f6f04ba
YQ
5598install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5599 struct displaced_step_closure *dsc, int load,
5600 int immed, int writeback, int size, int usermode,
5601 int rt, int rm, int rn)
cca44b1b 5602{
cca44b1b 5603 ULONGEST rt_val, rn_val, rm_val = 0;
cca44b1b 5604
36073a92
YQ
5605 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5606 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
cca44b1b 5607 if (!immed)
36073a92 5608 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
cca44b1b 5609 if (!load)
36073a92 5610 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
cca44b1b 5611
36073a92
YQ
5612 rt_val = displaced_read_reg (regs, dsc, rt);
5613 rn_val = displaced_read_reg (regs, dsc, rn);
cca44b1b 5614 if (!immed)
36073a92 5615 rm_val = displaced_read_reg (regs, dsc, rm);
cca44b1b
JB
5616
5617 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5618 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5619 if (!immed)
5620 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
cca44b1b 5621 dsc->rd = rt;
0f6f04ba 5622 dsc->u.ldst.xfersize = size;
cca44b1b
JB
5623 dsc->u.ldst.rn = rn;
5624 dsc->u.ldst.immed = immed;
7ff120b4 5625 dsc->u.ldst.writeback = writeback;
cca44b1b
JB
5626
5627 /* To write PC we can do:
5628
494e194e
YQ
5629 Before this sequence of instructions:
5630 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5631 r2 is the Rn value got from dispalced_read_reg.
5632
5633 Insn1: push {pc} Write address of STR instruction + offset on stack
5634 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5635 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5636 = addr(Insn1) + offset - addr(Insn3) - 8
5637 = offset - 16
5638 Insn4: add r4, r4, #8 r4 = offset - 8
5639 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5640 = from + offset
5641 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
cca44b1b
JB
5642
5643 Otherwise we don't know what value to write for PC, since the offset is
494e194e
YQ
5644 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5645 of this can be found in Section "Saving from r15" in
5646 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
cca44b1b 5647
7ff120b4
YQ
5648 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5649}
5650
34518530
YQ
5651
5652static int
5653thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5654 uint16_t insn2, struct regcache *regs,
5655 struct displaced_step_closure *dsc, int size)
5656{
5657 unsigned int u_bit = bit (insn1, 7);
5658 unsigned int rt = bits (insn2, 12, 15);
5659 int imm12 = bits (insn2, 0, 11);
5660 ULONGEST pc_val;
5661
5662 if (debug_displaced)
5663 fprintf_unfiltered (gdb_stdlog,
5664 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5665 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5666 imm12);
5667
5668 if (!u_bit)
5669 imm12 = -1 * imm12;
5670
5671 /* Rewrite instruction LDR Rt imm12 into:
5672
5673 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5674
5675 LDR R0, R2, R3,
5676
5677 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5678
5679
5680 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5681 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5682 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5683
5684 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5685
5686 pc_val = pc_val & 0xfffffffc;
5687
5688 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5689 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5690
5691 dsc->rd = rt;
5692
5693 dsc->u.ldst.xfersize = size;
5694 dsc->u.ldst.immed = 0;
5695 dsc->u.ldst.writeback = 0;
5696 dsc->u.ldst.restore_r4 = 0;
5697
5698 /* LDR R0, R2, R3 */
5699 dsc->modinsn[0] = 0xf852;
5700 dsc->modinsn[1] = 0x3;
5701 dsc->numinsns = 2;
5702
5703 dsc->cleanup = &cleanup_load;
5704
5705 return 0;
5706}
5707
5708static int
5709thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5710 uint16_t insn2, struct regcache *regs,
5711 struct displaced_step_closure *dsc,
5712 int writeback, int immed)
5713{
5714 unsigned int rt = bits (insn2, 12, 15);
5715 unsigned int rn = bits (insn1, 0, 3);
5716 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5717 /* In LDR (register), there is also a register Rm, which is not allowed to
5718 be PC, so we don't have to check it. */
5719
5720 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5722 dsc);
5723
5724 if (debug_displaced)
5725 fprintf_unfiltered (gdb_stdlog,
5726 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5727 rt, rn, insn1, insn2);
5728
5729 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5730 0, rt, rm, rn);
5731
5732 dsc->u.ldst.restore_r4 = 0;
5733
5734 if (immed)
5735 /* ldr[b]<cond> rt, [rn, #imm], etc.
5736 ->
5737 ldr[b]<cond> r0, [r2, #imm]. */
5738 {
5739 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5740 dsc->modinsn[1] = insn2 & 0x0fff;
5741 }
5742 else
5743 /* ldr[b]<cond> rt, [rn, rm], etc.
5744 ->
5745 ldr[b]<cond> r0, [r2, r3]. */
5746 {
5747 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5748 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5749 }
5750
5751 dsc->numinsns = 2;
5752
5753 return 0;
5754}
5755
5756
7ff120b4
YQ
5757static int
5758arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5759 struct regcache *regs,
5760 struct displaced_step_closure *dsc,
0f6f04ba 5761 int load, int size, int usermode)
7ff120b4
YQ
5762{
5763 int immed = !bit (insn, 25);
5764 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5765 unsigned int rt = bits (insn, 12, 15);
5766 unsigned int rn = bits (insn, 16, 19);
5767 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5768
5769 if (!insn_references_pc (insn, 0x000ff00ful))
5770 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5771
5772 if (debug_displaced)
5773 fprintf_unfiltered (gdb_stdlog,
5774 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
0f6f04ba
YQ
5775 load ? (size == 1 ? "ldrb" : "ldr")
5776 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
7ff120b4
YQ
5777 rt, rn,
5778 (unsigned long) insn);
5779
0f6f04ba
YQ
5780 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5781 usermode, rt, rm, rn);
7ff120b4 5782
bf9f652a 5783 if (load || rt != ARM_PC_REGNUM)
cca44b1b
JB
5784 {
5785 dsc->u.ldst.restore_r4 = 0;
5786
5787 if (immed)
5788 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5789 ->
5790 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5791 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5792 else
5793 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5794 ->
5795 {ldr,str}[b]<cond> r0, [r2, r3]. */
5796 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5797 }
5798 else
5799 {
5800 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5801 dsc->u.ldst.restore_r4 = 1;
494e194e
YQ
5802 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5803 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
cca44b1b
JB
5804 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5805 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5806 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5807
5808 /* As above. */
5809 if (immed)
5810 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5811 else
5812 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5813
cca44b1b
JB
5814 dsc->numinsns = 6;
5815 }
5816
5817 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5818
5819 return 0;
5820}
5821
5822/* Cleanup LDM instructions with fully-populated register list. This is an
5823 unfortunate corner case: it's impossible to implement correctly by modifying
5824 the instruction. The issue is as follows: we have an instruction,
5825
5826 ldm rN, {r0-r15}
5827
5828 which we must rewrite to avoid loading PC. A possible solution would be to
5829 do the load in two halves, something like (with suitable cleanup
5830 afterwards):
5831
5832 mov r8, rN
5833 ldm[id][ab] r8!, {r0-r7}
5834 str r7, <temp>
5835 ldm[id][ab] r8, {r7-r14}
5836 <bkpt>
5837
5838 but at present there's no suitable place for <temp>, since the scratch space
5839 is overwritten before the cleanup routine is called. For now, we simply
5840 emulate the instruction. */
5841
5842static void
5843cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5844 struct displaced_step_closure *dsc)
5845{
cca44b1b
JB
5846 int inc = dsc->u.block.increment;
5847 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5848 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5849 uint32_t regmask = dsc->u.block.regmask;
5850 int regno = inc ? 0 : 15;
5851 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5852 int exception_return = dsc->u.block.load && dsc->u.block.user
5853 && (regmask & 0x8000) != 0;
36073a92 5854 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5855 int do_transfer = condition_true (dsc->u.block.cond, status);
5856 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5857
5858 if (!do_transfer)
5859 return;
5860
5861 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5862 sensible we can do here. Complain loudly. */
5863 if (exception_return)
5864 error (_("Cannot single-step exception return"));
5865
5866 /* We don't handle any stores here for now. */
5867 gdb_assert (dsc->u.block.load != 0);
5868
5869 if (debug_displaced)
5870 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5871 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5872 dsc->u.block.increment ? "inc" : "dec",
5873 dsc->u.block.before ? "before" : "after");
5874
5875 while (regmask)
5876 {
5877 uint32_t memword;
5878
5879 if (inc)
bf9f652a 5880 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
cca44b1b
JB
5881 regno++;
5882 else
5883 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5884 regno--;
5885
5886 xfer_addr += bump_before;
5887
5888 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5889 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5890
5891 xfer_addr += bump_after;
5892
5893 regmask &= ~(1 << regno);
5894 }
5895
5896 if (dsc->u.block.writeback)
5897 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5898 CANNOT_WRITE_PC);
5899}
5900
5901/* Clean up an STM which included the PC in the register list. */
5902
5903static void
5904cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5905 struct displaced_step_closure *dsc)
5906{
36073a92 5907 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
cca44b1b
JB
5908 int store_executed = condition_true (dsc->u.block.cond, status);
5909 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5910 CORE_ADDR stm_insn_addr;
5911 uint32_t pc_val;
5912 long offset;
5913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5914
5915 /* If condition code fails, there's nothing else to do. */
5916 if (!store_executed)
5917 return;
5918
5919 if (dsc->u.block.increment)
5920 {
5921 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5922
5923 if (dsc->u.block.before)
5924 pc_stored_at += 4;
5925 }
5926 else
5927 {
5928 pc_stored_at = dsc->u.block.xfer_addr;
5929
5930 if (dsc->u.block.before)
5931 pc_stored_at -= 4;
5932 }
5933
5934 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5935 stm_insn_addr = dsc->scratch_base;
5936 offset = pc_val - stm_insn_addr;
5937
5938 if (debug_displaced)
5939 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5940 "STM instruction\n", offset);
5941
5942 /* Rewrite the stored PC to the proper value for the non-displaced original
5943 instruction. */
5944 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5945 dsc->insn_addr + offset);
5946}
5947
5948/* Clean up an LDM which includes the PC in the register list. We clumped all
5949 the registers in the transferred list into a contiguous range r0...rX (to
5950 avoid loading PC directly and losing control of the debugged program), so we
5951 must undo that here. */
5952
5953static void
6e39997a 5954cleanup_block_load_pc (struct gdbarch *gdbarch,
cca44b1b
JB
5955 struct regcache *regs,
5956 struct displaced_step_closure *dsc)
5957{
36073a92 5958 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
22e048c9 5959 int load_executed = condition_true (dsc->u.block.cond, status);
bf9f652a 5960 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
cca44b1b
JB
5961 unsigned int regs_loaded = bitcount (mask);
5962 unsigned int num_to_shuffle = regs_loaded, clobbered;
5963
5964 /* The method employed here will fail if the register list is fully populated
5965 (we need to avoid loading PC directly). */
5966 gdb_assert (num_to_shuffle < 16);
5967
5968 if (!load_executed)
5969 return;
5970
5971 clobbered = (1 << num_to_shuffle) - 1;
5972
5973 while (num_to_shuffle > 0)
5974 {
5975 if ((mask & (1 << write_reg)) != 0)
5976 {
5977 unsigned int read_reg = num_to_shuffle - 1;
5978
5979 if (read_reg != write_reg)
5980 {
36073a92 5981 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
cca44b1b
JB
5982 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5983 if (debug_displaced)
5984 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5985 "loaded register r%d to r%d\n"), read_reg,
5986 write_reg);
5987 }
5988 else if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5990 "r%d already in the right place\n"),
5991 write_reg);
5992
5993 clobbered &= ~(1 << write_reg);
5994
5995 num_to_shuffle--;
5996 }
5997
5998 write_reg--;
5999 }
6000
6001 /* Restore any registers we scribbled over. */
6002 for (write_reg = 0; clobbered != 0; write_reg++)
6003 {
6004 if ((clobbered & (1 << write_reg)) != 0)
6005 {
6006 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6007 CANNOT_WRITE_PC);
6008 if (debug_displaced)
6009 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6010 "clobbered register r%d\n"), write_reg);
6011 clobbered &= ~(1 << write_reg);
6012 }
6013 }
6014
6015 /* Perform register writeback manually. */
6016 if (dsc->u.block.writeback)
6017 {
6018 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6019
6020 if (dsc->u.block.increment)
6021 new_rn_val += regs_loaded * 4;
6022 else
6023 new_rn_val -= regs_loaded * 4;
6024
6025 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6026 CANNOT_WRITE_PC);
6027 }
6028}
6029
6030/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6031 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6032
6033static int
7ff120b4
YQ
6034arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6035 struct regcache *regs,
6036 struct displaced_step_closure *dsc)
cca44b1b
JB
6037{
6038 int load = bit (insn, 20);
6039 int user = bit (insn, 22);
6040 int increment = bit (insn, 23);
6041 int before = bit (insn, 24);
6042 int writeback = bit (insn, 21);
6043 int rn = bits (insn, 16, 19);
cca44b1b 6044
0963b4bd
MS
6045 /* Block transfers which don't mention PC can be run directly
6046 out-of-line. */
bf9f652a 6047 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7ff120b4 6048 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
cca44b1b 6049
bf9f652a 6050 if (rn == ARM_PC_REGNUM)
cca44b1b 6051 {
0963b4bd
MS
6052 warning (_("displaced: Unpredictable LDM or STM with "
6053 "base register r15"));
7ff120b4 6054 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
cca44b1b
JB
6055 }
6056
6057 if (debug_displaced)
6058 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6059 "%.8lx\n", (unsigned long) insn);
6060
36073a92 6061 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b
JB
6062 dsc->u.block.rn = rn;
6063
6064 dsc->u.block.load = load;
6065 dsc->u.block.user = user;
6066 dsc->u.block.increment = increment;
6067 dsc->u.block.before = before;
6068 dsc->u.block.writeback = writeback;
6069 dsc->u.block.cond = bits (insn, 28, 31);
6070
6071 dsc->u.block.regmask = insn & 0xffff;
6072
6073 if (load)
6074 {
6075 if ((insn & 0xffff) == 0xffff)
6076 {
6077 /* LDM with a fully-populated register list. This case is
6078 particularly tricky. Implement for now by fully emulating the
6079 instruction (which might not behave perfectly in all cases, but
6080 these instructions should be rare enough for that not to matter
6081 too much). */
6082 dsc->modinsn[0] = ARM_NOP;
6083
6084 dsc->cleanup = &cleanup_block_load_all;
6085 }
6086 else
6087 {
6088 /* LDM of a list of registers which includes PC. Implement by
6089 rewriting the list of registers to be transferred into a
6090 contiguous chunk r0...rX before doing the transfer, then shuffling
6091 registers into the correct places in the cleanup routine. */
6092 unsigned int regmask = insn & 0xffff;
bec2ab5a
SM
6093 unsigned int num_in_list = bitcount (regmask), new_regmask;
6094 unsigned int i;
cca44b1b
JB
6095
6096 for (i = 0; i < num_in_list; i++)
36073a92 6097 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
cca44b1b
JB
6098
6099 /* Writeback makes things complicated. We need to avoid clobbering
6100 the base register with one of the registers in our modified
6101 register list, but just using a different register can't work in
6102 all cases, e.g.:
6103
6104 ldm r14!, {r0-r13,pc}
6105
6106 which would need to be rewritten as:
6107
6108 ldm rN!, {r0-r14}
6109
6110 but that can't work, because there's no free register for N.
6111
6112 Solve this by turning off the writeback bit, and emulating
6113 writeback manually in the cleanup routine. */
6114
6115 if (writeback)
6116 insn &= ~(1 << 21);
6117
6118 new_regmask = (1 << num_in_list) - 1;
6119
6120 if (debug_displaced)
6121 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6122 "{..., pc}: original reg list %.4x, modified "
6123 "list %.4x\n"), rn, writeback ? "!" : "",
6124 (int) insn & 0xffff, new_regmask);
6125
6126 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6127
6128 dsc->cleanup = &cleanup_block_load_pc;
6129 }
6130 }
6131 else
6132 {
6133 /* STM of a list of registers which includes PC. Run the instruction
6134 as-is, but out of line: this will store the wrong value for the PC,
6135 so we must manually fix up the memory in the cleanup routine.
6136 Doing things this way has the advantage that we can auto-detect
6137 the offset of the PC write (which is architecture-dependent) in
6138 the cleanup routine. */
6139 dsc->modinsn[0] = insn;
6140
6141 dsc->cleanup = &cleanup_block_store_pc;
6142 }
6143
6144 return 0;
6145}
6146
34518530
YQ
6147static int
6148thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6149 struct regcache *regs,
6150 struct displaced_step_closure *dsc)
cca44b1b 6151{
34518530
YQ
6152 int rn = bits (insn1, 0, 3);
6153 int load = bit (insn1, 4);
6154 int writeback = bit (insn1, 5);
cca44b1b 6155
34518530
YQ
6156 /* Block transfers which don't mention PC can be run directly
6157 out-of-line. */
6158 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6159 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7ff120b4 6160
34518530
YQ
6161 if (rn == ARM_PC_REGNUM)
6162 {
6163 warning (_("displaced: Unpredictable LDM or STM with "
6164 "base register r15"));
6165 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6166 "unpredictable ldm/stm", dsc);
6167 }
cca44b1b
JB
6168
6169 if (debug_displaced)
34518530
YQ
6170 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6171 "%.4x%.4x\n", insn1, insn2);
cca44b1b 6172
34518530
YQ
6173 /* Clear bit 13, since it should be always zero. */
6174 dsc->u.block.regmask = (insn2 & 0xdfff);
6175 dsc->u.block.rn = rn;
cca44b1b 6176
34518530
YQ
6177 dsc->u.block.load = load;
6178 dsc->u.block.user = 0;
6179 dsc->u.block.increment = bit (insn1, 7);
6180 dsc->u.block.before = bit (insn1, 8);
6181 dsc->u.block.writeback = writeback;
6182 dsc->u.block.cond = INST_AL;
6183 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
cca44b1b 6184
34518530
YQ
6185 if (load)
6186 {
6187 if (dsc->u.block.regmask == 0xffff)
6188 {
6189 /* This branch is impossible to happen. */
6190 gdb_assert (0);
6191 }
6192 else
6193 {
6194 unsigned int regmask = dsc->u.block.regmask;
bec2ab5a
SM
6195 unsigned int num_in_list = bitcount (regmask), new_regmask;
6196 unsigned int i;
34518530
YQ
6197
6198 for (i = 0; i < num_in_list; i++)
6199 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6200
6201 if (writeback)
6202 insn1 &= ~(1 << 5);
6203
6204 new_regmask = (1 << num_in_list) - 1;
6205
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6208 "{..., pc}: original reg list %.4x, modified "
6209 "list %.4x\n"), rn, writeback ? "!" : "",
6210 (int) dsc->u.block.regmask, new_regmask);
6211
6212 dsc->modinsn[0] = insn1;
6213 dsc->modinsn[1] = (new_regmask & 0xffff);
6214 dsc->numinsns = 2;
6215
6216 dsc->cleanup = &cleanup_block_load_pc;
6217 }
6218 }
6219 else
6220 {
6221 dsc->modinsn[0] = insn1;
6222 dsc->modinsn[1] = insn2;
6223 dsc->numinsns = 2;
6224 dsc->cleanup = &cleanup_block_store_pc;
6225 }
6226 return 0;
6227}
6228
d9311bfa
AT
6229/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6230 This is used to avoid a dependency on BFD's bfd_endian enum. */
6231
6232ULONGEST
6233arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6234 int byte_order)
6235{
5f2dfcfd
AT
6236 return read_memory_unsigned_integer (memaddr, len,
6237 (enum bfd_endian) byte_order);
d9311bfa
AT
6238}
6239
6240/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6241
6242CORE_ADDR
6243arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6244 CORE_ADDR val)
6245{
6246 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6247}
6248
6249/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6250
e7cf25a8 6251static CORE_ADDR
553cb527 6252arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
d9311bfa 6253{
d9311bfa
AT
6254 return 0;
6255}
6256
6257/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6258
6259int
6260arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6261{
6262 return arm_is_thumb (self->regcache);
6263}
6264
6265/* single_step() is called just before we want to resume the inferior,
6266 if we want to single-step it but there is no hardware or kernel
6267 single-step support. We find the target of the coming instructions
6268 and breakpoint them. */
6269
6270int
6271arm_software_single_step (struct frame_info *frame)
6272{
6273 struct regcache *regcache = get_current_regcache ();
6274 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6275 struct address_space *aspace = get_regcache_aspace (regcache);
6276 struct arm_get_next_pcs next_pcs_ctx;
6277 CORE_ADDR pc;
6278 int i;
6279 VEC (CORE_ADDR) *next_pcs = NULL;
6280 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6281
6282 arm_get_next_pcs_ctor (&next_pcs_ctx,
6283 &arm_get_next_pcs_ops,
6284 gdbarch_byte_order (gdbarch),
6285 gdbarch_byte_order_for_code (gdbarch),
1b451dda 6286 0,
d9311bfa
AT
6287 regcache);
6288
4d18591b 6289 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
d9311bfa
AT
6290
6291 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6292 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6293
6294 do_cleanups (old_chain);
6295
6296 return 1;
6297}
6298
34518530
YQ
6299/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6300 for Linux, where some SVC instructions must be treated specially. */
6301
6302static void
6303cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6304 struct displaced_step_closure *dsc)
6305{
6306 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6307
6308 if (debug_displaced)
6309 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6310 "%.8lx\n", (unsigned long) resume_addr);
6311
6312 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6313}
6314
6315
6316/* Common copy routine for svc instruciton. */
6317
6318static int
6319install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6320 struct displaced_step_closure *dsc)
6321{
6322 /* Preparation: none.
6323 Insn: unmodified svc.
6324 Cleanup: pc <- insn_addr + insn_size. */
6325
6326 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6327 instruction. */
6328 dsc->wrote_to_pc = 1;
6329
6330 /* Allow OS-specific code to override SVC handling. */
bd18283a
YQ
6331 if (dsc->u.svc.copy_svc_os)
6332 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6333 else
6334 {
6335 dsc->cleanup = &cleanup_svc;
6336 return 0;
6337 }
34518530
YQ
6338}
6339
6340static int
6341arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6342 struct regcache *regs, struct displaced_step_closure *dsc)
6343{
6344
6345 if (debug_displaced)
6346 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6347 (unsigned long) insn);
6348
6349 dsc->modinsn[0] = insn;
6350
6351 return install_svc (gdbarch, regs, dsc);
6352}
6353
6354static int
6355thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6356 struct regcache *regs, struct displaced_step_closure *dsc)
6357{
6358
6359 if (debug_displaced)
6360 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6361 insn);
bd18283a 6362
34518530
YQ
6363 dsc->modinsn[0] = insn;
6364
6365 return install_svc (gdbarch, regs, dsc);
cca44b1b
JB
6366}
6367
6368/* Copy undefined instructions. */
6369
6370static int
7ff120b4
YQ
6371arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6372 struct displaced_step_closure *dsc)
cca44b1b
JB
6373{
6374 if (debug_displaced)
0963b4bd
MS
6375 fprintf_unfiltered (gdb_stdlog,
6376 "displaced: copying undefined insn %.8lx\n",
cca44b1b
JB
6377 (unsigned long) insn);
6378
6379 dsc->modinsn[0] = insn;
6380
6381 return 0;
6382}
6383
34518530
YQ
6384static int
6385thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6386 struct displaced_step_closure *dsc)
6387{
6388
6389 if (debug_displaced)
6390 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6391 "%.4x %.4x\n", (unsigned short) insn1,
6392 (unsigned short) insn2);
6393
6394 dsc->modinsn[0] = insn1;
6395 dsc->modinsn[1] = insn2;
6396 dsc->numinsns = 2;
6397
6398 return 0;
6399}
6400
cca44b1b
JB
6401/* Copy unpredictable instructions. */
6402
6403static int
7ff120b4
YQ
6404arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6405 struct displaced_step_closure *dsc)
cca44b1b
JB
6406{
6407 if (debug_displaced)
6408 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6409 "%.8lx\n", (unsigned long) insn);
6410
6411 dsc->modinsn[0] = insn;
6412
6413 return 0;
6414}
6415
6416/* The decode_* functions are instruction decoding helpers. They mostly follow
6417 the presentation in the ARM ARM. */
6418
6419static int
7ff120b4
YQ
6420arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6421 struct regcache *regs,
6422 struct displaced_step_closure *dsc)
cca44b1b
JB
6423{
6424 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6425 unsigned int rn = bits (insn, 16, 19);
6426
6427 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7ff120b4 6428 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
cca44b1b 6429 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7ff120b4 6430 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
cca44b1b 6431 else if ((op1 & 0x60) == 0x20)
7ff120b4 6432 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
cca44b1b 6433 else if ((op1 & 0x71) == 0x40)
7ff120b4
YQ
6434 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6435 dsc);
cca44b1b 6436 else if ((op1 & 0x77) == 0x41)
7ff120b4 6437 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6438 else if ((op1 & 0x77) == 0x45)
7ff120b4 6439 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
cca44b1b
JB
6440 else if ((op1 & 0x77) == 0x51)
6441 {
6442 if (rn != 0xf)
7ff120b4 6443 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b 6444 else
7ff120b4 6445 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6446 }
6447 else if ((op1 & 0x77) == 0x55)
7ff120b4 6448 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
cca44b1b
JB
6449 else if (op1 == 0x57)
6450 switch (op2)
6451 {
7ff120b4
YQ
6452 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6453 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6454 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6455 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6456 default: return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6457 }
6458 else if ((op1 & 0x63) == 0x43)
7ff120b4 6459 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b
JB
6460 else if ((op2 & 0x1) == 0x0)
6461 switch (op1 & ~0x80)
6462 {
6463 case 0x61:
7ff120b4 6464 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
cca44b1b 6465 case 0x65:
7ff120b4 6466 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
cca44b1b
JB
6467 case 0x71: case 0x75:
6468 /* pld/pldw reg. */
7ff120b4 6469 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
cca44b1b 6470 case 0x63: case 0x67: case 0x73: case 0x77:
7ff120b4 6471 return arm_copy_unpred (gdbarch, insn, dsc);
cca44b1b 6472 default:
7ff120b4 6473 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6474 }
6475 else
7ff120b4 6476 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
cca44b1b
JB
6477}
6478
6479static int
7ff120b4
YQ
6480arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6481 struct regcache *regs,
6482 struct displaced_step_closure *dsc)
cca44b1b
JB
6483{
6484 if (bit (insn, 27) == 0)
7ff120b4 6485 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
cca44b1b
JB
6486 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6487 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6488 {
6489 case 0x0: case 0x2:
7ff120b4 6490 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
cca44b1b
JB
6491
6492 case 0x1: case 0x3:
7ff120b4 6493 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
cca44b1b
JB
6494
6495 case 0x4: case 0x5: case 0x6: case 0x7:
7ff120b4 6496 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b
JB
6497
6498 case 0x8:
6499 switch ((insn & 0xe00000) >> 21)
6500 {
6501 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6502 /* stc/stc2. */
7ff120b4 6503 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6504
6505 case 0x2:
7ff120b4 6506 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b
JB
6507
6508 default:
7ff120b4 6509 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6510 }
6511
6512 case 0x9:
6513 {
6514 int rn_f = (bits (insn, 16, 19) == 0xf);
6515 switch ((insn & 0xe00000) >> 21)
6516 {
6517 case 0x1: case 0x3:
6518 /* ldc/ldc2 imm (undefined for rn == pc). */
7ff120b4
YQ
6519 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6520 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6521
6522 case 0x2:
7ff120b4 6523 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6524
6525 case 0x4: case 0x5: case 0x6: case 0x7:
6526 /* ldc/ldc2 lit (undefined for rn != pc). */
7ff120b4
YQ
6527 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6528 : arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6529
6530 default:
7ff120b4 6531 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6532 }
6533 }
6534
6535 case 0xa:
7ff120b4 6536 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
cca44b1b
JB
6537
6538 case 0xb:
6539 if (bits (insn, 16, 19) == 0xf)
6540 /* ldc/ldc2 lit. */
7ff120b4 6541 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6542 else
7ff120b4 6543 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6544
6545 case 0xc:
6546 if (bit (insn, 4))
7ff120b4 6547 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6548 else
7ff120b4 6549 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6550
6551 case 0xd:
6552 if (bit (insn, 4))
7ff120b4 6553 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6554 else
7ff120b4 6555 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6556
6557 default:
7ff120b4 6558 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6559 }
6560}
6561
6562/* Decode miscellaneous instructions in dp/misc encoding space. */
6563
6564static int
7ff120b4
YQ
6565arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6566 struct regcache *regs,
6567 struct displaced_step_closure *dsc)
cca44b1b
JB
6568{
6569 unsigned int op2 = bits (insn, 4, 6);
6570 unsigned int op = bits (insn, 21, 22);
cca44b1b
JB
6571
6572 switch (op2)
6573 {
6574 case 0x0:
7ff120b4 6575 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
cca44b1b
JB
6576
6577 case 0x1:
6578 if (op == 0x1) /* bx. */
7ff120b4 6579 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
cca44b1b 6580 else if (op == 0x3)
7ff120b4 6581 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
cca44b1b 6582 else
7ff120b4 6583 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6584
6585 case 0x2:
6586 if (op == 0x1)
6587 /* Not really supported. */
7ff120b4 6588 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
cca44b1b 6589 else
7ff120b4 6590 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6591
6592 case 0x3:
6593 if (op == 0x1)
7ff120b4 6594 return arm_copy_bx_blx_reg (gdbarch, insn,
0963b4bd 6595 regs, dsc); /* blx register. */
cca44b1b 6596 else
7ff120b4 6597 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6598
6599 case 0x5:
7ff120b4 6600 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
cca44b1b
JB
6601
6602 case 0x7:
6603 if (op == 0x1)
7ff120b4 6604 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
cca44b1b
JB
6605 else if (op == 0x3)
6606 /* Not really supported. */
7ff120b4 6607 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
cca44b1b
JB
6608
6609 default:
7ff120b4 6610 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6611 }
6612}
6613
6614static int
7ff120b4
YQ
6615arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6616 struct regcache *regs,
6617 struct displaced_step_closure *dsc)
cca44b1b
JB
6618{
6619 if (bit (insn, 25))
6620 switch (bits (insn, 20, 24))
6621 {
6622 case 0x10:
7ff120b4 6623 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
cca44b1b
JB
6624
6625 case 0x14:
7ff120b4 6626 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
cca44b1b
JB
6627
6628 case 0x12: case 0x16:
7ff120b4 6629 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
cca44b1b
JB
6630
6631 default:
7ff120b4 6632 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
cca44b1b
JB
6633 }
6634 else
6635 {
6636 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6637
6638 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7ff120b4 6639 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
cca44b1b 6640 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7ff120b4 6641 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
cca44b1b 6642 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7ff120b4 6643 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
cca44b1b 6644 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7ff120b4 6645 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
cca44b1b 6646 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7ff120b4 6647 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
cca44b1b 6648 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7ff120b4 6649 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
cca44b1b 6650 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
550dc4e2 6651 /* 2nd arg means "unprivileged". */
7ff120b4
YQ
6652 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6653 dsc);
cca44b1b
JB
6654 }
6655
6656 /* Should be unreachable. */
6657 return 1;
6658}
6659
6660static int
7ff120b4
YQ
6661arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6662 struct regcache *regs,
6663 struct displaced_step_closure *dsc)
cca44b1b
JB
6664{
6665 int a = bit (insn, 25), b = bit (insn, 4);
6666 uint32_t op1 = bits (insn, 20, 24);
cca44b1b
JB
6667
6668 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6669 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
0f6f04ba 6670 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
cca44b1b
JB
6671 else if ((!a && (op1 & 0x17) == 0x02)
6672 || (a && (op1 & 0x17) == 0x02 && !b))
0f6f04ba 6673 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
cca44b1b
JB
6674 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6675 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
0f6f04ba 6676 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
cca44b1b
JB
6677 else if ((!a && (op1 & 0x17) == 0x03)
6678 || (a && (op1 & 0x17) == 0x03 && !b))
0f6f04ba 6679 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
cca44b1b
JB
6680 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6681 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7ff120b4 6682 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
cca44b1b
JB
6683 else if ((!a && (op1 & 0x17) == 0x06)
6684 || (a && (op1 & 0x17) == 0x06 && !b))
7ff120b4 6685 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
cca44b1b
JB
6686 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6687 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7ff120b4 6688 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
cca44b1b
JB
6689 else if ((!a && (op1 & 0x17) == 0x07)
6690 || (a && (op1 & 0x17) == 0x07 && !b))
7ff120b4 6691 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
cca44b1b
JB
6692
6693 /* Should be unreachable. */
6694 return 1;
6695}
6696
6697static int
7ff120b4
YQ
6698arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6699 struct displaced_step_closure *dsc)
cca44b1b
JB
6700{
6701 switch (bits (insn, 20, 24))
6702 {
6703 case 0x00: case 0x01: case 0x02: case 0x03:
7ff120b4 6704 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
cca44b1b
JB
6705
6706 case 0x04: case 0x05: case 0x06: case 0x07:
7ff120b4 6707 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
cca44b1b
JB
6708
6709 case 0x08: case 0x09: case 0x0a: case 0x0b:
6710 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7ff120b4 6711 return arm_copy_unmodified (gdbarch, insn,
cca44b1b
JB
6712 "decode/pack/unpack/saturate/reverse", dsc);
6713
6714 case 0x18:
6715 if (bits (insn, 5, 7) == 0) /* op2. */
6716 {
6717 if (bits (insn, 12, 15) == 0xf)
7ff120b4 6718 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
cca44b1b 6719 else
7ff120b4 6720 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
cca44b1b
JB
6721 }
6722 else
7ff120b4 6723 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6724
6725 case 0x1a: case 0x1b:
6726 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6727 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
cca44b1b 6728 else
7ff120b4 6729 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6730
6731 case 0x1c: case 0x1d:
6732 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6733 {
6734 if (bits (insn, 0, 3) == 0xf)
7ff120b4 6735 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
cca44b1b 6736 else
7ff120b4 6737 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
cca44b1b
JB
6738 }
6739 else
7ff120b4 6740 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6741
6742 case 0x1e: case 0x1f:
6743 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7ff120b4 6744 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
cca44b1b 6745 else
7ff120b4 6746 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b
JB
6747 }
6748
6749 /* Should be unreachable. */
6750 return 1;
6751}
6752
6753static int
615234c1 6754arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4
YQ
6755 struct regcache *regs,
6756 struct displaced_step_closure *dsc)
cca44b1b
JB
6757{
6758 if (bit (insn, 25))
7ff120b4 6759 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
cca44b1b 6760 else
7ff120b4 6761 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
cca44b1b
JB
6762}
6763
6764static int
7ff120b4
YQ
6765arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6766 struct regcache *regs,
6767 struct displaced_step_closure *dsc)
cca44b1b
JB
6768{
6769 unsigned int opcode = bits (insn, 20, 24);
6770
6771 switch (opcode)
6772 {
6773 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7ff120b4 6774 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
cca44b1b
JB
6775
6776 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6777 case 0x12: case 0x16:
7ff120b4 6778 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
cca44b1b
JB
6779
6780 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6781 case 0x13: case 0x17:
7ff120b4 6782 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
cca44b1b
JB
6783
6784 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6785 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6786 /* Note: no writeback for these instructions. Bit 25 will always be
6787 zero though (via caller), so the following works OK. */
7ff120b4 6788 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6789 }
6790
6791 /* Should be unreachable. */
6792 return 1;
6793}
6794
34518530
YQ
6795/* Decode shifted register instructions. */
6796
6797static int
6798thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6799 uint16_t insn2, struct regcache *regs,
6800 struct displaced_step_closure *dsc)
6801{
6802 /* PC is only allowed to be used in instruction MOV. */
6803
6804 unsigned int op = bits (insn1, 5, 8);
6805 unsigned int rn = bits (insn1, 0, 3);
6806
6807 if (op == 0x2 && rn == 0xf) /* MOV */
6808 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6809 else
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6811 "dp (shift reg)", dsc);
6812}
6813
6814
6815/* Decode extension register load/store. Exactly the same as
6816 arm_decode_ext_reg_ld_st. */
6817
6818static int
6819thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6820 uint16_t insn2, struct regcache *regs,
6821 struct displaced_step_closure *dsc)
6822{
6823 unsigned int opcode = bits (insn1, 4, 8);
6824
6825 switch (opcode)
6826 {
6827 case 0x04: case 0x05:
6828 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6829 "vfp/neon vmov", dsc);
6830
6831 case 0x08: case 0x0c: /* 01x00 */
6832 case 0x0a: case 0x0e: /* 01x10 */
6833 case 0x12: case 0x16: /* 10x10 */
6834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6835 "vfp/neon vstm/vpush", dsc);
6836
6837 case 0x09: case 0x0d: /* 01x01 */
6838 case 0x0b: case 0x0f: /* 01x11 */
6839 case 0x13: case 0x17: /* 10x11 */
6840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6841 "vfp/neon vldm/vpop", dsc);
6842
6843 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6844 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6845 "vstr", dsc);
6846 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6847 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6848 }
6849
6850 /* Should be unreachable. */
6851 return 1;
6852}
6853
cca44b1b 6854static int
12545665 6855arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7ff120b4 6856 struct regcache *regs, struct displaced_step_closure *dsc)
cca44b1b
JB
6857{
6858 unsigned int op1 = bits (insn, 20, 25);
6859 int op = bit (insn, 4);
6860 unsigned int coproc = bits (insn, 8, 11);
cca44b1b
JB
6861
6862 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7ff120b4 6863 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
cca44b1b
JB
6864 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6865 && (coproc & 0xe) != 0xa)
6866 /* stc/stc2. */
7ff120b4 6867 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b
JB
6868 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6869 && (coproc & 0xe) != 0xa)
6870 /* ldc/ldc2 imm/lit. */
7ff120b4 6871 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
cca44b1b 6872 else if ((op1 & 0x3e) == 0x00)
7ff120b4 6873 return arm_copy_undef (gdbarch, insn, dsc);
cca44b1b 6874 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7ff120b4 6875 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
cca44b1b 6876 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7ff120b4 6877 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
cca44b1b 6878 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7ff120b4 6879 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
cca44b1b
JB
6880 else if ((op1 & 0x30) == 0x20 && !op)
6881 {
6882 if ((coproc & 0xe) == 0xa)
7ff120b4 6883 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
cca44b1b 6884 else
7ff120b4 6885 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
cca44b1b
JB
6886 }
6887 else if ((op1 & 0x30) == 0x20 && op)
7ff120b4 6888 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
cca44b1b 6889 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7ff120b4 6890 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
cca44b1b 6891 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7ff120b4 6892 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
cca44b1b 6893 else if ((op1 & 0x30) == 0x30)
7ff120b4 6894 return arm_copy_svc (gdbarch, insn, regs, dsc);
cca44b1b 6895 else
7ff120b4 6896 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
cca44b1b
JB
6897}
6898
34518530
YQ
6899static int
6900thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6901 uint16_t insn2, struct regcache *regs,
6902 struct displaced_step_closure *dsc)
6903{
6904 unsigned int coproc = bits (insn2, 8, 11);
34518530
YQ
6905 unsigned int bit_5_8 = bits (insn1, 5, 8);
6906 unsigned int bit_9 = bit (insn1, 9);
6907 unsigned int bit_4 = bit (insn1, 4);
34518530
YQ
6908
6909 if (bit_9 == 0)
6910 {
6911 if (bit_5_8 == 2)
6912 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6913 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6914 dsc);
6915 else if (bit_5_8 == 0) /* UNDEFINED. */
6916 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6917 else
6918 {
6919 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6920 if ((coproc & 0xe) == 0xa)
6921 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6922 dsc);
6923 else /* coproc is not 101x. */
6924 {
6925 if (bit_4 == 0) /* STC/STC2. */
6926 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6927 "stc/stc2", dsc);
6928 else /* LDC/LDC2 {literal, immeidate}. */
6929 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6930 regs, dsc);
6931 }
6932 }
6933 }
6934 else
6935 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6936
6937 return 0;
6938}
6939
6940static void
6941install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6942 struct displaced_step_closure *dsc, int rd)
6943{
6944 /* ADR Rd, #imm
6945
6946 Rewrite as:
6947
6948 Preparation: Rd <- PC
6949 Insn: ADD Rd, #imm
6950 Cleanup: Null.
6951 */
6952
6953 /* Rd <- PC */
6954 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6955 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6956}
6957
6958static int
6959thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6960 struct displaced_step_closure *dsc,
6961 int rd, unsigned int imm)
6962{
6963
6964 /* Encoding T2: ADDS Rd, #imm */
6965 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6966
6967 install_pc_relative (gdbarch, regs, dsc, rd);
6968
6969 return 0;
6970}
6971
6972static int
6973thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6974 struct regcache *regs,
6975 struct displaced_step_closure *dsc)
6976{
6977 unsigned int rd = bits (insn, 8, 10);
6978 unsigned int imm8 = bits (insn, 0, 7);
6979
6980 if (debug_displaced)
6981 fprintf_unfiltered (gdb_stdlog,
6982 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6983 rd, imm8, insn);
6984
6985 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6986}
6987
6988static int
6989thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6990 uint16_t insn2, struct regcache *regs,
6991 struct displaced_step_closure *dsc)
6992{
6993 unsigned int rd = bits (insn2, 8, 11);
6994 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6995 extract raw immediate encoding rather than computing immediate. When
6996 generating ADD or SUB instruction, we can simply perform OR operation to
6997 set immediate into ADD. */
6998 unsigned int imm_3_8 = insn2 & 0x70ff;
6999 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7000
7001 if (debug_displaced)
7002 fprintf_unfiltered (gdb_stdlog,
7003 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7004 rd, imm_i, imm_3_8, insn1, insn2);
7005
7006 if (bit (insn1, 7)) /* Encoding T2 */
7007 {
7008 /* Encoding T3: SUB Rd, Rd, #imm */
7009 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7010 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7011 }
7012 else /* Encoding T3 */
7013 {
7014 /* Encoding T3: ADD Rd, Rd, #imm */
7015 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7016 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7017 }
7018 dsc->numinsns = 2;
7019
7020 install_pc_relative (gdbarch, regs, dsc, rd);
7021
7022 return 0;
7023}
7024
7025static int
615234c1 7026thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7027 struct regcache *regs,
7028 struct displaced_step_closure *dsc)
7029{
7030 unsigned int rt = bits (insn1, 8, 10);
7031 unsigned int pc;
7032 int imm8 = (bits (insn1, 0, 7) << 2);
34518530
YQ
7033
7034 /* LDR Rd, #imm8
7035
7036 Rwrite as:
7037
7038 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7039
7040 Insn: LDR R0, [R2, R3];
7041 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7042
7043 if (debug_displaced)
7044 fprintf_unfiltered (gdb_stdlog,
7045 "displaced: copying thumb ldr r%d [pc #%d]\n"
7046 , rt, imm8);
7047
7048 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7049 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7050 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7051 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7052 /* The assembler calculates the required value of the offset from the
7053 Align(PC,4) value of this instruction to the label. */
7054 pc = pc & 0xfffffffc;
7055
7056 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7057 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7058
7059 dsc->rd = rt;
7060 dsc->u.ldst.xfersize = 4;
7061 dsc->u.ldst.rn = 0;
7062 dsc->u.ldst.immed = 0;
7063 dsc->u.ldst.writeback = 0;
7064 dsc->u.ldst.restore_r4 = 0;
7065
7066 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7067
7068 dsc->cleanup = &cleanup_load;
7069
7070 return 0;
7071}
7072
7073/* Copy Thumb cbnz/cbz insruction. */
7074
7075static int
7076thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7077 struct regcache *regs,
7078 struct displaced_step_closure *dsc)
7079{
7080 int non_zero = bit (insn1, 11);
7081 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7082 CORE_ADDR from = dsc->insn_addr;
7083 int rn = bits (insn1, 0, 2);
7084 int rn_val = displaced_read_reg (regs, dsc, rn);
7085
7086 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7087 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7088 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7089 condition is false, let it be, cleanup_branch will do nothing. */
7090 if (dsc->u.branch.cond)
7091 {
7092 dsc->u.branch.cond = INST_AL;
7093 dsc->u.branch.dest = from + 4 + imm5;
7094 }
7095 else
7096 dsc->u.branch.dest = from + 2;
7097
7098 dsc->u.branch.link = 0;
7099 dsc->u.branch.exchange = 0;
7100
7101 if (debug_displaced)
7102 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7103 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7104 rn, rn_val, insn1, dsc->u.branch.dest);
7105
7106 dsc->modinsn[0] = THUMB_NOP;
7107
7108 dsc->cleanup = &cleanup_branch;
7109 return 0;
7110}
7111
7112/* Copy Table Branch Byte/Halfword */
7113static int
7114thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7115 uint16_t insn2, struct regcache *regs,
7116 struct displaced_step_closure *dsc)
7117{
7118 ULONGEST rn_val, rm_val;
7119 int is_tbh = bit (insn2, 4);
7120 CORE_ADDR halfwords = 0;
7121 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7122
7123 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7124 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7125
7126 if (is_tbh)
7127 {
7128 gdb_byte buf[2];
7129
7130 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7131 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7132 }
7133 else
7134 {
7135 gdb_byte buf[1];
7136
7137 target_read_memory (rn_val + rm_val, buf, 1);
7138 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7139 }
7140
7141 if (debug_displaced)
7142 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7143 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7144 (unsigned int) rn_val, (unsigned int) rm_val,
7145 (unsigned int) halfwords);
7146
7147 dsc->u.branch.cond = INST_AL;
7148 dsc->u.branch.link = 0;
7149 dsc->u.branch.exchange = 0;
7150 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7151
7152 dsc->cleanup = &cleanup_branch;
7153
7154 return 0;
7155}
7156
7157static void
7158cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7159 struct displaced_step_closure *dsc)
7160{
7161 /* PC <- r7 */
7162 int val = displaced_read_reg (regs, dsc, 7);
7163 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7164
7165 /* r7 <- r8 */
7166 val = displaced_read_reg (regs, dsc, 8);
7167 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7168
7169 /* r8 <- tmp[0] */
7170 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7171
7172}
7173
7174static int
615234c1 7175thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
34518530
YQ
7176 struct regcache *regs,
7177 struct displaced_step_closure *dsc)
7178{
7179 dsc->u.block.regmask = insn1 & 0x00ff;
7180
7181 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7182 to :
7183
7184 (1) register list is full, that is, r0-r7 are used.
7185 Prepare: tmp[0] <- r8
7186
7187 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7188 MOV r8, r7; Move value of r7 to r8;
7189 POP {r7}; Store PC value into r7.
7190
7191 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7192
7193 (2) register list is not full, supposing there are N registers in
7194 register list (except PC, 0 <= N <= 7).
7195 Prepare: for each i, 0 - N, tmp[i] <- ri.
7196
7197 POP {r0, r1, ...., rN};
7198
7199 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7200 from tmp[] properly.
7201 */
7202 if (debug_displaced)
7203 fprintf_unfiltered (gdb_stdlog,
7204 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7205 dsc->u.block.regmask, insn1);
7206
7207 if (dsc->u.block.regmask == 0xff)
7208 {
7209 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7210
7211 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7212 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7213 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7214
7215 dsc->numinsns = 3;
7216 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7217 }
7218 else
7219 {
7220 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
bec2ab5a
SM
7221 unsigned int i;
7222 unsigned int new_regmask;
34518530
YQ
7223
7224 for (i = 0; i < num_in_list + 1; i++)
7225 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7226
7227 new_regmask = (1 << (num_in_list + 1)) - 1;
7228
7229 if (debug_displaced)
7230 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7231 "{..., pc}: original reg list %.4x,"
7232 " modified list %.4x\n"),
7233 (int) dsc->u.block.regmask, new_regmask);
7234
7235 dsc->u.block.regmask |= 0x8000;
7236 dsc->u.block.writeback = 0;
7237 dsc->u.block.cond = INST_AL;
7238
7239 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7240
7241 dsc->cleanup = &cleanup_block_load_pc;
7242 }
7243
7244 return 0;
7245}
7246
7247static void
7248thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7249 struct regcache *regs,
7250 struct displaced_step_closure *dsc)
7251{
7252 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7253 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7254 int err = 0;
7255
7256 /* 16-bit thumb instructions. */
7257 switch (op_bit_12_15)
7258 {
7259 /* Shift (imme), add, subtract, move and compare. */
7260 case 0: case 1: case 2: case 3:
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7262 "shift/add/sub/mov/cmp",
7263 dsc);
7264 break;
7265 case 4:
7266 switch (op_bit_10_11)
7267 {
7268 case 0: /* Data-processing */
7269 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7270 "data-processing",
7271 dsc);
7272 break;
7273 case 1: /* Special data instructions and branch and exchange. */
7274 {
7275 unsigned short op = bits (insn1, 7, 9);
7276 if (op == 6 || op == 7) /* BX or BLX */
7277 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7278 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7279 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7280 else
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7282 dsc);
7283 }
7284 break;
7285 default: /* LDR (literal) */
7286 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7287 }
7288 break;
7289 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7291 break;
7292 case 10:
7293 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7294 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7295 else /* Generate SP-relative address */
7296 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7297 break;
7298 case 11: /* Misc 16-bit instructions */
7299 {
7300 switch (bits (insn1, 8, 11))
7301 {
7302 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7303 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7304 break;
7305 case 12: case 13: /* POP */
7306 if (bit (insn1, 8)) /* PC is in register list. */
7307 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7308 else
7309 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7310 break;
7311 case 15: /* If-Then, and hints */
7312 if (bits (insn1, 0, 3))
7313 /* If-Then makes up to four following instructions conditional.
7314 IT instruction itself is not conditional, so handle it as a
7315 common unmodified instruction. */
7316 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7317 dsc);
7318 else
7319 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7320 break;
7321 default:
7322 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7323 }
7324 }
7325 break;
7326 case 12:
7327 if (op_bit_10_11 < 2) /* Store multiple registers */
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7329 else /* Load multiple registers */
7330 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7331 break;
7332 case 13: /* Conditional branch and supervisor call */
7333 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7334 err = thumb_copy_b (gdbarch, insn1, dsc);
7335 else
7336 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7337 break;
7338 case 14: /* Unconditional branch */
7339 err = thumb_copy_b (gdbarch, insn1, dsc);
7340 break;
7341 default:
7342 err = 1;
7343 }
7344
7345 if (err)
7346 internal_error (__FILE__, __LINE__,
7347 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7348}
7349
7350static int
7351decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7352 uint16_t insn1, uint16_t insn2,
7353 struct regcache *regs,
7354 struct displaced_step_closure *dsc)
7355{
7356 int rt = bits (insn2, 12, 15);
7357 int rn = bits (insn1, 0, 3);
7358 int op1 = bits (insn1, 7, 8);
34518530
YQ
7359
7360 switch (bits (insn1, 5, 6))
7361 {
7362 case 0: /* Load byte and memory hints */
7363 if (rt == 0xf) /* PLD/PLI */
7364 {
7365 if (rn == 0xf)
7366 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7367 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7368 else
7369 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 "pli/pld", dsc);
7371 }
7372 else
7373 {
7374 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7375 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7376 1);
7377 else
7378 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7379 "ldrb{reg, immediate}/ldrbt",
7380 dsc);
7381 }
7382
7383 break;
7384 case 1: /* Load halfword and memory hints. */
7385 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7386 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7387 "pld/unalloc memhint", dsc);
7388 else
7389 {
7390 if (rn == 0xf)
7391 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7392 2);
7393 else
7394 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7395 "ldrh/ldrht", dsc);
7396 }
7397 break;
7398 case 2: /* Load word */
7399 {
7400 int insn2_bit_8_11 = bits (insn2, 8, 11);
7401
7402 if (rn == 0xf)
7403 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7404 else if (op1 == 0x1) /* Encoding T3 */
7405 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7406 0, 1);
7407 else /* op1 == 0x0 */
7408 {
7409 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7410 /* LDR (immediate) */
7411 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7412 dsc, bit (insn2, 8), 1);
7413 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7414 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7415 "ldrt", dsc);
7416 else
7417 /* LDR (register) */
7418 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7419 dsc, 0, 0);
7420 }
7421 break;
7422 }
7423 default:
7424 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7425 break;
7426 }
7427 return 0;
7428}
7429
7430static void
7431thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7432 uint16_t insn2, struct regcache *regs,
7433 struct displaced_step_closure *dsc)
7434{
7435 int err = 0;
7436 unsigned short op = bit (insn2, 15);
7437 unsigned int op1 = bits (insn1, 11, 12);
7438
7439 switch (op1)
7440 {
7441 case 1:
7442 {
7443 switch (bits (insn1, 9, 10))
7444 {
7445 case 0:
7446 if (bit (insn1, 6))
7447 {
7448 /* Load/store {dual, execlusive}, table branch. */
7449 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7450 && bits (insn2, 5, 7) == 0)
7451 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7452 dsc);
7453 else
7454 /* PC is not allowed to use in load/store {dual, exclusive}
7455 instructions. */
7456 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7457 "load/store dual/ex", dsc);
7458 }
7459 else /* load/store multiple */
7460 {
7461 switch (bits (insn1, 7, 8))
7462 {
7463 case 0: case 3: /* SRS, RFE */
7464 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7465 "srs/rfe", dsc);
7466 break;
7467 case 1: case 2: /* LDM/STM/PUSH/POP */
7468 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7469 break;
7470 }
7471 }
7472 break;
7473
7474 case 1:
7475 /* Data-processing (shift register). */
7476 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7477 dsc);
7478 break;
7479 default: /* Coprocessor instructions. */
7480 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7481 break;
7482 }
7483 break;
7484 }
7485 case 2: /* op1 = 2 */
7486 if (op) /* Branch and misc control. */
7487 {
7488 if (bit (insn2, 14) /* BLX/BL */
7489 || bit (insn2, 12) /* Unconditional branch */
7490 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7491 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7492 else
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "misc ctrl", dsc);
7495 }
7496 else
7497 {
7498 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7499 {
7500 int op = bits (insn1, 4, 8);
7501 int rn = bits (insn1, 0, 3);
7502 if ((op == 0 || op == 0xa) && rn == 0xf)
7503 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7504 regs, dsc);
7505 else
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "dp/pb", dsc);
7508 }
7509 else /* Data processing (modified immeidate) */
7510 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7511 "dp/mi", dsc);
7512 }
7513 break;
7514 case 3: /* op1 = 3 */
7515 switch (bits (insn1, 9, 10))
7516 {
7517 case 0:
7518 if (bit (insn1, 4))
7519 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7520 regs, dsc);
7521 else /* NEON Load/Store and Store single data item */
7522 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7523 "neon elt/struct load/store",
7524 dsc);
7525 break;
7526 case 1: /* op1 = 3, bits (9, 10) == 1 */
7527 switch (bits (insn1, 7, 8))
7528 {
7529 case 0: case 1: /* Data processing (register) */
7530 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7531 "dp(reg)", dsc);
7532 break;
7533 case 2: /* Multiply and absolute difference */
7534 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7535 "mul/mua/diff", dsc);
7536 break;
7537 case 3: /* Long multiply and divide */
7538 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7539 "lmul/lmua", dsc);
7540 break;
7541 }
7542 break;
7543 default: /* Coprocessor instructions */
7544 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7545 break;
7546 }
7547 break;
7548 default:
7549 err = 1;
7550 }
7551
7552 if (err)
7553 internal_error (__FILE__, __LINE__,
7554 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7555
7556}
7557
b434a28f
YQ
7558static void
7559thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
12545665 7560 struct regcache *regs,
b434a28f
YQ
7561 struct displaced_step_closure *dsc)
7562{
34518530
YQ
7563 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7564 uint16_t insn1
7565 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7566
7567 if (debug_displaced)
7568 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7569 "at %.8lx\n", insn1, (unsigned long) from);
7570
7571 dsc->is_thumb = 1;
7572 dsc->insn_size = thumb_insn_size (insn1);
7573 if (thumb_insn_size (insn1) == 4)
7574 {
7575 uint16_t insn2
7576 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7577 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7578 }
7579 else
7580 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
b434a28f
YQ
7581}
7582
cca44b1b 7583void
b434a28f
YQ
7584arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7585 CORE_ADDR to, struct regcache *regs,
cca44b1b
JB
7586 struct displaced_step_closure *dsc)
7587{
7588 int err = 0;
b434a28f
YQ
7589 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7590 uint32_t insn;
cca44b1b
JB
7591
7592 /* Most displaced instructions use a 1-instruction scratch space, so set this
7593 here and override below if/when necessary. */
7594 dsc->numinsns = 1;
7595 dsc->insn_addr = from;
7596 dsc->scratch_base = to;
7597 dsc->cleanup = NULL;
7598 dsc->wrote_to_pc = 0;
7599
b434a28f 7600 if (!displaced_in_arm_mode (regs))
12545665 7601 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
b434a28f 7602
4db71c0b
YQ
7603 dsc->is_thumb = 0;
7604 dsc->insn_size = 4;
b434a28f
YQ
7605 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7606 if (debug_displaced)
7607 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7608 "at %.8lx\n", (unsigned long) insn,
7609 (unsigned long) from);
7610
cca44b1b 7611 if ((insn & 0xf0000000) == 0xf0000000)
7ff120b4 7612 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
cca44b1b
JB
7613 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7614 {
7615 case 0x0: case 0x1: case 0x2: case 0x3:
7ff120b4 7616 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
cca44b1b
JB
7617 break;
7618
7619 case 0x4: case 0x5: case 0x6:
7ff120b4 7620 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
cca44b1b
JB
7621 break;
7622
7623 case 0x7:
7ff120b4 7624 err = arm_decode_media (gdbarch, insn, dsc);
cca44b1b
JB
7625 break;
7626
7627 case 0x8: case 0x9: case 0xa: case 0xb:
7ff120b4 7628 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
cca44b1b
JB
7629 break;
7630
7631 case 0xc: case 0xd: case 0xe: case 0xf:
12545665 7632 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
cca44b1b
JB
7633 break;
7634 }
7635
7636 if (err)
7637 internal_error (__FILE__, __LINE__,
7638 _("arm_process_displaced_insn: Instruction decode error"));
7639}
7640
7641/* Actually set up the scratch space for a displaced instruction. */
7642
7643void
7644arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7645 CORE_ADDR to, struct displaced_step_closure *dsc)
7646{
7647 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4db71c0b 7648 unsigned int i, len, offset;
cca44b1b 7649 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4db71c0b 7650 int size = dsc->is_thumb? 2 : 4;
948f8e3d 7651 const gdb_byte *bkp_insn;
cca44b1b 7652
4db71c0b 7653 offset = 0;
cca44b1b
JB
7654 /* Poke modified instruction(s). */
7655 for (i = 0; i < dsc->numinsns; i++)
7656 {
7657 if (debug_displaced)
4db71c0b
YQ
7658 {
7659 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7660 if (size == 4)
7661 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7662 dsc->modinsn[i]);
7663 else if (size == 2)
7664 fprintf_unfiltered (gdb_stdlog, "%.4x",
7665 (unsigned short)dsc->modinsn[i]);
7666
7667 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7668 (unsigned long) to + offset);
7669
7670 }
7671 write_memory_unsigned_integer (to + offset, size,
7672 byte_order_for_code,
cca44b1b 7673 dsc->modinsn[i]);
4db71c0b
YQ
7674 offset += size;
7675 }
7676
7677 /* Choose the correct breakpoint instruction. */
7678 if (dsc->is_thumb)
7679 {
7680 bkp_insn = tdep->thumb_breakpoint;
7681 len = tdep->thumb_breakpoint_size;
7682 }
7683 else
7684 {
7685 bkp_insn = tdep->arm_breakpoint;
7686 len = tdep->arm_breakpoint_size;
cca44b1b
JB
7687 }
7688
7689 /* Put breakpoint afterwards. */
4db71c0b 7690 write_memory (to + offset, bkp_insn, len);
cca44b1b
JB
7691
7692 if (debug_displaced)
7693 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7694 paddress (gdbarch, from), paddress (gdbarch, to));
7695}
7696
cca44b1b
JB
7697/* Entry point for cleaning things up after a displaced instruction has been
7698 single-stepped. */
7699
7700void
7701arm_displaced_step_fixup (struct gdbarch *gdbarch,
7702 struct displaced_step_closure *dsc,
7703 CORE_ADDR from, CORE_ADDR to,
7704 struct regcache *regs)
7705{
7706 if (dsc->cleanup)
7707 dsc->cleanup (gdbarch, regs, dsc);
7708
7709 if (!dsc->wrote_to_pc)
4db71c0b
YQ
7710 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7711 dsc->insn_addr + dsc->insn_size);
7712
cca44b1b
JB
7713}
7714
7715#include "bfd-in2.h"
7716#include "libcoff.h"
7717
7718static int
7719gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7720{
9a3c8263 7721 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
9779414d
DJ
7722
7723 if (arm_pc_is_thumb (gdbarch, memaddr))
cca44b1b
JB
7724 {
7725 static asymbol *asym;
7726 static combined_entry_type ce;
7727 static struct coff_symbol_struct csym;
7728 static struct bfd fake_bfd;
7729 static bfd_target fake_target;
7730
7731 if (csym.native == NULL)
7732 {
7733 /* Create a fake symbol vector containing a Thumb symbol.
7734 This is solely so that the code in print_insn_little_arm()
7735 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7736 the presence of a Thumb symbol and switch to decoding
7737 Thumb instructions. */
7738
7739 fake_target.flavour = bfd_target_coff_flavour;
7740 fake_bfd.xvec = &fake_target;
7741 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7742 csym.native = &ce;
7743 csym.symbol.the_bfd = &fake_bfd;
7744 csym.symbol.name = "fake";
7745 asym = (asymbol *) & csym;
7746 }
7747
7748 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7749 info->symbols = &asym;
7750 }
7751 else
7752 info->symbols = NULL;
7753
7754 if (info->endian == BFD_ENDIAN_BIG)
7755 return print_insn_big_arm (memaddr, info);
7756 else
7757 return print_insn_little_arm (memaddr, info);
7758}
7759
7760/* The following define instruction sequences that will cause ARM
7761 cpu's to take an undefined instruction trap. These are used to
7762 signal a breakpoint to GDB.
7763
7764 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7765 modes. A different instruction is required for each mode. The ARM
7766 cpu's can also be big or little endian. Thus four different
7767 instructions are needed to support all cases.
7768
7769 Note: ARMv4 defines several new instructions that will take the
7770 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7771 not in fact add the new instructions. The new undefined
7772 instructions in ARMv4 are all instructions that had no defined
7773 behaviour in earlier chips. There is no guarantee that they will
7774 raise an exception, but may be treated as NOP's. In practice, it
7775 may only safe to rely on instructions matching:
7776
7777 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7778 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7779 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7780
0963b4bd 7781 Even this may only true if the condition predicate is true. The
cca44b1b
JB
7782 following use a condition predicate of ALWAYS so it is always TRUE.
7783
7784 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7785 and NetBSD all use a software interrupt rather than an undefined
7786 instruction to force a trap. This can be handled by by the
7787 abi-specific code during establishment of the gdbarch vector. */
7788
7789#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7790#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7791#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7792#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7793
948f8e3d
PA
7794static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7795static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7796static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7797static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
cca44b1b
JB
7798
7799/* Determine the type and size of breakpoint to insert at PCPTR. Uses
7800 the program counter value to determine whether a 16-bit or 32-bit
7801 breakpoint should be used. It returns a pointer to a string of
7802 bytes that encode a breakpoint instruction, stores the length of
7803 the string to *lenptr, and adjusts the program counter (if
7804 necessary) to point to the actual memory location where the
7805 breakpoint should be inserted. */
7806
7807static const unsigned char *
7808arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7809{
7810 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
177321bd 7811 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
cca44b1b 7812
9779414d 7813 if (arm_pc_is_thumb (gdbarch, *pcptr))
cca44b1b
JB
7814 {
7815 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
177321bd
DJ
7816
7817 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7818 check whether we are replacing a 32-bit instruction. */
7819 if (tdep->thumb2_breakpoint != NULL)
7820 {
7821 gdb_byte buf[2];
7822 if (target_read_memory (*pcptr, buf, 2) == 0)
7823 {
7824 unsigned short inst1;
7825 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
db24da6d 7826 if (thumb_insn_size (inst1) == 4)
177321bd
DJ
7827 {
7828 *lenptr = tdep->thumb2_breakpoint_size;
7829 return tdep->thumb2_breakpoint;
7830 }
7831 }
7832 }
7833
cca44b1b
JB
7834 *lenptr = tdep->thumb_breakpoint_size;
7835 return tdep->thumb_breakpoint;
7836 }
7837 else
7838 {
7839 *lenptr = tdep->arm_breakpoint_size;
7840 return tdep->arm_breakpoint;
7841 }
7842}
7843
177321bd
DJ
7844static void
7845arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7846 int *kindptr)
7847{
177321bd
DJ
7848 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7849
9779414d 7850 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
177321bd
DJ
7851 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7852 that this is not confused with a 32-bit ARM breakpoint. */
7853 *kindptr = 3;
7854}
7855
cca44b1b
JB
7856/* Extract from an array REGBUF containing the (raw) register state a
7857 function return value of type TYPE, and copy that, in virtual
7858 format, into VALBUF. */
7859
7860static void
7861arm_extract_return_value (struct type *type, struct regcache *regs,
7862 gdb_byte *valbuf)
7863{
7864 struct gdbarch *gdbarch = get_regcache_arch (regs);
7865 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7866
7867 if (TYPE_CODE_FLT == TYPE_CODE (type))
7868 {
7869 switch (gdbarch_tdep (gdbarch)->fp_model)
7870 {
7871 case ARM_FLOAT_FPA:
7872 {
7873 /* The value is in register F0 in internal format. We need to
7874 extract the raw value and then convert it to the desired
7875 internal type. */
7876 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7877
7878 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7879 convert_from_extended (floatformat_from_type (type), tmpbuf,
7880 valbuf, gdbarch_byte_order (gdbarch));
7881 }
7882 break;
7883
7884 case ARM_FLOAT_SOFT_FPA:
7885 case ARM_FLOAT_SOFT_VFP:
7886 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7887 not using the VFP ABI code. */
7888 case ARM_FLOAT_VFP:
7889 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7890 if (TYPE_LENGTH (type) > 4)
7891 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7892 valbuf + INT_REGISTER_SIZE);
7893 break;
7894
7895 default:
0963b4bd
MS
7896 internal_error (__FILE__, __LINE__,
7897 _("arm_extract_return_value: "
7898 "Floating point model not supported"));
cca44b1b
JB
7899 break;
7900 }
7901 }
7902 else if (TYPE_CODE (type) == TYPE_CODE_INT
7903 || TYPE_CODE (type) == TYPE_CODE_CHAR
7904 || TYPE_CODE (type) == TYPE_CODE_BOOL
7905 || TYPE_CODE (type) == TYPE_CODE_PTR
7906 || TYPE_CODE (type) == TYPE_CODE_REF
7907 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7908 {
b021a221
MS
7909 /* If the type is a plain integer, then the access is
7910 straight-forward. Otherwise we have to play around a bit
7911 more. */
cca44b1b
JB
7912 int len = TYPE_LENGTH (type);
7913 int regno = ARM_A1_REGNUM;
7914 ULONGEST tmp;
7915
7916 while (len > 0)
7917 {
7918 /* By using store_unsigned_integer we avoid having to do
7919 anything special for small big-endian values. */
7920 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7921 store_unsigned_integer (valbuf,
7922 (len > INT_REGISTER_SIZE
7923 ? INT_REGISTER_SIZE : len),
7924 byte_order, tmp);
7925 len -= INT_REGISTER_SIZE;
7926 valbuf += INT_REGISTER_SIZE;
7927 }
7928 }
7929 else
7930 {
7931 /* For a structure or union the behaviour is as if the value had
7932 been stored to word-aligned memory and then loaded into
7933 registers with 32-bit load instruction(s). */
7934 int len = TYPE_LENGTH (type);
7935 int regno = ARM_A1_REGNUM;
7936 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7937
7938 while (len > 0)
7939 {
7940 regcache_cooked_read (regs, regno++, tmpbuf);
7941 memcpy (valbuf, tmpbuf,
7942 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7943 len -= INT_REGISTER_SIZE;
7944 valbuf += INT_REGISTER_SIZE;
7945 }
7946 }
7947}
7948
7949
7950/* Will a function return an aggregate type in memory or in a
7951 register? Return 0 if an aggregate type can be returned in a
7952 register, 1 if it must be returned in memory. */
7953
7954static int
7955arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7956{
cca44b1b
JB
7957 enum type_code code;
7958
f168693b 7959 type = check_typedef (type);
cca44b1b 7960
b13c8ab2
YQ
7961 /* Simple, non-aggregate types (ie not including vectors and
7962 complex) are always returned in a register (or registers). */
7963 code = TYPE_CODE (type);
7964 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7965 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7966 return 0;
cca44b1b 7967
c4312b19
YQ
7968 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7969 {
7970 /* Vector values should be returned using ARM registers if they
7971 are not over 16 bytes. */
7972 return (TYPE_LENGTH (type) > 16);
7973 }
7974
b13c8ab2 7975 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
cca44b1b 7976 {
b13c8ab2
YQ
7977 /* The AAPCS says all aggregates not larger than a word are returned
7978 in a register. */
7979 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7980 return 0;
7981
cca44b1b
JB
7982 return 1;
7983 }
b13c8ab2
YQ
7984 else
7985 {
7986 int nRc;
cca44b1b 7987
b13c8ab2
YQ
7988 /* All aggregate types that won't fit in a register must be returned
7989 in memory. */
7990 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7991 return 1;
cca44b1b 7992
b13c8ab2
YQ
7993 /* In the ARM ABI, "integer" like aggregate types are returned in
7994 registers. For an aggregate type to be integer like, its size
7995 must be less than or equal to INT_REGISTER_SIZE and the
7996 offset of each addressable subfield must be zero. Note that bit
7997 fields are not addressable, and all addressable subfields of
7998 unions always start at offset zero.
cca44b1b 7999
b13c8ab2
YQ
8000 This function is based on the behaviour of GCC 2.95.1.
8001 See: gcc/arm.c: arm_return_in_memory() for details.
cca44b1b 8002
b13c8ab2
YQ
8003 Note: All versions of GCC before GCC 2.95.2 do not set up the
8004 parameters correctly for a function returning the following
8005 structure: struct { float f;}; This should be returned in memory,
8006 not a register. Richard Earnshaw sent me a patch, but I do not
8007 know of any way to detect if a function like the above has been
8008 compiled with the correct calling convention. */
8009
8010 /* Assume all other aggregate types can be returned in a register.
8011 Run a check for structures, unions and arrays. */
8012 nRc = 0;
67255d04 8013
b13c8ab2
YQ
8014 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8015 {
8016 int i;
8017 /* Need to check if this struct/union is "integer" like. For
8018 this to be true, its size must be less than or equal to
8019 INT_REGISTER_SIZE and the offset of each addressable
8020 subfield must be zero. Note that bit fields are not
8021 addressable, and unions always start at offset zero. If any
8022 of the subfields is a floating point type, the struct/union
8023 cannot be an integer type. */
8024
8025 /* For each field in the object, check:
8026 1) Is it FP? --> yes, nRc = 1;
8027 2) Is it addressable (bitpos != 0) and
8028 not packed (bitsize == 0)?
8029 --> yes, nRc = 1
8030 */
8031
8032 for (i = 0; i < TYPE_NFIELDS (type); i++)
67255d04 8033 {
b13c8ab2
YQ
8034 enum type_code field_type_code;
8035
8036 field_type_code
8037 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8038 i)));
8039
8040 /* Is it a floating point type field? */
8041 if (field_type_code == TYPE_CODE_FLT)
67255d04
RE
8042 {
8043 nRc = 1;
8044 break;
8045 }
b13c8ab2
YQ
8046
8047 /* If bitpos != 0, then we have to care about it. */
8048 if (TYPE_FIELD_BITPOS (type, i) != 0)
8049 {
8050 /* Bitfields are not addressable. If the field bitsize is
8051 zero, then the field is not packed. Hence it cannot be
8052 a bitfield or any other packed type. */
8053 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8054 {
8055 nRc = 1;
8056 break;
8057 }
8058 }
67255d04
RE
8059 }
8060 }
67255d04 8061
b13c8ab2
YQ
8062 return nRc;
8063 }
67255d04
RE
8064}
8065
34e8f22d
RE
8066/* Write into appropriate registers a function return value of type
8067 TYPE, given in virtual format. */
8068
8069static void
b508a996 8070arm_store_return_value (struct type *type, struct regcache *regs,
5238cf52 8071 const gdb_byte *valbuf)
34e8f22d 8072{
be8626e0 8073 struct gdbarch *gdbarch = get_regcache_arch (regs);
e17a4113 8074 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
be8626e0 8075
34e8f22d
RE
8076 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8077 {
e362b510 8078 gdb_byte buf[MAX_REGISTER_SIZE];
34e8f22d 8079
be8626e0 8080 switch (gdbarch_tdep (gdbarch)->fp_model)
08216dd7
RE
8081 {
8082 case ARM_FLOAT_FPA:
8083
be8626e0
MD
8084 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8085 gdbarch_byte_order (gdbarch));
b508a996 8086 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
08216dd7
RE
8087 break;
8088
fd50bc42 8089 case ARM_FLOAT_SOFT_FPA:
08216dd7 8090 case ARM_FLOAT_SOFT_VFP:
90445bd3
DJ
8091 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8092 not using the VFP ABI code. */
8093 case ARM_FLOAT_VFP:
b508a996
RE
8094 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8095 if (TYPE_LENGTH (type) > 4)
8096 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
7a5ea0d4 8097 valbuf + INT_REGISTER_SIZE);
08216dd7
RE
8098 break;
8099
8100 default:
9b20d036
MS
8101 internal_error (__FILE__, __LINE__,
8102 _("arm_store_return_value: Floating "
8103 "point model not supported"));
08216dd7
RE
8104 break;
8105 }
34e8f22d 8106 }
b508a996
RE
8107 else if (TYPE_CODE (type) == TYPE_CODE_INT
8108 || TYPE_CODE (type) == TYPE_CODE_CHAR
8109 || TYPE_CODE (type) == TYPE_CODE_BOOL
8110 || TYPE_CODE (type) == TYPE_CODE_PTR
8111 || TYPE_CODE (type) == TYPE_CODE_REF
8112 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8113 {
8114 if (TYPE_LENGTH (type) <= 4)
8115 {
8116 /* Values of one word or less are zero/sign-extended and
8117 returned in r0. */
7a5ea0d4 8118 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8119 LONGEST val = unpack_long (type, valbuf);
8120
e17a4113 8121 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
b508a996
RE
8122 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8123 }
8124 else
8125 {
8126 /* Integral values greater than one word are stored in consecutive
8127 registers starting with r0. This will always be a multiple of
8128 the regiser size. */
8129 int len = TYPE_LENGTH (type);
8130 int regno = ARM_A1_REGNUM;
8131
8132 while (len > 0)
8133 {
8134 regcache_cooked_write (regs, regno++, valbuf);
7a5ea0d4
DJ
8135 len -= INT_REGISTER_SIZE;
8136 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8137 }
8138 }
8139 }
34e8f22d 8140 else
b508a996
RE
8141 {
8142 /* For a structure or union the behaviour is as if the value had
8143 been stored to word-aligned memory and then loaded into
8144 registers with 32-bit load instruction(s). */
8145 int len = TYPE_LENGTH (type);
8146 int regno = ARM_A1_REGNUM;
7a5ea0d4 8147 bfd_byte tmpbuf[INT_REGISTER_SIZE];
b508a996
RE
8148
8149 while (len > 0)
8150 {
8151 memcpy (tmpbuf, valbuf,
7a5ea0d4 8152 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
b508a996 8153 regcache_cooked_write (regs, regno++, tmpbuf);
7a5ea0d4
DJ
8154 len -= INT_REGISTER_SIZE;
8155 valbuf += INT_REGISTER_SIZE;
b508a996
RE
8156 }
8157 }
34e8f22d
RE
8158}
8159
2af48f68
PB
8160
8161/* Handle function return values. */
8162
8163static enum return_value_convention
6a3a010b 8164arm_return_value (struct gdbarch *gdbarch, struct value *function,
c055b101
CV
8165 struct type *valtype, struct regcache *regcache,
8166 gdb_byte *readbuf, const gdb_byte *writebuf)
2af48f68 8167{
7c00367c 8168 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6a3a010b 8169 struct type *func_type = function ? value_type (function) : NULL;
90445bd3
DJ
8170 enum arm_vfp_cprc_base_type vfp_base_type;
8171 int vfp_base_count;
8172
8173 if (arm_vfp_abi_for_function (gdbarch, func_type)
8174 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8175 {
8176 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8177 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8178 int i;
8179 for (i = 0; i < vfp_base_count; i++)
8180 {
58d6951d
DJ
8181 if (reg_char == 'q')
8182 {
8183 if (writebuf)
8184 arm_neon_quad_write (gdbarch, regcache, i,
8185 writebuf + i * unit_length);
8186
8187 if (readbuf)
8188 arm_neon_quad_read (gdbarch, regcache, i,
8189 readbuf + i * unit_length);
8190 }
8191 else
8192 {
8193 char name_buf[4];
8194 int regnum;
8195
8c042590 8196 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
58d6951d
DJ
8197 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8198 strlen (name_buf));
8199 if (writebuf)
8200 regcache_cooked_write (regcache, regnum,
8201 writebuf + i * unit_length);
8202 if (readbuf)
8203 regcache_cooked_read (regcache, regnum,
8204 readbuf + i * unit_length);
8205 }
90445bd3
DJ
8206 }
8207 return RETURN_VALUE_REGISTER_CONVENTION;
8208 }
7c00367c 8209
2af48f68
PB
8210 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8211 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8212 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8213 {
7c00367c
MK
8214 if (tdep->struct_return == pcc_struct_return
8215 || arm_return_in_memory (gdbarch, valtype))
2af48f68
PB
8216 return RETURN_VALUE_STRUCT_CONVENTION;
8217 }
b13c8ab2
YQ
8218 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8219 {
8220 if (arm_return_in_memory (gdbarch, valtype))
8221 return RETURN_VALUE_STRUCT_CONVENTION;
8222 }
7052e42c 8223
2af48f68
PB
8224 if (writebuf)
8225 arm_store_return_value (valtype, regcache, writebuf);
8226
8227 if (readbuf)
8228 arm_extract_return_value (valtype, regcache, readbuf);
8229
8230 return RETURN_VALUE_REGISTER_CONVENTION;
8231}
8232
8233
9df628e0 8234static int
60ade65d 8235arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9df628e0 8236{
e17a4113
UW
8237 struct gdbarch *gdbarch = get_frame_arch (frame);
8238 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8239 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9df628e0 8240 CORE_ADDR jb_addr;
e362b510 8241 gdb_byte buf[INT_REGISTER_SIZE];
9df628e0 8242
60ade65d 8243 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9df628e0
RE
8244
8245 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
7a5ea0d4 8246 INT_REGISTER_SIZE))
9df628e0
RE
8247 return 0;
8248
e17a4113 8249 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9df628e0
RE
8250 return 1;
8251}
8252
faa95490
DJ
8253/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8254 return the target PC. Otherwise return 0. */
c906108c
SS
8255
8256CORE_ADDR
52f729a7 8257arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
c906108c 8258{
2c02bd72 8259 const char *name;
faa95490 8260 int namelen;
c906108c
SS
8261 CORE_ADDR start_addr;
8262
8263 /* Find the starting address and name of the function containing the PC. */
8264 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
80d8d390
YQ
8265 {
8266 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8267 check here. */
8268 start_addr = arm_skip_bx_reg (frame, pc);
8269 if (start_addr != 0)
8270 return start_addr;
8271
8272 return 0;
8273 }
c906108c 8274
faa95490
DJ
8275 /* If PC is in a Thumb call or return stub, return the address of the
8276 target PC, which is in a register. The thunk functions are called
8277 _call_via_xx, where x is the register name. The possible names
3d8d5e79
DJ
8278 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8279 functions, named __ARM_call_via_r[0-7]. */
61012eef
GB
8280 if (startswith (name, "_call_via_")
8281 || startswith (name, "__ARM_call_via_"))
c906108c 8282 {
ed9a39eb
JM
8283 /* Use the name suffix to determine which register contains the
8284 target PC. */
c5aa993b
JM
8285 static char *table[15] =
8286 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8287 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8288 };
c906108c 8289 int regno;
faa95490 8290 int offset = strlen (name) - 2;
c906108c
SS
8291
8292 for (regno = 0; regno <= 14; regno++)
faa95490 8293 if (strcmp (&name[offset], table[regno]) == 0)
52f729a7 8294 return get_frame_register_unsigned (frame, regno);
c906108c 8295 }
ed9a39eb 8296
faa95490
DJ
8297 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8298 non-interworking calls to foo. We could decode the stubs
8299 to find the target but it's easier to use the symbol table. */
8300 namelen = strlen (name);
8301 if (name[0] == '_' && name[1] == '_'
8302 && ((namelen > 2 + strlen ("_from_thumb")
61012eef 8303 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
faa95490 8304 || (namelen > 2 + strlen ("_from_arm")
61012eef 8305 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
faa95490
DJ
8306 {
8307 char *target_name;
8308 int target_len = namelen - 2;
3b7344d5 8309 struct bound_minimal_symbol minsym;
faa95490
DJ
8310 struct objfile *objfile;
8311 struct obj_section *sec;
8312
8313 if (name[namelen - 1] == 'b')
8314 target_len -= strlen ("_from_thumb");
8315 else
8316 target_len -= strlen ("_from_arm");
8317
224c3ddb 8318 target_name = (char *) alloca (target_len + 1);
faa95490
DJ
8319 memcpy (target_name, name + 2, target_len);
8320 target_name[target_len] = '\0';
8321
8322 sec = find_pc_section (pc);
8323 objfile = (sec == NULL) ? NULL : sec->objfile;
8324 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
3b7344d5 8325 if (minsym.minsym != NULL)
77e371c0 8326 return BMSYMBOL_VALUE_ADDRESS (minsym);
faa95490
DJ
8327 else
8328 return 0;
8329 }
8330
c5aa993b 8331 return 0; /* not a stub */
c906108c
SS
8332}
8333
afd7eef0
RE
8334static void
8335set_arm_command (char *args, int from_tty)
8336{
edefbb7c
AC
8337 printf_unfiltered (_("\
8338\"set arm\" must be followed by an apporpriate subcommand.\n"));
afd7eef0
RE
8339 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8340}
8341
8342static void
8343show_arm_command (char *args, int from_tty)
8344{
26304000 8345 cmd_show_list (showarmcmdlist, from_tty, "");
afd7eef0
RE
8346}
8347
28e97307
DJ
8348static void
8349arm_update_current_architecture (void)
fd50bc42 8350{
28e97307 8351 struct gdbarch_info info;
fd50bc42 8352
28e97307 8353 /* If the current architecture is not ARM, we have nothing to do. */
f5656ead 8354 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
28e97307 8355 return;
fd50bc42 8356
28e97307
DJ
8357 /* Update the architecture. */
8358 gdbarch_info_init (&info);
fd50bc42 8359
28e97307 8360 if (!gdbarch_update_p (info))
9b20d036 8361 internal_error (__FILE__, __LINE__, _("could not update architecture"));
fd50bc42
RE
8362}
8363
8364static void
8365set_fp_model_sfunc (char *args, int from_tty,
8366 struct cmd_list_element *c)
8367{
570dc176 8368 int fp_model;
fd50bc42
RE
8369
8370 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8371 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8372 {
aead7601 8373 arm_fp_model = (enum arm_float_model) fp_model;
fd50bc42
RE
8374 break;
8375 }
8376
8377 if (fp_model == ARM_FLOAT_LAST)
edefbb7c 8378 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
fd50bc42
RE
8379 current_fp_model);
8380
28e97307 8381 arm_update_current_architecture ();
fd50bc42
RE
8382}
8383
8384static void
08546159
AC
8385show_fp_model (struct ui_file *file, int from_tty,
8386 struct cmd_list_element *c, const char *value)
fd50bc42 8387{
f5656ead 8388 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
fd50bc42 8389
28e97307 8390 if (arm_fp_model == ARM_FLOAT_AUTO
f5656ead 8391 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8392 fprintf_filtered (file, _("\
8393The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8394 fp_model_strings[tdep->fp_model]);
8395 else
8396 fprintf_filtered (file, _("\
8397The current ARM floating point model is \"%s\".\n"),
8398 fp_model_strings[arm_fp_model]);
8399}
8400
8401static void
8402arm_set_abi (char *args, int from_tty,
8403 struct cmd_list_element *c)
8404{
570dc176 8405 int arm_abi;
28e97307
DJ
8406
8407 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8408 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8409 {
aead7601 8410 arm_abi_global = (enum arm_abi_kind) arm_abi;
28e97307
DJ
8411 break;
8412 }
8413
8414 if (arm_abi == ARM_ABI_LAST)
8415 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8416 arm_abi_string);
8417
8418 arm_update_current_architecture ();
8419}
8420
8421static void
8422arm_show_abi (struct ui_file *file, int from_tty,
8423 struct cmd_list_element *c, const char *value)
8424{
f5656ead 8425 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
28e97307
DJ
8426
8427 if (arm_abi_global == ARM_ABI_AUTO
f5656ead 8428 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
28e97307
DJ
8429 fprintf_filtered (file, _("\
8430The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8431 arm_abi_strings[tdep->arm_abi]);
8432 else
8433 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8434 arm_abi_string);
fd50bc42
RE
8435}
8436
0428b8f5
DJ
8437static void
8438arm_show_fallback_mode (struct ui_file *file, int from_tty,
8439 struct cmd_list_element *c, const char *value)
8440{
0963b4bd
MS
8441 fprintf_filtered (file,
8442 _("The current execution mode assumed "
8443 "(when symbols are unavailable) is \"%s\".\n"),
0428b8f5
DJ
8444 arm_fallback_mode_string);
8445}
8446
8447static void
8448arm_show_force_mode (struct ui_file *file, int from_tty,
8449 struct cmd_list_element *c, const char *value)
8450{
0963b4bd
MS
8451 fprintf_filtered (file,
8452 _("The current execution mode assumed "
8453 "(even when symbols are available) is \"%s\".\n"),
0428b8f5
DJ
8454 arm_force_mode_string);
8455}
8456
afd7eef0
RE
8457/* If the user changes the register disassembly style used for info
8458 register and other commands, we have to also switch the style used
8459 in opcodes for disassembly output. This function is run in the "set
8460 arm disassembly" command, and does that. */
bc90b915
FN
8461
8462static void
afd7eef0 8463set_disassembly_style_sfunc (char *args, int from_tty,
bc90b915
FN
8464 struct cmd_list_element *c)
8465{
afd7eef0 8466 set_disassembly_style ();
bc90b915
FN
8467}
8468\f
966fbf70 8469/* Return the ARM register name corresponding to register I. */
a208b0cb 8470static const char *
d93859e2 8471arm_register_name (struct gdbarch *gdbarch, int i)
966fbf70 8472{
58d6951d
DJ
8473 const int num_regs = gdbarch_num_regs (gdbarch);
8474
8475 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8476 && i >= num_regs && i < num_regs + 32)
8477 {
8478 static const char *const vfp_pseudo_names[] = {
8479 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8480 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8481 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8482 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8483 };
8484
8485 return vfp_pseudo_names[i - num_regs];
8486 }
8487
8488 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8489 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8490 {
8491 static const char *const neon_pseudo_names[] = {
8492 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8493 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8494 };
8495
8496 return neon_pseudo_names[i - num_regs - 32];
8497 }
8498
ff6f572f
DJ
8499 if (i >= ARRAY_SIZE (arm_register_names))
8500 /* These registers are only supported on targets which supply
8501 an XML description. */
8502 return "";
8503
966fbf70
RE
8504 return arm_register_names[i];
8505}
8506
bc90b915 8507static void
afd7eef0 8508set_disassembly_style (void)
bc90b915 8509{
123dc839 8510 int current;
bc90b915 8511
123dc839
DJ
8512 /* Find the style that the user wants. */
8513 for (current = 0; current < num_disassembly_options; current++)
8514 if (disassembly_style == valid_disassembly_styles[current])
8515 break;
8516 gdb_assert (current < num_disassembly_options);
bc90b915 8517
94c30b78 8518 /* Synchronize the disassembler. */
bc90b915
FN
8519 set_arm_regname_option (current);
8520}
8521
082fc60d
RE
8522/* Test whether the coff symbol specific value corresponds to a Thumb
8523 function. */
8524
8525static int
8526coff_sym_is_thumb (int val)
8527{
f8bf5763
PM
8528 return (val == C_THUMBEXT
8529 || val == C_THUMBSTAT
8530 || val == C_THUMBEXTFUNC
8531 || val == C_THUMBSTATFUNC
8532 || val == C_THUMBLABEL);
082fc60d
RE
8533}
8534
8535/* arm_coff_make_msymbol_special()
8536 arm_elf_make_msymbol_special()
8537
8538 These functions test whether the COFF or ELF symbol corresponds to
8539 an address in thumb code, and set a "special" bit in a minimal
8540 symbol to indicate that it does. */
8541
34e8f22d 8542static void
082fc60d
RE
8543arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8544{
467d42c4
UW
8545 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
8546 == ST_BRANCH_TO_THUMB)
082fc60d
RE
8547 MSYMBOL_SET_SPECIAL (msym);
8548}
8549
34e8f22d 8550static void
082fc60d
RE
8551arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8552{
8553 if (coff_sym_is_thumb (val))
8554 MSYMBOL_SET_SPECIAL (msym);
8555}
8556
60c5725c 8557static void
c1bd65d0 8558arm_objfile_data_free (struct objfile *objfile, void *arg)
60c5725c 8559{
9a3c8263 8560 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
60c5725c
DJ
8561 unsigned int i;
8562
8563 for (i = 0; i < objfile->obfd->section_count; i++)
8564 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8565}
8566
8567static void
8568arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8569 asymbol *sym)
8570{
8571 const char *name = bfd_asymbol_name (sym);
8572 struct arm_per_objfile *data;
8573 VEC(arm_mapping_symbol_s) **map_p;
8574 struct arm_mapping_symbol new_map_sym;
8575
8576 gdb_assert (name[0] == '$');
8577 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8578 return;
8579
9a3c8263
SM
8580 data = (struct arm_per_objfile *) objfile_data (objfile,
8581 arm_objfile_data_key);
60c5725c
DJ
8582 if (data == NULL)
8583 {
8584 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8585 struct arm_per_objfile);
8586 set_objfile_data (objfile, arm_objfile_data_key, data);
8587 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8588 objfile->obfd->section_count,
8589 VEC(arm_mapping_symbol_s) *);
8590 }
8591 map_p = &data->section_maps[bfd_get_section (sym)->index];
8592
8593 new_map_sym.value = sym->value;
8594 new_map_sym.type = name[1];
8595
8596 /* Assume that most mapping symbols appear in order of increasing
8597 value. If they were randomly distributed, it would be faster to
8598 always push here and then sort at first use. */
8599 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8600 {
8601 struct arm_mapping_symbol *prev_map_sym;
8602
8603 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8604 if (prev_map_sym->value >= sym->value)
8605 {
8606 unsigned int idx;
8607 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8608 arm_compare_mapping_symbols);
8609 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8610 return;
8611 }
8612 }
8613
8614 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8615}
8616
756fe439 8617static void
61a1198a 8618arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
756fe439 8619{
9779414d 8620 struct gdbarch *gdbarch = get_regcache_arch (regcache);
61a1198a 8621 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
756fe439
DJ
8622
8623 /* If necessary, set the T bit. */
8624 if (arm_apcs_32)
8625 {
9779414d 8626 ULONGEST val, t_bit;
61a1198a 8627 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9779414d
DJ
8628 t_bit = arm_psr_thumb_bit (gdbarch);
8629 if (arm_pc_is_thumb (gdbarch, pc))
8630 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8631 val | t_bit);
756fe439 8632 else
61a1198a 8633 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9779414d 8634 val & ~t_bit);
756fe439
DJ
8635 }
8636}
123dc839 8637
58d6951d
DJ
8638/* Read the contents of a NEON quad register, by reading from two
8639 double registers. This is used to implement the quad pseudo
8640 registers, and for argument passing in case the quad registers are
8641 missing; vectors are passed in quad registers when using the VFP
8642 ABI, even if a NEON unit is not present. REGNUM is the index of
8643 the quad register, in [0, 15]. */
8644
05d1431c 8645static enum register_status
58d6951d
DJ
8646arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8647 int regnum, gdb_byte *buf)
8648{
8649 char name_buf[4];
8650 gdb_byte reg_buf[8];
8651 int offset, double_regnum;
05d1431c 8652 enum register_status status;
58d6951d 8653
8c042590 8654 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8655 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8656 strlen (name_buf));
8657
8658 /* d0 is always the least significant half of q0. */
8659 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8660 offset = 8;
8661 else
8662 offset = 0;
8663
05d1431c
PA
8664 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8665 if (status != REG_VALID)
8666 return status;
58d6951d
DJ
8667 memcpy (buf + offset, reg_buf, 8);
8668
8669 offset = 8 - offset;
05d1431c
PA
8670 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8671 if (status != REG_VALID)
8672 return status;
58d6951d 8673 memcpy (buf + offset, reg_buf, 8);
05d1431c
PA
8674
8675 return REG_VALID;
58d6951d
DJ
8676}
8677
05d1431c 8678static enum register_status
58d6951d
DJ
8679arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8680 int regnum, gdb_byte *buf)
8681{
8682 const int num_regs = gdbarch_num_regs (gdbarch);
8683 char name_buf[4];
8684 gdb_byte reg_buf[8];
8685 int offset, double_regnum;
8686
8687 gdb_assert (regnum >= num_regs);
8688 regnum -= num_regs;
8689
8690 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8691 /* Quad-precision register. */
05d1431c 8692 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
58d6951d
DJ
8693 else
8694 {
05d1431c
PA
8695 enum register_status status;
8696
58d6951d
DJ
8697 /* Single-precision register. */
8698 gdb_assert (regnum < 32);
8699
8700 /* s0 is always the least significant half of d0. */
8701 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8702 offset = (regnum & 1) ? 0 : 4;
8703 else
8704 offset = (regnum & 1) ? 4 : 0;
8705
8c042590 8706 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8707 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8708 strlen (name_buf));
8709
05d1431c
PA
8710 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8711 if (status == REG_VALID)
8712 memcpy (buf, reg_buf + offset, 4);
8713 return status;
58d6951d
DJ
8714 }
8715}
8716
8717/* Store the contents of BUF to a NEON quad register, by writing to
8718 two double registers. This is used to implement the quad pseudo
8719 registers, and for argument passing in case the quad registers are
8720 missing; vectors are passed in quad registers when using the VFP
8721 ABI, even if a NEON unit is not present. REGNUM is the index
8722 of the quad register, in [0, 15]. */
8723
8724static void
8725arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8726 int regnum, const gdb_byte *buf)
8727{
8728 char name_buf[4];
58d6951d
DJ
8729 int offset, double_regnum;
8730
8c042590 8731 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
58d6951d
DJ
8732 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8733 strlen (name_buf));
8734
8735 /* d0 is always the least significant half of q0. */
8736 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8737 offset = 8;
8738 else
8739 offset = 0;
8740
8741 regcache_raw_write (regcache, double_regnum, buf + offset);
8742 offset = 8 - offset;
8743 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8744}
8745
8746static void
8747arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8748 int regnum, const gdb_byte *buf)
8749{
8750 const int num_regs = gdbarch_num_regs (gdbarch);
8751 char name_buf[4];
8752 gdb_byte reg_buf[8];
8753 int offset, double_regnum;
8754
8755 gdb_assert (regnum >= num_regs);
8756 regnum -= num_regs;
8757
8758 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8759 /* Quad-precision register. */
8760 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8761 else
8762 {
8763 /* Single-precision register. */
8764 gdb_assert (regnum < 32);
8765
8766 /* s0 is always the least significant half of d0. */
8767 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8768 offset = (regnum & 1) ? 0 : 4;
8769 else
8770 offset = (regnum & 1) ? 4 : 0;
8771
8c042590 8772 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
58d6951d
DJ
8773 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8774 strlen (name_buf));
8775
8776 regcache_raw_read (regcache, double_regnum, reg_buf);
8777 memcpy (reg_buf + offset, buf, 4);
8778 regcache_raw_write (regcache, double_regnum, reg_buf);
8779 }
8780}
8781
123dc839
DJ
8782static struct value *
8783value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8784{
9a3c8263 8785 const int *reg_p = (const int *) baton;
123dc839
DJ
8786 return value_of_register (*reg_p, frame);
8787}
97e03143 8788\f
70f80edf
JT
8789static enum gdb_osabi
8790arm_elf_osabi_sniffer (bfd *abfd)
97e03143 8791{
2af48f68 8792 unsigned int elfosabi;
70f80edf 8793 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
97e03143 8794
70f80edf 8795 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
97e03143 8796
28e97307
DJ
8797 if (elfosabi == ELFOSABI_ARM)
8798 /* GNU tools use this value. Check note sections in this case,
8799 as well. */
8800 bfd_map_over_sections (abfd,
8801 generic_elf_osabi_sniff_abi_tag_sections,
8802 &osabi);
97e03143 8803
28e97307 8804 /* Anything else will be handled by the generic ELF sniffer. */
70f80edf 8805 return osabi;
97e03143
RE
8806}
8807
54483882
YQ
8808static int
8809arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8810 struct reggroup *group)
8811{
2c291032
YQ
8812 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8813 this, FPS register belongs to save_regroup, restore_reggroup, and
8814 all_reggroup, of course. */
54483882 8815 if (regnum == ARM_FPS_REGNUM)
2c291032
YQ
8816 return (group == float_reggroup
8817 || group == save_reggroup
8818 || group == restore_reggroup
8819 || group == all_reggroup);
54483882
YQ
8820 else
8821 return default_register_reggroup_p (gdbarch, regnum, group);
8822}
8823
25f8c692
JL
8824\f
8825/* For backward-compatibility we allow two 'g' packet lengths with
8826 the remote protocol depending on whether FPA registers are
8827 supplied. M-profile targets do not have FPA registers, but some
8828 stubs already exist in the wild which use a 'g' packet which
8829 supplies them albeit with dummy values. The packet format which
8830 includes FPA registers should be considered deprecated for
8831 M-profile targets. */
8832
8833static void
8834arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8835{
8836 if (gdbarch_tdep (gdbarch)->is_m)
8837 {
8838 /* If we know from the executable this is an M-profile target,
8839 cater for remote targets whose register set layout is the
8840 same as the FPA layout. */
8841 register_remote_g_packet_guess (gdbarch,
03145bf4 8842 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
25f8c692
JL
8843 (16 * INT_REGISTER_SIZE)
8844 + (8 * FP_REGISTER_SIZE)
8845 + (2 * INT_REGISTER_SIZE),
8846 tdesc_arm_with_m_fpa_layout);
8847
8848 /* The regular M-profile layout. */
8849 register_remote_g_packet_guess (gdbarch,
8850 /* r0-r12,sp,lr,pc; xpsr */
8851 (16 * INT_REGISTER_SIZE)
8852 + INT_REGISTER_SIZE,
8853 tdesc_arm_with_m);
3184d3f9
JL
8854
8855 /* M-profile plus M4F VFP. */
8856 register_remote_g_packet_guess (gdbarch,
8857 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8858 (16 * INT_REGISTER_SIZE)
8859 + (16 * VFP_REGISTER_SIZE)
8860 + (2 * INT_REGISTER_SIZE),
8861 tdesc_arm_with_m_vfp_d16);
25f8c692
JL
8862 }
8863
8864 /* Otherwise we don't have a useful guess. */
8865}
8866
70f80edf 8867\f
da3c6d4a
MS
8868/* Initialize the current architecture based on INFO. If possible,
8869 re-use an architecture from ARCHES, which is a list of
8870 architectures already created during this debugging session.
97e03143 8871
da3c6d4a
MS
8872 Called e.g. at program startup, when reading a core file, and when
8873 reading a binary file. */
97e03143 8874
39bbf761
RE
8875static struct gdbarch *
8876arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8877{
97e03143 8878 struct gdbarch_tdep *tdep;
39bbf761 8879 struct gdbarch *gdbarch;
28e97307
DJ
8880 struct gdbarch_list *best_arch;
8881 enum arm_abi_kind arm_abi = arm_abi_global;
8882 enum arm_float_model fp_model = arm_fp_model;
123dc839 8883 struct tdesc_arch_data *tdesc_data = NULL;
9779414d 8884 int i, is_m = 0;
330c6ca9 8885 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
a56cc1ce 8886 int have_wmmx_registers = 0;
58d6951d 8887 int have_neon = 0;
ff6f572f 8888 int have_fpa_registers = 1;
9779414d
DJ
8889 const struct target_desc *tdesc = info.target_desc;
8890
8891 /* If we have an object to base this architecture on, try to determine
8892 its ABI. */
8893
8894 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8895 {
8896 int ei_osabi, e_flags;
8897
8898 switch (bfd_get_flavour (info.abfd))
8899 {
8900 case bfd_target_aout_flavour:
8901 /* Assume it's an old APCS-style ABI. */
8902 arm_abi = ARM_ABI_APCS;
8903 break;
8904
8905 case bfd_target_coff_flavour:
8906 /* Assume it's an old APCS-style ABI. */
8907 /* XXX WinCE? */
8908 arm_abi = ARM_ABI_APCS;
8909 break;
8910
8911 case bfd_target_elf_flavour:
8912 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8913 e_flags = elf_elfheader (info.abfd)->e_flags;
8914
8915 if (ei_osabi == ELFOSABI_ARM)
8916 {
8917 /* GNU tools used to use this value, but do not for EABI
8918 objects. There's nowhere to tag an EABI version
8919 anyway, so assume APCS. */
8920 arm_abi = ARM_ABI_APCS;
8921 }
d403db27 8922 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9779414d
DJ
8923 {
8924 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8925 int attr_arch, attr_profile;
8926
8927 switch (eabi_ver)
8928 {
8929 case EF_ARM_EABI_UNKNOWN:
8930 /* Assume GNU tools. */
8931 arm_abi = ARM_ABI_APCS;
8932 break;
8933
8934 case EF_ARM_EABI_VER4:
8935 case EF_ARM_EABI_VER5:
8936 arm_abi = ARM_ABI_AAPCS;
8937 /* EABI binaries default to VFP float ordering.
8938 They may also contain build attributes that can
8939 be used to identify if the VFP argument-passing
8940 ABI is in use. */
8941 if (fp_model == ARM_FLOAT_AUTO)
8942 {
8943#ifdef HAVE_ELF
8944 switch (bfd_elf_get_obj_attr_int (info.abfd,
8945 OBJ_ATTR_PROC,
8946 Tag_ABI_VFP_args))
8947 {
b35b0298 8948 case AEABI_VFP_args_base:
9779414d
DJ
8949 /* "The user intended FP parameter/result
8950 passing to conform to AAPCS, base
8951 variant". */
8952 fp_model = ARM_FLOAT_SOFT_VFP;
8953 break;
b35b0298 8954 case AEABI_VFP_args_vfp:
9779414d
DJ
8955 /* "The user intended FP parameter/result
8956 passing to conform to AAPCS, VFP
8957 variant". */
8958 fp_model = ARM_FLOAT_VFP;
8959 break;
b35b0298 8960 case AEABI_VFP_args_toolchain:
9779414d
DJ
8961 /* "The user intended FP parameter/result
8962 passing to conform to tool chain-specific
8963 conventions" - we don't know any such
8964 conventions, so leave it as "auto". */
8965 break;
b35b0298 8966 case AEABI_VFP_args_compatible:
5c294fee
TG
8967 /* "Code is compatible with both the base
8968 and VFP variants; the user did not permit
8969 non-variadic functions to pass FP
8970 parameters/results" - leave it as
8971 "auto". */
8972 break;
9779414d
DJ
8973 default:
8974 /* Attribute value not mentioned in the
5c294fee 8975 November 2012 ABI, so leave it as
9779414d
DJ
8976 "auto". */
8977 break;
8978 }
8979#else
8980 fp_model = ARM_FLOAT_SOFT_VFP;
8981#endif
8982 }
8983 break;
8984
8985 default:
8986 /* Leave it as "auto". */
8987 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8988 break;
8989 }
8990
8991#ifdef HAVE_ELF
8992 /* Detect M-profile programs. This only works if the
8993 executable file includes build attributes; GCC does
8994 copy them to the executable, but e.g. RealView does
8995 not. */
8996 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8997 Tag_CPU_arch);
0963b4bd
MS
8998 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8999 OBJ_ATTR_PROC,
9779414d
DJ
9000 Tag_CPU_arch_profile);
9001 /* GCC specifies the profile for v6-M; RealView only
9002 specifies the profile for architectures starting with
9003 V7 (as opposed to architectures with a tag
9004 numerically greater than TAG_CPU_ARCH_V7). */
9005 if (!tdesc_has_registers (tdesc)
9006 && (attr_arch == TAG_CPU_ARCH_V6_M
9007 || attr_arch == TAG_CPU_ARCH_V6S_M
9008 || attr_profile == 'M'))
25f8c692 9009 is_m = 1;
9779414d
DJ
9010#endif
9011 }
9012
9013 if (fp_model == ARM_FLOAT_AUTO)
9014 {
9015 int e_flags = elf_elfheader (info.abfd)->e_flags;
9016
9017 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9018 {
9019 case 0:
9020 /* Leave it as "auto". Strictly speaking this case
9021 means FPA, but almost nobody uses that now, and
9022 many toolchains fail to set the appropriate bits
9023 for the floating-point model they use. */
9024 break;
9025 case EF_ARM_SOFT_FLOAT:
9026 fp_model = ARM_FLOAT_SOFT_FPA;
9027 break;
9028 case EF_ARM_VFP_FLOAT:
9029 fp_model = ARM_FLOAT_VFP;
9030 break;
9031 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9032 fp_model = ARM_FLOAT_SOFT_VFP;
9033 break;
9034 }
9035 }
9036
9037 if (e_flags & EF_ARM_BE8)
9038 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9039
9040 break;
9041
9042 default:
9043 /* Leave it as "auto". */
9044 break;
9045 }
9046 }
123dc839
DJ
9047
9048 /* Check any target description for validity. */
9779414d 9049 if (tdesc_has_registers (tdesc))
123dc839
DJ
9050 {
9051 /* For most registers we require GDB's default names; but also allow
9052 the numeric names for sp / lr / pc, as a convenience. */
9053 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9054 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9055 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9056
9057 const struct tdesc_feature *feature;
58d6951d 9058 int valid_p;
123dc839 9059
9779414d 9060 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9061 "org.gnu.gdb.arm.core");
9062 if (feature == NULL)
9779414d
DJ
9063 {
9064 feature = tdesc_find_feature (tdesc,
9065 "org.gnu.gdb.arm.m-profile");
9066 if (feature == NULL)
9067 return NULL;
9068 else
9069 is_m = 1;
9070 }
123dc839
DJ
9071
9072 tdesc_data = tdesc_data_alloc ();
9073
9074 valid_p = 1;
9075 for (i = 0; i < ARM_SP_REGNUM; i++)
9076 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9077 arm_register_names[i]);
9078 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9079 ARM_SP_REGNUM,
9080 arm_sp_names);
9081 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9082 ARM_LR_REGNUM,
9083 arm_lr_names);
9084 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9085 ARM_PC_REGNUM,
9086 arm_pc_names);
9779414d
DJ
9087 if (is_m)
9088 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9089 ARM_PS_REGNUM, "xpsr");
9090 else
9091 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9092 ARM_PS_REGNUM, "cpsr");
123dc839
DJ
9093
9094 if (!valid_p)
9095 {
9096 tdesc_data_cleanup (tdesc_data);
9097 return NULL;
9098 }
9099
9779414d 9100 feature = tdesc_find_feature (tdesc,
123dc839
DJ
9101 "org.gnu.gdb.arm.fpa");
9102 if (feature != NULL)
9103 {
9104 valid_p = 1;
9105 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9106 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9107 arm_register_names[i]);
9108 if (!valid_p)
9109 {
9110 tdesc_data_cleanup (tdesc_data);
9111 return NULL;
9112 }
9113 }
ff6f572f
DJ
9114 else
9115 have_fpa_registers = 0;
9116
9779414d 9117 feature = tdesc_find_feature (tdesc,
ff6f572f
DJ
9118 "org.gnu.gdb.xscale.iwmmxt");
9119 if (feature != NULL)
9120 {
9121 static const char *const iwmmxt_names[] = {
9122 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9123 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9124 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9125 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9126 };
9127
9128 valid_p = 1;
9129 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9130 valid_p
9131 &= tdesc_numbered_register (feature, tdesc_data, i,
9132 iwmmxt_names[i - ARM_WR0_REGNUM]);
9133
9134 /* Check for the control registers, but do not fail if they
9135 are missing. */
9136 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9137 tdesc_numbered_register (feature, tdesc_data, i,
9138 iwmmxt_names[i - ARM_WR0_REGNUM]);
9139
9140 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9141 valid_p
9142 &= tdesc_numbered_register (feature, tdesc_data, i,
9143 iwmmxt_names[i - ARM_WR0_REGNUM]);
9144
9145 if (!valid_p)
9146 {
9147 tdesc_data_cleanup (tdesc_data);
9148 return NULL;
9149 }
a56cc1ce
YQ
9150
9151 have_wmmx_registers = 1;
ff6f572f 9152 }
58d6951d
DJ
9153
9154 /* If we have a VFP unit, check whether the single precision registers
9155 are present. If not, then we will synthesize them as pseudo
9156 registers. */
9779414d 9157 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9158 "org.gnu.gdb.arm.vfp");
9159 if (feature != NULL)
9160 {
9161 static const char *const vfp_double_names[] = {
9162 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9163 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9164 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9165 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9166 };
9167
9168 /* Require the double precision registers. There must be either
9169 16 or 32. */
9170 valid_p = 1;
9171 for (i = 0; i < 32; i++)
9172 {
9173 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9174 ARM_D0_REGNUM + i,
9175 vfp_double_names[i]);
9176 if (!valid_p)
9177 break;
9178 }
2b9e5ea6
UW
9179 if (!valid_p && i == 16)
9180 valid_p = 1;
58d6951d 9181
2b9e5ea6
UW
9182 /* Also require FPSCR. */
9183 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9184 ARM_FPSCR_REGNUM, "fpscr");
9185 if (!valid_p)
58d6951d
DJ
9186 {
9187 tdesc_data_cleanup (tdesc_data);
9188 return NULL;
9189 }
9190
9191 if (tdesc_unnumbered_register (feature, "s0") == 0)
9192 have_vfp_pseudos = 1;
9193
330c6ca9 9194 vfp_register_count = i;
58d6951d
DJ
9195
9196 /* If we have VFP, also check for NEON. The architecture allows
9197 NEON without VFP (integer vector operations only), but GDB
9198 does not support that. */
9779414d 9199 feature = tdesc_find_feature (tdesc,
58d6951d
DJ
9200 "org.gnu.gdb.arm.neon");
9201 if (feature != NULL)
9202 {
9203 /* NEON requires 32 double-precision registers. */
9204 if (i != 32)
9205 {
9206 tdesc_data_cleanup (tdesc_data);
9207 return NULL;
9208 }
9209
9210 /* If there are quad registers defined by the stub, use
9211 their type; otherwise (normally) provide them with
9212 the default type. */
9213 if (tdesc_unnumbered_register (feature, "q0") == 0)
9214 have_neon_pseudos = 1;
9215
9216 have_neon = 1;
9217 }
9218 }
123dc839 9219 }
39bbf761 9220
28e97307
DJ
9221 /* If there is already a candidate, use it. */
9222 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9223 best_arch != NULL;
9224 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9225 {
b8926edc
DJ
9226 if (arm_abi != ARM_ABI_AUTO
9227 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
28e97307
DJ
9228 continue;
9229
b8926edc
DJ
9230 if (fp_model != ARM_FLOAT_AUTO
9231 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
28e97307
DJ
9232 continue;
9233
58d6951d
DJ
9234 /* There are various other properties in tdep that we do not
9235 need to check here: those derived from a target description,
9236 since gdbarches with a different target description are
9237 automatically disqualified. */
9238
9779414d
DJ
9239 /* Do check is_m, though, since it might come from the binary. */
9240 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9241 continue;
9242
28e97307
DJ
9243 /* Found a match. */
9244 break;
9245 }
97e03143 9246
28e97307 9247 if (best_arch != NULL)
123dc839
DJ
9248 {
9249 if (tdesc_data != NULL)
9250 tdesc_data_cleanup (tdesc_data);
9251 return best_arch->gdbarch;
9252 }
28e97307 9253
8d749320 9254 tdep = XCNEW (struct gdbarch_tdep);
97e03143
RE
9255 gdbarch = gdbarch_alloc (&info, tdep);
9256
28e97307
DJ
9257 /* Record additional information about the architecture we are defining.
9258 These are gdbarch discriminators, like the OSABI. */
9259 tdep->arm_abi = arm_abi;
9260 tdep->fp_model = fp_model;
9779414d 9261 tdep->is_m = is_m;
ff6f572f 9262 tdep->have_fpa_registers = have_fpa_registers;
a56cc1ce 9263 tdep->have_wmmx_registers = have_wmmx_registers;
330c6ca9
YQ
9264 gdb_assert (vfp_register_count == 0
9265 || vfp_register_count == 16
9266 || vfp_register_count == 32);
9267 tdep->vfp_register_count = vfp_register_count;
58d6951d
DJ
9268 tdep->have_vfp_pseudos = have_vfp_pseudos;
9269 tdep->have_neon_pseudos = have_neon_pseudos;
9270 tdep->have_neon = have_neon;
08216dd7 9271
25f8c692
JL
9272 arm_register_g_packet_guesses (gdbarch);
9273
08216dd7 9274 /* Breakpoints. */
9d4fde75 9275 switch (info.byte_order_for_code)
67255d04
RE
9276 {
9277 case BFD_ENDIAN_BIG:
66e810cd
RE
9278 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9279 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9280 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9281 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9282
67255d04
RE
9283 break;
9284
9285 case BFD_ENDIAN_LITTLE:
66e810cd
RE
9286 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9287 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9288 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9289 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9290
67255d04
RE
9291 break;
9292
9293 default:
9294 internal_error (__FILE__, __LINE__,
edefbb7c 9295 _("arm_gdbarch_init: bad byte order for float format"));
67255d04
RE
9296 }
9297
d7b486e7
RE
9298 /* On ARM targets char defaults to unsigned. */
9299 set_gdbarch_char_signed (gdbarch, 0);
9300
cca44b1b
JB
9301 /* Note: for displaced stepping, this includes the breakpoint, and one word
9302 of additional scratch space. This setting isn't used for anything beside
9303 displaced stepping at present. */
9304 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9305
9df628e0 9306 /* This should be low enough for everything. */
97e03143 9307 tdep->lowest_pc = 0x20;
94c30b78 9308 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
97e03143 9309
7c00367c
MK
9310 /* The default, for both APCS and AAPCS, is to return small
9311 structures in registers. */
9312 tdep->struct_return = reg_struct_return;
9313
2dd604e7 9314 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
f53f0d0b 9315 set_gdbarch_frame_align (gdbarch, arm_frame_align);
39bbf761 9316
756fe439
DJ
9317 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9318
148754e5 9319 /* Frame handling. */
a262aec2 9320 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
eb5492fa
DJ
9321 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9322 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9323
eb5492fa 9324 frame_base_set_default (gdbarch, &arm_normal_base);
148754e5 9325
34e8f22d 9326 /* Address manipulation. */
34e8f22d
RE
9327 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9328
34e8f22d
RE
9329 /* Advance PC across function entry code. */
9330 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9331
c9cf6e20
MG
9332 /* Detect whether PC is at a point where the stack has been destroyed. */
9333 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
4024ca99 9334
190dce09
UW
9335 /* Skip trampolines. */
9336 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9337
34e8f22d
RE
9338 /* The stack grows downward. */
9339 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9340
9341 /* Breakpoint manipulation. */
9342 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
177321bd
DJ
9343 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9344 arm_remote_breakpoint_from_pc);
34e8f22d
RE
9345
9346 /* Information about registers, etc. */
34e8f22d
RE
9347 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9348 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
ff6f572f 9349 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7a5ea0d4 9350 set_gdbarch_register_type (gdbarch, arm_register_type);
54483882 9351 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
34e8f22d 9352
ff6f572f
DJ
9353 /* This "info float" is FPA-specific. Use the generic version if we
9354 do not have FPA. */
9355 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9356 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9357
26216b98 9358 /* Internal <-> external register number maps. */
ff6f572f 9359 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
26216b98
AC
9360 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9361
34e8f22d
RE
9362 set_gdbarch_register_name (gdbarch, arm_register_name);
9363
9364 /* Returning results. */
2af48f68 9365 set_gdbarch_return_value (gdbarch, arm_return_value);
34e8f22d 9366
03d48a7d
RE
9367 /* Disassembly. */
9368 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9369
34e8f22d
RE
9370 /* Minsymbol frobbing. */
9371 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9372 set_gdbarch_coff_make_msymbol_special (gdbarch,
9373 arm_coff_make_msymbol_special);
60c5725c 9374 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
34e8f22d 9375
f9d67f43
DJ
9376 /* Thumb-2 IT block support. */
9377 set_gdbarch_adjust_breakpoint_address (gdbarch,
9378 arm_adjust_breakpoint_address);
9379
0d5de010
DJ
9380 /* Virtual tables. */
9381 set_gdbarch_vbit_in_delta (gdbarch, 1);
9382
97e03143 9383 /* Hook in the ABI-specific overrides, if they have been registered. */
4be87837 9384 gdbarch_init_osabi (info, gdbarch);
97e03143 9385
b39cc962
DJ
9386 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9387
eb5492fa 9388 /* Add some default predicates. */
2ae28aa9
YQ
9389 if (is_m)
9390 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
a262aec2
DJ
9391 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9392 dwarf2_append_unwinders (gdbarch);
0e9e9abd 9393 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
779aa56f 9394 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
a262aec2 9395 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
eb5492fa 9396
97e03143
RE
9397 /* Now we have tuned the configuration, set a few final things,
9398 based on what the OS ABI has told us. */
9399
b8926edc
DJ
9400 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9401 binaries are always marked. */
9402 if (tdep->arm_abi == ARM_ABI_AUTO)
9403 tdep->arm_abi = ARM_ABI_APCS;
9404
e3039479
UW
9405 /* Watchpoints are not steppable. */
9406 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9407
b8926edc
DJ
9408 /* We used to default to FPA for generic ARM, but almost nobody
9409 uses that now, and we now provide a way for the user to force
9410 the model. So default to the most useful variant. */
9411 if (tdep->fp_model == ARM_FLOAT_AUTO)
9412 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9413
9df628e0
RE
9414 if (tdep->jb_pc >= 0)
9415 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9416
08216dd7 9417 /* Floating point sizes and format. */
8da61cc4 9418 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
b8926edc 9419 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
08216dd7 9420 {
8da61cc4
DJ
9421 set_gdbarch_double_format
9422 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9423 set_gdbarch_long_double_format
9424 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9425 }
9426 else
9427 {
9428 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9429 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
08216dd7
RE
9430 }
9431
58d6951d
DJ
9432 if (have_vfp_pseudos)
9433 {
9434 /* NOTE: These are the only pseudo registers used by
9435 the ARM target at the moment. If more are added, a
9436 little more care in numbering will be needed. */
9437
9438 int num_pseudos = 32;
9439 if (have_neon_pseudos)
9440 num_pseudos += 16;
9441 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9442 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9443 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9444 }
9445
123dc839 9446 if (tdesc_data)
58d6951d
DJ
9447 {
9448 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9449
9779414d 9450 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
58d6951d
DJ
9451
9452 /* Override tdesc_register_type to adjust the types of VFP
9453 registers for NEON. */
9454 set_gdbarch_register_type (gdbarch, arm_register_type);
9455 }
123dc839
DJ
9456
9457 /* Add standard register aliases. We add aliases even for those
9458 nanes which are used by the current architecture - it's simpler,
9459 and does no harm, since nothing ever lists user registers. */
9460 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9461 user_reg_add (gdbarch, arm_register_aliases[i].name,
9462 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9463
39bbf761
RE
9464 return gdbarch;
9465}
9466
97e03143 9467static void
2af46ca0 9468arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
97e03143 9469{
2af46ca0 9470 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
97e03143
RE
9471
9472 if (tdep == NULL)
9473 return;
9474
edefbb7c 9475 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
97e03143
RE
9476 (unsigned long) tdep->lowest_pc);
9477}
9478
a78f21af
AC
9479extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9480
c906108c 9481void
ed9a39eb 9482_initialize_arm_tdep (void)
c906108c 9483{
bc90b915
FN
9484 struct ui_file *stb;
9485 long length;
53904c9e
AC
9486 const char *setname;
9487 const char *setdesc;
4bd7b427 9488 const char *const *regnames;
bec2ab5a 9489 int i;
bc90b915 9490 static char *helptext;
edefbb7c
AC
9491 char regdesc[1024], *rdptr = regdesc;
9492 size_t rest = sizeof (regdesc);
085dd6e6 9493
42cf1509 9494 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
97e03143 9495
60c5725c 9496 arm_objfile_data_key
c1bd65d0 9497 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
60c5725c 9498
0e9e9abd
UW
9499 /* Add ourselves to objfile event chain. */
9500 observer_attach_new_objfile (arm_exidx_new_objfile);
9501 arm_exidx_data_key
9502 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9503
70f80edf
JT
9504 /* Register an ELF OS ABI sniffer for ARM binaries. */
9505 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9506 bfd_target_elf_flavour,
9507 arm_elf_osabi_sniffer);
9508
9779414d
DJ
9509 /* Initialize the standard target descriptions. */
9510 initialize_tdesc_arm_with_m ();
25f8c692 9511 initialize_tdesc_arm_with_m_fpa_layout ();
3184d3f9 9512 initialize_tdesc_arm_with_m_vfp_d16 ();
ef7e8358
UW
9513 initialize_tdesc_arm_with_iwmmxt ();
9514 initialize_tdesc_arm_with_vfpv2 ();
9515 initialize_tdesc_arm_with_vfpv3 ();
9516 initialize_tdesc_arm_with_neon ();
9779414d 9517
94c30b78 9518 /* Get the number of possible sets of register names defined in opcodes. */
afd7eef0
RE
9519 num_disassembly_options = get_arm_regname_num_options ();
9520
9521 /* Add root prefix command for all "set arm"/"show arm" commands. */
9522 add_prefix_cmd ("arm", no_class, set_arm_command,
edefbb7c 9523 _("Various ARM-specific commands."),
afd7eef0
RE
9524 &setarmcmdlist, "set arm ", 0, &setlist);
9525
9526 add_prefix_cmd ("arm", no_class, show_arm_command,
edefbb7c 9527 _("Various ARM-specific commands."),
afd7eef0 9528 &showarmcmdlist, "show arm ", 0, &showlist);
bc90b915 9529
94c30b78 9530 /* Sync the opcode insn printer with our register viewer. */
bc90b915 9531 parse_arm_disassembler_option ("reg-names-std");
c5aa993b 9532
eefe576e
AC
9533 /* Initialize the array that will be passed to
9534 add_setshow_enum_cmd(). */
8d749320
SM
9535 valid_disassembly_styles = XNEWVEC (const char *,
9536 num_disassembly_options + 1);
afd7eef0 9537 for (i = 0; i < num_disassembly_options; i++)
bc90b915 9538 {
bec2ab5a 9539 get_arm_regnames (i, &setname, &setdesc, &regnames);
afd7eef0 9540 valid_disassembly_styles[i] = setname;
edefbb7c
AC
9541 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9542 rdptr += length;
9543 rest -= length;
123dc839
DJ
9544 /* When we find the default names, tell the disassembler to use
9545 them. */
bc90b915
FN
9546 if (!strcmp (setname, "std"))
9547 {
afd7eef0 9548 disassembly_style = setname;
bc90b915
FN
9549 set_arm_regname_option (i);
9550 }
9551 }
94c30b78 9552 /* Mark the end of valid options. */
afd7eef0 9553 valid_disassembly_styles[num_disassembly_options] = NULL;
c906108c 9554
edefbb7c
AC
9555 /* Create the help text. */
9556 stb = mem_fileopen ();
9557 fprintf_unfiltered (stb, "%s%s%s",
9558 _("The valid values are:\n"),
9559 regdesc,
9560 _("The default is \"std\"."));
759ef836 9561 helptext = ui_file_xstrdup (stb, NULL);
bc90b915 9562 ui_file_delete (stb);
ed9a39eb 9563
edefbb7c
AC
9564 add_setshow_enum_cmd("disassembler", no_class,
9565 valid_disassembly_styles, &disassembly_style,
9566 _("Set the disassembly style."),
9567 _("Show the disassembly style."),
9568 helptext,
2c5b56ce 9569 set_disassembly_style_sfunc,
0963b4bd
MS
9570 NULL, /* FIXME: i18n: The disassembly style is
9571 \"%s\". */
7376b4c2 9572 &setarmcmdlist, &showarmcmdlist);
edefbb7c
AC
9573
9574 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9575 _("Set usage of ARM 32-bit mode."),
9576 _("Show usage of ARM 32-bit mode."),
9577 _("When off, a 26-bit PC will be used."),
2c5b56ce 9578 NULL,
0963b4bd
MS
9579 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9580 mode is %s. */
26304000 9581 &setarmcmdlist, &showarmcmdlist);
c906108c 9582
fd50bc42 9583 /* Add a command to allow the user to force the FPU model. */
edefbb7c
AC
9584 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9585 _("Set the floating point type."),
9586 _("Show the floating point type."),
9587 _("auto - Determine the FP typefrom the OS-ABI.\n\
9588softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9589fpa - FPA co-processor (GCC compiled).\n\
9590softvfp - Software FP with pure-endian doubles.\n\
9591vfp - VFP co-processor."),
edefbb7c 9592 set_fp_model_sfunc, show_fp_model,
7376b4c2 9593 &setarmcmdlist, &showarmcmdlist);
fd50bc42 9594
28e97307
DJ
9595 /* Add a command to allow the user to force the ABI. */
9596 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9597 _("Set the ABI."),
9598 _("Show the ABI."),
9599 NULL, arm_set_abi, arm_show_abi,
9600 &setarmcmdlist, &showarmcmdlist);
9601
0428b8f5
DJ
9602 /* Add two commands to allow the user to force the assumed
9603 execution mode. */
9604 add_setshow_enum_cmd ("fallback-mode", class_support,
9605 arm_mode_strings, &arm_fallback_mode_string,
9606 _("Set the mode assumed when symbols are unavailable."),
9607 _("Show the mode assumed when symbols are unavailable."),
9608 NULL, NULL, arm_show_fallback_mode,
9609 &setarmcmdlist, &showarmcmdlist);
9610 add_setshow_enum_cmd ("force-mode", class_support,
9611 arm_mode_strings, &arm_force_mode_string,
9612 _("Set the mode assumed even when symbols are available."),
9613 _("Show the mode assumed even when symbols are available."),
9614 NULL, NULL, arm_show_force_mode,
9615 &setarmcmdlist, &showarmcmdlist);
9616
6529d2dd 9617 /* Debugging flag. */
edefbb7c
AC
9618 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9619 _("Set ARM debugging."),
9620 _("Show ARM debugging."),
9621 _("When on, arm-specific debugging is enabled."),
2c5b56ce 9622 NULL,
7915a72c 9623 NULL, /* FIXME: i18n: "ARM debugging is %s. */
26304000 9624 &setdebuglist, &showdebuglist);
c906108c 9625}
72508ac0
PO
9626
9627/* ARM-reversible process record data structures. */
9628
9629#define ARM_INSN_SIZE_BYTES 4
9630#define THUMB_INSN_SIZE_BYTES 2
9631#define THUMB2_INSN_SIZE_BYTES 4
9632
9633
71e396f9
LM
9634/* Position of the bit within a 32-bit ARM instruction
9635 that defines whether the instruction is a load or store. */
72508ac0
PO
9636#define INSN_S_L_BIT_NUM 20
9637
9638#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9639 do \
9640 { \
9641 unsigned int reg_len = LENGTH; \
9642 if (reg_len) \
9643 { \
9644 REGS = XNEWVEC (uint32_t, reg_len); \
9645 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9646 } \
9647 } \
9648 while (0)
9649
9650#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9651 do \
9652 { \
9653 unsigned int mem_len = LENGTH; \
9654 if (mem_len) \
9655 { \
9656 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9657 memcpy(&MEMS->len, &RECORD_BUF[0], \
9658 sizeof(struct arm_mem_r) * LENGTH); \
9659 } \
9660 } \
9661 while (0)
9662
9663/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9664#define INSN_RECORDED(ARM_RECORD) \
9665 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9666
9667/* ARM memory record structure. */
9668struct arm_mem_r
9669{
9670 uint32_t len; /* Record length. */
bfbbec00 9671 uint32_t addr; /* Memory address. */
72508ac0
PO
9672};
9673
9674/* ARM instruction record contains opcode of current insn
9675 and execution state (before entry to decode_insn()),
9676 contains list of to-be-modified registers and
9677 memory blocks (on return from decode_insn()). */
9678
9679typedef struct insn_decode_record_t
9680{
9681 struct gdbarch *gdbarch;
9682 struct regcache *regcache;
9683 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9684 uint32_t arm_insn; /* Should accommodate thumb. */
9685 uint32_t cond; /* Condition code. */
9686 uint32_t opcode; /* Insn opcode. */
9687 uint32_t decode; /* Insn decode bits. */
9688 uint32_t mem_rec_count; /* No of mem records. */
9689 uint32_t reg_rec_count; /* No of reg records. */
9690 uint32_t *arm_regs; /* Registers to be saved for this record. */
9691 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9692} insn_decode_record;
9693
9694
9695/* Checks ARM SBZ and SBO mandatory fields. */
9696
9697static int
9698sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9699{
9700 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9701
9702 if (!len)
9703 return 1;
9704
9705 if (!sbo)
9706 ones = ~ones;
9707
9708 while (ones)
9709 {
9710 if (!(ones & sbo))
9711 {
9712 return 0;
9713 }
9714 ones = ones >> 1;
9715 }
9716 return 1;
9717}
9718
c6ec2b30
OJ
9719enum arm_record_result
9720{
9721 ARM_RECORD_SUCCESS = 0,
9722 ARM_RECORD_FAILURE = 1
9723};
9724
72508ac0
PO
9725typedef enum
9726{
9727 ARM_RECORD_STRH=1,
9728 ARM_RECORD_STRD
9729} arm_record_strx_t;
9730
9731typedef enum
9732{
9733 ARM_RECORD=1,
9734 THUMB_RECORD,
9735 THUMB2_RECORD
9736} record_type_t;
9737
9738
9739static int
9740arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9741 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9742{
9743
9744 struct regcache *reg_cache = arm_insn_r->regcache;
9745 ULONGEST u_regval[2]= {0};
9746
9747 uint32_t reg_src1 = 0, reg_src2 = 0;
9748 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
72508ac0
PO
9749
9750 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9751 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
72508ac0
PO
9752
9753 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9754 {
9755 /* 1) Handle misc store, immediate offset. */
9756 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9757 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9758 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9759 regcache_raw_read_unsigned (reg_cache, reg_src1,
9760 &u_regval[0]);
9761 if (ARM_PC_REGNUM == reg_src1)
9762 {
9763 /* If R15 was used as Rn, hence current PC+8. */
9764 u_regval[0] = u_regval[0] + 8;
9765 }
9766 offset_8 = (immed_high << 4) | immed_low;
9767 /* Calculate target store address. */
9768 if (14 == arm_insn_r->opcode)
9769 {
9770 tgt_mem_addr = u_regval[0] + offset_8;
9771 }
9772 else
9773 {
9774 tgt_mem_addr = u_regval[0] - offset_8;
9775 }
9776 if (ARM_RECORD_STRH == str_type)
9777 {
9778 record_buf_mem[0] = 2;
9779 record_buf_mem[1] = tgt_mem_addr;
9780 arm_insn_r->mem_rec_count = 1;
9781 }
9782 else if (ARM_RECORD_STRD == str_type)
9783 {
9784 record_buf_mem[0] = 4;
9785 record_buf_mem[1] = tgt_mem_addr;
9786 record_buf_mem[2] = 4;
9787 record_buf_mem[3] = tgt_mem_addr + 4;
9788 arm_insn_r->mem_rec_count = 2;
9789 }
9790 }
9791 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9792 {
9793 /* 2) Store, register offset. */
9794 /* Get Rm. */
9795 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9796 /* Get Rn. */
9797 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9798 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9799 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9800 if (15 == reg_src2)
9801 {
9802 /* If R15 was used as Rn, hence current PC+8. */
9803 u_regval[0] = u_regval[0] + 8;
9804 }
9805 /* Calculate target store address, Rn +/- Rm, register offset. */
9806 if (12 == arm_insn_r->opcode)
9807 {
9808 tgt_mem_addr = u_regval[0] + u_regval[1];
9809 }
9810 else
9811 {
9812 tgt_mem_addr = u_regval[1] - u_regval[0];
9813 }
9814 if (ARM_RECORD_STRH == str_type)
9815 {
9816 record_buf_mem[0] = 2;
9817 record_buf_mem[1] = tgt_mem_addr;
9818 arm_insn_r->mem_rec_count = 1;
9819 }
9820 else if (ARM_RECORD_STRD == str_type)
9821 {
9822 record_buf_mem[0] = 4;
9823 record_buf_mem[1] = tgt_mem_addr;
9824 record_buf_mem[2] = 4;
9825 record_buf_mem[3] = tgt_mem_addr + 4;
9826 arm_insn_r->mem_rec_count = 2;
9827 }
9828 }
9829 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9830 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9831 {
9832 /* 3) Store, immediate pre-indexed. */
9833 /* 5) Store, immediate post-indexed. */
9834 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9835 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9836 offset_8 = (immed_high << 4) | immed_low;
9837 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9838 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9839 /* Calculate target store address, Rn +/- Rm, register offset. */
9840 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9841 {
9842 tgt_mem_addr = u_regval[0] + offset_8;
9843 }
9844 else
9845 {
9846 tgt_mem_addr = u_regval[0] - offset_8;
9847 }
9848 if (ARM_RECORD_STRH == str_type)
9849 {
9850 record_buf_mem[0] = 2;
9851 record_buf_mem[1] = tgt_mem_addr;
9852 arm_insn_r->mem_rec_count = 1;
9853 }
9854 else if (ARM_RECORD_STRD == str_type)
9855 {
9856 record_buf_mem[0] = 4;
9857 record_buf_mem[1] = tgt_mem_addr;
9858 record_buf_mem[2] = 4;
9859 record_buf_mem[3] = tgt_mem_addr + 4;
9860 arm_insn_r->mem_rec_count = 2;
9861 }
9862 /* Record Rn also as it changes. */
9863 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9864 arm_insn_r->reg_rec_count = 1;
9865 }
9866 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9867 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9868 {
9869 /* 4) Store, register pre-indexed. */
9870 /* 6) Store, register post -indexed. */
9871 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9872 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9873 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9874 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9875 /* Calculate target store address, Rn +/- Rm, register offset. */
9876 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9877 {
9878 tgt_mem_addr = u_regval[0] + u_regval[1];
9879 }
9880 else
9881 {
9882 tgt_mem_addr = u_regval[1] - u_regval[0];
9883 }
9884 if (ARM_RECORD_STRH == str_type)
9885 {
9886 record_buf_mem[0] = 2;
9887 record_buf_mem[1] = tgt_mem_addr;
9888 arm_insn_r->mem_rec_count = 1;
9889 }
9890 else if (ARM_RECORD_STRD == str_type)
9891 {
9892 record_buf_mem[0] = 4;
9893 record_buf_mem[1] = tgt_mem_addr;
9894 record_buf_mem[2] = 4;
9895 record_buf_mem[3] = tgt_mem_addr + 4;
9896 arm_insn_r->mem_rec_count = 2;
9897 }
9898 /* Record Rn also as it changes. */
9899 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9900 arm_insn_r->reg_rec_count = 1;
9901 }
9902 return 0;
9903}
9904
9905/* Handling ARM extension space insns. */
9906
9907static int
9908arm_record_extension_space (insn_decode_record *arm_insn_r)
9909{
9910 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9911 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9912 uint32_t record_buf[8], record_buf_mem[8];
9913 uint32_t reg_src1 = 0;
72508ac0
PO
9914 struct regcache *reg_cache = arm_insn_r->regcache;
9915 ULONGEST u_regval = 0;
9916
9917 gdb_assert (!INSN_RECORDED(arm_insn_r));
9918 /* Handle unconditional insn extension space. */
9919
9920 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9921 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9922 if (arm_insn_r->cond)
9923 {
9924 /* PLD has no affect on architectural state, it just affects
9925 the caches. */
9926 if (5 == ((opcode1 & 0xE0) >> 5))
9927 {
9928 /* BLX(1) */
9929 record_buf[0] = ARM_PS_REGNUM;
9930 record_buf[1] = ARM_LR_REGNUM;
9931 arm_insn_r->reg_rec_count = 2;
9932 }
9933 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9934 }
9935
9936
9937 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9938 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9939 {
9940 ret = -1;
9941 /* Undefined instruction on ARM V5; need to handle if later
9942 versions define it. */
9943 }
9944
9945 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9946 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9947 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9948
9949 /* Handle arithmetic insn extension space. */
9950 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9951 && !INSN_RECORDED(arm_insn_r))
9952 {
9953 /* Handle MLA(S) and MUL(S). */
9954 if (0 <= insn_op1 && 3 >= insn_op1)
9955 {
9956 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9957 record_buf[1] = ARM_PS_REGNUM;
9958 arm_insn_r->reg_rec_count = 2;
9959 }
9960 else if (4 <= insn_op1 && 15 >= insn_op1)
9961 {
9962 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9963 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9964 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9965 record_buf[2] = ARM_PS_REGNUM;
9966 arm_insn_r->reg_rec_count = 3;
9967 }
9968 }
9969
9970 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9971 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9972 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9973
9974 /* Handle control insn extension space. */
9975
9976 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9977 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9978 {
9979 if (!bit (arm_insn_r->arm_insn,25))
9980 {
9981 if (!bits (arm_insn_r->arm_insn, 4, 7))
9982 {
9983 if ((0 == insn_op1) || (2 == insn_op1))
9984 {
9985 /* MRS. */
9986 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9987 arm_insn_r->reg_rec_count = 1;
9988 }
9989 else if (1 == insn_op1)
9990 {
9991 /* CSPR is going to be changed. */
9992 record_buf[0] = ARM_PS_REGNUM;
9993 arm_insn_r->reg_rec_count = 1;
9994 }
9995 else if (3 == insn_op1)
9996 {
9997 /* SPSR is going to be changed. */
9998 /* We need to get SPSR value, which is yet to be done. */
72508ac0
PO
9999 return -1;
10000 }
10001 }
10002 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10003 {
10004 if (1 == insn_op1)
10005 {
10006 /* BX. */
10007 record_buf[0] = ARM_PS_REGNUM;
10008 arm_insn_r->reg_rec_count = 1;
10009 }
10010 else if (3 == insn_op1)
10011 {
10012 /* CLZ. */
10013 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10014 arm_insn_r->reg_rec_count = 1;
10015 }
10016 }
10017 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10018 {
10019 /* BLX. */
10020 record_buf[0] = ARM_PS_REGNUM;
10021 record_buf[1] = ARM_LR_REGNUM;
10022 arm_insn_r->reg_rec_count = 2;
10023 }
10024 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10025 {
10026 /* QADD, QSUB, QDADD, QDSUB */
10027 record_buf[0] = ARM_PS_REGNUM;
10028 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10029 arm_insn_r->reg_rec_count = 2;
10030 }
10031 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10032 {
10033 /* BKPT. */
10034 record_buf[0] = ARM_PS_REGNUM;
10035 record_buf[1] = ARM_LR_REGNUM;
10036 arm_insn_r->reg_rec_count = 2;
10037
10038 /* Save SPSR also;how? */
72508ac0
PO
10039 return -1;
10040 }
10041 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10042 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10043 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10044 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10045 )
10046 {
10047 if (0 == insn_op1 || 1 == insn_op1)
10048 {
10049 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10050 /* We dont do optimization for SMULW<y> where we
10051 need only Rd. */
10052 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10053 record_buf[1] = ARM_PS_REGNUM;
10054 arm_insn_r->reg_rec_count = 2;
10055 }
10056 else if (2 == insn_op1)
10057 {
10058 /* SMLAL<x><y>. */
10059 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10060 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10061 arm_insn_r->reg_rec_count = 2;
10062 }
10063 else if (3 == insn_op1)
10064 {
10065 /* SMUL<x><y>. */
10066 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10067 arm_insn_r->reg_rec_count = 1;
10068 }
10069 }
10070 }
10071 else
10072 {
10073 /* MSR : immediate form. */
10074 if (1 == insn_op1)
10075 {
10076 /* CSPR is going to be changed. */
10077 record_buf[0] = ARM_PS_REGNUM;
10078 arm_insn_r->reg_rec_count = 1;
10079 }
10080 else if (3 == insn_op1)
10081 {
10082 /* SPSR is going to be changed. */
10083 /* we need to get SPSR value, which is yet to be done */
72508ac0
PO
10084 return -1;
10085 }
10086 }
10087 }
10088
10089 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10090 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10091 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10092
10093 /* Handle load/store insn extension space. */
10094
10095 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10096 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10097 && !INSN_RECORDED(arm_insn_r))
10098 {
10099 /* SWP/SWPB. */
10100 if (0 == insn_op1)
10101 {
10102 /* These insn, changes register and memory as well. */
10103 /* SWP or SWPB insn. */
10104 /* Get memory address given by Rn. */
10105 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10106 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10107 /* SWP insn ?, swaps word. */
10108 if (8 == arm_insn_r->opcode)
10109 {
10110 record_buf_mem[0] = 4;
10111 }
10112 else
10113 {
10114 /* SWPB insn, swaps only byte. */
10115 record_buf_mem[0] = 1;
10116 }
10117 record_buf_mem[1] = u_regval;
10118 arm_insn_r->mem_rec_count = 1;
10119 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10120 arm_insn_r->reg_rec_count = 1;
10121 }
10122 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10123 {
10124 /* STRH. */
10125 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10126 ARM_RECORD_STRH);
10127 }
10128 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10129 {
10130 /* LDRD. */
10131 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10132 record_buf[1] = record_buf[0] + 1;
10133 arm_insn_r->reg_rec_count = 2;
10134 }
10135 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10136 {
10137 /* STRD. */
10138 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10139 ARM_RECORD_STRD);
10140 }
10141 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10142 {
10143 /* LDRH, LDRSB, LDRSH. */
10144 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10145 arm_insn_r->reg_rec_count = 1;
10146 }
10147
10148 }
10149
10150 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10151 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10152 && !INSN_RECORDED(arm_insn_r))
10153 {
10154 ret = -1;
10155 /* Handle coprocessor insn extension space. */
10156 }
10157
10158 /* To be done for ARMv5 and later; as of now we return -1. */
10159 if (-1 == ret)
ca92db2d 10160 return ret;
72508ac0
PO
10161
10162 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10163 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10164
10165 return ret;
10166}
10167
10168/* Handling opcode 000 insns. */
10169
10170static int
10171arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10172{
10173 struct regcache *reg_cache = arm_insn_r->regcache;
10174 uint32_t record_buf[8], record_buf_mem[8];
10175 ULONGEST u_regval[2] = {0};
10176
bec2ab5a 10177 uint32_t reg_src1 = 0, reg_dest = 0;
72508ac0
PO
10178 uint32_t opcode1 = 0;
10179
10180 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10181 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10182 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10183
10184 /* Data processing insn /multiply insn. */
10185 if (9 == arm_insn_r->decode
10186 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10187 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10188 {
10189 /* Handle multiply instructions. */
10190 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10191 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10192 {
10193 /* Handle MLA and MUL. */
10194 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10195 record_buf[1] = ARM_PS_REGNUM;
10196 arm_insn_r->reg_rec_count = 2;
10197 }
10198 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10199 {
10200 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10201 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10202 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10203 record_buf[2] = ARM_PS_REGNUM;
10204 arm_insn_r->reg_rec_count = 3;
10205 }
10206 }
10207 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10208 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10209 {
10210 /* Handle misc load insns, as 20th bit (L = 1). */
10211 /* LDR insn has a capability to do branching, if
10212 MOV LR, PC is precceded by LDR insn having Rn as R15
10213 in that case, it emulates branch and link insn, and hence we
10214 need to save CSPR and PC as well. I am not sure this is right
10215 place; as opcode = 010 LDR insn make this happen, if R15 was
10216 used. */
10217 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10218 if (15 != reg_dest)
10219 {
10220 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10221 arm_insn_r->reg_rec_count = 1;
10222 }
10223 else
10224 {
10225 record_buf[0] = reg_dest;
10226 record_buf[1] = ARM_PS_REGNUM;
10227 arm_insn_r->reg_rec_count = 2;
10228 }
10229 }
10230 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10231 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10232 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10233 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10234 {
10235 /* Handle MSR insn. */
10236 if (9 == arm_insn_r->opcode)
10237 {
10238 /* CSPR is going to be changed. */
10239 record_buf[0] = ARM_PS_REGNUM;
10240 arm_insn_r->reg_rec_count = 1;
10241 }
10242 else
10243 {
10244 /* SPSR is going to be changed. */
10245 /* How to read SPSR value? */
72508ac0
PO
10246 return -1;
10247 }
10248 }
10249 else if (9 == arm_insn_r->decode
10250 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10251 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10252 {
10253 /* Handling SWP, SWPB. */
10254 /* These insn, changes register and memory as well. */
10255 /* SWP or SWPB insn. */
10256
10257 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10258 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10259 /* SWP insn ?, swaps word. */
10260 if (8 == arm_insn_r->opcode)
10261 {
10262 record_buf_mem[0] = 4;
10263 }
10264 else
10265 {
10266 /* SWPB insn, swaps only byte. */
10267 record_buf_mem[0] = 1;
10268 }
10269 record_buf_mem[1] = u_regval[0];
10270 arm_insn_r->mem_rec_count = 1;
10271 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10272 arm_insn_r->reg_rec_count = 1;
10273 }
10274 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10275 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10276 {
10277 /* Handle BLX, branch and link/exchange. */
10278 if (9 == arm_insn_r->opcode)
10279 {
10280 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10281 and R14 stores the return address. */
10282 record_buf[0] = ARM_PS_REGNUM;
10283 record_buf[1] = ARM_LR_REGNUM;
10284 arm_insn_r->reg_rec_count = 2;
10285 }
10286 }
10287 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10288 {
10289 /* Handle enhanced software breakpoint insn, BKPT. */
10290 /* CPSR is changed to be executed in ARM state, disabling normal
10291 interrupts, entering abort mode. */
10292 /* According to high vector configuration PC is set. */
10293 /* user hit breakpoint and type reverse, in
10294 that case, we need to go back with previous CPSR and
10295 Program Counter. */
10296 record_buf[0] = ARM_PS_REGNUM;
10297 record_buf[1] = ARM_LR_REGNUM;
10298 arm_insn_r->reg_rec_count = 2;
10299
10300 /* Save SPSR also; how? */
72508ac0
PO
10301 return -1;
10302 }
10303 else if (11 == arm_insn_r->decode
10304 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10305 {
10306 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10307
10308 /* Handle str(x) insn */
10309 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10310 ARM_RECORD_STRH);
10311 }
10312 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10313 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10314 {
10315 /* Handle BX, branch and link/exchange. */
10316 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10317 record_buf[0] = ARM_PS_REGNUM;
10318 arm_insn_r->reg_rec_count = 1;
10319 }
10320 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10321 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10322 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10323 {
10324 /* Count leading zeros: CLZ. */
10325 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10326 arm_insn_r->reg_rec_count = 1;
10327 }
10328 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10329 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10330 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10331 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10332 )
10333 {
10334 /* Handle MRS insn. */
10335 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10336 arm_insn_r->reg_rec_count = 1;
10337 }
10338 else if (arm_insn_r->opcode <= 15)
10339 {
10340 /* Normal data processing insns. */
10341 /* Out of 11 shifter operands mode, all the insn modifies destination
10342 register, which is specified by 13-16 decode. */
10343 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10344 record_buf[1] = ARM_PS_REGNUM;
10345 arm_insn_r->reg_rec_count = 2;
10346 }
10347 else
10348 {
10349 return -1;
10350 }
10351
10352 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10353 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10354 return 0;
10355}
10356
10357/* Handling opcode 001 insns. */
10358
10359static int
10360arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10361{
10362 uint32_t record_buf[8], record_buf_mem[8];
10363
10364 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10365 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10366
10367 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10368 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10369 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10370 )
10371 {
10372 /* Handle MSR insn. */
10373 if (9 == arm_insn_r->opcode)
10374 {
10375 /* CSPR is going to be changed. */
10376 record_buf[0] = ARM_PS_REGNUM;
10377 arm_insn_r->reg_rec_count = 1;
10378 }
10379 else
10380 {
10381 /* SPSR is going to be changed. */
10382 }
10383 }
10384 else if (arm_insn_r->opcode <= 15)
10385 {
10386 /* Normal data processing insns. */
10387 /* Out of 11 shifter operands mode, all the insn modifies destination
10388 register, which is specified by 13-16 decode. */
10389 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10390 record_buf[1] = ARM_PS_REGNUM;
10391 arm_insn_r->reg_rec_count = 2;
10392 }
10393 else
10394 {
10395 return -1;
10396 }
10397
10398 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10399 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10400 return 0;
10401}
10402
c55978a6
YQ
10403static int
10404arm_record_media (insn_decode_record *arm_insn_r)
10405{
10406 uint32_t record_buf[8];
10407
10408 switch (bits (arm_insn_r->arm_insn, 22, 24))
10409 {
10410 case 0:
10411 /* Parallel addition and subtraction, signed */
10412 case 1:
10413 /* Parallel addition and subtraction, unsigned */
10414 case 2:
10415 case 3:
10416 /* Packing, unpacking, saturation and reversal */
10417 {
10418 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10419
10420 record_buf[arm_insn_r->reg_rec_count++] = rd;
10421 }
10422 break;
10423
10424 case 4:
10425 case 5:
10426 /* Signed multiplies */
10427 {
10428 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10429 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10430
10431 record_buf[arm_insn_r->reg_rec_count++] = rd;
10432 if (op1 == 0x0)
10433 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10434 else if (op1 == 0x4)
10435 record_buf[arm_insn_r->reg_rec_count++]
10436 = bits (arm_insn_r->arm_insn, 12, 15);
10437 }
10438 break;
10439
10440 case 6:
10441 {
10442 if (bit (arm_insn_r->arm_insn, 21)
10443 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10444 {
10445 /* SBFX */
10446 record_buf[arm_insn_r->reg_rec_count++]
10447 = bits (arm_insn_r->arm_insn, 12, 15);
10448 }
10449 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10450 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10451 {
10452 /* USAD8 and USADA8 */
10453 record_buf[arm_insn_r->reg_rec_count++]
10454 = bits (arm_insn_r->arm_insn, 16, 19);
10455 }
10456 }
10457 break;
10458
10459 case 7:
10460 {
10461 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10462 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10463 {
10464 /* Permanently UNDEFINED */
10465 return -1;
10466 }
10467 else
10468 {
10469 /* BFC, BFI and UBFX */
10470 record_buf[arm_insn_r->reg_rec_count++]
10471 = bits (arm_insn_r->arm_insn, 12, 15);
10472 }
10473 }
10474 break;
10475
10476 default:
10477 return -1;
10478 }
10479
10480 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10481
10482 return 0;
10483}
10484
71e396f9 10485/* Handle ARM mode instructions with opcode 010. */
72508ac0
PO
10486
10487static int
10488arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10489{
10490 struct regcache *reg_cache = arm_insn_r->regcache;
10491
71e396f9
LM
10492 uint32_t reg_base , reg_dest;
10493 uint32_t offset_12, tgt_mem_addr;
72508ac0 10494 uint32_t record_buf[8], record_buf_mem[8];
71e396f9
LM
10495 unsigned char wback;
10496 ULONGEST u_regval;
72508ac0 10497
71e396f9
LM
10498 /* Calculate wback. */
10499 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10500 || (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0 10501
71e396f9
LM
10502 arm_insn_r->reg_rec_count = 0;
10503 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0
PO
10504
10505 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10506 {
71e396f9
LM
10507 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10508 and LDRT. */
10509
72508ac0 10510 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
71e396f9
LM
10511 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10512
10513 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10514 preceeds a LDR instruction having R15 as reg_base, it
10515 emulates a branch and link instruction, and hence we need to save
10516 CPSR and PC as well. */
10517 if (ARM_PC_REGNUM == reg_dest)
10518 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10519
10520 /* If wback is true, also save the base register, which is going to be
10521 written to. */
10522 if (wback)
10523 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10524 }
10525 else
10526 {
71e396f9
LM
10527 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10528
72508ac0 10529 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
71e396f9
LM
10530 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10531
10532 /* Handle bit U. */
72508ac0 10533 if (bit (arm_insn_r->arm_insn, 23))
71e396f9
LM
10534 {
10535 /* U == 1: Add the offset. */
10536 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10537 }
72508ac0 10538 else
71e396f9
LM
10539 {
10540 /* U == 0: subtract the offset. */
10541 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10542 }
10543
10544 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10545 bytes. */
10546 if (bit (arm_insn_r->arm_insn, 22))
10547 {
10548 /* STRB and STRBT: 1 byte. */
10549 record_buf_mem[0] = 1;
10550 }
10551 else
10552 {
10553 /* STR and STRT: 4 bytes. */
10554 record_buf_mem[0] = 4;
10555 }
10556
10557 /* Handle bit P. */
10558 if (bit (arm_insn_r->arm_insn, 24))
10559 record_buf_mem[1] = tgt_mem_addr;
10560 else
10561 record_buf_mem[1] = (uint32_t) u_regval;
72508ac0 10562
72508ac0
PO
10563 arm_insn_r->mem_rec_count = 1;
10564
71e396f9
LM
10565 /* If wback is true, also save the base register, which is going to be
10566 written to. */
10567 if (wback)
10568 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10569 }
10570
10571 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10572 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10573 return 0;
10574}
10575
10576/* Handling opcode 011 insns. */
10577
10578static int
10579arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10580{
10581 struct regcache *reg_cache = arm_insn_r->regcache;
10582
10583 uint32_t shift_imm = 0;
10584 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10585 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10586 uint32_t record_buf[8], record_buf_mem[8];
10587
10588 LONGEST s_word;
10589 ULONGEST u_regval[2];
10590
c55978a6
YQ
10591 if (bit (arm_insn_r->arm_insn, 4))
10592 return arm_record_media (arm_insn_r);
10593
72508ac0
PO
10594 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10595 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10596
10597 /* Handle enhanced store insns and LDRD DSP insn,
10598 order begins according to addressing modes for store insns
10599 STRH insn. */
10600
10601 /* LDR or STR? */
10602 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10603 {
10604 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10605 /* LDR insn has a capability to do branching, if
10606 MOV LR, PC is precedded by LDR insn having Rn as R15
10607 in that case, it emulates branch and link insn, and hence we
10608 need to save CSPR and PC as well. */
10609 if (15 != reg_dest)
10610 {
10611 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10612 arm_insn_r->reg_rec_count = 1;
10613 }
10614 else
10615 {
10616 record_buf[0] = reg_dest;
10617 record_buf[1] = ARM_PS_REGNUM;
10618 arm_insn_r->reg_rec_count = 2;
10619 }
10620 }
10621 else
10622 {
10623 if (! bits (arm_insn_r->arm_insn, 4, 11))
10624 {
10625 /* Store insn, register offset and register pre-indexed,
10626 register post-indexed. */
10627 /* Get Rm. */
10628 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10629 /* Get Rn. */
10630 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10631 regcache_raw_read_unsigned (reg_cache, reg_src1
10632 , &u_regval[0]);
10633 regcache_raw_read_unsigned (reg_cache, reg_src2
10634 , &u_regval[1]);
10635 if (15 == reg_src2)
10636 {
10637 /* If R15 was used as Rn, hence current PC+8. */
10638 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10639 u_regval[0] = u_regval[0] + 8;
10640 }
10641 /* Calculate target store address, Rn +/- Rm, register offset. */
10642 /* U == 1. */
10643 if (bit (arm_insn_r->arm_insn, 23))
10644 {
10645 tgt_mem_addr = u_regval[0] + u_regval[1];
10646 }
10647 else
10648 {
10649 tgt_mem_addr = u_regval[1] - u_regval[0];
10650 }
10651
10652 switch (arm_insn_r->opcode)
10653 {
10654 /* STR. */
10655 case 8:
10656 case 12:
10657 /* STR. */
10658 case 9:
10659 case 13:
10660 /* STRT. */
10661 case 1:
10662 case 5:
10663 /* STR. */
10664 case 0:
10665 case 4:
10666 record_buf_mem[0] = 4;
10667 break;
10668
10669 /* STRB. */
10670 case 10:
10671 case 14:
10672 /* STRB. */
10673 case 11:
10674 case 15:
10675 /* STRBT. */
10676 case 3:
10677 case 7:
10678 /* STRB. */
10679 case 2:
10680 case 6:
10681 record_buf_mem[0] = 1;
10682 break;
10683
10684 default:
10685 gdb_assert_not_reached ("no decoding pattern found");
10686 break;
10687 }
10688 record_buf_mem[1] = tgt_mem_addr;
10689 arm_insn_r->mem_rec_count = 1;
10690
10691 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10692 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10693 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10694 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10695 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10696 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10697 )
10698 {
10699 /* Rn is going to be changed in pre-indexed mode and
10700 post-indexed mode as well. */
10701 record_buf[0] = reg_src2;
10702 arm_insn_r->reg_rec_count = 1;
10703 }
10704 }
10705 else
10706 {
10707 /* Store insn, scaled register offset; scaled pre-indexed. */
10708 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10709 /* Get Rm. */
10710 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10711 /* Get Rn. */
10712 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10713 /* Get shift_imm. */
10714 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10715 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10716 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10717 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10718 /* Offset_12 used as shift. */
10719 switch (offset_12)
10720 {
10721 case 0:
10722 /* Offset_12 used as index. */
10723 offset_12 = u_regval[0] << shift_imm;
10724 break;
10725
10726 case 1:
10727 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10728 break;
10729
10730 case 2:
10731 if (!shift_imm)
10732 {
10733 if (bit (u_regval[0], 31))
10734 {
10735 offset_12 = 0xFFFFFFFF;
10736 }
10737 else
10738 {
10739 offset_12 = 0;
10740 }
10741 }
10742 else
10743 {
10744 /* This is arithmetic shift. */
10745 offset_12 = s_word >> shift_imm;
10746 }
10747 break;
10748
10749 case 3:
10750 if (!shift_imm)
10751 {
10752 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10753 &u_regval[1]);
10754 /* Get C flag value and shift it by 31. */
10755 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10756 | (u_regval[0]) >> 1);
10757 }
10758 else
10759 {
10760 offset_12 = (u_regval[0] >> shift_imm) \
10761 | (u_regval[0] <<
10762 (sizeof(uint32_t) - shift_imm));
10763 }
10764 break;
10765
10766 default:
10767 gdb_assert_not_reached ("no decoding pattern found");
10768 break;
10769 }
10770
10771 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10772 /* bit U set. */
10773 if (bit (arm_insn_r->arm_insn, 23))
10774 {
10775 tgt_mem_addr = u_regval[1] + offset_12;
10776 }
10777 else
10778 {
10779 tgt_mem_addr = u_regval[1] - offset_12;
10780 }
10781
10782 switch (arm_insn_r->opcode)
10783 {
10784 /* STR. */
10785 case 8:
10786 case 12:
10787 /* STR. */
10788 case 9:
10789 case 13:
10790 /* STRT. */
10791 case 1:
10792 case 5:
10793 /* STR. */
10794 case 0:
10795 case 4:
10796 record_buf_mem[0] = 4;
10797 break;
10798
10799 /* STRB. */
10800 case 10:
10801 case 14:
10802 /* STRB. */
10803 case 11:
10804 case 15:
10805 /* STRBT. */
10806 case 3:
10807 case 7:
10808 /* STRB. */
10809 case 2:
10810 case 6:
10811 record_buf_mem[0] = 1;
10812 break;
10813
10814 default:
10815 gdb_assert_not_reached ("no decoding pattern found");
10816 break;
10817 }
10818 record_buf_mem[1] = tgt_mem_addr;
10819 arm_insn_r->mem_rec_count = 1;
10820
10821 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10822 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10823 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10824 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10825 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10826 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10827 )
10828 {
10829 /* Rn is going to be changed in register scaled pre-indexed
10830 mode,and scaled post indexed mode. */
10831 record_buf[0] = reg_src2;
10832 arm_insn_r->reg_rec_count = 1;
10833 }
10834 }
10835 }
10836
10837 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10838 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10839 return 0;
10840}
10841
71e396f9 10842/* Handle ARM mode instructions with opcode 100. */
72508ac0
PO
10843
10844static int
10845arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10846{
10847 struct regcache *reg_cache = arm_insn_r->regcache;
71e396f9
LM
10848 uint32_t register_count = 0, register_bits;
10849 uint32_t reg_base, addr_mode;
72508ac0 10850 uint32_t record_buf[24], record_buf_mem[48];
71e396f9
LM
10851 uint32_t wback;
10852 ULONGEST u_regval;
72508ac0 10853
71e396f9
LM
10854 /* Fetch the list of registers. */
10855 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10856 arm_insn_r->reg_rec_count = 0;
10857
10858 /* Fetch the base register that contains the address we are loading data
10859 to. */
10860 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
72508ac0 10861
71e396f9
LM
10862 /* Calculate wback. */
10863 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
72508ac0
PO
10864
10865 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10866 {
71e396f9 10867 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
72508ac0 10868
71e396f9 10869 /* Find out which registers are going to be loaded from memory. */
72508ac0 10870 while (register_bits)
71e396f9
LM
10871 {
10872 if (register_bits & 0x00000001)
10873 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10874 register_bits = register_bits >> 1;
10875 register_count++;
10876 }
72508ac0 10877
71e396f9
LM
10878
10879 /* If wback is true, also save the base register, which is going to be
10880 written to. */
10881 if (wback)
10882 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10883
10884 /* Save the CPSR register. */
10885 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
72508ac0
PO
10886 }
10887 else
10888 {
71e396f9 10889 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
72508ac0 10890
71e396f9
LM
10891 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10892
10893 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10894
10895 /* Find out how many registers are going to be stored to memory. */
72508ac0 10896 while (register_bits)
71e396f9
LM
10897 {
10898 if (register_bits & 0x00000001)
10899 register_count++;
10900 register_bits = register_bits >> 1;
10901 }
72508ac0
PO
10902
10903 switch (addr_mode)
71e396f9
LM
10904 {
10905 /* STMDA (STMED): Decrement after. */
10906 case 0:
10907 record_buf_mem[1] = (uint32_t) u_regval
10908 - register_count * INT_REGISTER_SIZE + 4;
10909 break;
10910 /* STM (STMIA, STMEA): Increment after. */
10911 case 1:
10912 record_buf_mem[1] = (uint32_t) u_regval;
10913 break;
10914 /* STMDB (STMFD): Decrement before. */
10915 case 2:
10916 record_buf_mem[1] = (uint32_t) u_regval
10917 - register_count * INT_REGISTER_SIZE;
10918 break;
10919 /* STMIB (STMFA): Increment before. */
10920 case 3:
10921 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10922 break;
10923 default:
10924 gdb_assert_not_reached ("no decoding pattern found");
10925 break;
10926 }
72508ac0 10927
71e396f9
LM
10928 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10929 arm_insn_r->mem_rec_count = 1;
10930
10931 /* If wback is true, also save the base register, which is going to be
10932 written to. */
10933 if (wback)
10934 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
72508ac0
PO
10935 }
10936
10937 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10938 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10939 return 0;
10940}
10941
10942/* Handling opcode 101 insns. */
10943
10944static int
10945arm_record_b_bl (insn_decode_record *arm_insn_r)
10946{
10947 uint32_t record_buf[8];
10948
10949 /* Handle B, BL, BLX(1) insns. */
10950 /* B simply branches so we do nothing here. */
10951 /* Note: BLX(1) doesnt fall here but instead it falls into
10952 extension space. */
10953 if (bit (arm_insn_r->arm_insn, 24))
10954 {
10955 record_buf[0] = ARM_LR_REGNUM;
10956 arm_insn_r->reg_rec_count = 1;
10957 }
10958
10959 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10960
10961 return 0;
10962}
10963
72508ac0 10964static int
c6ec2b30 10965arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
72508ac0
PO
10966{
10967 printf_unfiltered (_("Process record does not support instruction "
01e57735
YQ
10968 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10969 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
72508ac0
PO
10970
10971 return -1;
10972}
10973
5a578da5
OJ
10974/* Record handler for vector data transfer instructions. */
10975
10976static int
10977arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10978{
10979 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10980 uint32_t record_buf[4];
10981
10982 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10983 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10984 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10985 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10986 bit_l = bit (arm_insn_r->arm_insn, 20);
10987 bit_c = bit (arm_insn_r->arm_insn, 8);
10988
10989 /* Handle VMOV instruction. */
10990 if (bit_l && bit_c)
10991 {
10992 record_buf[0] = reg_t;
10993 arm_insn_r->reg_rec_count = 1;
10994 }
10995 else if (bit_l && !bit_c)
10996 {
10997 /* Handle VMOV instruction. */
10998 if (bits_a == 0x00)
10999 {
f1771dce 11000 record_buf[0] = reg_t;
5a578da5
OJ
11001 arm_insn_r->reg_rec_count = 1;
11002 }
11003 /* Handle VMRS instruction. */
11004 else if (bits_a == 0x07)
11005 {
11006 if (reg_t == 15)
11007 reg_t = ARM_PS_REGNUM;
11008
11009 record_buf[0] = reg_t;
11010 arm_insn_r->reg_rec_count = 1;
11011 }
11012 }
11013 else if (!bit_l && !bit_c)
11014 {
11015 /* Handle VMOV instruction. */
11016 if (bits_a == 0x00)
11017 {
f1771dce 11018 record_buf[0] = ARM_D0_REGNUM + reg_v;
5a578da5
OJ
11019
11020 arm_insn_r->reg_rec_count = 1;
11021 }
11022 /* Handle VMSR instruction. */
11023 else if (bits_a == 0x07)
11024 {
11025 record_buf[0] = ARM_FPSCR_REGNUM;
11026 arm_insn_r->reg_rec_count = 1;
11027 }
11028 }
11029 else if (!bit_l && bit_c)
11030 {
11031 /* Handle VMOV instruction. */
11032 if (!(bits_a & 0x04))
11033 {
11034 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11035 + ARM_D0_REGNUM;
11036 arm_insn_r->reg_rec_count = 1;
11037 }
11038 /* Handle VDUP instruction. */
11039 else
11040 {
11041 if (bit (arm_insn_r->arm_insn, 21))
11042 {
11043 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11044 record_buf[0] = reg_v + ARM_D0_REGNUM;
11045 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11046 arm_insn_r->reg_rec_count = 2;
11047 }
11048 else
11049 {
11050 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11051 record_buf[0] = reg_v + ARM_D0_REGNUM;
11052 arm_insn_r->reg_rec_count = 1;
11053 }
11054 }
11055 }
11056
11057 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11058 return 0;
11059}
11060
f20f80dd
OJ
11061/* Record handler for extension register load/store instructions. */
11062
11063static int
11064arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11065{
11066 uint32_t opcode, single_reg;
11067 uint8_t op_vldm_vstm;
11068 uint32_t record_buf[8], record_buf_mem[128];
11069 ULONGEST u_regval = 0;
11070
11071 struct regcache *reg_cache = arm_insn_r->regcache;
11072 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11073
11074 opcode = bits (arm_insn_r->arm_insn, 20, 24);
9fde51ed 11075 single_reg = !bit (arm_insn_r->arm_insn, 8);
f20f80dd
OJ
11076 op_vldm_vstm = opcode & 0x1b;
11077
11078 /* Handle VMOV instructions. */
11079 if ((opcode & 0x1e) == 0x04)
11080 {
9fde51ed 11081 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
01e57735
YQ
11082 {
11083 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11084 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11085 arm_insn_r->reg_rec_count = 2;
11086 }
f20f80dd 11087 else
01e57735 11088 {
9fde51ed
YQ
11089 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11090 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
f20f80dd 11091
9fde51ed 11092 if (single_reg)
01e57735 11093 {
9fde51ed
YQ
11094 /* The first S register number m is REG_M:M (M is bit 5),
11095 the corresponding D register number is REG_M:M / 2, which
11096 is REG_M. */
11097 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11098 /* The second S register number is REG_M:M + 1, the
11099 corresponding D register number is (REG_M:M + 1) / 2.
11100 IOW, if bit M is 1, the first and second S registers
11101 are mapped to different D registers, otherwise, they are
11102 in the same D register. */
11103 if (bit_m)
11104 {
11105 record_buf[arm_insn_r->reg_rec_count++]
11106 = ARM_D0_REGNUM + reg_m + 1;
11107 }
01e57735
YQ
11108 }
11109 else
11110 {
9fde51ed 11111 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
01e57735
YQ
11112 arm_insn_r->reg_rec_count = 1;
11113 }
11114 }
f20f80dd
OJ
11115 }
11116 /* Handle VSTM and VPUSH instructions. */
11117 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
01e57735 11118 || op_vldm_vstm == 0x12)
f20f80dd
OJ
11119 {
11120 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11121 uint32_t memory_index = 0;
11122
11123 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11124 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11125 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11126 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11127 memory_count = imm_off8;
11128
11129 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11130 start_address = u_regval;
f20f80dd 11131 else
01e57735 11132 start_address = u_regval - imm_off32;
f20f80dd
OJ
11133
11134 if (bit (arm_insn_r->arm_insn, 21))
01e57735
YQ
11135 {
11136 record_buf[0] = reg_rn;
11137 arm_insn_r->reg_rec_count = 1;
11138 }
f20f80dd
OJ
11139
11140 while (memory_count > 0)
01e57735 11141 {
9fde51ed 11142 if (single_reg)
01e57735 11143 {
9fde51ed
YQ
11144 record_buf_mem[memory_index] = 4;
11145 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11146 start_address = start_address + 4;
11147 memory_index = memory_index + 2;
11148 }
11149 else
11150 {
9fde51ed
YQ
11151 record_buf_mem[memory_index] = 4;
11152 record_buf_mem[memory_index + 1] = start_address;
11153 record_buf_mem[memory_index + 2] = 4;
11154 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11155 start_address = start_address + 8;
11156 memory_index = memory_index + 4;
11157 }
11158 memory_count--;
11159 }
f20f80dd
OJ
11160 arm_insn_r->mem_rec_count = (memory_index >> 1);
11161 }
11162 /* Handle VLDM instructions. */
11163 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
01e57735 11164 || op_vldm_vstm == 0x13)
f20f80dd
OJ
11165 {
11166 uint32_t reg_count, reg_vd;
11167 uint32_t reg_index = 0;
9fde51ed 11168 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
f20f80dd
OJ
11169
11170 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11171 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11172
9fde51ed
YQ
11173 /* REG_VD is the first D register number. If the instruction
11174 loads memory to S registers (SINGLE_REG is TRUE), the register
11175 number is (REG_VD << 1 | bit D), so the corresponding D
11176 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11177 if (!single_reg)
11178 reg_vd = reg_vd | (bit_d << 4);
f20f80dd 11179
9fde51ed 11180 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
01e57735 11181 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
f20f80dd 11182
9fde51ed
YQ
11183 /* If the instruction loads memory to D register, REG_COUNT should
11184 be divided by 2, according to the ARM Architecture Reference
11185 Manual. If the instruction loads memory to S register, divide by
11186 2 as well because two S registers are mapped to D register. */
11187 reg_count = reg_count / 2;
11188 if (single_reg && bit_d)
01e57735 11189 {
9fde51ed
YQ
11190 /* Increase the register count if S register list starts from
11191 an odd number (bit d is one). */
11192 reg_count++;
11193 }
f20f80dd 11194
9fde51ed
YQ
11195 while (reg_count > 0)
11196 {
11197 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
01e57735
YQ
11198 reg_count--;
11199 }
f20f80dd
OJ
11200 arm_insn_r->reg_rec_count = reg_index;
11201 }
11202 /* VSTR Vector store register. */
11203 else if ((opcode & 0x13) == 0x10)
11204 {
bec2ab5a 11205 uint32_t start_address, reg_rn, imm_off32, imm_off8;
f20f80dd
OJ
11206 uint32_t memory_index = 0;
11207
11208 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11209 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11210 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
9fde51ed 11211 imm_off32 = imm_off8 << 2;
f20f80dd
OJ
11212
11213 if (bit (arm_insn_r->arm_insn, 23))
01e57735 11214 start_address = u_regval + imm_off32;
f20f80dd 11215 else
01e57735 11216 start_address = u_regval - imm_off32;
f20f80dd
OJ
11217
11218 if (single_reg)
01e57735 11219 {
9fde51ed
YQ
11220 record_buf_mem[memory_index] = 4;
11221 record_buf_mem[memory_index + 1] = start_address;
01e57735
YQ
11222 arm_insn_r->mem_rec_count = 1;
11223 }
f20f80dd 11224 else
01e57735 11225 {
9fde51ed
YQ
11226 record_buf_mem[memory_index] = 4;
11227 record_buf_mem[memory_index + 1] = start_address;
11228 record_buf_mem[memory_index + 2] = 4;
11229 record_buf_mem[memory_index + 3] = start_address + 4;
01e57735
YQ
11230 arm_insn_r->mem_rec_count = 2;
11231 }
f20f80dd
OJ
11232 }
11233 /* VLDR Vector load register. */
11234 else if ((opcode & 0x13) == 0x11)
11235 {
11236 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11237
11238 if (!single_reg)
01e57735
YQ
11239 {
11240 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11241 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11242 }
f20f80dd 11243 else
01e57735
YQ
11244 {
11245 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
9fde51ed
YQ
11246 /* Record register D rather than pseudo register S. */
11247 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
01e57735 11248 }
f20f80dd
OJ
11249 arm_insn_r->reg_rec_count = 1;
11250 }
11251
11252 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11253 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11254 return 0;
11255}
11256
851f26ae
OJ
11257/* Record handler for arm/thumb mode VFP data processing instructions. */
11258
11259static int
11260arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11261{
11262 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11263 uint32_t record_buf[4];
11264 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11265 enum insn_types curr_insn_type = INSN_INV;
11266
11267 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11268 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11269 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11270 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11271 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11272 bit_d = bit (arm_insn_r->arm_insn, 22);
11273 opc1 = opc1 & 0x04;
11274
11275 /* Handle VMLA, VMLS. */
11276 if (opc1 == 0x00)
11277 {
11278 if (bit (arm_insn_r->arm_insn, 10))
11279 {
11280 if (bit (arm_insn_r->arm_insn, 6))
11281 curr_insn_type = INSN_T0;
11282 else
11283 curr_insn_type = INSN_T1;
11284 }
11285 else
11286 {
11287 if (dp_op_sz)
11288 curr_insn_type = INSN_T1;
11289 else
11290 curr_insn_type = INSN_T2;
11291 }
11292 }
11293 /* Handle VNMLA, VNMLS, VNMUL. */
11294 else if (opc1 == 0x01)
11295 {
11296 if (dp_op_sz)
11297 curr_insn_type = INSN_T1;
11298 else
11299 curr_insn_type = INSN_T2;
11300 }
11301 /* Handle VMUL. */
11302 else if (opc1 == 0x02 && !(opc3 & 0x01))
11303 {
11304 if (bit (arm_insn_r->arm_insn, 10))
11305 {
11306 if (bit (arm_insn_r->arm_insn, 6))
11307 curr_insn_type = INSN_T0;
11308 else
11309 curr_insn_type = INSN_T1;
11310 }
11311 else
11312 {
11313 if (dp_op_sz)
11314 curr_insn_type = INSN_T1;
11315 else
11316 curr_insn_type = INSN_T2;
11317 }
11318 }
11319 /* Handle VADD, VSUB. */
11320 else if (opc1 == 0x03)
11321 {
11322 if (!bit (arm_insn_r->arm_insn, 9))
11323 {
11324 if (bit (arm_insn_r->arm_insn, 6))
11325 curr_insn_type = INSN_T0;
11326 else
11327 curr_insn_type = INSN_T1;
11328 }
11329 else
11330 {
11331 if (dp_op_sz)
11332 curr_insn_type = INSN_T1;
11333 else
11334 curr_insn_type = INSN_T2;
11335 }
11336 }
11337 /* Handle VDIV. */
11338 else if (opc1 == 0x0b)
11339 {
11340 if (dp_op_sz)
11341 curr_insn_type = INSN_T1;
11342 else
11343 curr_insn_type = INSN_T2;
11344 }
11345 /* Handle all other vfp data processing instructions. */
11346 else if (opc1 == 0x0b)
11347 {
11348 /* Handle VMOV. */
11349 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11350 {
11351 if (bit (arm_insn_r->arm_insn, 4))
11352 {
11353 if (bit (arm_insn_r->arm_insn, 6))
11354 curr_insn_type = INSN_T0;
11355 else
11356 curr_insn_type = INSN_T1;
11357 }
11358 else
11359 {
11360 if (dp_op_sz)
11361 curr_insn_type = INSN_T1;
11362 else
11363 curr_insn_type = INSN_T2;
11364 }
11365 }
11366 /* Handle VNEG and VABS. */
11367 else if ((opc2 == 0x01 && opc3 == 0x01)
11368 || (opc2 == 0x00 && opc3 == 0x03))
11369 {
11370 if (!bit (arm_insn_r->arm_insn, 11))
11371 {
11372 if (bit (arm_insn_r->arm_insn, 6))
11373 curr_insn_type = INSN_T0;
11374 else
11375 curr_insn_type = INSN_T1;
11376 }
11377 else
11378 {
11379 if (dp_op_sz)
11380 curr_insn_type = INSN_T1;
11381 else
11382 curr_insn_type = INSN_T2;
11383 }
11384 }
11385 /* Handle VSQRT. */
11386 else if (opc2 == 0x01 && opc3 == 0x03)
11387 {
11388 if (dp_op_sz)
11389 curr_insn_type = INSN_T1;
11390 else
11391 curr_insn_type = INSN_T2;
11392 }
11393 /* Handle VCVT. */
11394 else if (opc2 == 0x07 && opc3 == 0x03)
11395 {
11396 if (!dp_op_sz)
11397 curr_insn_type = INSN_T1;
11398 else
11399 curr_insn_type = INSN_T2;
11400 }
11401 else if (opc3 & 0x01)
11402 {
11403 /* Handle VCVT. */
11404 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11405 {
11406 if (!bit (arm_insn_r->arm_insn, 18))
11407 curr_insn_type = INSN_T2;
11408 else
11409 {
11410 if (dp_op_sz)
11411 curr_insn_type = INSN_T1;
11412 else
11413 curr_insn_type = INSN_T2;
11414 }
11415 }
11416 /* Handle VCVT. */
11417 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11418 {
11419 if (dp_op_sz)
11420 curr_insn_type = INSN_T1;
11421 else
11422 curr_insn_type = INSN_T2;
11423 }
11424 /* Handle VCVTB, VCVTT. */
11425 else if ((opc2 & 0x0e) == 0x02)
11426 curr_insn_type = INSN_T2;
11427 /* Handle VCMP, VCMPE. */
11428 else if ((opc2 & 0x0e) == 0x04)
11429 curr_insn_type = INSN_T3;
11430 }
11431 }
11432
11433 switch (curr_insn_type)
11434 {
11435 case INSN_T0:
11436 reg_vd = reg_vd | (bit_d << 4);
11437 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11438 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11439 arm_insn_r->reg_rec_count = 2;
11440 break;
11441
11442 case INSN_T1:
11443 reg_vd = reg_vd | (bit_d << 4);
11444 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11445 arm_insn_r->reg_rec_count = 1;
11446 break;
11447
11448 case INSN_T2:
11449 reg_vd = (reg_vd << 1) | bit_d;
11450 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11451 arm_insn_r->reg_rec_count = 1;
11452 break;
11453
11454 case INSN_T3:
11455 record_buf[0] = ARM_FPSCR_REGNUM;
11456 arm_insn_r->reg_rec_count = 1;
11457 break;
11458
11459 default:
11460 gdb_assert_not_reached ("no decoding pattern found");
11461 break;
11462 }
11463
11464 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11465 return 0;
11466}
11467
60cc5e93
OJ
11468/* Handling opcode 110 insns. */
11469
11470static int
11471arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11472{
bec2ab5a 11473 uint32_t op1, op1_ebit, coproc;
60cc5e93
OJ
11474
11475 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11476 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11477 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11478
11479 if ((coproc & 0x0e) == 0x0a)
11480 {
11481 /* Handle extension register ld/st instructions. */
11482 if (!(op1 & 0x20))
f20f80dd 11483 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11484
11485 /* 64-bit transfers between arm core and extension registers. */
11486 if ((op1 & 0x3e) == 0x04)
f20f80dd 11487 return arm_record_exreg_ld_st_insn (arm_insn_r);
60cc5e93
OJ
11488 }
11489 else
11490 {
11491 /* Handle coprocessor ld/st instructions. */
11492 if (!(op1 & 0x3a))
11493 {
11494 /* Store. */
11495 if (!op1_ebit)
11496 return arm_record_unsupported_insn (arm_insn_r);
11497 else
11498 /* Load. */
11499 return arm_record_unsupported_insn (arm_insn_r);
11500 }
11501
11502 /* Move to coprocessor from two arm core registers. */
11503 if (op1 == 0x4)
11504 return arm_record_unsupported_insn (arm_insn_r);
11505
11506 /* Move to two arm core registers from coprocessor. */
11507 if (op1 == 0x5)
11508 {
11509 uint32_t reg_t[2];
11510
11511 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11512 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11513 arm_insn_r->reg_rec_count = 2;
11514
11515 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11516 return 0;
11517 }
11518 }
11519 return arm_record_unsupported_insn (arm_insn_r);
11520}
11521
72508ac0
PO
11522/* Handling opcode 111 insns. */
11523
11524static int
11525arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11526{
60cc5e93 11527 uint32_t op, op1_sbit, op1_ebit, coproc;
72508ac0
PO
11528 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11529 struct regcache *reg_cache = arm_insn_r->regcache;
72508ac0
PO
11530
11531 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
60cc5e93
OJ
11532 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11533 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11534 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11535 op = bit (arm_insn_r->arm_insn, 4);
97dfe206
OJ
11536
11537 /* Handle arm SWI/SVC system call instructions. */
60cc5e93 11538 if (op1_sbit)
97dfe206
OJ
11539 {
11540 if (tdep->arm_syscall_record != NULL)
11541 {
11542 ULONGEST svc_operand, svc_number;
11543
11544 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11545
11546 if (svc_operand) /* OABI. */
11547 svc_number = svc_operand - 0x900000;
11548 else /* EABI. */
11549 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11550
60cc5e93 11551 return tdep->arm_syscall_record (reg_cache, svc_number);
97dfe206
OJ
11552 }
11553 else
11554 {
11555 printf_unfiltered (_("no syscall record support\n"));
60cc5e93 11556 return -1;
97dfe206
OJ
11557 }
11558 }
60cc5e93
OJ
11559
11560 if ((coproc & 0x0e) == 0x0a)
11561 {
11562 /* VFP data-processing instructions. */
11563 if (!op1_sbit && !op)
851f26ae 11564 return arm_record_vfp_data_proc_insn (arm_insn_r);
60cc5e93
OJ
11565
11566 /* Advanced SIMD, VFP instructions. */
11567 if (!op1_sbit && op)
5a578da5 11568 return arm_record_vdata_transfer_insn (arm_insn_r);
60cc5e93 11569 }
97dfe206
OJ
11570 else
11571 {
60cc5e93
OJ
11572 /* Coprocessor data operations. */
11573 if (!op1_sbit && !op)
11574 return arm_record_unsupported_insn (arm_insn_r);
11575
11576 /* Move to Coprocessor from ARM core register. */
11577 if (!op1_sbit && !op1_ebit && op)
11578 return arm_record_unsupported_insn (arm_insn_r);
11579
11580 /* Move to arm core register from coprocessor. */
11581 if (!op1_sbit && op1_ebit && op)
11582 {
11583 uint32_t record_buf[1];
11584
11585 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11586 if (record_buf[0] == 15)
11587 record_buf[0] = ARM_PS_REGNUM;
11588
11589 arm_insn_r->reg_rec_count = 1;
11590 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11591 record_buf);
11592 return 0;
11593 }
97dfe206 11594 }
72508ac0 11595
60cc5e93 11596 return arm_record_unsupported_insn (arm_insn_r);
72508ac0
PO
11597}
11598
11599/* Handling opcode 000 insns. */
11600
11601static int
11602thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11603{
11604 uint32_t record_buf[8];
11605 uint32_t reg_src1 = 0;
11606
11607 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11608
11609 record_buf[0] = ARM_PS_REGNUM;
11610 record_buf[1] = reg_src1;
11611 thumb_insn_r->reg_rec_count = 2;
11612
11613 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11614
11615 return 0;
11616}
11617
11618
11619/* Handling opcode 001 insns. */
11620
11621static int
11622thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11623{
11624 uint32_t record_buf[8];
11625 uint32_t reg_src1 = 0;
11626
11627 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11628
11629 record_buf[0] = ARM_PS_REGNUM;
11630 record_buf[1] = reg_src1;
11631 thumb_insn_r->reg_rec_count = 2;
11632
11633 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11634
11635 return 0;
11636}
11637
11638/* Handling opcode 010 insns. */
11639
11640static int
11641thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11642{
11643 struct regcache *reg_cache = thumb_insn_r->regcache;
11644 uint32_t record_buf[8], record_buf_mem[8];
11645
11646 uint32_t reg_src1 = 0, reg_src2 = 0;
11647 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11648
11649 ULONGEST u_regval[2] = {0};
11650
11651 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11652
11653 if (bit (thumb_insn_r->arm_insn, 12))
11654 {
11655 /* Handle load/store register offset. */
11656 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11657 if (opcode2 >= 12 && opcode2 <= 15)
11658 {
11659 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11660 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11661 record_buf[0] = reg_src1;
11662 thumb_insn_r->reg_rec_count = 1;
11663 }
11664 else if (opcode2 >= 8 && opcode2 <= 10)
11665 {
11666 /* STR(2), STRB(2), STRH(2) . */
11667 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11668 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11669 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11670 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11671 if (8 == opcode2)
11672 record_buf_mem[0] = 4; /* STR (2). */
11673 else if (10 == opcode2)
11674 record_buf_mem[0] = 1; /* STRB (2). */
11675 else if (9 == opcode2)
11676 record_buf_mem[0] = 2; /* STRH (2). */
11677 record_buf_mem[1] = u_regval[0] + u_regval[1];
11678 thumb_insn_r->mem_rec_count = 1;
11679 }
11680 }
11681 else if (bit (thumb_insn_r->arm_insn, 11))
11682 {
11683 /* Handle load from literal pool. */
11684 /* LDR(3). */
11685 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11686 record_buf[0] = reg_src1;
11687 thumb_insn_r->reg_rec_count = 1;
11688 }
11689 else if (opcode1)
11690 {
11691 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11692 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11693 if ((3 == opcode2) && (!opcode3))
11694 {
11695 /* Branch with exchange. */
11696 record_buf[0] = ARM_PS_REGNUM;
11697 thumb_insn_r->reg_rec_count = 1;
11698 }
11699 else
11700 {
1f33efec
YQ
11701 /* Format 8; special data processing insns. */
11702 record_buf[0] = ARM_PS_REGNUM;
11703 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11704 | bits (thumb_insn_r->arm_insn, 0, 2));
72508ac0
PO
11705 thumb_insn_r->reg_rec_count = 2;
11706 }
11707 }
11708 else
11709 {
11710 /* Format 5; data processing insns. */
11711 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11712 if (bit (thumb_insn_r->arm_insn, 7))
11713 {
11714 reg_src1 = reg_src1 + 8;
11715 }
11716 record_buf[0] = ARM_PS_REGNUM;
11717 record_buf[1] = reg_src1;
11718 thumb_insn_r->reg_rec_count = 2;
11719 }
11720
11721 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11722 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11723 record_buf_mem);
11724
11725 return 0;
11726}
11727
11728/* Handling opcode 001 insns. */
11729
11730static int
11731thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11732{
11733 struct regcache *reg_cache = thumb_insn_r->regcache;
11734 uint32_t record_buf[8], record_buf_mem[8];
11735
11736 uint32_t reg_src1 = 0;
11737 uint32_t opcode = 0, immed_5 = 0;
11738
11739 ULONGEST u_regval = 0;
11740
11741 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11742
11743 if (opcode)
11744 {
11745 /* LDR(1). */
11746 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11747 record_buf[0] = reg_src1;
11748 thumb_insn_r->reg_rec_count = 1;
11749 }
11750 else
11751 {
11752 /* STR(1). */
11753 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11754 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11755 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11756 record_buf_mem[0] = 4;
11757 record_buf_mem[1] = u_regval + (immed_5 * 4);
11758 thumb_insn_r->mem_rec_count = 1;
11759 }
11760
11761 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11762 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11763 record_buf_mem);
11764
11765 return 0;
11766}
11767
11768/* Handling opcode 100 insns. */
11769
11770static int
11771thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11772{
11773 struct regcache *reg_cache = thumb_insn_r->regcache;
11774 uint32_t record_buf[8], record_buf_mem[8];
11775
11776 uint32_t reg_src1 = 0;
11777 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11778
11779 ULONGEST u_regval = 0;
11780
11781 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11782
11783 if (3 == opcode)
11784 {
11785 /* LDR(4). */
11786 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11787 record_buf[0] = reg_src1;
11788 thumb_insn_r->reg_rec_count = 1;
11789 }
11790 else if (1 == opcode)
11791 {
11792 /* LDRH(1). */
11793 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11794 record_buf[0] = reg_src1;
11795 thumb_insn_r->reg_rec_count = 1;
11796 }
11797 else if (2 == opcode)
11798 {
11799 /* STR(3). */
11800 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11801 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11802 record_buf_mem[0] = 4;
11803 record_buf_mem[1] = u_regval + (immed_8 * 4);
11804 thumb_insn_r->mem_rec_count = 1;
11805 }
11806 else if (0 == opcode)
11807 {
11808 /* STRH(1). */
11809 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11810 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11811 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11812 record_buf_mem[0] = 2;
11813 record_buf_mem[1] = u_regval + (immed_5 * 2);
11814 thumb_insn_r->mem_rec_count = 1;
11815 }
11816
11817 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11818 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11819 record_buf_mem);
11820
11821 return 0;
11822}
11823
11824/* Handling opcode 101 insns. */
11825
11826static int
11827thumb_record_misc (insn_decode_record *thumb_insn_r)
11828{
11829 struct regcache *reg_cache = thumb_insn_r->regcache;
11830
11831 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11832 uint32_t register_bits = 0, register_count = 0;
bec2ab5a 11833 uint32_t index = 0, start_address = 0;
72508ac0
PO
11834 uint32_t record_buf[24], record_buf_mem[48];
11835 uint32_t reg_src1;
11836
11837 ULONGEST u_regval = 0;
11838
11839 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11840 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11841 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11842
11843 if (14 == opcode2)
11844 {
11845 /* POP. */
11846 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11847 while (register_bits)
f969241e
OJ
11848 {
11849 if (register_bits & 0x00000001)
11850 record_buf[index++] = register_count;
11851 register_bits = register_bits >> 1;
11852 register_count++;
11853 }
11854 record_buf[index++] = ARM_PS_REGNUM;
11855 record_buf[index++] = ARM_SP_REGNUM;
11856 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11857 }
11858 else if (10 == opcode2)
11859 {
11860 /* PUSH. */
11861 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
9904a494 11862 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
72508ac0
PO
11863 while (register_bits)
11864 {
11865 if (register_bits & 0x00000001)
11866 register_count++;
11867 register_bits = register_bits >> 1;
11868 }
11869 start_address = u_regval - \
11870 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11871 thumb_insn_r->mem_rec_count = register_count;
11872 while (register_count)
11873 {
11874 record_buf_mem[(register_count * 2) - 1] = start_address;
11875 record_buf_mem[(register_count * 2) - 2] = 4;
11876 start_address = start_address + 4;
11877 register_count--;
11878 }
11879 record_buf[0] = ARM_SP_REGNUM;
11880 thumb_insn_r->reg_rec_count = 1;
11881 }
11882 else if (0x1E == opcode1)
11883 {
11884 /* BKPT insn. */
11885 /* Handle enhanced software breakpoint insn, BKPT. */
11886 /* CPSR is changed to be executed in ARM state, disabling normal
11887 interrupts, entering abort mode. */
11888 /* According to high vector configuration PC is set. */
11889 /* User hits breakpoint and type reverse, in that case, we need to go back with
11890 previous CPSR and Program Counter. */
11891 record_buf[0] = ARM_PS_REGNUM;
11892 record_buf[1] = ARM_LR_REGNUM;
11893 thumb_insn_r->reg_rec_count = 2;
11894 /* We need to save SPSR value, which is not yet done. */
11895 printf_unfiltered (_("Process record does not support instruction "
11896 "0x%0x at address %s.\n"),
11897 thumb_insn_r->arm_insn,
11898 paddress (thumb_insn_r->gdbarch,
11899 thumb_insn_r->this_addr));
11900 return -1;
11901 }
11902 else if ((0 == opcode) || (1 == opcode))
11903 {
11904 /* ADD(5), ADD(6). */
11905 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11906 record_buf[0] = reg_src1;
11907 thumb_insn_r->reg_rec_count = 1;
11908 }
11909 else if (2 == opcode)
11910 {
11911 /* ADD(7), SUB(4). */
11912 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11913 record_buf[0] = ARM_SP_REGNUM;
11914 thumb_insn_r->reg_rec_count = 1;
11915 }
11916
11917 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11918 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11919 record_buf_mem);
11920
11921 return 0;
11922}
11923
11924/* Handling opcode 110 insns. */
11925
11926static int
11927thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11928{
11929 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11930 struct regcache *reg_cache = thumb_insn_r->regcache;
11931
11932 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11933 uint32_t reg_src1 = 0;
11934 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
bec2ab5a 11935 uint32_t index = 0, start_address = 0;
72508ac0
PO
11936 uint32_t record_buf[24], record_buf_mem[48];
11937
11938 ULONGEST u_regval = 0;
11939
11940 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11941 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11942
11943 if (1 == opcode2)
11944 {
11945
11946 /* LDMIA. */
11947 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11948 /* Get Rn. */
11949 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11950 while (register_bits)
11951 {
11952 if (register_bits & 0x00000001)
f969241e 11953 record_buf[index++] = register_count;
72508ac0 11954 register_bits = register_bits >> 1;
f969241e 11955 register_count++;
72508ac0 11956 }
f969241e
OJ
11957 record_buf[index++] = reg_src1;
11958 thumb_insn_r->reg_rec_count = index;
72508ac0
PO
11959 }
11960 else if (0 == opcode2)
11961 {
11962 /* It handles both STMIA. */
11963 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11964 /* Get Rn. */
11965 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11966 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11967 while (register_bits)
11968 {
11969 if (register_bits & 0x00000001)
11970 register_count++;
11971 register_bits = register_bits >> 1;
11972 }
11973 start_address = u_regval;
11974 thumb_insn_r->mem_rec_count = register_count;
11975 while (register_count)
11976 {
11977 record_buf_mem[(register_count * 2) - 1] = start_address;
11978 record_buf_mem[(register_count * 2) - 2] = 4;
11979 start_address = start_address + 4;
11980 register_count--;
11981 }
11982 }
11983 else if (0x1F == opcode1)
11984 {
11985 /* Handle arm syscall insn. */
97dfe206 11986 if (tdep->arm_syscall_record != NULL)
72508ac0 11987 {
97dfe206
OJ
11988 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11989 ret = tdep->arm_syscall_record (reg_cache, u_regval);
72508ac0
PO
11990 }
11991 else
11992 {
11993 printf_unfiltered (_("no syscall record support\n"));
11994 return -1;
11995 }
11996 }
11997
11998 /* B (1), conditional branch is automatically taken care in process_record,
11999 as PC is saved there. */
12000
12001 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12002 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12003 record_buf_mem);
12004
12005 return ret;
12006}
12007
12008/* Handling opcode 111 insns. */
12009
12010static int
12011thumb_record_branch (insn_decode_record *thumb_insn_r)
12012{
12013 uint32_t record_buf[8];
12014 uint32_t bits_h = 0;
12015
12016 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12017
12018 if (2 == bits_h || 3 == bits_h)
12019 {
12020 /* BL */
12021 record_buf[0] = ARM_LR_REGNUM;
12022 thumb_insn_r->reg_rec_count = 1;
12023 }
12024 else if (1 == bits_h)
12025 {
12026 /* BLX(1). */
12027 record_buf[0] = ARM_PS_REGNUM;
12028 record_buf[1] = ARM_LR_REGNUM;
12029 thumb_insn_r->reg_rec_count = 2;
12030 }
12031
12032 /* B(2) is automatically taken care in process_record, as PC is
12033 saved there. */
12034
12035 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12036
12037 return 0;
12038}
12039
c6ec2b30
OJ
12040/* Handler for thumb2 load/store multiple instructions. */
12041
12042static int
12043thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12044{
12045 struct regcache *reg_cache = thumb2_insn_r->regcache;
12046
12047 uint32_t reg_rn, op;
12048 uint32_t register_bits = 0, register_count = 0;
12049 uint32_t index = 0, start_address = 0;
12050 uint32_t record_buf[24], record_buf_mem[48];
12051
12052 ULONGEST u_regval = 0;
12053
12054 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12055 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12056
12057 if (0 == op || 3 == op)
12058 {
12059 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12060 {
12061 /* Handle RFE instruction. */
12062 record_buf[0] = ARM_PS_REGNUM;
12063 thumb2_insn_r->reg_rec_count = 1;
12064 }
12065 else
12066 {
12067 /* Handle SRS instruction after reading banked SP. */
12068 return arm_record_unsupported_insn (thumb2_insn_r);
12069 }
12070 }
12071 else if (1 == op || 2 == op)
12072 {
12073 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12074 {
12075 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12076 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12077 while (register_bits)
12078 {
12079 if (register_bits & 0x00000001)
12080 record_buf[index++] = register_count;
12081
12082 register_count++;
12083 register_bits = register_bits >> 1;
12084 }
12085 record_buf[index++] = reg_rn;
12086 record_buf[index++] = ARM_PS_REGNUM;
12087 thumb2_insn_r->reg_rec_count = index;
12088 }
12089 else
12090 {
12091 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12092 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12093 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12094 while (register_bits)
12095 {
12096 if (register_bits & 0x00000001)
12097 register_count++;
12098
12099 register_bits = register_bits >> 1;
12100 }
12101
12102 if (1 == op)
12103 {
12104 /* Start address calculation for LDMDB/LDMEA. */
12105 start_address = u_regval;
12106 }
12107 else if (2 == op)
12108 {
12109 /* Start address calculation for LDMDB/LDMEA. */
12110 start_address = u_regval - register_count * 4;
12111 }
12112
12113 thumb2_insn_r->mem_rec_count = register_count;
12114 while (register_count)
12115 {
12116 record_buf_mem[register_count * 2 - 1] = start_address;
12117 record_buf_mem[register_count * 2 - 2] = 4;
12118 start_address = start_address + 4;
12119 register_count--;
12120 }
12121 record_buf[0] = reg_rn;
12122 record_buf[1] = ARM_PS_REGNUM;
12123 thumb2_insn_r->reg_rec_count = 2;
12124 }
12125 }
12126
12127 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12128 record_buf_mem);
12129 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12130 record_buf);
12131 return ARM_RECORD_SUCCESS;
12132}
12133
12134/* Handler for thumb2 load/store (dual/exclusive) and table branch
12135 instructions. */
12136
12137static int
12138thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12139{
12140 struct regcache *reg_cache = thumb2_insn_r->regcache;
12141
12142 uint32_t reg_rd, reg_rn, offset_imm;
12143 uint32_t reg_dest1, reg_dest2;
12144 uint32_t address, offset_addr;
12145 uint32_t record_buf[8], record_buf_mem[8];
12146 uint32_t op1, op2, op3;
c6ec2b30
OJ
12147
12148 ULONGEST u_regval[2];
12149
12150 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12151 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12152 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12153
12154 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12155 {
12156 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12157 {
12158 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12159 record_buf[0] = reg_dest1;
12160 record_buf[1] = ARM_PS_REGNUM;
12161 thumb2_insn_r->reg_rec_count = 2;
12162 }
12163
12164 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12165 {
12166 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12167 record_buf[2] = reg_dest2;
12168 thumb2_insn_r->reg_rec_count = 3;
12169 }
12170 }
12171 else
12172 {
12173 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12174 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12175
12176 if (0 == op1 && 0 == op2)
12177 {
12178 /* Handle STREX. */
12179 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12180 address = u_regval[0] + (offset_imm * 4);
12181 record_buf_mem[0] = 4;
12182 record_buf_mem[1] = address;
12183 thumb2_insn_r->mem_rec_count = 1;
12184 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12185 record_buf[0] = reg_rd;
12186 thumb2_insn_r->reg_rec_count = 1;
12187 }
12188 else if (1 == op1 && 0 == op2)
12189 {
12190 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12191 record_buf[0] = reg_rd;
12192 thumb2_insn_r->reg_rec_count = 1;
12193 address = u_regval[0];
12194 record_buf_mem[1] = address;
12195
12196 if (4 == op3)
12197 {
12198 /* Handle STREXB. */
12199 record_buf_mem[0] = 1;
12200 thumb2_insn_r->mem_rec_count = 1;
12201 }
12202 else if (5 == op3)
12203 {
12204 /* Handle STREXH. */
12205 record_buf_mem[0] = 2 ;
12206 thumb2_insn_r->mem_rec_count = 1;
12207 }
12208 else if (7 == op3)
12209 {
12210 /* Handle STREXD. */
12211 address = u_regval[0];
12212 record_buf_mem[0] = 4;
12213 record_buf_mem[2] = 4;
12214 record_buf_mem[3] = address + 4;
12215 thumb2_insn_r->mem_rec_count = 2;
12216 }
12217 }
12218 else
12219 {
12220 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12221
12222 if (bit (thumb2_insn_r->arm_insn, 24))
12223 {
12224 if (bit (thumb2_insn_r->arm_insn, 23))
12225 offset_addr = u_regval[0] + (offset_imm * 4);
12226 else
12227 offset_addr = u_regval[0] - (offset_imm * 4);
12228
12229 address = offset_addr;
12230 }
12231 else
12232 address = u_regval[0];
12233
12234 record_buf_mem[0] = 4;
12235 record_buf_mem[1] = address;
12236 record_buf_mem[2] = 4;
12237 record_buf_mem[3] = address + 4;
12238 thumb2_insn_r->mem_rec_count = 2;
12239 record_buf[0] = reg_rn;
12240 thumb2_insn_r->reg_rec_count = 1;
12241 }
12242 }
12243
12244 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12245 record_buf);
12246 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12247 record_buf_mem);
12248 return ARM_RECORD_SUCCESS;
12249}
12250
12251/* Handler for thumb2 data processing (shift register and modified immediate)
12252 instructions. */
12253
12254static int
12255thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12256{
12257 uint32_t reg_rd, op;
12258 uint32_t record_buf[8];
12259
12260 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12261 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12262
12263 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12264 {
12265 record_buf[0] = ARM_PS_REGNUM;
12266 thumb2_insn_r->reg_rec_count = 1;
12267 }
12268 else
12269 {
12270 record_buf[0] = reg_rd;
12271 record_buf[1] = ARM_PS_REGNUM;
12272 thumb2_insn_r->reg_rec_count = 2;
12273 }
12274
12275 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12276 record_buf);
12277 return ARM_RECORD_SUCCESS;
12278}
12279
12280/* Generic handler for thumb2 instructions which effect destination and PS
12281 registers. */
12282
12283static int
12284thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12285{
12286 uint32_t reg_rd;
12287 uint32_t record_buf[8];
12288
12289 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12290
12291 record_buf[0] = reg_rd;
12292 record_buf[1] = ARM_PS_REGNUM;
12293 thumb2_insn_r->reg_rec_count = 2;
12294
12295 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12296 record_buf);
12297 return ARM_RECORD_SUCCESS;
12298}
12299
12300/* Handler for thumb2 branch and miscellaneous control instructions. */
12301
12302static int
12303thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12304{
12305 uint32_t op, op1, op2;
12306 uint32_t record_buf[8];
12307
12308 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12309 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12310 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12311
12312 /* Handle MSR insn. */
12313 if (!(op1 & 0x2) && 0x38 == op)
12314 {
12315 if (!(op2 & 0x3))
12316 {
12317 /* CPSR is going to be changed. */
12318 record_buf[0] = ARM_PS_REGNUM;
12319 thumb2_insn_r->reg_rec_count = 1;
12320 }
12321 else
12322 {
12323 arm_record_unsupported_insn(thumb2_insn_r);
12324 return -1;
12325 }
12326 }
12327 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12328 {
12329 /* BLX. */
12330 record_buf[0] = ARM_PS_REGNUM;
12331 record_buf[1] = ARM_LR_REGNUM;
12332 thumb2_insn_r->reg_rec_count = 2;
12333 }
12334
12335 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12336 record_buf);
12337 return ARM_RECORD_SUCCESS;
12338}
12339
12340/* Handler for thumb2 store single data item instructions. */
12341
12342static int
12343thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12344{
12345 struct regcache *reg_cache = thumb2_insn_r->regcache;
12346
12347 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12348 uint32_t address, offset_addr;
12349 uint32_t record_buf[8], record_buf_mem[8];
12350 uint32_t op1, op2;
12351
12352 ULONGEST u_regval[2];
12353
12354 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12355 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12356 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12357 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12358
12359 if (bit (thumb2_insn_r->arm_insn, 23))
12360 {
12361 /* T2 encoding. */
12362 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12363 offset_addr = u_regval[0] + offset_imm;
12364 address = offset_addr;
12365 }
12366 else
12367 {
12368 /* T3 encoding. */
12369 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12370 {
12371 /* Handle STRB (register). */
12372 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12373 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12374 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12375 offset_addr = u_regval[1] << shift_imm;
12376 address = u_regval[0] + offset_addr;
12377 }
12378 else
12379 {
12380 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12381 if (bit (thumb2_insn_r->arm_insn, 10))
12382 {
12383 if (bit (thumb2_insn_r->arm_insn, 9))
12384 offset_addr = u_regval[0] + offset_imm;
12385 else
12386 offset_addr = u_regval[0] - offset_imm;
12387
12388 address = offset_addr;
12389 }
12390 else
12391 address = u_regval[0];
12392 }
12393 }
12394
12395 switch (op1)
12396 {
12397 /* Store byte instructions. */
12398 case 4:
12399 case 0:
12400 record_buf_mem[0] = 1;
12401 break;
12402 /* Store half word instructions. */
12403 case 1:
12404 case 5:
12405 record_buf_mem[0] = 2;
12406 break;
12407 /* Store word instructions. */
12408 case 2:
12409 case 6:
12410 record_buf_mem[0] = 4;
12411 break;
12412
12413 default:
12414 gdb_assert_not_reached ("no decoding pattern found");
12415 break;
12416 }
12417
12418 record_buf_mem[1] = address;
12419 thumb2_insn_r->mem_rec_count = 1;
12420 record_buf[0] = reg_rn;
12421 thumb2_insn_r->reg_rec_count = 1;
12422
12423 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12424 record_buf);
12425 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12426 record_buf_mem);
12427 return ARM_RECORD_SUCCESS;
12428}
12429
12430/* Handler for thumb2 load memory hints instructions. */
12431
12432static int
12433thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12434{
12435 uint32_t record_buf[8];
12436 uint32_t reg_rt, reg_rn;
12437
12438 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12439 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12440
12441 if (ARM_PC_REGNUM != reg_rt)
12442 {
12443 record_buf[0] = reg_rt;
12444 record_buf[1] = reg_rn;
12445 record_buf[2] = ARM_PS_REGNUM;
12446 thumb2_insn_r->reg_rec_count = 3;
12447
12448 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12449 record_buf);
12450 return ARM_RECORD_SUCCESS;
12451 }
12452
12453 return ARM_RECORD_FAILURE;
12454}
12455
12456/* Handler for thumb2 load word instructions. */
12457
12458static int
12459thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12460{
c6ec2b30
OJ
12461 uint32_t record_buf[8];
12462
12463 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12464 record_buf[1] = ARM_PS_REGNUM;
12465 thumb2_insn_r->reg_rec_count = 2;
12466
12467 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12468 record_buf);
12469 return ARM_RECORD_SUCCESS;
12470}
12471
12472/* Handler for thumb2 long multiply, long multiply accumulate, and
12473 divide instructions. */
12474
12475static int
12476thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12477{
12478 uint32_t opcode1 = 0, opcode2 = 0;
12479 uint32_t record_buf[8];
c6ec2b30
OJ
12480
12481 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12482 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12483
12484 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12485 {
12486 /* Handle SMULL, UMULL, SMULAL. */
12487 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12488 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12489 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12490 record_buf[2] = ARM_PS_REGNUM;
12491 thumb2_insn_r->reg_rec_count = 3;
12492 }
12493 else if (1 == opcode1 || 3 == opcode2)
12494 {
12495 /* Handle SDIV and UDIV. */
12496 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12497 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12498 record_buf[2] = ARM_PS_REGNUM;
12499 thumb2_insn_r->reg_rec_count = 3;
12500 }
12501 else
12502 return ARM_RECORD_FAILURE;
12503
12504 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12505 record_buf);
12506 return ARM_RECORD_SUCCESS;
12507}
12508
60cc5e93
OJ
12509/* Record handler for thumb32 coprocessor instructions. */
12510
12511static int
12512thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12513{
12514 if (bit (thumb2_insn_r->arm_insn, 25))
12515 return arm_record_coproc_data_proc (thumb2_insn_r);
12516 else
12517 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12518}
12519
1e1b6563
OJ
12520/* Record handler for advance SIMD structure load/store instructions. */
12521
12522static int
12523thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12524{
12525 struct regcache *reg_cache = thumb2_insn_r->regcache;
12526 uint32_t l_bit, a_bit, b_bits;
12527 uint32_t record_buf[128], record_buf_mem[128];
bec2ab5a 12528 uint32_t reg_rn, reg_vd, address, f_elem;
1e1b6563
OJ
12529 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12530 uint8_t f_ebytes;
12531
12532 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12533 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12534 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12535 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12536 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12537 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12538 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
1e1b6563
OJ
12539 f_elem = 8 / f_ebytes;
12540
12541 if (!l_bit)
12542 {
12543 ULONGEST u_regval = 0;
12544 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12545 address = u_regval;
12546
12547 if (!a_bit)
12548 {
12549 /* Handle VST1. */
12550 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12551 {
12552 if (b_bits == 0x07)
12553 bf_regs = 1;
12554 else if (b_bits == 0x0a)
12555 bf_regs = 2;
12556 else if (b_bits == 0x06)
12557 bf_regs = 3;
12558 else if (b_bits == 0x02)
12559 bf_regs = 4;
12560 else
12561 bf_regs = 0;
12562
12563 for (index_r = 0; index_r < bf_regs; index_r++)
12564 {
12565 for (index_e = 0; index_e < f_elem; index_e++)
12566 {
12567 record_buf_mem[index_m++] = f_ebytes;
12568 record_buf_mem[index_m++] = address;
12569 address = address + f_ebytes;
12570 thumb2_insn_r->mem_rec_count += 1;
12571 }
12572 }
12573 }
12574 /* Handle VST2. */
12575 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12576 {
12577 if (b_bits == 0x09 || b_bits == 0x08)
12578 bf_regs = 1;
12579 else if (b_bits == 0x03)
12580 bf_regs = 2;
12581 else
12582 bf_regs = 0;
12583
12584 for (index_r = 0; index_r < bf_regs; index_r++)
12585 for (index_e = 0; index_e < f_elem; index_e++)
12586 {
12587 for (loop_t = 0; loop_t < 2; loop_t++)
12588 {
12589 record_buf_mem[index_m++] = f_ebytes;
12590 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12591 thumb2_insn_r->mem_rec_count += 1;
12592 }
12593 address = address + (2 * f_ebytes);
12594 }
12595 }
12596 /* Handle VST3. */
12597 else if ((b_bits & 0x0e) == 0x04)
12598 {
12599 for (index_e = 0; index_e < f_elem; index_e++)
12600 {
12601 for (loop_t = 0; loop_t < 3; loop_t++)
12602 {
12603 record_buf_mem[index_m++] = f_ebytes;
12604 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12605 thumb2_insn_r->mem_rec_count += 1;
12606 }
12607 address = address + (3 * f_ebytes);
12608 }
12609 }
12610 /* Handle VST4. */
12611 else if (!(b_bits & 0x0e))
12612 {
12613 for (index_e = 0; index_e < f_elem; index_e++)
12614 {
12615 for (loop_t = 0; loop_t < 4; loop_t++)
12616 {
12617 record_buf_mem[index_m++] = f_ebytes;
12618 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12619 thumb2_insn_r->mem_rec_count += 1;
12620 }
12621 address = address + (4 * f_ebytes);
12622 }
12623 }
12624 }
12625 else
12626 {
12627 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12628
12629 if (bft_size == 0x00)
12630 f_ebytes = 1;
12631 else if (bft_size == 0x01)
12632 f_ebytes = 2;
12633 else if (bft_size == 0x02)
12634 f_ebytes = 4;
12635 else
12636 f_ebytes = 0;
12637
12638 /* Handle VST1. */
12639 if (!(b_bits & 0x0b) || b_bits == 0x08)
12640 thumb2_insn_r->mem_rec_count = 1;
12641 /* Handle VST2. */
12642 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12643 thumb2_insn_r->mem_rec_count = 2;
12644 /* Handle VST3. */
12645 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12646 thumb2_insn_r->mem_rec_count = 3;
12647 /* Handle VST4. */
12648 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12649 thumb2_insn_r->mem_rec_count = 4;
12650
12651 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12652 {
12653 record_buf_mem[index_m] = f_ebytes;
12654 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12655 }
12656 }
12657 }
12658 else
12659 {
12660 if (!a_bit)
12661 {
12662 /* Handle VLD1. */
12663 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12664 thumb2_insn_r->reg_rec_count = 1;
12665 /* Handle VLD2. */
12666 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12667 thumb2_insn_r->reg_rec_count = 2;
12668 /* Handle VLD3. */
12669 else if ((b_bits & 0x0e) == 0x04)
12670 thumb2_insn_r->reg_rec_count = 3;
12671 /* Handle VLD4. */
12672 else if (!(b_bits & 0x0e))
12673 thumb2_insn_r->reg_rec_count = 4;
12674 }
12675 else
12676 {
12677 /* Handle VLD1. */
12678 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12679 thumb2_insn_r->reg_rec_count = 1;
12680 /* Handle VLD2. */
12681 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12682 thumb2_insn_r->reg_rec_count = 2;
12683 /* Handle VLD3. */
12684 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12685 thumb2_insn_r->reg_rec_count = 3;
12686 /* Handle VLD4. */
12687 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12688 thumb2_insn_r->reg_rec_count = 4;
12689
12690 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12691 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12692 }
12693 }
12694
12695 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12696 {
12697 record_buf[index_r] = reg_rn;
12698 thumb2_insn_r->reg_rec_count += 1;
12699 }
12700
12701 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12702 record_buf);
12703 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12704 record_buf_mem);
12705 return 0;
12706}
12707
c6ec2b30
OJ
12708/* Decodes thumb2 instruction type and invokes its record handler. */
12709
12710static unsigned int
12711thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12712{
12713 uint32_t op, op1, op2;
12714
12715 op = bit (thumb2_insn_r->arm_insn, 15);
12716 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12717 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12718
12719 if (op1 == 0x01)
12720 {
12721 if (!(op2 & 0x64 ))
12722 {
12723 /* Load/store multiple instruction. */
12724 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12725 }
12726 else if (!((op2 & 0x64) ^ 0x04))
12727 {
12728 /* Load/store (dual/exclusive) and table branch instruction. */
12729 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12730 }
12731 else if (!((op2 & 0x20) ^ 0x20))
12732 {
12733 /* Data-processing (shifted register). */
12734 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12735 }
12736 else if (op2 & 0x40)
12737 {
12738 /* Co-processor instructions. */
60cc5e93 12739 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12740 }
12741 }
12742 else if (op1 == 0x02)
12743 {
12744 if (op)
12745 {
12746 /* Branches and miscellaneous control instructions. */
12747 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12748 }
12749 else if (op2 & 0x20)
12750 {
12751 /* Data-processing (plain binary immediate) instruction. */
12752 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12753 }
12754 else
12755 {
12756 /* Data-processing (modified immediate). */
12757 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12758 }
12759 }
12760 else if (op1 == 0x03)
12761 {
12762 if (!(op2 & 0x71 ))
12763 {
12764 /* Store single data item. */
12765 return thumb2_record_str_single_data (thumb2_insn_r);
12766 }
12767 else if (!((op2 & 0x71) ^ 0x10))
12768 {
12769 /* Advanced SIMD or structure load/store instructions. */
1e1b6563 12770 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
c6ec2b30
OJ
12771 }
12772 else if (!((op2 & 0x67) ^ 0x01))
12773 {
12774 /* Load byte, memory hints instruction. */
12775 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12776 }
12777 else if (!((op2 & 0x67) ^ 0x03))
12778 {
12779 /* Load halfword, memory hints instruction. */
12780 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12781 }
12782 else if (!((op2 & 0x67) ^ 0x05))
12783 {
12784 /* Load word instruction. */
12785 return thumb2_record_ld_word (thumb2_insn_r);
12786 }
12787 else if (!((op2 & 0x70) ^ 0x20))
12788 {
12789 /* Data-processing (register) instruction. */
12790 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12791 }
12792 else if (!((op2 & 0x78) ^ 0x30))
12793 {
12794 /* Multiply, multiply accumulate, abs diff instruction. */
12795 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12796 }
12797 else if (!((op2 & 0x78) ^ 0x38))
12798 {
12799 /* Long multiply, long multiply accumulate, and divide. */
12800 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12801 }
12802 else if (op2 & 0x40)
12803 {
12804 /* Co-processor instructions. */
60cc5e93 12805 return thumb2_record_coproc_insn (thumb2_insn_r);
c6ec2b30
OJ
12806 }
12807 }
12808
12809 return -1;
12810}
72508ac0
PO
12811
12812/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12813and positive val on fauilure. */
12814
12815static int
12816extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12817{
12818 gdb_byte buf[insn_size];
12819
12820 memset (&buf[0], 0, insn_size);
12821
12822 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12823 return 1;
12824 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12825 insn_size,
2959fed9 12826 gdbarch_byte_order_for_code (insn_record->gdbarch));
72508ac0
PO
12827 return 0;
12828}
12829
12830typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12831
12832/* Decode arm/thumb insn depending on condition cods and opcodes; and
12833 dispatch it. */
12834
12835static int
12836decode_insn (insn_decode_record *arm_record, record_type_t record_type,
01e57735 12837 uint32_t insn_size)
72508ac0
PO
12838{
12839
01e57735
YQ
12840 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12841 instruction. */
0fa9c223 12842 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
72508ac0
PO
12843 {
12844 arm_record_data_proc_misc_ld_str, /* 000. */
12845 arm_record_data_proc_imm, /* 001. */
12846 arm_record_ld_st_imm_offset, /* 010. */
12847 arm_record_ld_st_reg_offset, /* 011. */
12848 arm_record_ld_st_multiple, /* 100. */
12849 arm_record_b_bl, /* 101. */
60cc5e93 12850 arm_record_asimd_vfp_coproc, /* 110. */
72508ac0
PO
12851 arm_record_coproc_data_proc /* 111. */
12852 };
12853
01e57735
YQ
12854 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12855 instruction. */
0fa9c223 12856 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
72508ac0
PO
12857 { \
12858 thumb_record_shift_add_sub, /* 000. */
12859 thumb_record_add_sub_cmp_mov, /* 001. */
12860 thumb_record_ld_st_reg_offset, /* 010. */
12861 thumb_record_ld_st_imm_offset, /* 011. */
12862 thumb_record_ld_st_stack, /* 100. */
12863 thumb_record_misc, /* 101. */
12864 thumb_record_ldm_stm_swi, /* 110. */
12865 thumb_record_branch /* 111. */
12866 };
12867
12868 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12869 uint32_t insn_id = 0;
12870
12871 if (extract_arm_insn (arm_record, insn_size))
12872 {
12873 if (record_debug)
01e57735
YQ
12874 {
12875 printf_unfiltered (_("Process record: error reading memory at "
12876 "addr %s len = %d.\n"),
12877 paddress (arm_record->gdbarch,
12878 arm_record->this_addr), insn_size);
12879 }
72508ac0
PO
12880 return -1;
12881 }
12882 else if (ARM_RECORD == record_type)
12883 {
12884 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12885 insn_id = bits (arm_record->arm_insn, 25, 27);
ca92db2d
YQ
12886
12887 if (arm_record->cond == 0xf)
12888 ret = arm_record_extension_space (arm_record);
12889 else
01e57735 12890 {
ca92db2d
YQ
12891 /* If this insn has fallen into extension space
12892 then we need not decode it anymore. */
01e57735
YQ
12893 ret = arm_handle_insn[insn_id] (arm_record);
12894 }
ca92db2d
YQ
12895 if (ret != ARM_RECORD_SUCCESS)
12896 {
12897 arm_record_unsupported_insn (arm_record);
12898 ret = -1;
12899 }
72508ac0
PO
12900 }
12901 else if (THUMB_RECORD == record_type)
12902 {
12903 /* As thumb does not have condition codes, we set negative. */
12904 arm_record->cond = -1;
12905 insn_id = bits (arm_record->arm_insn, 13, 15);
12906 ret = thumb_handle_insn[insn_id] (arm_record);
ca92db2d
YQ
12907 if (ret != ARM_RECORD_SUCCESS)
12908 {
12909 arm_record_unsupported_insn (arm_record);
12910 ret = -1;
12911 }
72508ac0
PO
12912 }
12913 else if (THUMB2_RECORD == record_type)
12914 {
c6ec2b30
OJ
12915 /* As thumb does not have condition codes, we set negative. */
12916 arm_record->cond = -1;
12917
12918 /* Swap first half of 32bit thumb instruction with second half. */
12919 arm_record->arm_insn
01e57735 12920 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
c6ec2b30 12921
ca92db2d 12922 ret = thumb2_record_decode_insn_handler (arm_record);
c6ec2b30 12923
ca92db2d 12924 if (ret != ARM_RECORD_SUCCESS)
01e57735
YQ
12925 {
12926 arm_record_unsupported_insn (arm_record);
12927 ret = -1;
12928 }
72508ac0
PO
12929 }
12930 else
12931 {
12932 /* Throw assertion. */
12933 gdb_assert_not_reached ("not a valid instruction, could not decode");
12934 }
12935
12936 return ret;
12937}
12938
12939
12940/* Cleans up local record registers and memory allocations. */
12941
12942static void
12943deallocate_reg_mem (insn_decode_record *record)
12944{
12945 xfree (record->arm_regs);
12946 xfree (record->arm_mems);
12947}
12948
12949
01e57735 12950/* Parse the current instruction and record the values of the registers and
72508ac0
PO
12951 memory that will be changed in current instruction to record_arch_list".
12952 Return -1 if something is wrong. */
12953
12954int
01e57735
YQ
12955arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12956 CORE_ADDR insn_addr)
72508ac0
PO
12957{
12958
72508ac0
PO
12959 uint32_t no_of_rec = 0;
12960 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12961 ULONGEST t_bit = 0, insn_id = 0;
12962
12963 ULONGEST u_regval = 0;
12964
12965 insn_decode_record arm_record;
12966
12967 memset (&arm_record, 0, sizeof (insn_decode_record));
12968 arm_record.regcache = regcache;
12969 arm_record.this_addr = insn_addr;
12970 arm_record.gdbarch = gdbarch;
12971
12972
12973 if (record_debug > 1)
12974 {
12975 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
01e57735 12976 "addr = %s\n",
72508ac0
PO
12977 paddress (gdbarch, arm_record.this_addr));
12978 }
12979
12980 if (extract_arm_insn (&arm_record, 2))
12981 {
12982 if (record_debug)
01e57735
YQ
12983 {
12984 printf_unfiltered (_("Process record: error reading memory at "
12985 "addr %s len = %d.\n"),
12986 paddress (arm_record.gdbarch,
12987 arm_record.this_addr), 2);
12988 }
72508ac0
PO
12989 return -1;
12990 }
12991
12992 /* Check the insn, whether it is thumb or arm one. */
12993
12994 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12995 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12996
12997
12998 if (!(u_regval & t_bit))
12999 {
13000 /* We are decoding arm insn. */
13001 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13002 }
13003 else
13004 {
13005 insn_id = bits (arm_record.arm_insn, 11, 15);
13006 /* is it thumb2 insn? */
13007 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
01e57735
YQ
13008 {
13009 ret = decode_insn (&arm_record, THUMB2_RECORD,
13010 THUMB2_INSN_SIZE_BYTES);
13011 }
72508ac0 13012 else
01e57735
YQ
13013 {
13014 /* We are decoding thumb insn. */
13015 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13016 }
72508ac0
PO
13017 }
13018
13019 if (0 == ret)
13020 {
13021 /* Record registers. */
25ea693b 13022 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
72508ac0 13023 if (arm_record.arm_regs)
01e57735
YQ
13024 {
13025 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13026 {
13027 if (record_full_arch_list_add_reg
25ea693b 13028 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
01e57735
YQ
13029 ret = -1;
13030 }
13031 }
72508ac0
PO
13032 /* Record memories. */
13033 if (arm_record.arm_mems)
01e57735
YQ
13034 {
13035 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13036 {
13037 if (record_full_arch_list_add_mem
13038 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
25ea693b 13039 arm_record.arm_mems[no_of_rec].len))
01e57735
YQ
13040 ret = -1;
13041 }
13042 }
72508ac0 13043
25ea693b 13044 if (record_full_arch_list_add_end ())
01e57735 13045 ret = -1;
72508ac0
PO
13046 }
13047
13048
13049 deallocate_reg_mem (&arm_record);
13050
13051 return ret;
13052}
This page took 2.565566 seconds and 4 git commands to generate.