Automatic date update in version.in
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
... / ...
CommitLineData
1/* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20#include "defs.h"
21
22#include <ctype.h> /* XXX for isupper (). */
23
24#include "frame.h"
25#include "inferior.h"
26#include "infrun.h"
27#include "gdbcmd.h"
28#include "gdbcore.h"
29#include "dis-asm.h" /* For register styles. */
30#include "disasm.h"
31#include "regcache.h"
32#include "reggroups.h"
33#include "target-float.h"
34#include "value.h"
35#include "arch-utils.h"
36#include "osabi.h"
37#include "frame-unwind.h"
38#include "frame-base.h"
39#include "trad-frame.h"
40#include "objfiles.h"
41#include "dwarf2-frame.h"
42#include "gdbtypes.h"
43#include "prologue-value.h"
44#include "remote.h"
45#include "target-descriptions.h"
46#include "user-regs.h"
47#include "observable.h"
48
49#include "arch/arm.h"
50#include "arch/arm-get-next-pcs.h"
51#include "arm-tdep.h"
52#include "gdb/sim-arm.h"
53
54#include "elf-bfd.h"
55#include "coff/internal.h"
56#include "elf/arm.h"
57
58#include "vec.h"
59
60#include "record.h"
61#include "record-full.h"
62#include <algorithm>
63
64#include "features/arm/arm-with-m.c"
65#include "features/arm/arm-with-m-fpa-layout.c"
66#include "features/arm/arm-with-m-vfp-d16.c"
67#include "features/arm/arm-with-iwmmxt.c"
68#include "features/arm/arm-with-vfpv2.c"
69#include "features/arm/arm-with-vfpv3.c"
70#include "features/arm/arm-with-neon.c"
71
72#if GDB_SELF_TEST
73#include "selftest.h"
74#endif
75
76static int arm_debug;
77
78/* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85#define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
87
88#define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
90
91/* Per-objfile data used for mapping symbols. */
92static const struct objfile_data *arm_objfile_data_key;
93
94struct arm_mapping_symbol
95{
96 bfd_vma value;
97 char type;
98};
99typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100DEF_VEC_O(arm_mapping_symbol_s);
101
102struct arm_per_objfile
103{
104 VEC(arm_mapping_symbol_s) **section_maps;
105};
106
107/* The list of available "set arm ..." and "show arm ..." commands. */
108static struct cmd_list_element *setarmcmdlist = NULL;
109static struct cmd_list_element *showarmcmdlist = NULL;
110
111/* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113static const char *const fp_model_strings[] =
114{
115 "auto",
116 "softfpa",
117 "fpa",
118 "softvfp",
119 "vfp",
120 NULL
121};
122
123/* A variable that can be configured by the user. */
124static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125static const char *current_fp_model = "auto";
126
127/* The ABI to use. Keep this in sync with arm_abi_kind. */
128static const char *const arm_abi_strings[] =
129{
130 "auto",
131 "APCS",
132 "AAPCS",
133 NULL
134};
135
136/* A variable that can be configured by the user. */
137static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138static const char *arm_abi_string = "auto";
139
140/* The execution mode to assume. */
141static const char *const arm_mode_strings[] =
142 {
143 "auto",
144 "arm",
145 "thumb",
146 NULL
147 };
148
149static const char *arm_fallback_mode_string = "auto";
150static const char *arm_force_mode_string = "auto";
151
152/* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
156static const struct
157{
158 const char *name;
159 int regnum;
160} arm_register_aliases[] = {
161 /* Basic register numbers. */
162 { "r0", 0 },
163 { "r1", 1 },
164 { "r2", 2 },
165 { "r3", 3 },
166 { "r4", 4 },
167 { "r5", 5 },
168 { "r6", 6 },
169 { "r7", 7 },
170 { "r8", 8 },
171 { "r9", 9 },
172 { "r10", 10 },
173 { "r11", 11 },
174 { "r12", 12 },
175 { "r13", 13 },
176 { "r14", 14 },
177 { "r15", 15 },
178 /* Synonyms (argument and variable registers). */
179 { "a1", 0 },
180 { "a2", 1 },
181 { "a3", 2 },
182 { "a4", 3 },
183 { "v1", 4 },
184 { "v2", 5 },
185 { "v3", 6 },
186 { "v4", 7 },
187 { "v5", 8 },
188 { "v6", 9 },
189 { "v7", 10 },
190 { "v8", 11 },
191 /* Other platform-specific names for r9. */
192 { "sb", 9 },
193 { "tr", 9 },
194 /* Special names. */
195 { "ip", 12 },
196 { "lr", 14 },
197 /* Names used by GCC (not listed in the ARM EABI). */
198 { "sl", 10 },
199 /* A special name from the older ATPCS. */
200 { "wr", 7 },
201};
202
203static const char *const arm_register_names[] =
204{"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
211
212/* Holds the current set of options to be passed to the disassembler. */
213static char *arm_disassembler_options;
214
215/* Valid register name styles. */
216static const char **valid_disassembly_styles;
217
218/* Disassembly style to use. Default to "std" register names. */
219static const char *disassembly_style;
220
221/* This is used to keep the bfd arch_info in sync with the disassembly
222 style. */
223static void set_disassembly_style_sfunc (const char *, int,
224 struct cmd_list_element *);
225static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
227 const char *);
228
229static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 readable_regcache *regcache,
231 int regnum, gdb_byte *buf);
232static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
235
236static CORE_ADDR
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
238
239
240/* get_next_pcs operations. */
241static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
246 NULL,
247};
248
249struct arm_prologue_cache
250{
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
254 CORE_ADDR prev_sp;
255
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
259
260 int framesize;
261
262 /* The register used to hold the frame pointer for this frame. */
263 int framereg;
264
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
267};
268
269static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
273
274/* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
276
277#define DISPLACED_STEPPING_ARCH_VERSION 5
278
279/* Set to true if the 32-bit mode is in use. */
280
281int arm_apcs_32 = 1;
282
283/* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
284
285int
286arm_psr_thumb_bit (struct gdbarch *gdbarch)
287{
288 if (gdbarch_tdep (gdbarch)->is_m)
289 return XPSR_T;
290 else
291 return CPSR_T;
292}
293
294/* Determine if the processor is currently executing in Thumb mode. */
295
296int
297arm_is_thumb (struct regcache *regcache)
298{
299 ULONGEST cpsr;
300 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
301
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
303
304 return (cpsr & t_bit) != 0;
305}
306
307/* Determine if FRAME is executing in Thumb mode. */
308
309int
310arm_frame_is_thumb (struct frame_info *frame)
311{
312 CORE_ADDR cpsr;
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
314
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
320
321 return (cpsr & t_bit) != 0;
322}
323
324/* Callback for VEC_lower_bound. */
325
326static inline int
327arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
329{
330 return lhs->value < rhs->value;
331}
332
333/* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
336
337static char
338arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
339{
340 struct obj_section *sec;
341
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
344 if (sec != NULL)
345 {
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
349 0 };
350 unsigned int idx;
351
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
354 if (data != NULL)
355 {
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
358 {
359 struct arm_mapping_symbol *map_sym;
360
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
363
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
369 {
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
372 {
373 if (start)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
376 }
377 }
378
379 if (idx > 0)
380 {
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
382 if (start)
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
385 }
386 }
387 }
388 }
389
390 return 0;
391}
392
393/* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
396
397int
398arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
399{
400 struct bound_minimal_symbol sym;
401 char type;
402 arm_displaced_step_closure *dsc
403 = ((arm_displaced_step_closure * )
404 get_displaced_step_closure_by_addr (memaddr));
405
406 /* If checking the mode of displaced instruction in copy area, the mode
407 should be determined by instruction on the original address. */
408 if (dsc)
409 {
410 if (debug_displaced)
411 fprintf_unfiltered (gdb_stdlog,
412 "displaced: check mode of %.8lx instead of %.8lx\n",
413 (unsigned long) dsc->insn_addr,
414 (unsigned long) memaddr);
415 memaddr = dsc->insn_addr;
416 }
417
418 /* If bit 0 of the address is set, assume this is a Thumb address. */
419 if (IS_THUMB_ADDR (memaddr))
420 return 1;
421
422 /* If the user wants to override the symbol table, let him. */
423 if (strcmp (arm_force_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_force_mode_string, "thumb") == 0)
426 return 1;
427
428 /* ARM v6-M and v7-M are always in Thumb mode. */
429 if (gdbarch_tdep (gdbarch)->is_m)
430 return 1;
431
432 /* If there are mapping symbols, consult them. */
433 type = arm_find_mapping_symbol (memaddr, NULL);
434 if (type)
435 return type == 't';
436
437 /* Thumb functions have a "special" bit set in minimal symbols. */
438 sym = lookup_minimal_symbol_by_pc (memaddr);
439 if (sym.minsym)
440 return (MSYMBOL_IS_SPECIAL (sym.minsym));
441
442 /* If the user wants to override the fallback mode, let them. */
443 if (strcmp (arm_fallback_mode_string, "arm") == 0)
444 return 0;
445 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
446 return 1;
447
448 /* If we couldn't find any symbol, but we're talking to a running
449 target, then trust the current value of $cpsr. This lets
450 "display/i $pc" always show the correct mode (though if there is
451 a symbol table we will not reach here, so it still may not be
452 displayed in the mode it will be executed). */
453 if (target_has_registers)
454 return arm_frame_is_thumb (get_current_frame ());
455
456 /* Otherwise we're out of luck; we assume ARM. */
457 return 0;
458}
459
460/* Determine if the address specified equals any of these magic return
461 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
462 architectures.
463
464 From ARMv6-M Reference Manual B1.5.8
465 Table B1-5 Exception return behavior
466
467 EXC_RETURN Return To Return Stack
468 0xFFFFFFF1 Handler mode Main
469 0xFFFFFFF9 Thread mode Main
470 0xFFFFFFFD Thread mode Process
471
472 From ARMv7-M Reference Manual B1.5.8
473 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 Table B1-9 EXC_RETURN definition of exception return behavior, with
481 FP
482
483 EXC_RETURN Return To Return Stack Frame Type
484 0xFFFFFFE1 Handler mode Main Extended
485 0xFFFFFFE9 Thread mode Main Extended
486 0xFFFFFFED Thread mode Process Extended
487 0xFFFFFFF1 Handler mode Main Basic
488 0xFFFFFFF9 Thread mode Main Basic
489 0xFFFFFFFD Thread mode Process Basic
490
491 For more details see "B1.5.8 Exception return behavior"
492 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
493
494static int
495arm_m_addr_is_magic (CORE_ADDR addr)
496{
497 switch (addr)
498 {
499 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
500 the exception return behavior. */
501 case 0xffffffe1:
502 case 0xffffffe9:
503 case 0xffffffed:
504 case 0xfffffff1:
505 case 0xfffffff9:
506 case 0xfffffffd:
507 /* Address is magic. */
508 return 1;
509
510 default:
511 /* Address is not magic. */
512 return 0;
513 }
514}
515
516/* Remove useless bits from addresses in a running program. */
517static CORE_ADDR
518arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
519{
520 /* On M-profile devices, do not strip the low bit from EXC_RETURN
521 (the magic exception return address). */
522 if (gdbarch_tdep (gdbarch)->is_m
523 && arm_m_addr_is_magic (val))
524 return val;
525
526 if (arm_apcs_32)
527 return UNMAKE_THUMB_ADDR (val);
528 else
529 return (val & 0x03fffffc);
530}
531
532/* Return 1 if PC is the start of a compiler helper function which
533 can be safely ignored during prologue skipping. IS_THUMB is true
534 if the function is known to be a Thumb function due to the way it
535 is being called. */
536static int
537skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
538{
539 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
540 struct bound_minimal_symbol msym;
541
542 msym = lookup_minimal_symbol_by_pc (pc);
543 if (msym.minsym != NULL
544 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
545 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
546 {
547 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
548
549 /* The GNU linker's Thumb call stub to foo is named
550 __foo_from_thumb. */
551 if (strstr (name, "_from_thumb") != NULL)
552 name += 2;
553
554 /* On soft-float targets, __truncdfsf2 is called to convert promoted
555 arguments to their argument types in non-prototyped
556 functions. */
557 if (startswith (name, "__truncdfsf2"))
558 return 1;
559 if (startswith (name, "__aeabi_d2f"))
560 return 1;
561
562 /* Internal functions related to thread-local storage. */
563 if (startswith (name, "__tls_get_addr"))
564 return 1;
565 if (startswith (name, "__aeabi_read_tp"))
566 return 1;
567 }
568 else
569 {
570 /* If we run against a stripped glibc, we may be unable to identify
571 special functions by name. Check for one important case,
572 __aeabi_read_tp, by comparing the *code* against the default
573 implementation (this is hand-written ARM assembler in glibc). */
574
575 if (!is_thumb
576 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
577 == 0xe3e00a0f /* mov r0, #0xffff0fff */
578 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
579 == 0xe240f01f) /* sub pc, r0, #31 */
580 return 1;
581 }
582
583 return 0;
584}
585
586/* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
587 the first 16-bit of instruction, and INSN2 is the second 16-bit of
588 instruction. */
589#define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
590 ((bits ((insn1), 0, 3) << 12) \
591 | (bits ((insn1), 10, 10) << 11) \
592 | (bits ((insn2), 12, 14) << 8) \
593 | bits ((insn2), 0, 7))
594
595/* Extract the immediate from instruction movw/movt of encoding A. INSN is
596 the 32-bit instruction. */
597#define EXTRACT_MOVW_MOVT_IMM_A(insn) \
598 ((bits ((insn), 16, 19) << 12) \
599 | bits ((insn), 0, 11))
600
601/* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
602
603static unsigned int
604thumb_expand_immediate (unsigned int imm)
605{
606 unsigned int count = imm >> 7;
607
608 if (count < 8)
609 switch (count / 2)
610 {
611 case 0:
612 return imm & 0xff;
613 case 1:
614 return (imm & 0xff) | ((imm & 0xff) << 16);
615 case 2:
616 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
617 case 3:
618 return (imm & 0xff) | ((imm & 0xff) << 8)
619 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
620 }
621
622 return (0x80 | (imm & 0x7f)) << (32 - count);
623}
624
625/* Return 1 if the 16-bit Thumb instruction INSN restores SP in
626 epilogue, 0 otherwise. */
627
628static int
629thumb_instruction_restores_sp (unsigned short insn)
630{
631 return (insn == 0x46bd /* mov sp, r7 */
632 || (insn & 0xff80) == 0xb000 /* add sp, imm */
633 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
634}
635
636/* Analyze a Thumb prologue, looking for a recognizable stack frame
637 and frame pointer. Scan until we encounter a store that could
638 clobber the stack frame unexpectedly, or an unknown instruction.
639 Return the last address which is definitely safe to skip for an
640 initial breakpoint. */
641
642static CORE_ADDR
643thumb_analyze_prologue (struct gdbarch *gdbarch,
644 CORE_ADDR start, CORE_ADDR limit,
645 struct arm_prologue_cache *cache)
646{
647 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
649 int i;
650 pv_t regs[16];
651 CORE_ADDR offset;
652 CORE_ADDR unrecognized_pc = 0;
653
654 for (i = 0; i < 16; i++)
655 regs[i] = pv_register (i, 0);
656 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
657
658 while (start < limit)
659 {
660 unsigned short insn;
661
662 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
663
664 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
665 {
666 int regno;
667 int mask;
668
669 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
670 break;
671
672 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
673 whether to save LR (R14). */
674 mask = (insn & 0xff) | ((insn & 0x100) << 6);
675
676 /* Calculate offsets of saved R0-R7 and LR. */
677 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
678 if (mask & (1 << regno))
679 {
680 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
681 -4);
682 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
683 }
684 }
685 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
686 {
687 offset = (insn & 0x7f) << 2; /* get scaled offset */
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -offset);
690 }
691 else if (thumb_instruction_restores_sp (insn))
692 {
693 /* Don't scan past the epilogue. */
694 break;
695 }
696 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
697 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
698 (insn & 0xff) << 2);
699 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
700 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
701 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
702 bits (insn, 6, 8));
703 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
704 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
706 bits (insn, 0, 7));
707 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
708 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
709 && pv_is_constant (regs[bits (insn, 3, 5)]))
710 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
711 regs[bits (insn, 6, 8)]);
712 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
713 && pv_is_constant (regs[bits (insn, 3, 6)]))
714 {
715 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
716 int rm = bits (insn, 3, 6);
717 regs[rd] = pv_add (regs[rd], regs[rm]);
718 }
719 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
720 {
721 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
722 int src_reg = (insn & 0x78) >> 3;
723 regs[dst_reg] = regs[src_reg];
724 }
725 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
726 {
727 /* Handle stores to the stack. Normally pushes are used,
728 but with GCC -mtpcs-frame, there may be other stores
729 in the prologue to create the frame. */
730 int regno = (insn >> 8) & 0x7;
731 pv_t addr;
732
733 offset = (insn & 0xff) << 2;
734 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
735
736 if (stack.store_would_trash (addr))
737 break;
738
739 stack.store (addr, 4, regs[regno]);
740 }
741 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
742 {
743 int rd = bits (insn, 0, 2);
744 int rn = bits (insn, 3, 5);
745 pv_t addr;
746
747 offset = bits (insn, 6, 10) << 2;
748 addr = pv_add_constant (regs[rn], offset);
749
750 if (stack.store_would_trash (addr))
751 break;
752
753 stack.store (addr, 4, regs[rd]);
754 }
755 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
756 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
757 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
758 /* Ignore stores of argument registers to the stack. */
759 ;
760 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
761 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
762 /* Ignore block loads from the stack, potentially copying
763 parameters from memory. */
764 ;
765 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
766 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
768 /* Similarly ignore single loads from the stack. */
769 ;
770 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
771 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
772 /* Skip register copies, i.e. saves to another register
773 instead of the stack. */
774 ;
775 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
776 /* Recognize constant loads; even with small stacks these are necessary
777 on Thumb. */
778 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
779 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
780 {
781 /* Constant pool loads, for the same reason. */
782 unsigned int constant;
783 CORE_ADDR loc;
784
785 loc = start + 4 + bits (insn, 0, 7) * 4;
786 constant = read_memory_unsigned_integer (loc, 4, byte_order);
787 regs[bits (insn, 8, 10)] = pv_constant (constant);
788 }
789 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
790 {
791 unsigned short inst2;
792
793 inst2 = read_code_unsigned_integer (start + 2, 2,
794 byte_order_for_code);
795
796 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
797 {
798 /* BL, BLX. Allow some special function calls when
799 skipping the prologue; GCC generates these before
800 storing arguments to the stack. */
801 CORE_ADDR nextpc;
802 int j1, j2, imm1, imm2;
803
804 imm1 = sbits (insn, 0, 10);
805 imm2 = bits (inst2, 0, 10);
806 j1 = bit (inst2, 13);
807 j2 = bit (inst2, 11);
808
809 offset = ((imm1 << 12) + (imm2 << 1));
810 offset ^= ((!j2) << 22) | ((!j1) << 23);
811
812 nextpc = start + 4 + offset;
813 /* For BLX make sure to clear the low bits. */
814 if (bit (inst2, 12) == 0)
815 nextpc = nextpc & 0xfffffffc;
816
817 if (!skip_prologue_function (gdbarch, nextpc,
818 bit (inst2, 12) != 0))
819 break;
820 }
821
822 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
823 { registers } */
824 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
825 {
826 pv_t addr = regs[bits (insn, 0, 3)];
827 int regno;
828
829 if (stack.store_would_trash (addr))
830 break;
831
832 /* Calculate offsets of saved registers. */
833 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
834 if (inst2 & (1 << regno))
835 {
836 addr = pv_add_constant (addr, -4);
837 stack.store (addr, 4, regs[regno]);
838 }
839
840 if (insn & 0x0020)
841 regs[bits (insn, 0, 3)] = addr;
842 }
843
844 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
845 [Rn, #+/-imm]{!} */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
847 {
848 int regno1 = bits (inst2, 12, 15);
849 int regno2 = bits (inst2, 8, 11);
850 pv_t addr = regs[bits (insn, 0, 3)];
851
852 offset = inst2 & 0xff;
853 if (insn & 0x0080)
854 addr = pv_add_constant (addr, offset);
855 else
856 addr = pv_add_constant (addr, -offset);
857
858 if (stack.store_would_trash (addr))
859 break;
860
861 stack.store (addr, 4, regs[regno1]);
862 stack.store (pv_add_constant (addr, 4),
863 4, regs[regno2]);
864
865 if (insn & 0x0020)
866 regs[bits (insn, 0, 3)] = addr;
867 }
868
869 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
870 && (inst2 & 0x0c00) == 0x0c00
871 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
872 {
873 int regno = bits (inst2, 12, 15);
874 pv_t addr = regs[bits (insn, 0, 3)];
875
876 offset = inst2 & 0xff;
877 if (inst2 & 0x0200)
878 addr = pv_add_constant (addr, offset);
879 else
880 addr = pv_add_constant (addr, -offset);
881
882 if (stack.store_would_trash (addr))
883 break;
884
885 stack.store (addr, 4, regs[regno]);
886
887 if (inst2 & 0x0100)
888 regs[bits (insn, 0, 3)] = addr;
889 }
890
891 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 {
894 int regno = bits (inst2, 12, 15);
895 pv_t addr;
896
897 offset = inst2 & 0xfff;
898 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
899
900 if (stack.store_would_trash (addr))
901 break;
902
903 stack.store (addr, 4, regs[regno]);
904 }
905
906 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
907 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
908 /* Ignore stores of argument registers to the stack. */
909 ;
910
911 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
912 && (inst2 & 0x0d00) == 0x0c00
913 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
914 /* Ignore stores of argument registers to the stack. */
915 ;
916
917 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
918 { registers } */
919 && (inst2 & 0x8000) == 0x0000
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore block loads from the stack, potentially copying
922 parameters from memory. */
923 ;
924
925 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
926 [Rn, #+/-imm] */
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Similarly ignore dual loads from the stack. */
929 ;
930
931 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
932 && (inst2 & 0x0d00) == 0x0c00
933 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
934 /* Similarly ignore single loads from the stack. */
935 ;
936
937 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
939 /* Similarly ignore single loads from the stack. */
940 ;
941
942 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
943 && (inst2 & 0x8000) == 0x0000)
944 {
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
948
949 regs[bits (inst2, 8, 11)]
950 = pv_add_constant (regs[bits (insn, 0, 3)],
951 thumb_expand_immediate (imm));
952 }
953
954 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
955 && (inst2 & 0x8000) == 0x0000)
956 {
957 unsigned int imm = ((bits (insn, 10, 10) << 11)
958 | (bits (inst2, 12, 14) << 8)
959 | bits (inst2, 0, 7));
960
961 regs[bits (inst2, 8, 11)]
962 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
963 }
964
965 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
966 && (inst2 & 0x8000) == 0x0000)
967 {
968 unsigned int imm = ((bits (insn, 10, 10) << 11)
969 | (bits (inst2, 12, 14) << 8)
970 | bits (inst2, 0, 7));
971
972 regs[bits (inst2, 8, 11)]
973 = pv_add_constant (regs[bits (insn, 0, 3)],
974 - (CORE_ADDR) thumb_expand_immediate (imm));
975 }
976
977 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
978 && (inst2 & 0x8000) == 0x0000)
979 {
980 unsigned int imm = ((bits (insn, 10, 10) << 11)
981 | (bits (inst2, 12, 14) << 8)
982 | bits (inst2, 0, 7));
983
984 regs[bits (inst2, 8, 11)]
985 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
986 }
987
988 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
989 {
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
993
994 regs[bits (inst2, 8, 11)]
995 = pv_constant (thumb_expand_immediate (imm));
996 }
997
998 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
999 {
1000 unsigned int imm
1001 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1002
1003 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1004 }
1005
1006 else if (insn == 0xea5f /* mov.w Rd,Rm */
1007 && (inst2 & 0xf0f0) == 0)
1008 {
1009 int dst_reg = (inst2 & 0x0f00) >> 8;
1010 int src_reg = inst2 & 0xf;
1011 regs[dst_reg] = regs[src_reg];
1012 }
1013
1014 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1015 {
1016 /* Constant pool loads. */
1017 unsigned int constant;
1018 CORE_ADDR loc;
1019
1020 offset = bits (inst2, 0, 11);
1021 if (insn & 0x0080)
1022 loc = start + 4 + offset;
1023 else
1024 loc = start + 4 - offset;
1025
1026 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1027 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1028 }
1029
1030 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1031 {
1032 /* Constant pool loads. */
1033 unsigned int constant;
1034 CORE_ADDR loc;
1035
1036 offset = bits (inst2, 0, 7) << 2;
1037 if (insn & 0x0080)
1038 loc = start + 4 + offset;
1039 else
1040 loc = start + 4 - offset;
1041
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1044
1045 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1046 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1047 }
1048
1049 else if (thumb2_instruction_changes_pc (insn, inst2))
1050 {
1051 /* Don't scan past anything that might change control flow. */
1052 break;
1053 }
1054 else
1055 {
1056 /* The optimizer might shove anything into the prologue,
1057 so we just skip what we don't recognize. */
1058 unrecognized_pc = start;
1059 }
1060
1061 start += 2;
1062 }
1063 else if (thumb_instruction_changes_pc (insn))
1064 {
1065 /* Don't scan past anything that might change control flow. */
1066 break;
1067 }
1068 else
1069 {
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc = start;
1073 }
1074
1075 start += 2;
1076 }
1077
1078 if (arm_debug)
1079 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1080 paddress (gdbarch, start));
1081
1082 if (unrecognized_pc == 0)
1083 unrecognized_pc = start;
1084
1085 if (cache == NULL)
1086 return unrecognized_pc;
1087
1088 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1089 {
1090 /* Frame pointer is fp. Frame size is constant. */
1091 cache->framereg = ARM_FP_REGNUM;
1092 cache->framesize = -regs[ARM_FP_REGNUM].k;
1093 }
1094 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1095 {
1096 /* Frame pointer is r7. Frame size is constant. */
1097 cache->framereg = THUMB_FP_REGNUM;
1098 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1099 }
1100 else
1101 {
1102 /* Try the stack pointer... this is a bit desperate. */
1103 cache->framereg = ARM_SP_REGNUM;
1104 cache->framesize = -regs[ARM_SP_REGNUM].k;
1105 }
1106
1107 for (i = 0; i < 16; i++)
1108 if (stack.find_reg (gdbarch, i, &offset))
1109 cache->saved_regs[i].addr = offset;
1110
1111 return unrecognized_pc;
1112}
1113
1114
1115/* Try to analyze the instructions starting from PC, which load symbol
1116 __stack_chk_guard. Return the address of instruction after loading this
1117 symbol, set the dest register number to *BASEREG, and set the size of
1118 instructions for loading symbol in OFFSET. Return 0 if instructions are
1119 not recognized. */
1120
1121static CORE_ADDR
1122arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1123 unsigned int *destreg, int *offset)
1124{
1125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1126 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1127 unsigned int low, high, address;
1128
1129 address = 0;
1130 if (is_thumb)
1131 {
1132 unsigned short insn1
1133 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1134
1135 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1136 {
1137 *destreg = bits (insn1, 8, 10);
1138 *offset = 2;
1139 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1140 address = read_memory_unsigned_integer (address, 4,
1141 byte_order_for_code);
1142 }
1143 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1144 {
1145 unsigned short insn2
1146 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1147
1148 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1149
1150 insn1
1151 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1152 insn2
1153 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1154
1155 /* movt Rd, #const */
1156 if ((insn1 & 0xfbc0) == 0xf2c0)
1157 {
1158 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 *destreg = bits (insn2, 8, 11);
1160 *offset = 8;
1161 address = (high << 16 | low);
1162 }
1163 }
1164 }
1165 else
1166 {
1167 unsigned int insn
1168 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1169
1170 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1171 {
1172 address = bits (insn, 0, 11) + pc + 8;
1173 address = read_memory_unsigned_integer (address, 4,
1174 byte_order_for_code);
1175
1176 *destreg = bits (insn, 12, 15);
1177 *offset = 4;
1178 }
1179 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1180 {
1181 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1182
1183 insn
1184 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1185
1186 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1187 {
1188 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1189 *destreg = bits (insn, 12, 15);
1190 *offset = 8;
1191 address = (high << 16 | low);
1192 }
1193 }
1194 }
1195
1196 return address;
1197}
1198
1199/* Try to skip a sequence of instructions used for stack protector. If PC
1200 points to the first instruction of this sequence, return the address of
1201 first instruction after this sequence, otherwise, return original PC.
1202
1203 On arm, this sequence of instructions is composed of mainly three steps,
1204 Step 1: load symbol __stack_chk_guard,
1205 Step 2: load from address of __stack_chk_guard,
1206 Step 3: store it to somewhere else.
1207
1208 Usually, instructions on step 2 and step 3 are the same on various ARM
1209 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1210 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1211 instructions in step 1 vary from different ARM architectures. On ARMv7,
1212 they are,
1213
1214 movw Rn, #:lower16:__stack_chk_guard
1215 movt Rn, #:upper16:__stack_chk_guard
1216
1217 On ARMv5t, it is,
1218
1219 ldr Rn, .Label
1220 ....
1221 .Lable:
1222 .word __stack_chk_guard
1223
1224 Since ldr/str is a very popular instruction, we can't use them as
1225 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1226 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1227 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1228
1229static CORE_ADDR
1230arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1231{
1232 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1233 unsigned int basereg;
1234 struct bound_minimal_symbol stack_chk_guard;
1235 int offset;
1236 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1237 CORE_ADDR addr;
1238
1239 /* Try to parse the instructions in Step 1. */
1240 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1241 &basereg, &offset);
1242 if (!addr)
1243 return pc;
1244
1245 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1246 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1247 Otherwise, this sequence cannot be for stack protector. */
1248 if (stack_chk_guard.minsym == NULL
1249 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1250 return pc;
1251
1252 if (is_thumb)
1253 {
1254 unsigned int destreg;
1255 unsigned short insn
1256 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1257
1258 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1259 if ((insn & 0xf800) != 0x6800)
1260 return pc;
1261 if (bits (insn, 3, 5) != basereg)
1262 return pc;
1263 destreg = bits (insn, 0, 2);
1264
1265 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1266 byte_order_for_code);
1267 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6000)
1269 return pc;
1270 if (destreg != bits (insn, 0, 2))
1271 return pc;
1272 }
1273 else
1274 {
1275 unsigned int destreg;
1276 unsigned int insn
1277 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1278
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1280 if ((insn & 0x0e500000) != 0x04100000)
1281 return pc;
1282 if (bits (insn, 16, 19) != basereg)
1283 return pc;
1284 destreg = bits (insn, 12, 15);
1285 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1286 insn = read_code_unsigned_integer (pc + offset + 4,
1287 4, byte_order_for_code);
1288 if ((insn & 0x0e500000) != 0x04000000)
1289 return pc;
1290 if (bits (insn, 12, 15) != destreg)
1291 return pc;
1292 }
1293 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1294 on arm. */
1295 if (is_thumb)
1296 return pc + offset + 4;
1297 else
1298 return pc + offset + 8;
1299}
1300
1301/* Advance the PC across any function entry prologue instructions to
1302 reach some "real" code.
1303
1304 The APCS (ARM Procedure Call Standard) defines the following
1305 prologue:
1306
1307 mov ip, sp
1308 [stmfd sp!, {a1,a2,a3,a4}]
1309 stmfd sp!, {...,fp,ip,lr,pc}
1310 [stfe f7, [sp, #-12]!]
1311 [stfe f6, [sp, #-12]!]
1312 [stfe f5, [sp, #-12]!]
1313 [stfe f4, [sp, #-12]!]
1314 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1315
1316static CORE_ADDR
1317arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1318{
1319 CORE_ADDR func_addr, limit_pc;
1320
1321 /* See if we can determine the end of the prologue via the symbol table.
1322 If so, then return either PC, or the PC after the prologue, whichever
1323 is greater. */
1324 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1325 {
1326 CORE_ADDR post_prologue_pc
1327 = skip_prologue_using_sal (gdbarch, func_addr);
1328 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1329
1330 if (post_prologue_pc)
1331 post_prologue_pc
1332 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1333
1334
1335 /* GCC always emits a line note before the prologue and another
1336 one after, even if the two are at the same address or on the
1337 same line. Take advantage of this so that we do not need to
1338 know every instruction that might appear in the prologue. We
1339 will have producer information for most binaries; if it is
1340 missing (e.g. for -gstabs), assuming the GNU tools. */
1341 if (post_prologue_pc
1342 && (cust == NULL
1343 || COMPUNIT_PRODUCER (cust) == NULL
1344 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1345 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1346 return post_prologue_pc;
1347
1348 if (post_prologue_pc != 0)
1349 {
1350 CORE_ADDR analyzed_limit;
1351
1352 /* For non-GCC compilers, make sure the entire line is an
1353 acceptable prologue; GDB will round this function's
1354 return value up to the end of the following line so we
1355 can not skip just part of a line (and we do not want to).
1356
1357 RealView does not treat the prologue specially, but does
1358 associate prologue code with the opening brace; so this
1359 lets us skip the first line if we think it is the opening
1360 brace. */
1361 if (arm_pc_is_thumb (gdbarch, func_addr))
1362 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1363 post_prologue_pc, NULL);
1364 else
1365 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1366 post_prologue_pc, NULL);
1367
1368 if (analyzed_limit != post_prologue_pc)
1369 return func_addr;
1370
1371 return post_prologue_pc;
1372 }
1373 }
1374
1375 /* Can't determine prologue from the symbol table, need to examine
1376 instructions. */
1377
1378 /* Find an upper limit on the function prologue using the debug
1379 information. If the debug information could not be used to provide
1380 that bound, then use an arbitrary large number as the upper bound. */
1381 /* Like arm_scan_prologue, stop no later than pc + 64. */
1382 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1383 if (limit_pc == 0)
1384 limit_pc = pc + 64; /* Magic. */
1385
1386
1387 /* Check if this is Thumb code. */
1388 if (arm_pc_is_thumb (gdbarch, pc))
1389 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1390 else
1391 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1392}
1393
1394/* *INDENT-OFF* */
1395/* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1396 This function decodes a Thumb function prologue to determine:
1397 1) the size of the stack frame
1398 2) which registers are saved on it
1399 3) the offsets of saved regs
1400 4) the offset from the stack pointer to the frame pointer
1401
1402 A typical Thumb function prologue would create this stack frame
1403 (offsets relative to FP)
1404 old SP -> 24 stack parameters
1405 20 LR
1406 16 R7
1407 R7 -> 0 local variables (16 bytes)
1408 SP -> -12 additional stack space (12 bytes)
1409 The frame size would thus be 36 bytes, and the frame offset would be
1410 12 bytes. The frame register is R7.
1411
1412 The comments for thumb_skip_prolog() describe the algorithm we use
1413 to detect the end of the prolog. */
1414/* *INDENT-ON* */
1415
1416static void
1417thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1418 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1419{
1420 CORE_ADDR prologue_start;
1421 CORE_ADDR prologue_end;
1422
1423 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1424 &prologue_end))
1425 {
1426 /* See comment in arm_scan_prologue for an explanation of
1427 this heuristics. */
1428 if (prologue_end > prologue_start + 64)
1429 {
1430 prologue_end = prologue_start + 64;
1431 }
1432 }
1433 else
1434 /* We're in the boondocks: we have no idea where the start of the
1435 function is. */
1436 return;
1437
1438 prologue_end = std::min (prologue_end, prev_pc);
1439
1440 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1441}
1442
1443/* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1444 otherwise. */
1445
1446static int
1447arm_instruction_restores_sp (unsigned int insn)
1448{
1449 if (bits (insn, 28, 31) != INST_NV)
1450 {
1451 if ((insn & 0x0df0f000) == 0x0080d000
1452 /* ADD SP (register or immediate). */
1453 || (insn & 0x0df0f000) == 0x0040d000
1454 /* SUB SP (register or immediate). */
1455 || (insn & 0x0ffffff0) == 0x01a0d000
1456 /* MOV SP. */
1457 || (insn & 0x0fff0000) == 0x08bd0000
1458 /* POP (LDMIA). */
1459 || (insn & 0x0fff0000) == 0x049d0000)
1460 /* POP of a single register. */
1461 return 1;
1462 }
1463
1464 return 0;
1465}
1466
1467/* Analyze an ARM mode prologue starting at PROLOGUE_START and
1468 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1469 fill it in. Return the first address not recognized as a prologue
1470 instruction.
1471
1472 We recognize all the instructions typically found in ARM prologues,
1473 plus harmless instructions which can be skipped (either for analysis
1474 purposes, or a more restrictive set that can be skipped when finding
1475 the end of the prologue). */
1476
1477static CORE_ADDR
1478arm_analyze_prologue (struct gdbarch *gdbarch,
1479 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1480 struct arm_prologue_cache *cache)
1481{
1482 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1483 int regno;
1484 CORE_ADDR offset, current_pc;
1485 pv_t regs[ARM_FPS_REGNUM];
1486 CORE_ADDR unrecognized_pc = 0;
1487
1488 /* Search the prologue looking for instructions that set up the
1489 frame pointer, adjust the stack pointer, and save registers.
1490
1491 Be careful, however, and if it doesn't look like a prologue,
1492 don't try to scan it. If, for instance, a frameless function
1493 begins with stmfd sp!, then we will tell ourselves there is
1494 a frame, which will confuse stack traceback, as well as "finish"
1495 and other operations that rely on a knowledge of the stack
1496 traceback. */
1497
1498 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1499 regs[regno] = pv_register (regno, 0);
1500 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1501
1502 for (current_pc = prologue_start;
1503 current_pc < prologue_end;
1504 current_pc += 4)
1505 {
1506 unsigned int insn
1507 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1508
1509 if (insn == 0xe1a0c00d) /* mov ip, sp */
1510 {
1511 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1512 continue;
1513 }
1514 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1515 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1516 {
1517 unsigned imm = insn & 0xff; /* immediate value */
1518 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1519 int rd = bits (insn, 12, 15);
1520 imm = (imm >> rot) | (imm << (32 - rot));
1521 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1522 continue;
1523 }
1524 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1526 {
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1532 continue;
1533 }
1534 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1535 [sp, #-4]! */
1536 {
1537 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1538 break;
1539 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1540 stack.store (regs[ARM_SP_REGNUM], 4,
1541 regs[bits (insn, 12, 15)]);
1542 continue;
1543 }
1544 else if ((insn & 0xffff0000) == 0xe92d0000)
1545 /* stmfd sp!, {..., fp, ip, lr, pc}
1546 or
1547 stmfd sp!, {a1, a2, a3, a4} */
1548 {
1549 int mask = insn & 0xffff;
1550
1551 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1552 break;
1553
1554 /* Calculate offsets of saved registers. */
1555 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1556 if (mask & (1 << regno))
1557 {
1558 regs[ARM_SP_REGNUM]
1559 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1560 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1561 }
1562 }
1563 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1564 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1565 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1566 {
1567 /* No need to add this to saved_regs -- it's just an arg reg. */
1568 continue;
1569 }
1570 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1571 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1572 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1573 {
1574 /* No need to add this to saved_regs -- it's just an arg reg. */
1575 continue;
1576 }
1577 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1578 { registers } */
1579 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1580 {
1581 /* No need to add this to saved_regs -- it's just arg regs. */
1582 continue;
1583 }
1584 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1585 {
1586 unsigned imm = insn & 0xff; /* immediate value */
1587 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1588 imm = (imm >> rot) | (imm << (32 - rot));
1589 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1590 }
1591 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1592 {
1593 unsigned imm = insn & 0xff; /* immediate value */
1594 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1595 imm = (imm >> rot) | (imm << (32 - rot));
1596 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1597 }
1598 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1599 [sp, -#c]! */
1600 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1601 {
1602 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1603 break;
1604
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1606 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1607 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1608 }
1609 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1610 [sp!] */
1611 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1612 {
1613 int n_saved_fp_regs;
1614 unsigned int fp_start_reg, fp_bound_reg;
1615
1616 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1617 break;
1618
1619 if ((insn & 0x800) == 0x800) /* N0 is set */
1620 {
1621 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1622 n_saved_fp_regs = 3;
1623 else
1624 n_saved_fp_regs = 1;
1625 }
1626 else
1627 {
1628 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1629 n_saved_fp_regs = 2;
1630 else
1631 n_saved_fp_regs = 4;
1632 }
1633
1634 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1635 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1636 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1637 {
1638 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1639 stack.store (regs[ARM_SP_REGNUM], 12,
1640 regs[fp_start_reg++]);
1641 }
1642 }
1643 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1644 {
1645 /* Allow some special function calls when skipping the
1646 prologue; GCC generates these before storing arguments to
1647 the stack. */
1648 CORE_ADDR dest = BranchDest (current_pc, insn);
1649
1650 if (skip_prologue_function (gdbarch, dest, 0))
1651 continue;
1652 else
1653 break;
1654 }
1655 else if ((insn & 0xf0000000) != 0xe0000000)
1656 break; /* Condition not true, exit early. */
1657 else if (arm_instruction_changes_pc (insn))
1658 /* Don't scan past anything that might change control flow. */
1659 break;
1660 else if (arm_instruction_restores_sp (insn))
1661 {
1662 /* Don't scan past the epilogue. */
1663 break;
1664 }
1665 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1666 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1667 /* Ignore block loads from the stack, potentially copying
1668 parameters from memory. */
1669 continue;
1670 else if ((insn & 0xfc500000) == 0xe4100000
1671 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1672 /* Similarly ignore single loads from the stack. */
1673 continue;
1674 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1675 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1676 register instead of the stack. */
1677 continue;
1678 else
1679 {
1680 /* The optimizer might shove anything into the prologue, if
1681 we build up cache (cache != NULL) from scanning prologue,
1682 we just skip what we don't recognize and scan further to
1683 make cache as complete as possible. However, if we skip
1684 prologue, we'll stop immediately on unrecognized
1685 instruction. */
1686 unrecognized_pc = current_pc;
1687 if (cache != NULL)
1688 continue;
1689 else
1690 break;
1691 }
1692 }
1693
1694 if (unrecognized_pc == 0)
1695 unrecognized_pc = current_pc;
1696
1697 if (cache)
1698 {
1699 int framereg, framesize;
1700
1701 /* The frame size is just the distance from the frame register
1702 to the original stack pointer. */
1703 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1704 {
1705 /* Frame pointer is fp. */
1706 framereg = ARM_FP_REGNUM;
1707 framesize = -regs[ARM_FP_REGNUM].k;
1708 }
1709 else
1710 {
1711 /* Try the stack pointer... this is a bit desperate. */
1712 framereg = ARM_SP_REGNUM;
1713 framesize = -regs[ARM_SP_REGNUM].k;
1714 }
1715
1716 cache->framereg = framereg;
1717 cache->framesize = framesize;
1718
1719 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1720 if (stack.find_reg (gdbarch, regno, &offset))
1721 cache->saved_regs[regno].addr = offset;
1722 }
1723
1724 if (arm_debug)
1725 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1726 paddress (gdbarch, unrecognized_pc));
1727
1728 return unrecognized_pc;
1729}
1730
1731static void
1732arm_scan_prologue (struct frame_info *this_frame,
1733 struct arm_prologue_cache *cache)
1734{
1735 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1736 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1737 CORE_ADDR prologue_start, prologue_end;
1738 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1739 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1740
1741 /* Assume there is no frame until proven otherwise. */
1742 cache->framereg = ARM_SP_REGNUM;
1743 cache->framesize = 0;
1744
1745 /* Check for Thumb prologue. */
1746 if (arm_frame_is_thumb (this_frame))
1747 {
1748 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1749 return;
1750 }
1751
1752 /* Find the function prologue. If we can't find the function in
1753 the symbol table, peek in the stack frame to find the PC. */
1754 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1755 &prologue_end))
1756 {
1757 /* One way to find the end of the prologue (which works well
1758 for unoptimized code) is to do the following:
1759
1760 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1761
1762 if (sal.line == 0)
1763 prologue_end = prev_pc;
1764 else if (sal.end < prologue_end)
1765 prologue_end = sal.end;
1766
1767 This mechanism is very accurate so long as the optimizer
1768 doesn't move any instructions from the function body into the
1769 prologue. If this happens, sal.end will be the last
1770 instruction in the first hunk of prologue code just before
1771 the first instruction that the scheduler has moved from
1772 the body to the prologue.
1773
1774 In order to make sure that we scan all of the prologue
1775 instructions, we use a slightly less accurate mechanism which
1776 may scan more than necessary. To help compensate for this
1777 lack of accuracy, the prologue scanning loop below contains
1778 several clauses which'll cause the loop to terminate early if
1779 an implausible prologue instruction is encountered.
1780
1781 The expression
1782
1783 prologue_start + 64
1784
1785 is a suitable endpoint since it accounts for the largest
1786 possible prologue plus up to five instructions inserted by
1787 the scheduler. */
1788
1789 if (prologue_end > prologue_start + 64)
1790 {
1791 prologue_end = prologue_start + 64; /* See above. */
1792 }
1793 }
1794 else
1795 {
1796 /* We have no symbol information. Our only option is to assume this
1797 function has a standard stack frame and the normal frame register.
1798 Then, we can find the value of our frame pointer on entrance to
1799 the callee (or at the present moment if this is the innermost frame).
1800 The value stored there should be the address of the stmfd + 8. */
1801 CORE_ADDR frame_loc;
1802 ULONGEST return_value;
1803
1804 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1805 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1806 &return_value))
1807 return;
1808 else
1809 {
1810 prologue_start = gdbarch_addr_bits_remove
1811 (gdbarch, return_value) - 8;
1812 prologue_end = prologue_start + 64; /* See above. */
1813 }
1814 }
1815
1816 if (prev_pc < prologue_end)
1817 prologue_end = prev_pc;
1818
1819 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1820}
1821
1822static struct arm_prologue_cache *
1823arm_make_prologue_cache (struct frame_info *this_frame)
1824{
1825 int reg;
1826 struct arm_prologue_cache *cache;
1827 CORE_ADDR unwound_fp;
1828
1829 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1830 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1831
1832 arm_scan_prologue (this_frame, cache);
1833
1834 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1835 if (unwound_fp == 0)
1836 return cache;
1837
1838 cache->prev_sp = unwound_fp + cache->framesize;
1839
1840 /* Calculate actual addresses of saved registers using offsets
1841 determined by arm_scan_prologue. */
1842 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1843 if (trad_frame_addr_p (cache->saved_regs, reg))
1844 cache->saved_regs[reg].addr += cache->prev_sp;
1845
1846 return cache;
1847}
1848
1849/* Implementation of the stop_reason hook for arm_prologue frames. */
1850
1851static enum unwind_stop_reason
1852arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1853 void **this_cache)
1854{
1855 struct arm_prologue_cache *cache;
1856 CORE_ADDR pc;
1857
1858 if (*this_cache == NULL)
1859 *this_cache = arm_make_prologue_cache (this_frame);
1860 cache = (struct arm_prologue_cache *) *this_cache;
1861
1862 /* This is meant to halt the backtrace at "_start". */
1863 pc = get_frame_pc (this_frame);
1864 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1865 return UNWIND_OUTERMOST;
1866
1867 /* If we've hit a wall, stop. */
1868 if (cache->prev_sp == 0)
1869 return UNWIND_OUTERMOST;
1870
1871 return UNWIND_NO_REASON;
1872}
1873
1874/* Our frame ID for a normal frame is the current function's starting PC
1875 and the caller's SP when we were called. */
1876
1877static void
1878arm_prologue_this_id (struct frame_info *this_frame,
1879 void **this_cache,
1880 struct frame_id *this_id)
1881{
1882 struct arm_prologue_cache *cache;
1883 struct frame_id id;
1884 CORE_ADDR pc, func;
1885
1886 if (*this_cache == NULL)
1887 *this_cache = arm_make_prologue_cache (this_frame);
1888 cache = (struct arm_prologue_cache *) *this_cache;
1889
1890 /* Use function start address as part of the frame ID. If we cannot
1891 identify the start address (due to missing symbol information),
1892 fall back to just using the current PC. */
1893 pc = get_frame_pc (this_frame);
1894 func = get_frame_func (this_frame);
1895 if (!func)
1896 func = pc;
1897
1898 id = frame_id_build (cache->prev_sp, func);
1899 *this_id = id;
1900}
1901
1902static struct value *
1903arm_prologue_prev_register (struct frame_info *this_frame,
1904 void **this_cache,
1905 int prev_regnum)
1906{
1907 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1908 struct arm_prologue_cache *cache;
1909
1910 if (*this_cache == NULL)
1911 *this_cache = arm_make_prologue_cache (this_frame);
1912 cache = (struct arm_prologue_cache *) *this_cache;
1913
1914 /* If we are asked to unwind the PC, then we need to return the LR
1915 instead. The prologue may save PC, but it will point into this
1916 frame's prologue, not the next frame's resume location. Also
1917 strip the saved T bit. A valid LR may have the low bit set, but
1918 a valid PC never does. */
1919 if (prev_regnum == ARM_PC_REGNUM)
1920 {
1921 CORE_ADDR lr;
1922
1923 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1924 return frame_unwind_got_constant (this_frame, prev_regnum,
1925 arm_addr_bits_remove (gdbarch, lr));
1926 }
1927
1928 /* SP is generally not saved to the stack, but this frame is
1929 identified by the next frame's stack pointer at the time of the call.
1930 The value was already reconstructed into PREV_SP. */
1931 if (prev_regnum == ARM_SP_REGNUM)
1932 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1933
1934 /* The CPSR may have been changed by the call instruction and by the
1935 called function. The only bit we can reconstruct is the T bit,
1936 by checking the low bit of LR as of the call. This is a reliable
1937 indicator of Thumb-ness except for some ARM v4T pre-interworking
1938 Thumb code, which could get away with a clear low bit as long as
1939 the called function did not use bx. Guess that all other
1940 bits are unchanged; the condition flags are presumably lost,
1941 but the processor status is likely valid. */
1942 if (prev_regnum == ARM_PS_REGNUM)
1943 {
1944 CORE_ADDR lr, cpsr;
1945 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1946
1947 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1948 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1949 if (IS_THUMB_ADDR (lr))
1950 cpsr |= t_bit;
1951 else
1952 cpsr &= ~t_bit;
1953 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1954 }
1955
1956 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1957 prev_regnum);
1958}
1959
1960struct frame_unwind arm_prologue_unwind = {
1961 NORMAL_FRAME,
1962 arm_prologue_unwind_stop_reason,
1963 arm_prologue_this_id,
1964 arm_prologue_prev_register,
1965 NULL,
1966 default_frame_sniffer
1967};
1968
1969/* Maintain a list of ARM exception table entries per objfile, similar to the
1970 list of mapping symbols. We only cache entries for standard ARM-defined
1971 personality routines; the cache will contain only the frame unwinding
1972 instructions associated with the entry (not the descriptors). */
1973
1974static const struct objfile_data *arm_exidx_data_key;
1975
1976struct arm_exidx_entry
1977{
1978 bfd_vma addr;
1979 gdb_byte *entry;
1980};
1981typedef struct arm_exidx_entry arm_exidx_entry_s;
1982DEF_VEC_O(arm_exidx_entry_s);
1983
1984struct arm_exidx_data
1985{
1986 VEC(arm_exidx_entry_s) **section_maps;
1987};
1988
1989static void
1990arm_exidx_data_free (struct objfile *objfile, void *arg)
1991{
1992 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1993 unsigned int i;
1994
1995 for (i = 0; i < objfile->obfd->section_count; i++)
1996 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1997}
1998
1999static inline int
2000arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2001 const struct arm_exidx_entry *rhs)
2002{
2003 return lhs->addr < rhs->addr;
2004}
2005
2006static struct obj_section *
2007arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008{
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_get_section_flags (objfile->obfd,
2013 osect->the_bfd_section) & SEC_ALLOC)
2014 {
2015 bfd_vma start, size;
2016 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2017 size = bfd_get_section_size (osect->the_bfd_section);
2018
2019 if (start <= vma && vma < start + size)
2020 return osect;
2021 }
2022
2023 return NULL;
2024}
2025
2026/* Parse contents of exception table and exception index sections
2027 of OBJFILE, and fill in the exception table entry cache.
2028
2029 For each entry that refers to a standard ARM-defined personality
2030 routine, extract the frame unwinding instructions (from either
2031 the index or the table section). The unwinding instructions
2032 are normalized by:
2033 - extracting them from the rest of the table data
2034 - converting to host endianness
2035 - appending the implicit 0xb0 ("Finish") code
2036
2037 The extracted and normalized instructions are stored for later
2038 retrieval by the arm_find_exidx_entry routine. */
2039
2040static void
2041arm_exidx_new_objfile (struct objfile *objfile)
2042{
2043 struct arm_exidx_data *data;
2044 asection *exidx, *extab;
2045 bfd_vma exidx_vma = 0, extab_vma = 0;
2046 LONGEST i;
2047
2048 /* If we've already touched this file, do nothing. */
2049 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2050 return;
2051
2052 /* Read contents of exception table and index. */
2053 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2054 gdb::byte_vector exidx_data;
2055 if (exidx)
2056 {
2057 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2058 exidx_data.resize (bfd_get_section_size (exidx));
2059
2060 if (!bfd_get_section_contents (objfile->obfd, exidx,
2061 exidx_data.data (), 0,
2062 exidx_data.size ()))
2063 return;
2064 }
2065
2066 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2067 gdb::byte_vector extab_data;
2068 if (extab)
2069 {
2070 extab_vma = bfd_section_vma (objfile->obfd, extab);
2071 extab_data.resize (bfd_get_section_size (extab));
2072
2073 if (!bfd_get_section_contents (objfile->obfd, extab,
2074 extab_data.data (), 0,
2075 extab_data.size ()))
2076 return;
2077 }
2078
2079 /* Allocate exception table data structure. */
2080 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2081 set_objfile_data (objfile, arm_exidx_data_key, data);
2082 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2083 objfile->obfd->section_count,
2084 VEC(arm_exidx_entry_s) *);
2085
2086 /* Fill in exception table. */
2087 for (i = 0; i < exidx_data.size () / 8; i++)
2088 {
2089 struct arm_exidx_entry new_exidx_entry;
2090 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2091 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2092 exidx_data.data () + i * 8 + 4);
2093 bfd_vma addr = 0, word = 0;
2094 int n_bytes = 0, n_words = 0;
2095 struct obj_section *sec;
2096 gdb_byte *entry = NULL;
2097
2098 /* Extract address of start of function. */
2099 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2100 idx += exidx_vma + i * 8;
2101
2102 /* Find section containing function and compute section offset. */
2103 sec = arm_obj_section_from_vma (objfile, idx);
2104 if (sec == NULL)
2105 continue;
2106 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2107
2108 /* Determine address of exception table entry. */
2109 if (val == 1)
2110 {
2111 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2112 }
2113 else if ((val & 0xff000000) == 0x80000000)
2114 {
2115 /* Exception table entry embedded in .ARM.exidx
2116 -- must be short form. */
2117 word = val;
2118 n_bytes = 3;
2119 }
2120 else if (!(val & 0x80000000))
2121 {
2122 /* Exception table entry in .ARM.extab. */
2123 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2124 addr += exidx_vma + i * 8 + 4;
2125
2126 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2127 {
2128 word = bfd_h_get_32 (objfile->obfd,
2129 extab_data.data () + addr - extab_vma);
2130 addr += 4;
2131
2132 if ((word & 0xff000000) == 0x80000000)
2133 {
2134 /* Short form. */
2135 n_bytes = 3;
2136 }
2137 else if ((word & 0xff000000) == 0x81000000
2138 || (word & 0xff000000) == 0x82000000)
2139 {
2140 /* Long form. */
2141 n_bytes = 2;
2142 n_words = ((word >> 16) & 0xff);
2143 }
2144 else if (!(word & 0x80000000))
2145 {
2146 bfd_vma pers;
2147 struct obj_section *pers_sec;
2148 int gnu_personality = 0;
2149
2150 /* Custom personality routine. */
2151 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2152 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2153
2154 /* Check whether we've got one of the variants of the
2155 GNU personality routines. */
2156 pers_sec = arm_obj_section_from_vma (objfile, pers);
2157 if (pers_sec)
2158 {
2159 static const char *personality[] =
2160 {
2161 "__gcc_personality_v0",
2162 "__gxx_personality_v0",
2163 "__gcj_personality_v0",
2164 "__gnu_objc_personality_v0",
2165 NULL
2166 };
2167
2168 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2169 int k;
2170
2171 for (k = 0; personality[k]; k++)
2172 if (lookup_minimal_symbol_by_pc_name
2173 (pc, personality[k], objfile))
2174 {
2175 gnu_personality = 1;
2176 break;
2177 }
2178 }
2179
2180 /* If so, the next word contains a word count in the high
2181 byte, followed by the same unwind instructions as the
2182 pre-defined forms. */
2183 if (gnu_personality
2184 && addr + 4 <= extab_vma + extab_data.size ())
2185 {
2186 word = bfd_h_get_32 (objfile->obfd,
2187 (extab_data.data ()
2188 + addr - extab_vma));
2189 addr += 4;
2190 n_bytes = 3;
2191 n_words = ((word >> 24) & 0xff);
2192 }
2193 }
2194 }
2195 }
2196
2197 /* Sanity check address. */
2198 if (n_words)
2199 if (addr < extab_vma
2200 || addr + 4 * n_words > extab_vma + extab_data.size ())
2201 n_words = n_bytes = 0;
2202
2203 /* The unwind instructions reside in WORD (only the N_BYTES least
2204 significant bytes are valid), followed by N_WORDS words in the
2205 extab section starting at ADDR. */
2206 if (n_bytes || n_words)
2207 {
2208 gdb_byte *p = entry
2209 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2210 n_bytes + n_words * 4 + 1);
2211
2212 while (n_bytes--)
2213 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2214
2215 while (n_words--)
2216 {
2217 word = bfd_h_get_32 (objfile->obfd,
2218 extab_data.data () + addr - extab_vma);
2219 addr += 4;
2220
2221 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2222 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2223 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2224 *p++ = (gdb_byte) (word & 0xff);
2225 }
2226
2227 /* Implied "Finish" to terminate the list. */
2228 *p++ = 0xb0;
2229 }
2230
2231 /* Push entry onto vector. They are guaranteed to always
2232 appear in order of increasing addresses. */
2233 new_exidx_entry.addr = idx;
2234 new_exidx_entry.entry = entry;
2235 VEC_safe_push (arm_exidx_entry_s,
2236 data->section_maps[sec->the_bfd_section->index],
2237 &new_exidx_entry);
2238 }
2239}
2240
2241/* Search for the exception table entry covering MEMADDR. If one is found,
2242 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2243 set *START to the start of the region covered by this entry. */
2244
2245static gdb_byte *
2246arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2247{
2248 struct obj_section *sec;
2249
2250 sec = find_pc_section (memaddr);
2251 if (sec != NULL)
2252 {
2253 struct arm_exidx_data *data;
2254 VEC(arm_exidx_entry_s) *map;
2255 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2256 unsigned int idx;
2257
2258 data = ((struct arm_exidx_data *)
2259 objfile_data (sec->objfile, arm_exidx_data_key));
2260 if (data != NULL)
2261 {
2262 map = data->section_maps[sec->the_bfd_section->index];
2263 if (!VEC_empty (arm_exidx_entry_s, map))
2264 {
2265 struct arm_exidx_entry *map_sym;
2266
2267 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2268 arm_compare_exidx_entries);
2269
2270 /* VEC_lower_bound finds the earliest ordered insertion
2271 point. If the following symbol starts at this exact
2272 address, we use that; otherwise, the preceding
2273 exception table entry covers this address. */
2274 if (idx < VEC_length (arm_exidx_entry_s, map))
2275 {
2276 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2277 if (map_sym->addr == map_key.addr)
2278 {
2279 if (start)
2280 *start = map_sym->addr + obj_section_addr (sec);
2281 return map_sym->entry;
2282 }
2283 }
2284
2285 if (idx > 0)
2286 {
2287 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2288 if (start)
2289 *start = map_sym->addr + obj_section_addr (sec);
2290 return map_sym->entry;
2291 }
2292 }
2293 }
2294 }
2295
2296 return NULL;
2297}
2298
2299/* Given the current frame THIS_FRAME, and its associated frame unwinding
2300 instruction list from the ARM exception table entry ENTRY, allocate and
2301 return a prologue cache structure describing how to unwind this frame.
2302
2303 Return NULL if the unwinding instruction list contains a "spare",
2304 "reserved" or "refuse to unwind" instruction as defined in section
2305 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2306 for the ARM Architecture" document. */
2307
2308static struct arm_prologue_cache *
2309arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2310{
2311 CORE_ADDR vsp = 0;
2312 int vsp_valid = 0;
2313
2314 struct arm_prologue_cache *cache;
2315 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2316 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2317
2318 for (;;)
2319 {
2320 gdb_byte insn;
2321
2322 /* Whenever we reload SP, we actually have to retrieve its
2323 actual value in the current frame. */
2324 if (!vsp_valid)
2325 {
2326 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2327 {
2328 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2329 vsp = get_frame_register_unsigned (this_frame, reg);
2330 }
2331 else
2332 {
2333 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2334 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2335 }
2336
2337 vsp_valid = 1;
2338 }
2339
2340 /* Decode next unwind instruction. */
2341 insn = *entry++;
2342
2343 if ((insn & 0xc0) == 0)
2344 {
2345 int offset = insn & 0x3f;
2346 vsp += (offset << 2) + 4;
2347 }
2348 else if ((insn & 0xc0) == 0x40)
2349 {
2350 int offset = insn & 0x3f;
2351 vsp -= (offset << 2) + 4;
2352 }
2353 else if ((insn & 0xf0) == 0x80)
2354 {
2355 int mask = ((insn & 0xf) << 8) | *entry++;
2356 int i;
2357
2358 /* The special case of an all-zero mask identifies
2359 "Refuse to unwind". We return NULL to fall back
2360 to the prologue analyzer. */
2361 if (mask == 0)
2362 return NULL;
2363
2364 /* Pop registers r4..r15 under mask. */
2365 for (i = 0; i < 12; i++)
2366 if (mask & (1 << i))
2367 {
2368 cache->saved_regs[4 + i].addr = vsp;
2369 vsp += 4;
2370 }
2371
2372 /* Special-case popping SP -- we need to reload vsp. */
2373 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2374 vsp_valid = 0;
2375 }
2376 else if ((insn & 0xf0) == 0x90)
2377 {
2378 int reg = insn & 0xf;
2379
2380 /* Reserved cases. */
2381 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2382 return NULL;
2383
2384 /* Set SP from another register and mark VSP for reload. */
2385 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2386 vsp_valid = 0;
2387 }
2388 else if ((insn & 0xf0) == 0xa0)
2389 {
2390 int count = insn & 0x7;
2391 int pop_lr = (insn & 0x8) != 0;
2392 int i;
2393
2394 /* Pop r4..r[4+count]. */
2395 for (i = 0; i <= count; i++)
2396 {
2397 cache->saved_regs[4 + i].addr = vsp;
2398 vsp += 4;
2399 }
2400
2401 /* If indicated by flag, pop LR as well. */
2402 if (pop_lr)
2403 {
2404 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2405 vsp += 4;
2406 }
2407 }
2408 else if (insn == 0xb0)
2409 {
2410 /* We could only have updated PC by popping into it; if so, it
2411 will show up as address. Otherwise, copy LR into PC. */
2412 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2413 cache->saved_regs[ARM_PC_REGNUM]
2414 = cache->saved_regs[ARM_LR_REGNUM];
2415
2416 /* We're done. */
2417 break;
2418 }
2419 else if (insn == 0xb1)
2420 {
2421 int mask = *entry++;
2422 int i;
2423
2424 /* All-zero mask and mask >= 16 is "spare". */
2425 if (mask == 0 || mask >= 16)
2426 return NULL;
2427
2428 /* Pop r0..r3 under mask. */
2429 for (i = 0; i < 4; i++)
2430 if (mask & (1 << i))
2431 {
2432 cache->saved_regs[i].addr = vsp;
2433 vsp += 4;
2434 }
2435 }
2436 else if (insn == 0xb2)
2437 {
2438 ULONGEST offset = 0;
2439 unsigned shift = 0;
2440
2441 do
2442 {
2443 offset |= (*entry & 0x7f) << shift;
2444 shift += 7;
2445 }
2446 while (*entry++ & 0x80);
2447
2448 vsp += 0x204 + (offset << 2);
2449 }
2450 else if (insn == 0xb3)
2451 {
2452 int start = *entry >> 4;
2453 int count = (*entry++) & 0xf;
2454 int i;
2455
2456 /* Only registers D0..D15 are valid here. */
2457 if (start + count >= 16)
2458 return NULL;
2459
2460 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2461 for (i = 0; i <= count; i++)
2462 {
2463 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2464 vsp += 8;
2465 }
2466
2467 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2468 vsp += 4;
2469 }
2470 else if ((insn & 0xf8) == 0xb8)
2471 {
2472 int count = insn & 0x7;
2473 int i;
2474
2475 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2476 for (i = 0; i <= count; i++)
2477 {
2478 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2479 vsp += 8;
2480 }
2481
2482 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2483 vsp += 4;
2484 }
2485 else if (insn == 0xc6)
2486 {
2487 int start = *entry >> 4;
2488 int count = (*entry++) & 0xf;
2489 int i;
2490
2491 /* Only registers WR0..WR15 are valid. */
2492 if (start + count >= 16)
2493 return NULL;
2494
2495 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2496 for (i = 0; i <= count; i++)
2497 {
2498 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2499 vsp += 8;
2500 }
2501 }
2502 else if (insn == 0xc7)
2503 {
2504 int mask = *entry++;
2505 int i;
2506
2507 /* All-zero mask and mask >= 16 is "spare". */
2508 if (mask == 0 || mask >= 16)
2509 return NULL;
2510
2511 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2512 for (i = 0; i < 4; i++)
2513 if (mask & (1 << i))
2514 {
2515 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2516 vsp += 4;
2517 }
2518 }
2519 else if ((insn & 0xf8) == 0xc0)
2520 {
2521 int count = insn & 0x7;
2522 int i;
2523
2524 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2525 for (i = 0; i <= count; i++)
2526 {
2527 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2528 vsp += 8;
2529 }
2530 }
2531 else if (insn == 0xc8)
2532 {
2533 int start = *entry >> 4;
2534 int count = (*entry++) & 0xf;
2535 int i;
2536
2537 /* Only registers D0..D31 are valid. */
2538 if (start + count >= 16)
2539 return NULL;
2540
2541 /* Pop VFP double-precision registers
2542 D[16+start]..D[16+start+count]. */
2543 for (i = 0; i <= count; i++)
2544 {
2545 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2546 vsp += 8;
2547 }
2548 }
2549 else if (insn == 0xc9)
2550 {
2551 int start = *entry >> 4;
2552 int count = (*entry++) & 0xf;
2553 int i;
2554
2555 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2556 for (i = 0; i <= count; i++)
2557 {
2558 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2559 vsp += 8;
2560 }
2561 }
2562 else if ((insn & 0xf8) == 0xd0)
2563 {
2564 int count = insn & 0x7;
2565 int i;
2566
2567 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2568 for (i = 0; i <= count; i++)
2569 {
2570 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2571 vsp += 8;
2572 }
2573 }
2574 else
2575 {
2576 /* Everything else is "spare". */
2577 return NULL;
2578 }
2579 }
2580
2581 /* If we restore SP from a register, assume this was the frame register.
2582 Otherwise just fall back to SP as frame register. */
2583 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2584 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2585 else
2586 cache->framereg = ARM_SP_REGNUM;
2587
2588 /* Determine offset to previous frame. */
2589 cache->framesize
2590 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2591
2592 /* We already got the previous SP. */
2593 cache->prev_sp = vsp;
2594
2595 return cache;
2596}
2597
2598/* Unwinding via ARM exception table entries. Note that the sniffer
2599 already computes a filled-in prologue cache, which is then used
2600 with the same arm_prologue_this_id and arm_prologue_prev_register
2601 routines also used for prologue-parsing based unwinding. */
2602
2603static int
2604arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2605 struct frame_info *this_frame,
2606 void **this_prologue_cache)
2607{
2608 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2609 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2610 CORE_ADDR addr_in_block, exidx_region, func_start;
2611 struct arm_prologue_cache *cache;
2612 gdb_byte *entry;
2613
2614 /* See if we have an ARM exception table entry covering this address. */
2615 addr_in_block = get_frame_address_in_block (this_frame);
2616 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2617 if (!entry)
2618 return 0;
2619
2620 /* The ARM exception table does not describe unwind information
2621 for arbitrary PC values, but is guaranteed to be correct only
2622 at call sites. We have to decide here whether we want to use
2623 ARM exception table information for this frame, or fall back
2624 to using prologue parsing. (Note that if we have DWARF CFI,
2625 this sniffer isn't even called -- CFI is always preferred.)
2626
2627 Before we make this decision, however, we check whether we
2628 actually have *symbol* information for the current frame.
2629 If not, prologue parsing would not work anyway, so we might
2630 as well use the exception table and hope for the best. */
2631 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2632 {
2633 int exc_valid = 0;
2634
2635 /* If the next frame is "normal", we are at a call site in this
2636 frame, so exception information is guaranteed to be valid. */
2637 if (get_next_frame (this_frame)
2638 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2639 exc_valid = 1;
2640
2641 /* We also assume exception information is valid if we're currently
2642 blocked in a system call. The system library is supposed to
2643 ensure this, so that e.g. pthread cancellation works. */
2644 if (arm_frame_is_thumb (this_frame))
2645 {
2646 ULONGEST insn;
2647
2648 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2649 2, byte_order_for_code, &insn)
2650 && (insn & 0xff00) == 0xdf00 /* svc */)
2651 exc_valid = 1;
2652 }
2653 else
2654 {
2655 ULONGEST insn;
2656
2657 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2658 4, byte_order_for_code, &insn)
2659 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2660 exc_valid = 1;
2661 }
2662
2663 /* Bail out if we don't know that exception information is valid. */
2664 if (!exc_valid)
2665 return 0;
2666
2667 /* The ARM exception index does not mark the *end* of the region
2668 covered by the entry, and some functions will not have any entry.
2669 To correctly recognize the end of the covered region, the linker
2670 should have inserted dummy records with a CANTUNWIND marker.
2671
2672 Unfortunately, current versions of GNU ld do not reliably do
2673 this, and thus we may have found an incorrect entry above.
2674 As a (temporary) sanity check, we only use the entry if it
2675 lies *within* the bounds of the function. Note that this check
2676 might reject perfectly valid entries that just happen to cover
2677 multiple functions; therefore this check ought to be removed
2678 once the linker is fixed. */
2679 if (func_start > exidx_region)
2680 return 0;
2681 }
2682
2683 /* Decode the list of unwinding instructions into a prologue cache.
2684 Note that this may fail due to e.g. a "refuse to unwind" code. */
2685 cache = arm_exidx_fill_cache (this_frame, entry);
2686 if (!cache)
2687 return 0;
2688
2689 *this_prologue_cache = cache;
2690 return 1;
2691}
2692
2693struct frame_unwind arm_exidx_unwind = {
2694 NORMAL_FRAME,
2695 default_frame_unwind_stop_reason,
2696 arm_prologue_this_id,
2697 arm_prologue_prev_register,
2698 NULL,
2699 arm_exidx_unwind_sniffer
2700};
2701
2702static struct arm_prologue_cache *
2703arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2704{
2705 struct arm_prologue_cache *cache;
2706 int reg;
2707
2708 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2709 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2710
2711 /* Still rely on the offset calculated from prologue. */
2712 arm_scan_prologue (this_frame, cache);
2713
2714 /* Since we are in epilogue, the SP has been restored. */
2715 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2716
2717 /* Calculate actual addresses of saved registers using offsets
2718 determined by arm_scan_prologue. */
2719 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2720 if (trad_frame_addr_p (cache->saved_regs, reg))
2721 cache->saved_regs[reg].addr += cache->prev_sp;
2722
2723 return cache;
2724}
2725
2726/* Implementation of function hook 'this_id' in
2727 'struct frame_uwnind' for epilogue unwinder. */
2728
2729static void
2730arm_epilogue_frame_this_id (struct frame_info *this_frame,
2731 void **this_cache,
2732 struct frame_id *this_id)
2733{
2734 struct arm_prologue_cache *cache;
2735 CORE_ADDR pc, func;
2736
2737 if (*this_cache == NULL)
2738 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2739 cache = (struct arm_prologue_cache *) *this_cache;
2740
2741 /* Use function start address as part of the frame ID. If we cannot
2742 identify the start address (due to missing symbol information),
2743 fall back to just using the current PC. */
2744 pc = get_frame_pc (this_frame);
2745 func = get_frame_func (this_frame);
2746 if (func == 0)
2747 func = pc;
2748
2749 (*this_id) = frame_id_build (cache->prev_sp, pc);
2750}
2751
2752/* Implementation of function hook 'prev_register' in
2753 'struct frame_uwnind' for epilogue unwinder. */
2754
2755static struct value *
2756arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2757 void **this_cache, int regnum)
2758{
2759 if (*this_cache == NULL)
2760 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2761
2762 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2763}
2764
2765static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2766 CORE_ADDR pc);
2767static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2768 CORE_ADDR pc);
2769
2770/* Implementation of function hook 'sniffer' in
2771 'struct frame_uwnind' for epilogue unwinder. */
2772
2773static int
2774arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2775 struct frame_info *this_frame,
2776 void **this_prologue_cache)
2777{
2778 if (frame_relative_level (this_frame) == 0)
2779 {
2780 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2781 CORE_ADDR pc = get_frame_pc (this_frame);
2782
2783 if (arm_frame_is_thumb (this_frame))
2784 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2785 else
2786 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2787 }
2788 else
2789 return 0;
2790}
2791
2792/* Frame unwinder from epilogue. */
2793
2794static const struct frame_unwind arm_epilogue_frame_unwind =
2795{
2796 NORMAL_FRAME,
2797 default_frame_unwind_stop_reason,
2798 arm_epilogue_frame_this_id,
2799 arm_epilogue_frame_prev_register,
2800 NULL,
2801 arm_epilogue_frame_sniffer,
2802};
2803
2804/* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2805 trampoline, return the target PC. Otherwise return 0.
2806
2807 void call0a (char c, short s, int i, long l) {}
2808
2809 int main (void)
2810 {
2811 (*pointer_to_call0a) (c, s, i, l);
2812 }
2813
2814 Instead of calling a stub library function _call_via_xx (xx is
2815 the register name), GCC may inline the trampoline in the object
2816 file as below (register r2 has the address of call0a).
2817
2818 .global main
2819 .type main, %function
2820 ...
2821 bl .L1
2822 ...
2823 .size main, .-main
2824
2825 .L1:
2826 bx r2
2827
2828 The trampoline 'bx r2' doesn't belong to main. */
2829
2830static CORE_ADDR
2831arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2832{
2833 /* The heuristics of recognizing such trampoline is that FRAME is
2834 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2835 if (arm_frame_is_thumb (frame))
2836 {
2837 gdb_byte buf[2];
2838
2839 if (target_read_memory (pc, buf, 2) == 0)
2840 {
2841 struct gdbarch *gdbarch = get_frame_arch (frame);
2842 enum bfd_endian byte_order_for_code
2843 = gdbarch_byte_order_for_code (gdbarch);
2844 uint16_t insn
2845 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2846
2847 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2848 {
2849 CORE_ADDR dest
2850 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2851
2852 /* Clear the LSB so that gdb core sets step-resume
2853 breakpoint at the right address. */
2854 return UNMAKE_THUMB_ADDR (dest);
2855 }
2856 }
2857 }
2858
2859 return 0;
2860}
2861
2862static struct arm_prologue_cache *
2863arm_make_stub_cache (struct frame_info *this_frame)
2864{
2865 struct arm_prologue_cache *cache;
2866
2867 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2868 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2869
2870 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2871
2872 return cache;
2873}
2874
2875/* Our frame ID for a stub frame is the current SP and LR. */
2876
2877static void
2878arm_stub_this_id (struct frame_info *this_frame,
2879 void **this_cache,
2880 struct frame_id *this_id)
2881{
2882 struct arm_prologue_cache *cache;
2883
2884 if (*this_cache == NULL)
2885 *this_cache = arm_make_stub_cache (this_frame);
2886 cache = (struct arm_prologue_cache *) *this_cache;
2887
2888 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2889}
2890
2891static int
2892arm_stub_unwind_sniffer (const struct frame_unwind *self,
2893 struct frame_info *this_frame,
2894 void **this_prologue_cache)
2895{
2896 CORE_ADDR addr_in_block;
2897 gdb_byte dummy[4];
2898 CORE_ADDR pc, start_addr;
2899 const char *name;
2900
2901 addr_in_block = get_frame_address_in_block (this_frame);
2902 pc = get_frame_pc (this_frame);
2903 if (in_plt_section (addr_in_block)
2904 /* We also use the stub winder if the target memory is unreadable
2905 to avoid having the prologue unwinder trying to read it. */
2906 || target_read_memory (pc, dummy, 4) != 0)
2907 return 1;
2908
2909 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2910 && arm_skip_bx_reg (this_frame, pc) != 0)
2911 return 1;
2912
2913 return 0;
2914}
2915
2916struct frame_unwind arm_stub_unwind = {
2917 NORMAL_FRAME,
2918 default_frame_unwind_stop_reason,
2919 arm_stub_this_id,
2920 arm_prologue_prev_register,
2921 NULL,
2922 arm_stub_unwind_sniffer
2923};
2924
2925/* Put here the code to store, into CACHE->saved_regs, the addresses
2926 of the saved registers of frame described by THIS_FRAME. CACHE is
2927 returned. */
2928
2929static struct arm_prologue_cache *
2930arm_m_exception_cache (struct frame_info *this_frame)
2931{
2932 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2933 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2934 struct arm_prologue_cache *cache;
2935 CORE_ADDR unwound_sp;
2936 LONGEST xpsr;
2937
2938 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2939 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2940
2941 unwound_sp = get_frame_register_unsigned (this_frame,
2942 ARM_SP_REGNUM);
2943
2944 /* The hardware saves eight 32-bit words, comprising xPSR,
2945 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2946 "B1.5.6 Exception entry behavior" in
2947 "ARMv7-M Architecture Reference Manual". */
2948 cache->saved_regs[0].addr = unwound_sp;
2949 cache->saved_regs[1].addr = unwound_sp + 4;
2950 cache->saved_regs[2].addr = unwound_sp + 8;
2951 cache->saved_regs[3].addr = unwound_sp + 12;
2952 cache->saved_regs[12].addr = unwound_sp + 16;
2953 cache->saved_regs[14].addr = unwound_sp + 20;
2954 cache->saved_regs[15].addr = unwound_sp + 24;
2955 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2956
2957 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2958 aligner between the top of the 32-byte stack frame and the
2959 previous context's stack pointer. */
2960 cache->prev_sp = unwound_sp + 32;
2961 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2962 && (xpsr & (1 << 9)) != 0)
2963 cache->prev_sp += 4;
2964
2965 return cache;
2966}
2967
2968/* Implementation of function hook 'this_id' in
2969 'struct frame_uwnind'. */
2970
2971static void
2972arm_m_exception_this_id (struct frame_info *this_frame,
2973 void **this_cache,
2974 struct frame_id *this_id)
2975{
2976 struct arm_prologue_cache *cache;
2977
2978 if (*this_cache == NULL)
2979 *this_cache = arm_m_exception_cache (this_frame);
2980 cache = (struct arm_prologue_cache *) *this_cache;
2981
2982 /* Our frame ID for a stub frame is the current SP and LR. */
2983 *this_id = frame_id_build (cache->prev_sp,
2984 get_frame_pc (this_frame));
2985}
2986
2987/* Implementation of function hook 'prev_register' in
2988 'struct frame_uwnind'. */
2989
2990static struct value *
2991arm_m_exception_prev_register (struct frame_info *this_frame,
2992 void **this_cache,
2993 int prev_regnum)
2994{
2995 struct arm_prologue_cache *cache;
2996
2997 if (*this_cache == NULL)
2998 *this_cache = arm_m_exception_cache (this_frame);
2999 cache = (struct arm_prologue_cache *) *this_cache;
3000
3001 /* The value was already reconstructed into PREV_SP. */
3002 if (prev_regnum == ARM_SP_REGNUM)
3003 return frame_unwind_got_constant (this_frame, prev_regnum,
3004 cache->prev_sp);
3005
3006 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3007 prev_regnum);
3008}
3009
3010/* Implementation of function hook 'sniffer' in
3011 'struct frame_uwnind'. */
3012
3013static int
3014arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3015 struct frame_info *this_frame,
3016 void **this_prologue_cache)
3017{
3018 CORE_ADDR this_pc = get_frame_pc (this_frame);
3019
3020 /* No need to check is_m; this sniffer is only registered for
3021 M-profile architectures. */
3022
3023 /* Check if exception frame returns to a magic PC value. */
3024 return arm_m_addr_is_magic (this_pc);
3025}
3026
3027/* Frame unwinder for M-profile exceptions. */
3028
3029struct frame_unwind arm_m_exception_unwind =
3030{
3031 SIGTRAMP_FRAME,
3032 default_frame_unwind_stop_reason,
3033 arm_m_exception_this_id,
3034 arm_m_exception_prev_register,
3035 NULL,
3036 arm_m_exception_unwind_sniffer
3037};
3038
3039static CORE_ADDR
3040arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3041{
3042 struct arm_prologue_cache *cache;
3043
3044 if (*this_cache == NULL)
3045 *this_cache = arm_make_prologue_cache (this_frame);
3046 cache = (struct arm_prologue_cache *) *this_cache;
3047
3048 return cache->prev_sp - cache->framesize;
3049}
3050
3051struct frame_base arm_normal_base = {
3052 &arm_prologue_unwind,
3053 arm_normal_frame_base,
3054 arm_normal_frame_base,
3055 arm_normal_frame_base
3056};
3057
3058/* Assuming THIS_FRAME is a dummy, return the frame ID of that
3059 dummy frame. The frame ID's base needs to match the TOS value
3060 saved by save_dummy_frame_tos() and returned from
3061 arm_push_dummy_call, and the PC needs to match the dummy frame's
3062 breakpoint. */
3063
3064static struct frame_id
3065arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3066{
3067 return frame_id_build (get_frame_register_unsigned (this_frame,
3068 ARM_SP_REGNUM),
3069 get_frame_pc (this_frame));
3070}
3071
3072/* Given THIS_FRAME, find the previous frame's resume PC (which will
3073 be used to construct the previous frame's ID, after looking up the
3074 containing function). */
3075
3076static CORE_ADDR
3077arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3078{
3079 CORE_ADDR pc;
3080 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3081 return arm_addr_bits_remove (gdbarch, pc);
3082}
3083
3084static CORE_ADDR
3085arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3086{
3087 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3088}
3089
3090static struct value *
3091arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3092 int regnum)
3093{
3094 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3095 CORE_ADDR lr, cpsr;
3096 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3097
3098 switch (regnum)
3099 {
3100 case ARM_PC_REGNUM:
3101 /* The PC is normally copied from the return column, which
3102 describes saves of LR. However, that version may have an
3103 extra bit set to indicate Thumb state. The bit is not
3104 part of the PC. */
3105 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3106 return frame_unwind_got_constant (this_frame, regnum,
3107 arm_addr_bits_remove (gdbarch, lr));
3108
3109 case ARM_PS_REGNUM:
3110 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3111 cpsr = get_frame_register_unsigned (this_frame, regnum);
3112 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3113 if (IS_THUMB_ADDR (lr))
3114 cpsr |= t_bit;
3115 else
3116 cpsr &= ~t_bit;
3117 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3118
3119 default:
3120 internal_error (__FILE__, __LINE__,
3121 _("Unexpected register %d"), regnum);
3122 }
3123}
3124
3125static void
3126arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3127 struct dwarf2_frame_state_reg *reg,
3128 struct frame_info *this_frame)
3129{
3130 switch (regnum)
3131 {
3132 case ARM_PC_REGNUM:
3133 case ARM_PS_REGNUM:
3134 reg->how = DWARF2_FRAME_REG_FN;
3135 reg->loc.fn = arm_dwarf2_prev_register;
3136 break;
3137 case ARM_SP_REGNUM:
3138 reg->how = DWARF2_FRAME_REG_CFA;
3139 break;
3140 }
3141}
3142
3143/* Implement the stack_frame_destroyed_p gdbarch method. */
3144
3145static int
3146thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3147{
3148 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3149 unsigned int insn, insn2;
3150 int found_return = 0, found_stack_adjust = 0;
3151 CORE_ADDR func_start, func_end;
3152 CORE_ADDR scan_pc;
3153 gdb_byte buf[4];
3154
3155 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3156 return 0;
3157
3158 /* The epilogue is a sequence of instructions along the following lines:
3159
3160 - add stack frame size to SP or FP
3161 - [if frame pointer used] restore SP from FP
3162 - restore registers from SP [may include PC]
3163 - a return-type instruction [if PC wasn't already restored]
3164
3165 In a first pass, we scan forward from the current PC and verify the
3166 instructions we find as compatible with this sequence, ending in a
3167 return instruction.
3168
3169 However, this is not sufficient to distinguish indirect function calls
3170 within a function from indirect tail calls in the epilogue in some cases.
3171 Therefore, if we didn't already find any SP-changing instruction during
3172 forward scan, we add a backward scanning heuristic to ensure we actually
3173 are in the epilogue. */
3174
3175 scan_pc = pc;
3176 while (scan_pc < func_end && !found_return)
3177 {
3178 if (target_read_memory (scan_pc, buf, 2))
3179 break;
3180
3181 scan_pc += 2;
3182 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3183
3184 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3185 found_return = 1;
3186 else if (insn == 0x46f7) /* mov pc, lr */
3187 found_return = 1;
3188 else if (thumb_instruction_restores_sp (insn))
3189 {
3190 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3191 found_return = 1;
3192 }
3193 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3194 {
3195 if (target_read_memory (scan_pc, buf, 2))
3196 break;
3197
3198 scan_pc += 2;
3199 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3200
3201 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3202 {
3203 if (insn2 & 0x8000) /* <registers> include PC. */
3204 found_return = 1;
3205 }
3206 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3207 && (insn2 & 0x0fff) == 0x0b04)
3208 {
3209 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3210 found_return = 1;
3211 }
3212 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3213 && (insn2 & 0x0e00) == 0x0a00)
3214 ;
3215 else
3216 break;
3217 }
3218 else
3219 break;
3220 }
3221
3222 if (!found_return)
3223 return 0;
3224
3225 /* Since any instruction in the epilogue sequence, with the possible
3226 exception of return itself, updates the stack pointer, we need to
3227 scan backwards for at most one instruction. Try either a 16-bit or
3228 a 32-bit instruction. This is just a heuristic, so we do not worry
3229 too much about false positives. */
3230
3231 if (pc - 4 < func_start)
3232 return 0;
3233 if (target_read_memory (pc - 4, buf, 4))
3234 return 0;
3235
3236 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3237 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3238
3239 if (thumb_instruction_restores_sp (insn2))
3240 found_stack_adjust = 1;
3241 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3242 found_stack_adjust = 1;
3243 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3244 && (insn2 & 0x0fff) == 0x0b04)
3245 found_stack_adjust = 1;
3246 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3247 && (insn2 & 0x0e00) == 0x0a00)
3248 found_stack_adjust = 1;
3249
3250 return found_stack_adjust;
3251}
3252
3253static int
3254arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3255{
3256 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3257 unsigned int insn;
3258 int found_return;
3259 CORE_ADDR func_start, func_end;
3260
3261 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3262 return 0;
3263
3264 /* We are in the epilogue if the previous instruction was a stack
3265 adjustment and the next instruction is a possible return (bx, mov
3266 pc, or pop). We could have to scan backwards to find the stack
3267 adjustment, or forwards to find the return, but this is a decent
3268 approximation. First scan forwards. */
3269
3270 found_return = 0;
3271 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3272 if (bits (insn, 28, 31) != INST_NV)
3273 {
3274 if ((insn & 0x0ffffff0) == 0x012fff10)
3275 /* BX. */
3276 found_return = 1;
3277 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3278 /* MOV PC. */
3279 found_return = 1;
3280 else if ((insn & 0x0fff0000) == 0x08bd0000
3281 && (insn & 0x0000c000) != 0)
3282 /* POP (LDMIA), including PC or LR. */
3283 found_return = 1;
3284 }
3285
3286 if (!found_return)
3287 return 0;
3288
3289 /* Scan backwards. This is just a heuristic, so do not worry about
3290 false positives from mode changes. */
3291
3292 if (pc < func_start + 4)
3293 return 0;
3294
3295 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3296 if (arm_instruction_restores_sp (insn))
3297 return 1;
3298
3299 return 0;
3300}
3301
3302/* Implement the stack_frame_destroyed_p gdbarch method. */
3303
3304static int
3305arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3306{
3307 if (arm_pc_is_thumb (gdbarch, pc))
3308 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3309 else
3310 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3311}
3312
3313/* When arguments must be pushed onto the stack, they go on in reverse
3314 order. The code below implements a FILO (stack) to do this. */
3315
3316struct stack_item
3317{
3318 int len;
3319 struct stack_item *prev;
3320 gdb_byte *data;
3321};
3322
3323static struct stack_item *
3324push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3325{
3326 struct stack_item *si;
3327 si = XNEW (struct stack_item);
3328 si->data = (gdb_byte *) xmalloc (len);
3329 si->len = len;
3330 si->prev = prev;
3331 memcpy (si->data, contents, len);
3332 return si;
3333}
3334
3335static struct stack_item *
3336pop_stack_item (struct stack_item *si)
3337{
3338 struct stack_item *dead = si;
3339 si = si->prev;
3340 xfree (dead->data);
3341 xfree (dead);
3342 return si;
3343}
3344
3345
3346/* Return the alignment (in bytes) of the given type. */
3347
3348static int
3349arm_type_align (struct type *t)
3350{
3351 int n;
3352 int align;
3353 int falign;
3354
3355 t = check_typedef (t);
3356 switch (TYPE_CODE (t))
3357 {
3358 default:
3359 /* Should never happen. */
3360 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3361 return 4;
3362
3363 case TYPE_CODE_PTR:
3364 case TYPE_CODE_ENUM:
3365 case TYPE_CODE_INT:
3366 case TYPE_CODE_FLT:
3367 case TYPE_CODE_SET:
3368 case TYPE_CODE_RANGE:
3369 case TYPE_CODE_REF:
3370 case TYPE_CODE_RVALUE_REF:
3371 case TYPE_CODE_CHAR:
3372 case TYPE_CODE_BOOL:
3373 return TYPE_LENGTH (t);
3374
3375 case TYPE_CODE_ARRAY:
3376 if (TYPE_VECTOR (t))
3377 {
3378 /* Use the natural alignment for vector types (the same for
3379 scalar type), but the maximum alignment is 64-bit. */
3380 if (TYPE_LENGTH (t) > 8)
3381 return 8;
3382 else
3383 return TYPE_LENGTH (t);
3384 }
3385 else
3386 return arm_type_align (TYPE_TARGET_TYPE (t));
3387 case TYPE_CODE_COMPLEX:
3388 return arm_type_align (TYPE_TARGET_TYPE (t));
3389
3390 case TYPE_CODE_STRUCT:
3391 case TYPE_CODE_UNION:
3392 align = 1;
3393 for (n = 0; n < TYPE_NFIELDS (t); n++)
3394 {
3395 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3396 if (falign > align)
3397 align = falign;
3398 }
3399 return align;
3400 }
3401}
3402
3403/* Possible base types for a candidate for passing and returning in
3404 VFP registers. */
3405
3406enum arm_vfp_cprc_base_type
3407{
3408 VFP_CPRC_UNKNOWN,
3409 VFP_CPRC_SINGLE,
3410 VFP_CPRC_DOUBLE,
3411 VFP_CPRC_VEC64,
3412 VFP_CPRC_VEC128
3413};
3414
3415/* The length of one element of base type B. */
3416
3417static unsigned
3418arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3419{
3420 switch (b)
3421 {
3422 case VFP_CPRC_SINGLE:
3423 return 4;
3424 case VFP_CPRC_DOUBLE:
3425 return 8;
3426 case VFP_CPRC_VEC64:
3427 return 8;
3428 case VFP_CPRC_VEC128:
3429 return 16;
3430 default:
3431 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3432 (int) b);
3433 }
3434}
3435
3436/* The character ('s', 'd' or 'q') for the type of VFP register used
3437 for passing base type B. */
3438
3439static int
3440arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3441{
3442 switch (b)
3443 {
3444 case VFP_CPRC_SINGLE:
3445 return 's';
3446 case VFP_CPRC_DOUBLE:
3447 return 'd';
3448 case VFP_CPRC_VEC64:
3449 return 'd';
3450 case VFP_CPRC_VEC128:
3451 return 'q';
3452 default:
3453 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3454 (int) b);
3455 }
3456}
3457
3458/* Determine whether T may be part of a candidate for passing and
3459 returning in VFP registers, ignoring the limit on the total number
3460 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3461 classification of the first valid component found; if it is not
3462 VFP_CPRC_UNKNOWN, all components must have the same classification
3463 as *BASE_TYPE. If it is found that T contains a type not permitted
3464 for passing and returning in VFP registers, a type differently
3465 classified from *BASE_TYPE, or two types differently classified
3466 from each other, return -1, otherwise return the total number of
3467 base-type elements found (possibly 0 in an empty structure or
3468 array). Vector types are not currently supported, matching the
3469 generic AAPCS support. */
3470
3471static int
3472arm_vfp_cprc_sub_candidate (struct type *t,
3473 enum arm_vfp_cprc_base_type *base_type)
3474{
3475 t = check_typedef (t);
3476 switch (TYPE_CODE (t))
3477 {
3478 case TYPE_CODE_FLT:
3479 switch (TYPE_LENGTH (t))
3480 {
3481 case 4:
3482 if (*base_type == VFP_CPRC_UNKNOWN)
3483 *base_type = VFP_CPRC_SINGLE;
3484 else if (*base_type != VFP_CPRC_SINGLE)
3485 return -1;
3486 return 1;
3487
3488 case 8:
3489 if (*base_type == VFP_CPRC_UNKNOWN)
3490 *base_type = VFP_CPRC_DOUBLE;
3491 else if (*base_type != VFP_CPRC_DOUBLE)
3492 return -1;
3493 return 1;
3494
3495 default:
3496 return -1;
3497 }
3498 break;
3499
3500 case TYPE_CODE_COMPLEX:
3501 /* Arguments of complex T where T is one of the types float or
3502 double get treated as if they are implemented as:
3503
3504 struct complexT
3505 {
3506 T real;
3507 T imag;
3508 };
3509
3510 */
3511 switch (TYPE_LENGTH (t))
3512 {
3513 case 8:
3514 if (*base_type == VFP_CPRC_UNKNOWN)
3515 *base_type = VFP_CPRC_SINGLE;
3516 else if (*base_type != VFP_CPRC_SINGLE)
3517 return -1;
3518 return 2;
3519
3520 case 16:
3521 if (*base_type == VFP_CPRC_UNKNOWN)
3522 *base_type = VFP_CPRC_DOUBLE;
3523 else if (*base_type != VFP_CPRC_DOUBLE)
3524 return -1;
3525 return 2;
3526
3527 default:
3528 return -1;
3529 }
3530 break;
3531
3532 case TYPE_CODE_ARRAY:
3533 {
3534 if (TYPE_VECTOR (t))
3535 {
3536 /* A 64-bit or 128-bit containerized vector type are VFP
3537 CPRCs. */
3538 switch (TYPE_LENGTH (t))
3539 {
3540 case 8:
3541 if (*base_type == VFP_CPRC_UNKNOWN)
3542 *base_type = VFP_CPRC_VEC64;
3543 return 1;
3544 case 16:
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_VEC128;
3547 return 1;
3548 default:
3549 return -1;
3550 }
3551 }
3552 else
3553 {
3554 int count;
3555 unsigned unitlen;
3556
3557 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3558 base_type);
3559 if (count == -1)
3560 return -1;
3561 if (TYPE_LENGTH (t) == 0)
3562 {
3563 gdb_assert (count == 0);
3564 return 0;
3565 }
3566 else if (count == 0)
3567 return -1;
3568 unitlen = arm_vfp_cprc_unit_length (*base_type);
3569 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3570 return TYPE_LENGTH (t) / unitlen;
3571 }
3572 }
3573 break;
3574
3575 case TYPE_CODE_STRUCT:
3576 {
3577 int count = 0;
3578 unsigned unitlen;
3579 int i;
3580 for (i = 0; i < TYPE_NFIELDS (t); i++)
3581 {
3582 int sub_count = 0;
3583
3584 if (!field_is_static (&TYPE_FIELD (t, i)))
3585 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3586 base_type);
3587 if (sub_count == -1)
3588 return -1;
3589 count += sub_count;
3590 }
3591 if (TYPE_LENGTH (t) == 0)
3592 {
3593 gdb_assert (count == 0);
3594 return 0;
3595 }
3596 else if (count == 0)
3597 return -1;
3598 unitlen = arm_vfp_cprc_unit_length (*base_type);
3599 if (TYPE_LENGTH (t) != unitlen * count)
3600 return -1;
3601 return count;
3602 }
3603
3604 case TYPE_CODE_UNION:
3605 {
3606 int count = 0;
3607 unsigned unitlen;
3608 int i;
3609 for (i = 0; i < TYPE_NFIELDS (t); i++)
3610 {
3611 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3612 base_type);
3613 if (sub_count == -1)
3614 return -1;
3615 count = (count > sub_count ? count : sub_count);
3616 }
3617 if (TYPE_LENGTH (t) == 0)
3618 {
3619 gdb_assert (count == 0);
3620 return 0;
3621 }
3622 else if (count == 0)
3623 return -1;
3624 unitlen = arm_vfp_cprc_unit_length (*base_type);
3625 if (TYPE_LENGTH (t) != unitlen * count)
3626 return -1;
3627 return count;
3628 }
3629
3630 default:
3631 break;
3632 }
3633
3634 return -1;
3635}
3636
3637/* Determine whether T is a VFP co-processor register candidate (CPRC)
3638 if passed to or returned from a non-variadic function with the VFP
3639 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3640 *BASE_TYPE to the base type for T and *COUNT to the number of
3641 elements of that base type before returning. */
3642
3643static int
3644arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3645 int *count)
3646{
3647 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3648 int c = arm_vfp_cprc_sub_candidate (t, &b);
3649 if (c <= 0 || c > 4)
3650 return 0;
3651 *base_type = b;
3652 *count = c;
3653 return 1;
3654}
3655
3656/* Return 1 if the VFP ABI should be used for passing arguments to and
3657 returning values from a function of type FUNC_TYPE, 0
3658 otherwise. */
3659
3660static int
3661arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3662{
3663 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3664 /* Variadic functions always use the base ABI. Assume that functions
3665 without debug info are not variadic. */
3666 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3667 return 0;
3668 /* The VFP ABI is only supported as a variant of AAPCS. */
3669 if (tdep->arm_abi != ARM_ABI_AAPCS)
3670 return 0;
3671 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3672}
3673
3674/* We currently only support passing parameters in integer registers, which
3675 conforms with GCC's default model, and VFP argument passing following
3676 the VFP variant of AAPCS. Several other variants exist and
3677 we should probably support some of them based on the selected ABI. */
3678
3679static CORE_ADDR
3680arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3681 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3682 struct value **args, CORE_ADDR sp, int struct_return,
3683 CORE_ADDR struct_addr)
3684{
3685 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3686 int argnum;
3687 int argreg;
3688 int nstack;
3689 struct stack_item *si = NULL;
3690 int use_vfp_abi;
3691 struct type *ftype;
3692 unsigned vfp_regs_free = (1 << 16) - 1;
3693
3694 /* Determine the type of this function and whether the VFP ABI
3695 applies. */
3696 ftype = check_typedef (value_type (function));
3697 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3698 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3699 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3700
3701 /* Set the return address. For the ARM, the return breakpoint is
3702 always at BP_ADDR. */
3703 if (arm_pc_is_thumb (gdbarch, bp_addr))
3704 bp_addr |= 1;
3705 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3706
3707 /* Walk through the list of args and determine how large a temporary
3708 stack is required. Need to take care here as structs may be
3709 passed on the stack, and we have to push them. */
3710 nstack = 0;
3711
3712 argreg = ARM_A1_REGNUM;
3713 nstack = 0;
3714
3715 /* The struct_return pointer occupies the first parameter
3716 passing register. */
3717 if (struct_return)
3718 {
3719 if (arm_debug)
3720 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3721 gdbarch_register_name (gdbarch, argreg),
3722 paddress (gdbarch, struct_addr));
3723 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3724 argreg++;
3725 }
3726
3727 for (argnum = 0; argnum < nargs; argnum++)
3728 {
3729 int len;
3730 struct type *arg_type;
3731 struct type *target_type;
3732 enum type_code typecode;
3733 const bfd_byte *val;
3734 int align;
3735 enum arm_vfp_cprc_base_type vfp_base_type;
3736 int vfp_base_count;
3737 int may_use_core_reg = 1;
3738
3739 arg_type = check_typedef (value_type (args[argnum]));
3740 len = TYPE_LENGTH (arg_type);
3741 target_type = TYPE_TARGET_TYPE (arg_type);
3742 typecode = TYPE_CODE (arg_type);
3743 val = value_contents (args[argnum]);
3744
3745 align = arm_type_align (arg_type);
3746 /* Round alignment up to a whole number of words. */
3747 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3748 /* Different ABIs have different maximum alignments. */
3749 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3750 {
3751 /* The APCS ABI only requires word alignment. */
3752 align = INT_REGISTER_SIZE;
3753 }
3754 else
3755 {
3756 /* The AAPCS requires at most doubleword alignment. */
3757 if (align > INT_REGISTER_SIZE * 2)
3758 align = INT_REGISTER_SIZE * 2;
3759 }
3760
3761 if (use_vfp_abi
3762 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3763 &vfp_base_count))
3764 {
3765 int regno;
3766 int unit_length;
3767 int shift;
3768 unsigned mask;
3769
3770 /* Because this is a CPRC it cannot go in a core register or
3771 cause a core register to be skipped for alignment.
3772 Either it goes in VFP registers and the rest of this loop
3773 iteration is skipped for this argument, or it goes on the
3774 stack (and the stack alignment code is correct for this
3775 case). */
3776 may_use_core_reg = 0;
3777
3778 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3779 shift = unit_length / 4;
3780 mask = (1 << (shift * vfp_base_count)) - 1;
3781 for (regno = 0; regno < 16; regno += shift)
3782 if (((vfp_regs_free >> regno) & mask) == mask)
3783 break;
3784
3785 if (regno < 16)
3786 {
3787 int reg_char;
3788 int reg_scaled;
3789 int i;
3790
3791 vfp_regs_free &= ~(mask << regno);
3792 reg_scaled = regno / shift;
3793 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3794 for (i = 0; i < vfp_base_count; i++)
3795 {
3796 char name_buf[4];
3797 int regnum;
3798 if (reg_char == 'q')
3799 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3800 val + i * unit_length);
3801 else
3802 {
3803 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3804 reg_char, reg_scaled + i);
3805 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3806 strlen (name_buf));
3807 regcache->cooked_write (regnum, val + i * unit_length);
3808 }
3809 }
3810 continue;
3811 }
3812 else
3813 {
3814 /* This CPRC could not go in VFP registers, so all VFP
3815 registers are now marked as used. */
3816 vfp_regs_free = 0;
3817 }
3818 }
3819
3820 /* Push stack padding for dowubleword alignment. */
3821 if (nstack & (align - 1))
3822 {
3823 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3824 nstack += INT_REGISTER_SIZE;
3825 }
3826
3827 /* Doubleword aligned quantities must go in even register pairs. */
3828 if (may_use_core_reg
3829 && argreg <= ARM_LAST_ARG_REGNUM
3830 && align > INT_REGISTER_SIZE
3831 && argreg & 1)
3832 argreg++;
3833
3834 /* If the argument is a pointer to a function, and it is a
3835 Thumb function, create a LOCAL copy of the value and set
3836 the THUMB bit in it. */
3837 if (TYPE_CODE_PTR == typecode
3838 && target_type != NULL
3839 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3840 {
3841 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3842 if (arm_pc_is_thumb (gdbarch, regval))
3843 {
3844 bfd_byte *copy = (bfd_byte *) alloca (len);
3845 store_unsigned_integer (copy, len, byte_order,
3846 MAKE_THUMB_ADDR (regval));
3847 val = copy;
3848 }
3849 }
3850
3851 /* Copy the argument to general registers or the stack in
3852 register-sized pieces. Large arguments are split between
3853 registers and stack. */
3854 while (len > 0)
3855 {
3856 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3857 CORE_ADDR regval
3858 = extract_unsigned_integer (val, partial_len, byte_order);
3859
3860 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3861 {
3862 /* The argument is being passed in a general purpose
3863 register. */
3864 if (byte_order == BFD_ENDIAN_BIG)
3865 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3866 if (arm_debug)
3867 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3868 argnum,
3869 gdbarch_register_name
3870 (gdbarch, argreg),
3871 phex (regval, INT_REGISTER_SIZE));
3872 regcache_cooked_write_unsigned (regcache, argreg, regval);
3873 argreg++;
3874 }
3875 else
3876 {
3877 gdb_byte buf[INT_REGISTER_SIZE];
3878
3879 memset (buf, 0, sizeof (buf));
3880 store_unsigned_integer (buf, partial_len, byte_order, regval);
3881
3882 /* Push the arguments onto the stack. */
3883 if (arm_debug)
3884 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3885 argnum, nstack);
3886 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3887 nstack += INT_REGISTER_SIZE;
3888 }
3889
3890 len -= partial_len;
3891 val += partial_len;
3892 }
3893 }
3894 /* If we have an odd number of words to push, then decrement the stack
3895 by one word now, so first stack argument will be dword aligned. */
3896 if (nstack & 4)
3897 sp -= 4;
3898
3899 while (si)
3900 {
3901 sp -= si->len;
3902 write_memory (sp, si->data, si->len);
3903 si = pop_stack_item (si);
3904 }
3905
3906 /* Finally, update teh SP register. */
3907 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3908
3909 return sp;
3910}
3911
3912
3913/* Always align the frame to an 8-byte boundary. This is required on
3914 some platforms and harmless on the rest. */
3915
3916static CORE_ADDR
3917arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3918{
3919 /* Align the stack to eight bytes. */
3920 return sp & ~ (CORE_ADDR) 7;
3921}
3922
3923static void
3924print_fpu_flags (struct ui_file *file, int flags)
3925{
3926 if (flags & (1 << 0))
3927 fputs_filtered ("IVO ", file);
3928 if (flags & (1 << 1))
3929 fputs_filtered ("DVZ ", file);
3930 if (flags & (1 << 2))
3931 fputs_filtered ("OFL ", file);
3932 if (flags & (1 << 3))
3933 fputs_filtered ("UFL ", file);
3934 if (flags & (1 << 4))
3935 fputs_filtered ("INX ", file);
3936 fputc_filtered ('\n', file);
3937}
3938
3939/* Print interesting information about the floating point processor
3940 (if present) or emulator. */
3941static void
3942arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3943 struct frame_info *frame, const char *args)
3944{
3945 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3946 int type;
3947
3948 type = (status >> 24) & 127;
3949 if (status & (1 << 31))
3950 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3951 else
3952 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3953 /* i18n: [floating point unit] mask */
3954 fputs_filtered (_("mask: "), file);
3955 print_fpu_flags (file, status >> 16);
3956 /* i18n: [floating point unit] flags */
3957 fputs_filtered (_("flags: "), file);
3958 print_fpu_flags (file, status);
3959}
3960
3961/* Construct the ARM extended floating point type. */
3962static struct type *
3963arm_ext_type (struct gdbarch *gdbarch)
3964{
3965 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3966
3967 if (!tdep->arm_ext_type)
3968 tdep->arm_ext_type
3969 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3970 floatformats_arm_ext);
3971
3972 return tdep->arm_ext_type;
3973}
3974
3975static struct type *
3976arm_neon_double_type (struct gdbarch *gdbarch)
3977{
3978 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3979
3980 if (tdep->neon_double_type == NULL)
3981 {
3982 struct type *t, *elem;
3983
3984 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3985 TYPE_CODE_UNION);
3986 elem = builtin_type (gdbarch)->builtin_uint8;
3987 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3988 elem = builtin_type (gdbarch)->builtin_uint16;
3989 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3990 elem = builtin_type (gdbarch)->builtin_uint32;
3991 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3992 elem = builtin_type (gdbarch)->builtin_uint64;
3993 append_composite_type_field (t, "u64", elem);
3994 elem = builtin_type (gdbarch)->builtin_float;
3995 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3996 elem = builtin_type (gdbarch)->builtin_double;
3997 append_composite_type_field (t, "f64", elem);
3998
3999 TYPE_VECTOR (t) = 1;
4000 TYPE_NAME (t) = "neon_d";
4001 tdep->neon_double_type = t;
4002 }
4003
4004 return tdep->neon_double_type;
4005}
4006
4007/* FIXME: The vector types are not correctly ordered on big-endian
4008 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4009 bits of d0 - regardless of what unit size is being held in d0. So
4010 the offset of the first uint8 in d0 is 7, but the offset of the
4011 first float is 4. This code works as-is for little-endian
4012 targets. */
4013
4014static struct type *
4015arm_neon_quad_type (struct gdbarch *gdbarch)
4016{
4017 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4018
4019 if (tdep->neon_quad_type == NULL)
4020 {
4021 struct type *t, *elem;
4022
4023 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4024 TYPE_CODE_UNION);
4025 elem = builtin_type (gdbarch)->builtin_uint8;
4026 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4027 elem = builtin_type (gdbarch)->builtin_uint16;
4028 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4029 elem = builtin_type (gdbarch)->builtin_uint32;
4030 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4031 elem = builtin_type (gdbarch)->builtin_uint64;
4032 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4033 elem = builtin_type (gdbarch)->builtin_float;
4034 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4035 elem = builtin_type (gdbarch)->builtin_double;
4036 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4037
4038 TYPE_VECTOR (t) = 1;
4039 TYPE_NAME (t) = "neon_q";
4040 tdep->neon_quad_type = t;
4041 }
4042
4043 return tdep->neon_quad_type;
4044}
4045
4046/* Return the GDB type object for the "standard" data type of data in
4047 register N. */
4048
4049static struct type *
4050arm_register_type (struct gdbarch *gdbarch, int regnum)
4051{
4052 int num_regs = gdbarch_num_regs (gdbarch);
4053
4054 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4055 && regnum >= num_regs && regnum < num_regs + 32)
4056 return builtin_type (gdbarch)->builtin_float;
4057
4058 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4059 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4060 return arm_neon_quad_type (gdbarch);
4061
4062 /* If the target description has register information, we are only
4063 in this function so that we can override the types of
4064 double-precision registers for NEON. */
4065 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4066 {
4067 struct type *t = tdesc_register_type (gdbarch, regnum);
4068
4069 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4070 && TYPE_CODE (t) == TYPE_CODE_FLT
4071 && gdbarch_tdep (gdbarch)->have_neon)
4072 return arm_neon_double_type (gdbarch);
4073 else
4074 return t;
4075 }
4076
4077 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4078 {
4079 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4080 return builtin_type (gdbarch)->builtin_void;
4081
4082 return arm_ext_type (gdbarch);
4083 }
4084 else if (regnum == ARM_SP_REGNUM)
4085 return builtin_type (gdbarch)->builtin_data_ptr;
4086 else if (regnum == ARM_PC_REGNUM)
4087 return builtin_type (gdbarch)->builtin_func_ptr;
4088 else if (regnum >= ARRAY_SIZE (arm_register_names))
4089 /* These registers are only supported on targets which supply
4090 an XML description. */
4091 return builtin_type (gdbarch)->builtin_int0;
4092 else
4093 return builtin_type (gdbarch)->builtin_uint32;
4094}
4095
4096/* Map a DWARF register REGNUM onto the appropriate GDB register
4097 number. */
4098
4099static int
4100arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4101{
4102 /* Core integer regs. */
4103 if (reg >= 0 && reg <= 15)
4104 return reg;
4105
4106 /* Legacy FPA encoding. These were once used in a way which
4107 overlapped with VFP register numbering, so their use is
4108 discouraged, but GDB doesn't support the ARM toolchain
4109 which used them for VFP. */
4110 if (reg >= 16 && reg <= 23)
4111 return ARM_F0_REGNUM + reg - 16;
4112
4113 /* New assignments for the FPA registers. */
4114 if (reg >= 96 && reg <= 103)
4115 return ARM_F0_REGNUM + reg - 96;
4116
4117 /* WMMX register assignments. */
4118 if (reg >= 104 && reg <= 111)
4119 return ARM_WCGR0_REGNUM + reg - 104;
4120
4121 if (reg >= 112 && reg <= 127)
4122 return ARM_WR0_REGNUM + reg - 112;
4123
4124 if (reg >= 192 && reg <= 199)
4125 return ARM_WC0_REGNUM + reg - 192;
4126
4127 /* VFP v2 registers. A double precision value is actually
4128 in d1 rather than s2, but the ABI only defines numbering
4129 for the single precision registers. This will "just work"
4130 in GDB for little endian targets (we'll read eight bytes,
4131 starting in s0 and then progressing to s1), but will be
4132 reversed on big endian targets with VFP. This won't
4133 be a problem for the new Neon quad registers; you're supposed
4134 to use DW_OP_piece for those. */
4135 if (reg >= 64 && reg <= 95)
4136 {
4137 char name_buf[4];
4138
4139 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4140 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4141 strlen (name_buf));
4142 }
4143
4144 /* VFP v3 / Neon registers. This range is also used for VFP v2
4145 registers, except that it now describes d0 instead of s0. */
4146 if (reg >= 256 && reg <= 287)
4147 {
4148 char name_buf[4];
4149
4150 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4151 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4152 strlen (name_buf));
4153 }
4154
4155 return -1;
4156}
4157
4158/* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4159static int
4160arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4161{
4162 int reg = regnum;
4163 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4164
4165 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4166 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4167
4168 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4169 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4170
4171 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4172 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4173
4174 if (reg < NUM_GREGS)
4175 return SIM_ARM_R0_REGNUM + reg;
4176 reg -= NUM_GREGS;
4177
4178 if (reg < NUM_FREGS)
4179 return SIM_ARM_FP0_REGNUM + reg;
4180 reg -= NUM_FREGS;
4181
4182 if (reg < NUM_SREGS)
4183 return SIM_ARM_FPS_REGNUM + reg;
4184 reg -= NUM_SREGS;
4185
4186 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4187}
4188
4189/* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4190 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4191 NULL if an error occurs. BUF is freed. */
4192
4193static gdb_byte *
4194extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4195 int old_len, int new_len)
4196{
4197 gdb_byte *new_buf;
4198 int bytes_to_read = new_len - old_len;
4199
4200 new_buf = (gdb_byte *) xmalloc (new_len);
4201 memcpy (new_buf + bytes_to_read, buf, old_len);
4202 xfree (buf);
4203 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4204 {
4205 xfree (new_buf);
4206 return NULL;
4207 }
4208 return new_buf;
4209}
4210
4211/* An IT block is at most the 2-byte IT instruction followed by
4212 four 4-byte instructions. The furthest back we must search to
4213 find an IT block that affects the current instruction is thus
4214 2 + 3 * 4 == 14 bytes. */
4215#define MAX_IT_BLOCK_PREFIX 14
4216
4217/* Use a quick scan if there are more than this many bytes of
4218 code. */
4219#define IT_SCAN_THRESHOLD 32
4220
4221/* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4222 A breakpoint in an IT block may not be hit, depending on the
4223 condition flags. */
4224static CORE_ADDR
4225arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4226{
4227 gdb_byte *buf;
4228 char map_type;
4229 CORE_ADDR boundary, func_start;
4230 int buf_len;
4231 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4232 int i, any, last_it, last_it_count;
4233
4234 /* If we are using BKPT breakpoints, none of this is necessary. */
4235 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4236 return bpaddr;
4237
4238 /* ARM mode does not have this problem. */
4239 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4240 return bpaddr;
4241
4242 /* We are setting a breakpoint in Thumb code that could potentially
4243 contain an IT block. The first step is to find how much Thumb
4244 code there is; we do not need to read outside of known Thumb
4245 sequences. */
4246 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4247 if (map_type == 0)
4248 /* Thumb-2 code must have mapping symbols to have a chance. */
4249 return bpaddr;
4250
4251 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4252
4253 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4254 && func_start > boundary)
4255 boundary = func_start;
4256
4257 /* Search for a candidate IT instruction. We have to do some fancy
4258 footwork to distinguish a real IT instruction from the second
4259 half of a 32-bit instruction, but there is no need for that if
4260 there's no candidate. */
4261 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4262 if (buf_len == 0)
4263 /* No room for an IT instruction. */
4264 return bpaddr;
4265
4266 buf = (gdb_byte *) xmalloc (buf_len);
4267 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4268 return bpaddr;
4269 any = 0;
4270 for (i = 0; i < buf_len; i += 2)
4271 {
4272 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4273 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4274 {
4275 any = 1;
4276 break;
4277 }
4278 }
4279
4280 if (any == 0)
4281 {
4282 xfree (buf);
4283 return bpaddr;
4284 }
4285
4286 /* OK, the code bytes before this instruction contain at least one
4287 halfword which resembles an IT instruction. We know that it's
4288 Thumb code, but there are still two possibilities. Either the
4289 halfword really is an IT instruction, or it is the second half of
4290 a 32-bit Thumb instruction. The only way we can tell is to
4291 scan forwards from a known instruction boundary. */
4292 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4293 {
4294 int definite;
4295
4296 /* There's a lot of code before this instruction. Start with an
4297 optimistic search; it's easy to recognize halfwords that can
4298 not be the start of a 32-bit instruction, and use that to
4299 lock on to the instruction boundaries. */
4300 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4301 if (buf == NULL)
4302 return bpaddr;
4303 buf_len = IT_SCAN_THRESHOLD;
4304
4305 definite = 0;
4306 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4307 {
4308 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4309 if (thumb_insn_size (inst1) == 2)
4310 {
4311 definite = 1;
4312 break;
4313 }
4314 }
4315
4316 /* At this point, if DEFINITE, BUF[I] is the first place we
4317 are sure that we know the instruction boundaries, and it is far
4318 enough from BPADDR that we could not miss an IT instruction
4319 affecting BPADDR. If ! DEFINITE, give up - start from a
4320 known boundary. */
4321 if (! definite)
4322 {
4323 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4324 bpaddr - boundary);
4325 if (buf == NULL)
4326 return bpaddr;
4327 buf_len = bpaddr - boundary;
4328 i = 0;
4329 }
4330 }
4331 else
4332 {
4333 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4334 if (buf == NULL)
4335 return bpaddr;
4336 buf_len = bpaddr - boundary;
4337 i = 0;
4338 }
4339
4340 /* Scan forwards. Find the last IT instruction before BPADDR. */
4341 last_it = -1;
4342 last_it_count = 0;
4343 while (i < buf_len)
4344 {
4345 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4346 last_it_count--;
4347 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4348 {
4349 last_it = i;
4350 if (inst1 & 0x0001)
4351 last_it_count = 4;
4352 else if (inst1 & 0x0002)
4353 last_it_count = 3;
4354 else if (inst1 & 0x0004)
4355 last_it_count = 2;
4356 else
4357 last_it_count = 1;
4358 }
4359 i += thumb_insn_size (inst1);
4360 }
4361
4362 xfree (buf);
4363
4364 if (last_it == -1)
4365 /* There wasn't really an IT instruction after all. */
4366 return bpaddr;
4367
4368 if (last_it_count < 1)
4369 /* It was too far away. */
4370 return bpaddr;
4371
4372 /* This really is a trouble spot. Move the breakpoint to the IT
4373 instruction. */
4374 return bpaddr - buf_len + last_it;
4375}
4376
4377/* ARM displaced stepping support.
4378
4379 Generally ARM displaced stepping works as follows:
4380
4381 1. When an instruction is to be single-stepped, it is first decoded by
4382 arm_process_displaced_insn. Depending on the type of instruction, it is
4383 then copied to a scratch location, possibly in a modified form. The
4384 copy_* set of functions performs such modification, as necessary. A
4385 breakpoint is placed after the modified instruction in the scratch space
4386 to return control to GDB. Note in particular that instructions which
4387 modify the PC will no longer do so after modification.
4388
4389 2. The instruction is single-stepped, by setting the PC to the scratch
4390 location address, and resuming. Control returns to GDB when the
4391 breakpoint is hit.
4392
4393 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4394 function used for the current instruction. This function's job is to
4395 put the CPU/memory state back to what it would have been if the
4396 instruction had been executed unmodified in its original location. */
4397
4398/* NOP instruction (mov r0, r0). */
4399#define ARM_NOP 0xe1a00000
4400#define THUMB_NOP 0x4600
4401
4402/* Helper for register reads for displaced stepping. In particular, this
4403 returns the PC as it would be seen by the instruction at its original
4404 location. */
4405
4406ULONGEST
4407displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4408 int regno)
4409{
4410 ULONGEST ret;
4411 CORE_ADDR from = dsc->insn_addr;
4412
4413 if (regno == ARM_PC_REGNUM)
4414 {
4415 /* Compute pipeline offset:
4416 - When executing an ARM instruction, PC reads as the address of the
4417 current instruction plus 8.
4418 - When executing a Thumb instruction, PC reads as the address of the
4419 current instruction plus 4. */
4420
4421 if (!dsc->is_thumb)
4422 from += 8;
4423 else
4424 from += 4;
4425
4426 if (debug_displaced)
4427 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4428 (unsigned long) from);
4429 return (ULONGEST) from;
4430 }
4431 else
4432 {
4433 regcache_cooked_read_unsigned (regs, regno, &ret);
4434 if (debug_displaced)
4435 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4436 regno, (unsigned long) ret);
4437 return ret;
4438 }
4439}
4440
4441static int
4442displaced_in_arm_mode (struct regcache *regs)
4443{
4444 ULONGEST ps;
4445 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4446
4447 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4448
4449 return (ps & t_bit) == 0;
4450}
4451
4452/* Write to the PC as from a branch instruction. */
4453
4454static void
4455branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4456 ULONGEST val)
4457{
4458 if (!dsc->is_thumb)
4459 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4460 architecture versions < 6. */
4461 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4462 val & ~(ULONGEST) 0x3);
4463 else
4464 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4465 val & ~(ULONGEST) 0x1);
4466}
4467
4468/* Write to the PC as from a branch-exchange instruction. */
4469
4470static void
4471bx_write_pc (struct regcache *regs, ULONGEST val)
4472{
4473 ULONGEST ps;
4474 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4475
4476 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4477
4478 if ((val & 1) == 1)
4479 {
4480 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4481 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4482 }
4483 else if ((val & 2) == 0)
4484 {
4485 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4486 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4487 }
4488 else
4489 {
4490 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4491 mode, align dest to 4 bytes). */
4492 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4493 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4494 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4495 }
4496}
4497
4498/* Write to the PC as if from a load instruction. */
4499
4500static void
4501load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4502 ULONGEST val)
4503{
4504 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4505 bx_write_pc (regs, val);
4506 else
4507 branch_write_pc (regs, dsc, val);
4508}
4509
4510/* Write to the PC as if from an ALU instruction. */
4511
4512static void
4513alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4514 ULONGEST val)
4515{
4516 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4517 bx_write_pc (regs, val);
4518 else
4519 branch_write_pc (regs, dsc, val);
4520}
4521
4522/* Helper for writing to registers for displaced stepping. Writing to the PC
4523 has a varying effects depending on the instruction which does the write:
4524 this is controlled by the WRITE_PC argument. */
4525
4526void
4527displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4528 int regno, ULONGEST val, enum pc_write_style write_pc)
4529{
4530 if (regno == ARM_PC_REGNUM)
4531 {
4532 if (debug_displaced)
4533 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4534 (unsigned long) val);
4535 switch (write_pc)
4536 {
4537 case BRANCH_WRITE_PC:
4538 branch_write_pc (regs, dsc, val);
4539 break;
4540
4541 case BX_WRITE_PC:
4542 bx_write_pc (regs, val);
4543 break;
4544
4545 case LOAD_WRITE_PC:
4546 load_write_pc (regs, dsc, val);
4547 break;
4548
4549 case ALU_WRITE_PC:
4550 alu_write_pc (regs, dsc, val);
4551 break;
4552
4553 case CANNOT_WRITE_PC:
4554 warning (_("Instruction wrote to PC in an unexpected way when "
4555 "single-stepping"));
4556 break;
4557
4558 default:
4559 internal_error (__FILE__, __LINE__,
4560 _("Invalid argument to displaced_write_reg"));
4561 }
4562
4563 dsc->wrote_to_pc = 1;
4564 }
4565 else
4566 {
4567 if (debug_displaced)
4568 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4569 regno, (unsigned long) val);
4570 regcache_cooked_write_unsigned (regs, regno, val);
4571 }
4572}
4573
4574/* This function is used to concisely determine if an instruction INSN
4575 references PC. Register fields of interest in INSN should have the
4576 corresponding fields of BITMASK set to 0b1111. The function
4577 returns return 1 if any of these fields in INSN reference the PC
4578 (also 0b1111, r15), else it returns 0. */
4579
4580static int
4581insn_references_pc (uint32_t insn, uint32_t bitmask)
4582{
4583 uint32_t lowbit = 1;
4584
4585 while (bitmask != 0)
4586 {
4587 uint32_t mask;
4588
4589 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4590 ;
4591
4592 if (!lowbit)
4593 break;
4594
4595 mask = lowbit * 0xf;
4596
4597 if ((insn & mask) == mask)
4598 return 1;
4599
4600 bitmask &= ~mask;
4601 }
4602
4603 return 0;
4604}
4605
4606/* The simplest copy function. Many instructions have the same effect no
4607 matter what address they are executed at: in those cases, use this. */
4608
4609static int
4610arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4611 const char *iname, arm_displaced_step_closure *dsc)
4612{
4613 if (debug_displaced)
4614 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4615 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4616 iname);
4617
4618 dsc->modinsn[0] = insn;
4619
4620 return 0;
4621}
4622
4623static int
4624thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4625 uint16_t insn2, const char *iname,
4626 arm_displaced_step_closure *dsc)
4627{
4628 if (debug_displaced)
4629 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4630 "opcode/class '%s' unmodified\n", insn1, insn2,
4631 iname);
4632
4633 dsc->modinsn[0] = insn1;
4634 dsc->modinsn[1] = insn2;
4635 dsc->numinsns = 2;
4636
4637 return 0;
4638}
4639
4640/* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4641 modification. */
4642static int
4643thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4644 const char *iname,
4645 arm_displaced_step_closure *dsc)
4646{
4647 if (debug_displaced)
4648 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4649 "opcode/class '%s' unmodified\n", insn,
4650 iname);
4651
4652 dsc->modinsn[0] = insn;
4653
4654 return 0;
4655}
4656
4657/* Preload instructions with immediate offset. */
4658
4659static void
4660cleanup_preload (struct gdbarch *gdbarch,
4661 struct regcache *regs, arm_displaced_step_closure *dsc)
4662{
4663 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4664 if (!dsc->u.preload.immed)
4665 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4666}
4667
4668static void
4669install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4670 arm_displaced_step_closure *dsc, unsigned int rn)
4671{
4672 ULONGEST rn_val;
4673 /* Preload instructions:
4674
4675 {pli/pld} [rn, #+/-imm]
4676 ->
4677 {pli/pld} [r0, #+/-imm]. */
4678
4679 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4680 rn_val = displaced_read_reg (regs, dsc, rn);
4681 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4682 dsc->u.preload.immed = 1;
4683
4684 dsc->cleanup = &cleanup_preload;
4685}
4686
4687static int
4688arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4689 arm_displaced_step_closure *dsc)
4690{
4691 unsigned int rn = bits (insn, 16, 19);
4692
4693 if (!insn_references_pc (insn, 0x000f0000ul))
4694 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4695
4696 if (debug_displaced)
4697 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4698 (unsigned long) insn);
4699
4700 dsc->modinsn[0] = insn & 0xfff0ffff;
4701
4702 install_preload (gdbarch, regs, dsc, rn);
4703
4704 return 0;
4705}
4706
4707static int
4708thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4709 struct regcache *regs, arm_displaced_step_closure *dsc)
4710{
4711 unsigned int rn = bits (insn1, 0, 3);
4712 unsigned int u_bit = bit (insn1, 7);
4713 int imm12 = bits (insn2, 0, 11);
4714 ULONGEST pc_val;
4715
4716 if (rn != ARM_PC_REGNUM)
4717 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4718
4719 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4720 PLD (literal) Encoding T1. */
4721 if (debug_displaced)
4722 fprintf_unfiltered (gdb_stdlog,
4723 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4724 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4725 imm12);
4726
4727 if (!u_bit)
4728 imm12 = -1 * imm12;
4729
4730 /* Rewrite instruction {pli/pld} PC imm12 into:
4731 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4732
4733 {pli/pld} [r0, r1]
4734
4735 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4736
4737 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4738 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4739
4740 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4741
4742 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4743 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4744 dsc->u.preload.immed = 0;
4745
4746 /* {pli/pld} [r0, r1] */
4747 dsc->modinsn[0] = insn1 & 0xfff0;
4748 dsc->modinsn[1] = 0xf001;
4749 dsc->numinsns = 2;
4750
4751 dsc->cleanup = &cleanup_preload;
4752 return 0;
4753}
4754
4755/* Preload instructions with register offset. */
4756
4757static void
4758install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4759 arm_displaced_step_closure *dsc, unsigned int rn,
4760 unsigned int rm)
4761{
4762 ULONGEST rn_val, rm_val;
4763
4764 /* Preload register-offset instructions:
4765
4766 {pli/pld} [rn, rm {, shift}]
4767 ->
4768 {pli/pld} [r0, r1 {, shift}]. */
4769
4770 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4771 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4772 rn_val = displaced_read_reg (regs, dsc, rn);
4773 rm_val = displaced_read_reg (regs, dsc, rm);
4774 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4775 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4776 dsc->u.preload.immed = 0;
4777
4778 dsc->cleanup = &cleanup_preload;
4779}
4780
4781static int
4782arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4783 struct regcache *regs,
4784 arm_displaced_step_closure *dsc)
4785{
4786 unsigned int rn = bits (insn, 16, 19);
4787 unsigned int rm = bits (insn, 0, 3);
4788
4789
4790 if (!insn_references_pc (insn, 0x000f000ful))
4791 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4792
4793 if (debug_displaced)
4794 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4795 (unsigned long) insn);
4796
4797 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4798
4799 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4800 return 0;
4801}
4802
4803/* Copy/cleanup coprocessor load and store instructions. */
4804
4805static void
4806cleanup_copro_load_store (struct gdbarch *gdbarch,
4807 struct regcache *regs,
4808 arm_displaced_step_closure *dsc)
4809{
4810 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4811
4812 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4813
4814 if (dsc->u.ldst.writeback)
4815 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4816}
4817
4818static void
4819install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4820 arm_displaced_step_closure *dsc,
4821 int writeback, unsigned int rn)
4822{
4823 ULONGEST rn_val;
4824
4825 /* Coprocessor load/store instructions:
4826
4827 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4828 ->
4829 {stc/stc2} [r0, #+/-imm].
4830
4831 ldc/ldc2 are handled identically. */
4832
4833 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4834 rn_val = displaced_read_reg (regs, dsc, rn);
4835 /* PC should be 4-byte aligned. */
4836 rn_val = rn_val & 0xfffffffc;
4837 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4838
4839 dsc->u.ldst.writeback = writeback;
4840 dsc->u.ldst.rn = rn;
4841
4842 dsc->cleanup = &cleanup_copro_load_store;
4843}
4844
4845static int
4846arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4847 struct regcache *regs,
4848 arm_displaced_step_closure *dsc)
4849{
4850 unsigned int rn = bits (insn, 16, 19);
4851
4852 if (!insn_references_pc (insn, 0x000f0000ul))
4853 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4854
4855 if (debug_displaced)
4856 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4857 "load/store insn %.8lx\n", (unsigned long) insn);
4858
4859 dsc->modinsn[0] = insn & 0xfff0ffff;
4860
4861 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4862
4863 return 0;
4864}
4865
4866static int
4867thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4868 uint16_t insn2, struct regcache *regs,
4869 arm_displaced_step_closure *dsc)
4870{
4871 unsigned int rn = bits (insn1, 0, 3);
4872
4873 if (rn != ARM_PC_REGNUM)
4874 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4875 "copro load/store", dsc);
4876
4877 if (debug_displaced)
4878 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4879 "load/store insn %.4x%.4x\n", insn1, insn2);
4880
4881 dsc->modinsn[0] = insn1 & 0xfff0;
4882 dsc->modinsn[1] = insn2;
4883 dsc->numinsns = 2;
4884
4885 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4886 doesn't support writeback, so pass 0. */
4887 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4888
4889 return 0;
4890}
4891
4892/* Clean up branch instructions (actually perform the branch, by setting
4893 PC). */
4894
4895static void
4896cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4897 arm_displaced_step_closure *dsc)
4898{
4899 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4900 int branch_taken = condition_true (dsc->u.branch.cond, status);
4901 enum pc_write_style write_pc = dsc->u.branch.exchange
4902 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4903
4904 if (!branch_taken)
4905 return;
4906
4907 if (dsc->u.branch.link)
4908 {
4909 /* The value of LR should be the next insn of current one. In order
4910 not to confuse logic hanlding later insn `bx lr', if current insn mode
4911 is Thumb, the bit 0 of LR value should be set to 1. */
4912 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4913
4914 if (dsc->is_thumb)
4915 next_insn_addr |= 0x1;
4916
4917 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4918 CANNOT_WRITE_PC);
4919 }
4920
4921 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4922}
4923
4924/* Copy B/BL/BLX instructions with immediate destinations. */
4925
4926static void
4927install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4928 arm_displaced_step_closure *dsc,
4929 unsigned int cond, int exchange, int link, long offset)
4930{
4931 /* Implement "BL<cond> <label>" as:
4932
4933 Preparation: cond <- instruction condition
4934 Insn: mov r0, r0 (nop)
4935 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4936
4937 B<cond> similar, but don't set r14 in cleanup. */
4938
4939 dsc->u.branch.cond = cond;
4940 dsc->u.branch.link = link;
4941 dsc->u.branch.exchange = exchange;
4942
4943 dsc->u.branch.dest = dsc->insn_addr;
4944 if (link && exchange)
4945 /* For BLX, offset is computed from the Align (PC, 4). */
4946 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4947
4948 if (dsc->is_thumb)
4949 dsc->u.branch.dest += 4 + offset;
4950 else
4951 dsc->u.branch.dest += 8 + offset;
4952
4953 dsc->cleanup = &cleanup_branch;
4954}
4955static int
4956arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4957 struct regcache *regs, arm_displaced_step_closure *dsc)
4958{
4959 unsigned int cond = bits (insn, 28, 31);
4960 int exchange = (cond == 0xf);
4961 int link = exchange || bit (insn, 24);
4962 long offset;
4963
4964 if (debug_displaced)
4965 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4966 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4967 (unsigned long) insn);
4968 if (exchange)
4969 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4970 then arrange the switch into Thumb mode. */
4971 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4972 else
4973 offset = bits (insn, 0, 23) << 2;
4974
4975 if (bit (offset, 25))
4976 offset = offset | ~0x3ffffff;
4977
4978 dsc->modinsn[0] = ARM_NOP;
4979
4980 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4981 return 0;
4982}
4983
4984static int
4985thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4986 uint16_t insn2, struct regcache *regs,
4987 arm_displaced_step_closure *dsc)
4988{
4989 int link = bit (insn2, 14);
4990 int exchange = link && !bit (insn2, 12);
4991 int cond = INST_AL;
4992 long offset = 0;
4993 int j1 = bit (insn2, 13);
4994 int j2 = bit (insn2, 11);
4995 int s = sbits (insn1, 10, 10);
4996 int i1 = !(j1 ^ bit (insn1, 10));
4997 int i2 = !(j2 ^ bit (insn1, 10));
4998
4999 if (!link && !exchange) /* B */
5000 {
5001 offset = (bits (insn2, 0, 10) << 1);
5002 if (bit (insn2, 12)) /* Encoding T4 */
5003 {
5004 offset |= (bits (insn1, 0, 9) << 12)
5005 | (i2 << 22)
5006 | (i1 << 23)
5007 | (s << 24);
5008 cond = INST_AL;
5009 }
5010 else /* Encoding T3 */
5011 {
5012 offset |= (bits (insn1, 0, 5) << 12)
5013 | (j1 << 18)
5014 | (j2 << 19)
5015 | (s << 20);
5016 cond = bits (insn1, 6, 9);
5017 }
5018 }
5019 else
5020 {
5021 offset = (bits (insn1, 0, 9) << 12);
5022 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5023 offset |= exchange ?
5024 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5025 }
5026
5027 if (debug_displaced)
5028 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5029 "%.4x %.4x with offset %.8lx\n",
5030 link ? (exchange) ? "blx" : "bl" : "b",
5031 insn1, insn2, offset);
5032
5033 dsc->modinsn[0] = THUMB_NOP;
5034
5035 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5036 return 0;
5037}
5038
5039/* Copy B Thumb instructions. */
5040static int
5041thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5042 arm_displaced_step_closure *dsc)
5043{
5044 unsigned int cond = 0;
5045 int offset = 0;
5046 unsigned short bit_12_15 = bits (insn, 12, 15);
5047 CORE_ADDR from = dsc->insn_addr;
5048
5049 if (bit_12_15 == 0xd)
5050 {
5051 /* offset = SignExtend (imm8:0, 32) */
5052 offset = sbits ((insn << 1), 0, 8);
5053 cond = bits (insn, 8, 11);
5054 }
5055 else if (bit_12_15 == 0xe) /* Encoding T2 */
5056 {
5057 offset = sbits ((insn << 1), 0, 11);
5058 cond = INST_AL;
5059 }
5060
5061 if (debug_displaced)
5062 fprintf_unfiltered (gdb_stdlog,
5063 "displaced: copying b immediate insn %.4x "
5064 "with offset %d\n", insn, offset);
5065
5066 dsc->u.branch.cond = cond;
5067 dsc->u.branch.link = 0;
5068 dsc->u.branch.exchange = 0;
5069 dsc->u.branch.dest = from + 4 + offset;
5070
5071 dsc->modinsn[0] = THUMB_NOP;
5072
5073 dsc->cleanup = &cleanup_branch;
5074
5075 return 0;
5076}
5077
5078/* Copy BX/BLX with register-specified destinations. */
5079
5080static void
5081install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5082 arm_displaced_step_closure *dsc, int link,
5083 unsigned int cond, unsigned int rm)
5084{
5085 /* Implement {BX,BLX}<cond> <reg>" as:
5086
5087 Preparation: cond <- instruction condition
5088 Insn: mov r0, r0 (nop)
5089 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5090
5091 Don't set r14 in cleanup for BX. */
5092
5093 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5094
5095 dsc->u.branch.cond = cond;
5096 dsc->u.branch.link = link;
5097
5098 dsc->u.branch.exchange = 1;
5099
5100 dsc->cleanup = &cleanup_branch;
5101}
5102
5103static int
5104arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5105 struct regcache *regs, arm_displaced_step_closure *dsc)
5106{
5107 unsigned int cond = bits (insn, 28, 31);
5108 /* BX: x12xxx1x
5109 BLX: x12xxx3x. */
5110 int link = bit (insn, 5);
5111 unsigned int rm = bits (insn, 0, 3);
5112
5113 if (debug_displaced)
5114 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5115 (unsigned long) insn);
5116
5117 dsc->modinsn[0] = ARM_NOP;
5118
5119 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5120 return 0;
5121}
5122
5123static int
5124thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5125 struct regcache *regs,
5126 arm_displaced_step_closure *dsc)
5127{
5128 int link = bit (insn, 7);
5129 unsigned int rm = bits (insn, 3, 6);
5130
5131 if (debug_displaced)
5132 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5133 (unsigned short) insn);
5134
5135 dsc->modinsn[0] = THUMB_NOP;
5136
5137 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5138
5139 return 0;
5140}
5141
5142
5143/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5144
5145static void
5146cleanup_alu_imm (struct gdbarch *gdbarch,
5147 struct regcache *regs, arm_displaced_step_closure *dsc)
5148{
5149 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5150 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5151 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5152 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5153}
5154
5155static int
5156arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5157 arm_displaced_step_closure *dsc)
5158{
5159 unsigned int rn = bits (insn, 16, 19);
5160 unsigned int rd = bits (insn, 12, 15);
5161 unsigned int op = bits (insn, 21, 24);
5162 int is_mov = (op == 0xd);
5163 ULONGEST rd_val, rn_val;
5164
5165 if (!insn_references_pc (insn, 0x000ff000ul))
5166 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5167
5168 if (debug_displaced)
5169 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5170 "%.8lx\n", is_mov ? "move" : "ALU",
5171 (unsigned long) insn);
5172
5173 /* Instruction is of form:
5174
5175 <op><cond> rd, [rn,] #imm
5176
5177 Rewrite as:
5178
5179 Preparation: tmp1, tmp2 <- r0, r1;
5180 r0, r1 <- rd, rn
5181 Insn: <op><cond> r0, r1, #imm
5182 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5183 */
5184
5185 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5186 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5187 rn_val = displaced_read_reg (regs, dsc, rn);
5188 rd_val = displaced_read_reg (regs, dsc, rd);
5189 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5190 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5191 dsc->rd = rd;
5192
5193 if (is_mov)
5194 dsc->modinsn[0] = insn & 0xfff00fff;
5195 else
5196 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5197
5198 dsc->cleanup = &cleanup_alu_imm;
5199
5200 return 0;
5201}
5202
5203static int
5204thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5205 uint16_t insn2, struct regcache *regs,
5206 arm_displaced_step_closure *dsc)
5207{
5208 unsigned int op = bits (insn1, 5, 8);
5209 unsigned int rn, rm, rd;
5210 ULONGEST rd_val, rn_val;
5211
5212 rn = bits (insn1, 0, 3); /* Rn */
5213 rm = bits (insn2, 0, 3); /* Rm */
5214 rd = bits (insn2, 8, 11); /* Rd */
5215
5216 /* This routine is only called for instruction MOV. */
5217 gdb_assert (op == 0x2 && rn == 0xf);
5218
5219 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5220 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5221
5222 if (debug_displaced)
5223 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5224 "ALU", insn1, insn2);
5225
5226 /* Instruction is of form:
5227
5228 <op><cond> rd, [rn,] #imm
5229
5230 Rewrite as:
5231
5232 Preparation: tmp1, tmp2 <- r0, r1;
5233 r0, r1 <- rd, rn
5234 Insn: <op><cond> r0, r1, #imm
5235 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5236 */
5237
5238 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5239 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5240 rn_val = displaced_read_reg (regs, dsc, rn);
5241 rd_val = displaced_read_reg (regs, dsc, rd);
5242 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5243 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5244 dsc->rd = rd;
5245
5246 dsc->modinsn[0] = insn1;
5247 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5248 dsc->numinsns = 2;
5249
5250 dsc->cleanup = &cleanup_alu_imm;
5251
5252 return 0;
5253}
5254
5255/* Copy/cleanup arithmetic/logic insns with register RHS. */
5256
5257static void
5258cleanup_alu_reg (struct gdbarch *gdbarch,
5259 struct regcache *regs, arm_displaced_step_closure *dsc)
5260{
5261 ULONGEST rd_val;
5262 int i;
5263
5264 rd_val = displaced_read_reg (regs, dsc, 0);
5265
5266 for (i = 0; i < 3; i++)
5267 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5268
5269 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5270}
5271
5272static void
5273install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5274 arm_displaced_step_closure *dsc,
5275 unsigned int rd, unsigned int rn, unsigned int rm)
5276{
5277 ULONGEST rd_val, rn_val, rm_val;
5278
5279 /* Instruction is of form:
5280
5281 <op><cond> rd, [rn,] rm [, <shift>]
5282
5283 Rewrite as:
5284
5285 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5286 r0, r1, r2 <- rd, rn, rm
5287 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5288 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5289 */
5290
5291 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5292 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5293 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5294 rd_val = displaced_read_reg (regs, dsc, rd);
5295 rn_val = displaced_read_reg (regs, dsc, rn);
5296 rm_val = displaced_read_reg (regs, dsc, rm);
5297 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5298 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5299 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5300 dsc->rd = rd;
5301
5302 dsc->cleanup = &cleanup_alu_reg;
5303}
5304
5305static int
5306arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5307 arm_displaced_step_closure *dsc)
5308{
5309 unsigned int op = bits (insn, 21, 24);
5310 int is_mov = (op == 0xd);
5311
5312 if (!insn_references_pc (insn, 0x000ff00ful))
5313 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5314
5315 if (debug_displaced)
5316 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5317 is_mov ? "move" : "ALU", (unsigned long) insn);
5318
5319 if (is_mov)
5320 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5321 else
5322 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5323
5324 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5325 bits (insn, 0, 3));
5326 return 0;
5327}
5328
5329static int
5330thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5331 struct regcache *regs,
5332 arm_displaced_step_closure *dsc)
5333{
5334 unsigned rm, rd;
5335
5336 rm = bits (insn, 3, 6);
5337 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5338
5339 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5340 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5341
5342 if (debug_displaced)
5343 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5344 (unsigned short) insn);
5345
5346 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5347
5348 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5349
5350 return 0;
5351}
5352
5353/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5354
5355static void
5356cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5357 struct regcache *regs,
5358 arm_displaced_step_closure *dsc)
5359{
5360 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5361 int i;
5362
5363 for (i = 0; i < 4; i++)
5364 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5365
5366 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5367}
5368
5369static void
5370install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5371 arm_displaced_step_closure *dsc,
5372 unsigned int rd, unsigned int rn, unsigned int rm,
5373 unsigned rs)
5374{
5375 int i;
5376 ULONGEST rd_val, rn_val, rm_val, rs_val;
5377
5378 /* Instruction is of form:
5379
5380 <op><cond> rd, [rn,] rm, <shift> rs
5381
5382 Rewrite as:
5383
5384 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5385 r0, r1, r2, r3 <- rd, rn, rm, rs
5386 Insn: <op><cond> r0, r1, r2, <shift> r3
5387 Cleanup: tmp5 <- r0
5388 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5389 rd <- tmp5
5390 */
5391
5392 for (i = 0; i < 4; i++)
5393 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5394
5395 rd_val = displaced_read_reg (regs, dsc, rd);
5396 rn_val = displaced_read_reg (regs, dsc, rn);
5397 rm_val = displaced_read_reg (regs, dsc, rm);
5398 rs_val = displaced_read_reg (regs, dsc, rs);
5399 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5400 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5401 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5402 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5403 dsc->rd = rd;
5404 dsc->cleanup = &cleanup_alu_shifted_reg;
5405}
5406
5407static int
5408arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5409 struct regcache *regs,
5410 arm_displaced_step_closure *dsc)
5411{
5412 unsigned int op = bits (insn, 21, 24);
5413 int is_mov = (op == 0xd);
5414 unsigned int rd, rn, rm, rs;
5415
5416 if (!insn_references_pc (insn, 0x000fff0ful))
5417 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5418
5419 if (debug_displaced)
5420 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5421 "%.8lx\n", is_mov ? "move" : "ALU",
5422 (unsigned long) insn);
5423
5424 rn = bits (insn, 16, 19);
5425 rm = bits (insn, 0, 3);
5426 rs = bits (insn, 8, 11);
5427 rd = bits (insn, 12, 15);
5428
5429 if (is_mov)
5430 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5431 else
5432 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5433
5434 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5435
5436 return 0;
5437}
5438
5439/* Clean up load instructions. */
5440
5441static void
5442cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5443 arm_displaced_step_closure *dsc)
5444{
5445 ULONGEST rt_val, rt_val2 = 0, rn_val;
5446
5447 rt_val = displaced_read_reg (regs, dsc, 0);
5448 if (dsc->u.ldst.xfersize == 8)
5449 rt_val2 = displaced_read_reg (regs, dsc, 1);
5450 rn_val = displaced_read_reg (regs, dsc, 2);
5451
5452 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5453 if (dsc->u.ldst.xfersize > 4)
5454 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5455 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5456 if (!dsc->u.ldst.immed)
5457 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5458
5459 /* Handle register writeback. */
5460 if (dsc->u.ldst.writeback)
5461 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5462 /* Put result in right place. */
5463 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5464 if (dsc->u.ldst.xfersize == 8)
5465 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5466}
5467
5468/* Clean up store instructions. */
5469
5470static void
5471cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5472 arm_displaced_step_closure *dsc)
5473{
5474 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5475
5476 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5477 if (dsc->u.ldst.xfersize > 4)
5478 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5479 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5480 if (!dsc->u.ldst.immed)
5481 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5482 if (!dsc->u.ldst.restore_r4)
5483 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5484
5485 /* Writeback. */
5486 if (dsc->u.ldst.writeback)
5487 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5488}
5489
5490/* Copy "extra" load/store instructions. These are halfword/doubleword
5491 transfers, which have a different encoding to byte/word transfers. */
5492
5493static int
5494arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5495 struct regcache *regs, arm_displaced_step_closure *dsc)
5496{
5497 unsigned int op1 = bits (insn, 20, 24);
5498 unsigned int op2 = bits (insn, 5, 6);
5499 unsigned int rt = bits (insn, 12, 15);
5500 unsigned int rn = bits (insn, 16, 19);
5501 unsigned int rm = bits (insn, 0, 3);
5502 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5503 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5504 int immed = (op1 & 0x4) != 0;
5505 int opcode;
5506 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5507
5508 if (!insn_references_pc (insn, 0x000ff00ful))
5509 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5510
5511 if (debug_displaced)
5512 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5513 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5514 (unsigned long) insn);
5515
5516 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5517
5518 if (opcode < 0)
5519 internal_error (__FILE__, __LINE__,
5520 _("copy_extra_ld_st: instruction decode error"));
5521
5522 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5523 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5524 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5525 if (!immed)
5526 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5527
5528 rt_val = displaced_read_reg (regs, dsc, rt);
5529 if (bytesize[opcode] == 8)
5530 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5531 rn_val = displaced_read_reg (regs, dsc, rn);
5532 if (!immed)
5533 rm_val = displaced_read_reg (regs, dsc, rm);
5534
5535 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5536 if (bytesize[opcode] == 8)
5537 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5538 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5539 if (!immed)
5540 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5541
5542 dsc->rd = rt;
5543 dsc->u.ldst.xfersize = bytesize[opcode];
5544 dsc->u.ldst.rn = rn;
5545 dsc->u.ldst.immed = immed;
5546 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5547 dsc->u.ldst.restore_r4 = 0;
5548
5549 if (immed)
5550 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5551 ->
5552 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5553 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5554 else
5555 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5556 ->
5557 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5558 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5559
5560 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5561
5562 return 0;
5563}
5564
5565/* Copy byte/half word/word loads and stores. */
5566
5567static void
5568install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5569 arm_displaced_step_closure *dsc, int load,
5570 int immed, int writeback, int size, int usermode,
5571 int rt, int rm, int rn)
5572{
5573 ULONGEST rt_val, rn_val, rm_val = 0;
5574
5575 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5576 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5577 if (!immed)
5578 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5579 if (!load)
5580 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5581
5582 rt_val = displaced_read_reg (regs, dsc, rt);
5583 rn_val = displaced_read_reg (regs, dsc, rn);
5584 if (!immed)
5585 rm_val = displaced_read_reg (regs, dsc, rm);
5586
5587 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5588 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5589 if (!immed)
5590 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5591 dsc->rd = rt;
5592 dsc->u.ldst.xfersize = size;
5593 dsc->u.ldst.rn = rn;
5594 dsc->u.ldst.immed = immed;
5595 dsc->u.ldst.writeback = writeback;
5596
5597 /* To write PC we can do:
5598
5599 Before this sequence of instructions:
5600 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5601 r2 is the Rn value got from dispalced_read_reg.
5602
5603 Insn1: push {pc} Write address of STR instruction + offset on stack
5604 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5605 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5606 = addr(Insn1) + offset - addr(Insn3) - 8
5607 = offset - 16
5608 Insn4: add r4, r4, #8 r4 = offset - 8
5609 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5610 = from + offset
5611 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5612
5613 Otherwise we don't know what value to write for PC, since the offset is
5614 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5615 of this can be found in Section "Saving from r15" in
5616 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5617
5618 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5619}
5620
5621
5622static int
5623thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5624 uint16_t insn2, struct regcache *regs,
5625 arm_displaced_step_closure *dsc, int size)
5626{
5627 unsigned int u_bit = bit (insn1, 7);
5628 unsigned int rt = bits (insn2, 12, 15);
5629 int imm12 = bits (insn2, 0, 11);
5630 ULONGEST pc_val;
5631
5632 if (debug_displaced)
5633 fprintf_unfiltered (gdb_stdlog,
5634 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5635 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5636 imm12);
5637
5638 if (!u_bit)
5639 imm12 = -1 * imm12;
5640
5641 /* Rewrite instruction LDR Rt imm12 into:
5642
5643 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5644
5645 LDR R0, R2, R3,
5646
5647 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5648
5649
5650 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5651 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5652 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5653
5654 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5655
5656 pc_val = pc_val & 0xfffffffc;
5657
5658 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5659 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5660
5661 dsc->rd = rt;
5662
5663 dsc->u.ldst.xfersize = size;
5664 dsc->u.ldst.immed = 0;
5665 dsc->u.ldst.writeback = 0;
5666 dsc->u.ldst.restore_r4 = 0;
5667
5668 /* LDR R0, R2, R3 */
5669 dsc->modinsn[0] = 0xf852;
5670 dsc->modinsn[1] = 0x3;
5671 dsc->numinsns = 2;
5672
5673 dsc->cleanup = &cleanup_load;
5674
5675 return 0;
5676}
5677
5678static int
5679thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5680 uint16_t insn2, struct regcache *regs,
5681 arm_displaced_step_closure *dsc,
5682 int writeback, int immed)
5683{
5684 unsigned int rt = bits (insn2, 12, 15);
5685 unsigned int rn = bits (insn1, 0, 3);
5686 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5687 /* In LDR (register), there is also a register Rm, which is not allowed to
5688 be PC, so we don't have to check it. */
5689
5690 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5691 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5692 dsc);
5693
5694 if (debug_displaced)
5695 fprintf_unfiltered (gdb_stdlog,
5696 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5697 rt, rn, insn1, insn2);
5698
5699 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5700 0, rt, rm, rn);
5701
5702 dsc->u.ldst.restore_r4 = 0;
5703
5704 if (immed)
5705 /* ldr[b]<cond> rt, [rn, #imm], etc.
5706 ->
5707 ldr[b]<cond> r0, [r2, #imm]. */
5708 {
5709 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5710 dsc->modinsn[1] = insn2 & 0x0fff;
5711 }
5712 else
5713 /* ldr[b]<cond> rt, [rn, rm], etc.
5714 ->
5715 ldr[b]<cond> r0, [r2, r3]. */
5716 {
5717 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5718 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5719 }
5720
5721 dsc->numinsns = 2;
5722
5723 return 0;
5724}
5725
5726
5727static int
5728arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5729 struct regcache *regs,
5730 arm_displaced_step_closure *dsc,
5731 int load, int size, int usermode)
5732{
5733 int immed = !bit (insn, 25);
5734 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5735 unsigned int rt = bits (insn, 12, 15);
5736 unsigned int rn = bits (insn, 16, 19);
5737 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5738
5739 if (!insn_references_pc (insn, 0x000ff00ful))
5740 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5741
5742 if (debug_displaced)
5743 fprintf_unfiltered (gdb_stdlog,
5744 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5745 load ? (size == 1 ? "ldrb" : "ldr")
5746 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5747 rt, rn,
5748 (unsigned long) insn);
5749
5750 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5751 usermode, rt, rm, rn);
5752
5753 if (load || rt != ARM_PC_REGNUM)
5754 {
5755 dsc->u.ldst.restore_r4 = 0;
5756
5757 if (immed)
5758 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5759 ->
5760 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5761 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5762 else
5763 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5764 ->
5765 {ldr,str}[b]<cond> r0, [r2, r3]. */
5766 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5767 }
5768 else
5769 {
5770 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5771 dsc->u.ldst.restore_r4 = 1;
5772 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5773 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5774 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5775 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5776 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5777
5778 /* As above. */
5779 if (immed)
5780 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5781 else
5782 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5783
5784 dsc->numinsns = 6;
5785 }
5786
5787 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5788
5789 return 0;
5790}
5791
5792/* Cleanup LDM instructions with fully-populated register list. This is an
5793 unfortunate corner case: it's impossible to implement correctly by modifying
5794 the instruction. The issue is as follows: we have an instruction,
5795
5796 ldm rN, {r0-r15}
5797
5798 which we must rewrite to avoid loading PC. A possible solution would be to
5799 do the load in two halves, something like (with suitable cleanup
5800 afterwards):
5801
5802 mov r8, rN
5803 ldm[id][ab] r8!, {r0-r7}
5804 str r7, <temp>
5805 ldm[id][ab] r8, {r7-r14}
5806 <bkpt>
5807
5808 but at present there's no suitable place for <temp>, since the scratch space
5809 is overwritten before the cleanup routine is called. For now, we simply
5810 emulate the instruction. */
5811
5812static void
5813cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5814 arm_displaced_step_closure *dsc)
5815{
5816 int inc = dsc->u.block.increment;
5817 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5818 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5819 uint32_t regmask = dsc->u.block.regmask;
5820 int regno = inc ? 0 : 15;
5821 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5822 int exception_return = dsc->u.block.load && dsc->u.block.user
5823 && (regmask & 0x8000) != 0;
5824 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5825 int do_transfer = condition_true (dsc->u.block.cond, status);
5826 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5827
5828 if (!do_transfer)
5829 return;
5830
5831 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5832 sensible we can do here. Complain loudly. */
5833 if (exception_return)
5834 error (_("Cannot single-step exception return"));
5835
5836 /* We don't handle any stores here for now. */
5837 gdb_assert (dsc->u.block.load != 0);
5838
5839 if (debug_displaced)
5840 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5841 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5842 dsc->u.block.increment ? "inc" : "dec",
5843 dsc->u.block.before ? "before" : "after");
5844
5845 while (regmask)
5846 {
5847 uint32_t memword;
5848
5849 if (inc)
5850 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5851 regno++;
5852 else
5853 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5854 regno--;
5855
5856 xfer_addr += bump_before;
5857
5858 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5859 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5860
5861 xfer_addr += bump_after;
5862
5863 regmask &= ~(1 << regno);
5864 }
5865
5866 if (dsc->u.block.writeback)
5867 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5868 CANNOT_WRITE_PC);
5869}
5870
5871/* Clean up an STM which included the PC in the register list. */
5872
5873static void
5874cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5875 arm_displaced_step_closure *dsc)
5876{
5877 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5878 int store_executed = condition_true (dsc->u.block.cond, status);
5879 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5880 CORE_ADDR stm_insn_addr;
5881 uint32_t pc_val;
5882 long offset;
5883 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5884
5885 /* If condition code fails, there's nothing else to do. */
5886 if (!store_executed)
5887 return;
5888
5889 if (dsc->u.block.increment)
5890 {
5891 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5892
5893 if (dsc->u.block.before)
5894 pc_stored_at += 4;
5895 }
5896 else
5897 {
5898 pc_stored_at = dsc->u.block.xfer_addr;
5899
5900 if (dsc->u.block.before)
5901 pc_stored_at -= 4;
5902 }
5903
5904 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5905 stm_insn_addr = dsc->scratch_base;
5906 offset = pc_val - stm_insn_addr;
5907
5908 if (debug_displaced)
5909 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5910 "STM instruction\n", offset);
5911
5912 /* Rewrite the stored PC to the proper value for the non-displaced original
5913 instruction. */
5914 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5915 dsc->insn_addr + offset);
5916}
5917
5918/* Clean up an LDM which includes the PC in the register list. We clumped all
5919 the registers in the transferred list into a contiguous range r0...rX (to
5920 avoid loading PC directly and losing control of the debugged program), so we
5921 must undo that here. */
5922
5923static void
5924cleanup_block_load_pc (struct gdbarch *gdbarch,
5925 struct regcache *regs,
5926 arm_displaced_step_closure *dsc)
5927{
5928 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5929 int load_executed = condition_true (dsc->u.block.cond, status);
5930 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5931 unsigned int regs_loaded = bitcount (mask);
5932 unsigned int num_to_shuffle = regs_loaded, clobbered;
5933
5934 /* The method employed here will fail if the register list is fully populated
5935 (we need to avoid loading PC directly). */
5936 gdb_assert (num_to_shuffle < 16);
5937
5938 if (!load_executed)
5939 return;
5940
5941 clobbered = (1 << num_to_shuffle) - 1;
5942
5943 while (num_to_shuffle > 0)
5944 {
5945 if ((mask & (1 << write_reg)) != 0)
5946 {
5947 unsigned int read_reg = num_to_shuffle - 1;
5948
5949 if (read_reg != write_reg)
5950 {
5951 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5952 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5953 if (debug_displaced)
5954 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5955 "loaded register r%d to r%d\n"), read_reg,
5956 write_reg);
5957 }
5958 else if (debug_displaced)
5959 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5960 "r%d already in the right place\n"),
5961 write_reg);
5962
5963 clobbered &= ~(1 << write_reg);
5964
5965 num_to_shuffle--;
5966 }
5967
5968 write_reg--;
5969 }
5970
5971 /* Restore any registers we scribbled over. */
5972 for (write_reg = 0; clobbered != 0; write_reg++)
5973 {
5974 if ((clobbered & (1 << write_reg)) != 0)
5975 {
5976 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5977 CANNOT_WRITE_PC);
5978 if (debug_displaced)
5979 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5980 "clobbered register r%d\n"), write_reg);
5981 clobbered &= ~(1 << write_reg);
5982 }
5983 }
5984
5985 /* Perform register writeback manually. */
5986 if (dsc->u.block.writeback)
5987 {
5988 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5989
5990 if (dsc->u.block.increment)
5991 new_rn_val += regs_loaded * 4;
5992 else
5993 new_rn_val -= regs_loaded * 4;
5994
5995 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5996 CANNOT_WRITE_PC);
5997 }
5998}
5999
6000/* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6001 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6002
6003static int
6004arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6005 struct regcache *regs,
6006 arm_displaced_step_closure *dsc)
6007{
6008 int load = bit (insn, 20);
6009 int user = bit (insn, 22);
6010 int increment = bit (insn, 23);
6011 int before = bit (insn, 24);
6012 int writeback = bit (insn, 21);
6013 int rn = bits (insn, 16, 19);
6014
6015 /* Block transfers which don't mention PC can be run directly
6016 out-of-line. */
6017 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6018 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6019
6020 if (rn == ARM_PC_REGNUM)
6021 {
6022 warning (_("displaced: Unpredictable LDM or STM with "
6023 "base register r15"));
6024 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6025 }
6026
6027 if (debug_displaced)
6028 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6029 "%.8lx\n", (unsigned long) insn);
6030
6031 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6032 dsc->u.block.rn = rn;
6033
6034 dsc->u.block.load = load;
6035 dsc->u.block.user = user;
6036 dsc->u.block.increment = increment;
6037 dsc->u.block.before = before;
6038 dsc->u.block.writeback = writeback;
6039 dsc->u.block.cond = bits (insn, 28, 31);
6040
6041 dsc->u.block.regmask = insn & 0xffff;
6042
6043 if (load)
6044 {
6045 if ((insn & 0xffff) == 0xffff)
6046 {
6047 /* LDM with a fully-populated register list. This case is
6048 particularly tricky. Implement for now by fully emulating the
6049 instruction (which might not behave perfectly in all cases, but
6050 these instructions should be rare enough for that not to matter
6051 too much). */
6052 dsc->modinsn[0] = ARM_NOP;
6053
6054 dsc->cleanup = &cleanup_block_load_all;
6055 }
6056 else
6057 {
6058 /* LDM of a list of registers which includes PC. Implement by
6059 rewriting the list of registers to be transferred into a
6060 contiguous chunk r0...rX before doing the transfer, then shuffling
6061 registers into the correct places in the cleanup routine. */
6062 unsigned int regmask = insn & 0xffff;
6063 unsigned int num_in_list = bitcount (regmask), new_regmask;
6064 unsigned int i;
6065
6066 for (i = 0; i < num_in_list; i++)
6067 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6068
6069 /* Writeback makes things complicated. We need to avoid clobbering
6070 the base register with one of the registers in our modified
6071 register list, but just using a different register can't work in
6072 all cases, e.g.:
6073
6074 ldm r14!, {r0-r13,pc}
6075
6076 which would need to be rewritten as:
6077
6078 ldm rN!, {r0-r14}
6079
6080 but that can't work, because there's no free register for N.
6081
6082 Solve this by turning off the writeback bit, and emulating
6083 writeback manually in the cleanup routine. */
6084
6085 if (writeback)
6086 insn &= ~(1 << 21);
6087
6088 new_regmask = (1 << num_in_list) - 1;
6089
6090 if (debug_displaced)
6091 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6092 "{..., pc}: original reg list %.4x, modified "
6093 "list %.4x\n"), rn, writeback ? "!" : "",
6094 (int) insn & 0xffff, new_regmask);
6095
6096 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6097
6098 dsc->cleanup = &cleanup_block_load_pc;
6099 }
6100 }
6101 else
6102 {
6103 /* STM of a list of registers which includes PC. Run the instruction
6104 as-is, but out of line: this will store the wrong value for the PC,
6105 so we must manually fix up the memory in the cleanup routine.
6106 Doing things this way has the advantage that we can auto-detect
6107 the offset of the PC write (which is architecture-dependent) in
6108 the cleanup routine. */
6109 dsc->modinsn[0] = insn;
6110
6111 dsc->cleanup = &cleanup_block_store_pc;
6112 }
6113
6114 return 0;
6115}
6116
6117static int
6118thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6119 struct regcache *regs,
6120 arm_displaced_step_closure *dsc)
6121{
6122 int rn = bits (insn1, 0, 3);
6123 int load = bit (insn1, 4);
6124 int writeback = bit (insn1, 5);
6125
6126 /* Block transfers which don't mention PC can be run directly
6127 out-of-line. */
6128 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6129 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6130
6131 if (rn == ARM_PC_REGNUM)
6132 {
6133 warning (_("displaced: Unpredictable LDM or STM with "
6134 "base register r15"));
6135 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6136 "unpredictable ldm/stm", dsc);
6137 }
6138
6139 if (debug_displaced)
6140 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6141 "%.4x%.4x\n", insn1, insn2);
6142
6143 /* Clear bit 13, since it should be always zero. */
6144 dsc->u.block.regmask = (insn2 & 0xdfff);
6145 dsc->u.block.rn = rn;
6146
6147 dsc->u.block.load = load;
6148 dsc->u.block.user = 0;
6149 dsc->u.block.increment = bit (insn1, 7);
6150 dsc->u.block.before = bit (insn1, 8);
6151 dsc->u.block.writeback = writeback;
6152 dsc->u.block.cond = INST_AL;
6153 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6154
6155 if (load)
6156 {
6157 if (dsc->u.block.regmask == 0xffff)
6158 {
6159 /* This branch is impossible to happen. */
6160 gdb_assert (0);
6161 }
6162 else
6163 {
6164 unsigned int regmask = dsc->u.block.regmask;
6165 unsigned int num_in_list = bitcount (regmask), new_regmask;
6166 unsigned int i;
6167
6168 for (i = 0; i < num_in_list; i++)
6169 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6170
6171 if (writeback)
6172 insn1 &= ~(1 << 5);
6173
6174 new_regmask = (1 << num_in_list) - 1;
6175
6176 if (debug_displaced)
6177 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6178 "{..., pc}: original reg list %.4x, modified "
6179 "list %.4x\n"), rn, writeback ? "!" : "",
6180 (int) dsc->u.block.regmask, new_regmask);
6181
6182 dsc->modinsn[0] = insn1;
6183 dsc->modinsn[1] = (new_regmask & 0xffff);
6184 dsc->numinsns = 2;
6185
6186 dsc->cleanup = &cleanup_block_load_pc;
6187 }
6188 }
6189 else
6190 {
6191 dsc->modinsn[0] = insn1;
6192 dsc->modinsn[1] = insn2;
6193 dsc->numinsns = 2;
6194 dsc->cleanup = &cleanup_block_store_pc;
6195 }
6196 return 0;
6197}
6198
6199/* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6200 This is used to avoid a dependency on BFD's bfd_endian enum. */
6201
6202ULONGEST
6203arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6204 int byte_order)
6205{
6206 return read_memory_unsigned_integer (memaddr, len,
6207 (enum bfd_endian) byte_order);
6208}
6209
6210/* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6211
6212CORE_ADDR
6213arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6214 CORE_ADDR val)
6215{
6216 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6217}
6218
6219/* Wrapper over syscall_next_pc for use in get_next_pcs. */
6220
6221static CORE_ADDR
6222arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6223{
6224 return 0;
6225}
6226
6227/* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6228
6229int
6230arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6231{
6232 return arm_is_thumb (self->regcache);
6233}
6234
6235/* single_step() is called just before we want to resume the inferior,
6236 if we want to single-step it but there is no hardware or kernel
6237 single-step support. We find the target of the coming instructions
6238 and breakpoint them. */
6239
6240std::vector<CORE_ADDR>
6241arm_software_single_step (struct regcache *regcache)
6242{
6243 struct gdbarch *gdbarch = regcache->arch ();
6244 struct arm_get_next_pcs next_pcs_ctx;
6245
6246 arm_get_next_pcs_ctor (&next_pcs_ctx,
6247 &arm_get_next_pcs_ops,
6248 gdbarch_byte_order (gdbarch),
6249 gdbarch_byte_order_for_code (gdbarch),
6250 0,
6251 regcache);
6252
6253 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6254
6255 for (CORE_ADDR &pc_ref : next_pcs)
6256 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6257
6258 return next_pcs;
6259}
6260
6261/* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6262 for Linux, where some SVC instructions must be treated specially. */
6263
6264static void
6265cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6266 arm_displaced_step_closure *dsc)
6267{
6268 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6269
6270 if (debug_displaced)
6271 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6272 "%.8lx\n", (unsigned long) resume_addr);
6273
6274 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6275}
6276
6277
6278/* Common copy routine for svc instruciton. */
6279
6280static int
6281install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6282 arm_displaced_step_closure *dsc)
6283{
6284 /* Preparation: none.
6285 Insn: unmodified svc.
6286 Cleanup: pc <- insn_addr + insn_size. */
6287
6288 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6289 instruction. */
6290 dsc->wrote_to_pc = 1;
6291
6292 /* Allow OS-specific code to override SVC handling. */
6293 if (dsc->u.svc.copy_svc_os)
6294 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6295 else
6296 {
6297 dsc->cleanup = &cleanup_svc;
6298 return 0;
6299 }
6300}
6301
6302static int
6303arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6304 struct regcache *regs, arm_displaced_step_closure *dsc)
6305{
6306
6307 if (debug_displaced)
6308 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6309 (unsigned long) insn);
6310
6311 dsc->modinsn[0] = insn;
6312
6313 return install_svc (gdbarch, regs, dsc);
6314}
6315
6316static int
6317thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6318 struct regcache *regs, arm_displaced_step_closure *dsc)
6319{
6320
6321 if (debug_displaced)
6322 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6323 insn);
6324
6325 dsc->modinsn[0] = insn;
6326
6327 return install_svc (gdbarch, regs, dsc);
6328}
6329
6330/* Copy undefined instructions. */
6331
6332static int
6333arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6334 arm_displaced_step_closure *dsc)
6335{
6336 if (debug_displaced)
6337 fprintf_unfiltered (gdb_stdlog,
6338 "displaced: copying undefined insn %.8lx\n",
6339 (unsigned long) insn);
6340
6341 dsc->modinsn[0] = insn;
6342
6343 return 0;
6344}
6345
6346static int
6347thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6348 arm_displaced_step_closure *dsc)
6349{
6350
6351 if (debug_displaced)
6352 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6353 "%.4x %.4x\n", (unsigned short) insn1,
6354 (unsigned short) insn2);
6355
6356 dsc->modinsn[0] = insn1;
6357 dsc->modinsn[1] = insn2;
6358 dsc->numinsns = 2;
6359
6360 return 0;
6361}
6362
6363/* Copy unpredictable instructions. */
6364
6365static int
6366arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6367 arm_displaced_step_closure *dsc)
6368{
6369 if (debug_displaced)
6370 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6371 "%.8lx\n", (unsigned long) insn);
6372
6373 dsc->modinsn[0] = insn;
6374
6375 return 0;
6376}
6377
6378/* The decode_* functions are instruction decoding helpers. They mostly follow
6379 the presentation in the ARM ARM. */
6380
6381static int
6382arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6383 struct regcache *regs,
6384 arm_displaced_step_closure *dsc)
6385{
6386 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6387 unsigned int rn = bits (insn, 16, 19);
6388
6389 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6390 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6391 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6392 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6393 else if ((op1 & 0x60) == 0x20)
6394 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6395 else if ((op1 & 0x71) == 0x40)
6396 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6397 dsc);
6398 else if ((op1 & 0x77) == 0x41)
6399 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6400 else if ((op1 & 0x77) == 0x45)
6401 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6402 else if ((op1 & 0x77) == 0x51)
6403 {
6404 if (rn != 0xf)
6405 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6406 else
6407 return arm_copy_unpred (gdbarch, insn, dsc);
6408 }
6409 else if ((op1 & 0x77) == 0x55)
6410 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6411 else if (op1 == 0x57)
6412 switch (op2)
6413 {
6414 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6415 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6416 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6417 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6418 default: return arm_copy_unpred (gdbarch, insn, dsc);
6419 }
6420 else if ((op1 & 0x63) == 0x43)
6421 return arm_copy_unpred (gdbarch, insn, dsc);
6422 else if ((op2 & 0x1) == 0x0)
6423 switch (op1 & ~0x80)
6424 {
6425 case 0x61:
6426 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6427 case 0x65:
6428 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6429 case 0x71: case 0x75:
6430 /* pld/pldw reg. */
6431 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6432 case 0x63: case 0x67: case 0x73: case 0x77:
6433 return arm_copy_unpred (gdbarch, insn, dsc);
6434 default:
6435 return arm_copy_undef (gdbarch, insn, dsc);
6436 }
6437 else
6438 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6439}
6440
6441static int
6442arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6443 struct regcache *regs,
6444 arm_displaced_step_closure *dsc)
6445{
6446 if (bit (insn, 27) == 0)
6447 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6448 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6449 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6450 {
6451 case 0x0: case 0x2:
6452 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6453
6454 case 0x1: case 0x3:
6455 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6456
6457 case 0x4: case 0x5: case 0x6: case 0x7:
6458 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6459
6460 case 0x8:
6461 switch ((insn & 0xe00000) >> 21)
6462 {
6463 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6464 /* stc/stc2. */
6465 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6466
6467 case 0x2:
6468 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6469
6470 default:
6471 return arm_copy_undef (gdbarch, insn, dsc);
6472 }
6473
6474 case 0x9:
6475 {
6476 int rn_f = (bits (insn, 16, 19) == 0xf);
6477 switch ((insn & 0xe00000) >> 21)
6478 {
6479 case 0x1: case 0x3:
6480 /* ldc/ldc2 imm (undefined for rn == pc). */
6481 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6482 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6483
6484 case 0x2:
6485 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6486
6487 case 0x4: case 0x5: case 0x6: case 0x7:
6488 /* ldc/ldc2 lit (undefined for rn != pc). */
6489 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6490 : arm_copy_undef (gdbarch, insn, dsc);
6491
6492 default:
6493 return arm_copy_undef (gdbarch, insn, dsc);
6494 }
6495 }
6496
6497 case 0xa:
6498 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6499
6500 case 0xb:
6501 if (bits (insn, 16, 19) == 0xf)
6502 /* ldc/ldc2 lit. */
6503 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6504 else
6505 return arm_copy_undef (gdbarch, insn, dsc);
6506
6507 case 0xc:
6508 if (bit (insn, 4))
6509 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6510 else
6511 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6512
6513 case 0xd:
6514 if (bit (insn, 4))
6515 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6516 else
6517 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6518
6519 default:
6520 return arm_copy_undef (gdbarch, insn, dsc);
6521 }
6522}
6523
6524/* Decode miscellaneous instructions in dp/misc encoding space. */
6525
6526static int
6527arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6528 struct regcache *regs,
6529 arm_displaced_step_closure *dsc)
6530{
6531 unsigned int op2 = bits (insn, 4, 6);
6532 unsigned int op = bits (insn, 21, 22);
6533
6534 switch (op2)
6535 {
6536 case 0x0:
6537 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6538
6539 case 0x1:
6540 if (op == 0x1) /* bx. */
6541 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6542 else if (op == 0x3)
6543 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6544 else
6545 return arm_copy_undef (gdbarch, insn, dsc);
6546
6547 case 0x2:
6548 if (op == 0x1)
6549 /* Not really supported. */
6550 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6551 else
6552 return arm_copy_undef (gdbarch, insn, dsc);
6553
6554 case 0x3:
6555 if (op == 0x1)
6556 return arm_copy_bx_blx_reg (gdbarch, insn,
6557 regs, dsc); /* blx register. */
6558 else
6559 return arm_copy_undef (gdbarch, insn, dsc);
6560
6561 case 0x5:
6562 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6563
6564 case 0x7:
6565 if (op == 0x1)
6566 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6567 else if (op == 0x3)
6568 /* Not really supported. */
6569 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6570 /* Fall through. */
6571
6572 default:
6573 return arm_copy_undef (gdbarch, insn, dsc);
6574 }
6575}
6576
6577static int
6578arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6579 struct regcache *regs,
6580 arm_displaced_step_closure *dsc)
6581{
6582 if (bit (insn, 25))
6583 switch (bits (insn, 20, 24))
6584 {
6585 case 0x10:
6586 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6587
6588 case 0x14:
6589 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6590
6591 case 0x12: case 0x16:
6592 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6593
6594 default:
6595 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6596 }
6597 else
6598 {
6599 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6600
6601 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6602 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6603 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6604 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6605 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6606 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6607 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6608 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6609 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6610 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6611 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6612 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6613 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6614 /* 2nd arg means "unprivileged". */
6615 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6616 dsc);
6617 }
6618
6619 /* Should be unreachable. */
6620 return 1;
6621}
6622
6623static int
6624arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6625 struct regcache *regs,
6626 arm_displaced_step_closure *dsc)
6627{
6628 int a = bit (insn, 25), b = bit (insn, 4);
6629 uint32_t op1 = bits (insn, 20, 24);
6630
6631 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6632 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6633 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6634 else if ((!a && (op1 & 0x17) == 0x02)
6635 || (a && (op1 & 0x17) == 0x02 && !b))
6636 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6637 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6638 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6639 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6640 else if ((!a && (op1 & 0x17) == 0x03)
6641 || (a && (op1 & 0x17) == 0x03 && !b))
6642 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6643 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6644 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6645 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6646 else if ((!a && (op1 & 0x17) == 0x06)
6647 || (a && (op1 & 0x17) == 0x06 && !b))
6648 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6649 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6650 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6651 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6652 else if ((!a && (op1 & 0x17) == 0x07)
6653 || (a && (op1 & 0x17) == 0x07 && !b))
6654 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6655
6656 /* Should be unreachable. */
6657 return 1;
6658}
6659
6660static int
6661arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6662 arm_displaced_step_closure *dsc)
6663{
6664 switch (bits (insn, 20, 24))
6665 {
6666 case 0x00: case 0x01: case 0x02: case 0x03:
6667 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6668
6669 case 0x04: case 0x05: case 0x06: case 0x07:
6670 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6671
6672 case 0x08: case 0x09: case 0x0a: case 0x0b:
6673 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6674 return arm_copy_unmodified (gdbarch, insn,
6675 "decode/pack/unpack/saturate/reverse", dsc);
6676
6677 case 0x18:
6678 if (bits (insn, 5, 7) == 0) /* op2. */
6679 {
6680 if (bits (insn, 12, 15) == 0xf)
6681 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6682 else
6683 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6684 }
6685 else
6686 return arm_copy_undef (gdbarch, insn, dsc);
6687
6688 case 0x1a: case 0x1b:
6689 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6690 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6691 else
6692 return arm_copy_undef (gdbarch, insn, dsc);
6693
6694 case 0x1c: case 0x1d:
6695 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6696 {
6697 if (bits (insn, 0, 3) == 0xf)
6698 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6699 else
6700 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6701 }
6702 else
6703 return arm_copy_undef (gdbarch, insn, dsc);
6704
6705 case 0x1e: case 0x1f:
6706 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6707 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6708 else
6709 return arm_copy_undef (gdbarch, insn, dsc);
6710 }
6711
6712 /* Should be unreachable. */
6713 return 1;
6714}
6715
6716static int
6717arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6718 struct regcache *regs,
6719 arm_displaced_step_closure *dsc)
6720{
6721 if (bit (insn, 25))
6722 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6723 else
6724 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6725}
6726
6727static int
6728arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6729 struct regcache *regs,
6730 arm_displaced_step_closure *dsc)
6731{
6732 unsigned int opcode = bits (insn, 20, 24);
6733
6734 switch (opcode)
6735 {
6736 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6737 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6738
6739 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6740 case 0x12: case 0x16:
6741 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6742
6743 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6744 case 0x13: case 0x17:
6745 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6746
6747 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6748 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6749 /* Note: no writeback for these instructions. Bit 25 will always be
6750 zero though (via caller), so the following works OK. */
6751 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6752 }
6753
6754 /* Should be unreachable. */
6755 return 1;
6756}
6757
6758/* Decode shifted register instructions. */
6759
6760static int
6761thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6762 uint16_t insn2, struct regcache *regs,
6763 arm_displaced_step_closure *dsc)
6764{
6765 /* PC is only allowed to be used in instruction MOV. */
6766
6767 unsigned int op = bits (insn1, 5, 8);
6768 unsigned int rn = bits (insn1, 0, 3);
6769
6770 if (op == 0x2 && rn == 0xf) /* MOV */
6771 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6772 else
6773 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6774 "dp (shift reg)", dsc);
6775}
6776
6777
6778/* Decode extension register load/store. Exactly the same as
6779 arm_decode_ext_reg_ld_st. */
6780
6781static int
6782thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6783 uint16_t insn2, struct regcache *regs,
6784 arm_displaced_step_closure *dsc)
6785{
6786 unsigned int opcode = bits (insn1, 4, 8);
6787
6788 switch (opcode)
6789 {
6790 case 0x04: case 0x05:
6791 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6792 "vfp/neon vmov", dsc);
6793
6794 case 0x08: case 0x0c: /* 01x00 */
6795 case 0x0a: case 0x0e: /* 01x10 */
6796 case 0x12: case 0x16: /* 10x10 */
6797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6798 "vfp/neon vstm/vpush", dsc);
6799
6800 case 0x09: case 0x0d: /* 01x01 */
6801 case 0x0b: case 0x0f: /* 01x11 */
6802 case 0x13: case 0x17: /* 10x11 */
6803 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6804 "vfp/neon vldm/vpop", dsc);
6805
6806 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6807 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6808 "vstr", dsc);
6809 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6810 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6811 }
6812
6813 /* Should be unreachable. */
6814 return 1;
6815}
6816
6817static int
6818arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6819 struct regcache *regs, arm_displaced_step_closure *dsc)
6820{
6821 unsigned int op1 = bits (insn, 20, 25);
6822 int op = bit (insn, 4);
6823 unsigned int coproc = bits (insn, 8, 11);
6824
6825 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6826 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6827 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6828 && (coproc & 0xe) != 0xa)
6829 /* stc/stc2. */
6830 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6831 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6832 && (coproc & 0xe) != 0xa)
6833 /* ldc/ldc2 imm/lit. */
6834 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6835 else if ((op1 & 0x3e) == 0x00)
6836 return arm_copy_undef (gdbarch, insn, dsc);
6837 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6838 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6839 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6840 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6841 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6842 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6843 else if ((op1 & 0x30) == 0x20 && !op)
6844 {
6845 if ((coproc & 0xe) == 0xa)
6846 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6847 else
6848 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6849 }
6850 else if ((op1 & 0x30) == 0x20 && op)
6851 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6852 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6853 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6854 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6855 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6856 else if ((op1 & 0x30) == 0x30)
6857 return arm_copy_svc (gdbarch, insn, regs, dsc);
6858 else
6859 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6860}
6861
6862static int
6863thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6864 uint16_t insn2, struct regcache *regs,
6865 arm_displaced_step_closure *dsc)
6866{
6867 unsigned int coproc = bits (insn2, 8, 11);
6868 unsigned int bit_5_8 = bits (insn1, 5, 8);
6869 unsigned int bit_9 = bit (insn1, 9);
6870 unsigned int bit_4 = bit (insn1, 4);
6871
6872 if (bit_9 == 0)
6873 {
6874 if (bit_5_8 == 2)
6875 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6876 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6877 dsc);
6878 else if (bit_5_8 == 0) /* UNDEFINED. */
6879 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6880 else
6881 {
6882 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6883 if ((coproc & 0xe) == 0xa)
6884 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6885 dsc);
6886 else /* coproc is not 101x. */
6887 {
6888 if (bit_4 == 0) /* STC/STC2. */
6889 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6890 "stc/stc2", dsc);
6891 else /* LDC/LDC2 {literal, immeidate}. */
6892 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6893 regs, dsc);
6894 }
6895 }
6896 }
6897 else
6898 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6899
6900 return 0;
6901}
6902
6903static void
6904install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6905 arm_displaced_step_closure *dsc, int rd)
6906{
6907 /* ADR Rd, #imm
6908
6909 Rewrite as:
6910
6911 Preparation: Rd <- PC
6912 Insn: ADD Rd, #imm
6913 Cleanup: Null.
6914 */
6915
6916 /* Rd <- PC */
6917 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6918 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6919}
6920
6921static int
6922thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6923 arm_displaced_step_closure *dsc,
6924 int rd, unsigned int imm)
6925{
6926
6927 /* Encoding T2: ADDS Rd, #imm */
6928 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6929
6930 install_pc_relative (gdbarch, regs, dsc, rd);
6931
6932 return 0;
6933}
6934
6935static int
6936thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6937 struct regcache *regs,
6938 arm_displaced_step_closure *dsc)
6939{
6940 unsigned int rd = bits (insn, 8, 10);
6941 unsigned int imm8 = bits (insn, 0, 7);
6942
6943 if (debug_displaced)
6944 fprintf_unfiltered (gdb_stdlog,
6945 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6946 rd, imm8, insn);
6947
6948 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6949}
6950
6951static int
6952thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6953 uint16_t insn2, struct regcache *regs,
6954 arm_displaced_step_closure *dsc)
6955{
6956 unsigned int rd = bits (insn2, 8, 11);
6957 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6958 extract raw immediate encoding rather than computing immediate. When
6959 generating ADD or SUB instruction, we can simply perform OR operation to
6960 set immediate into ADD. */
6961 unsigned int imm_3_8 = insn2 & 0x70ff;
6962 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6963
6964 if (debug_displaced)
6965 fprintf_unfiltered (gdb_stdlog,
6966 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6967 rd, imm_i, imm_3_8, insn1, insn2);
6968
6969 if (bit (insn1, 7)) /* Encoding T2 */
6970 {
6971 /* Encoding T3: SUB Rd, Rd, #imm */
6972 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6973 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6974 }
6975 else /* Encoding T3 */
6976 {
6977 /* Encoding T3: ADD Rd, Rd, #imm */
6978 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6979 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6980 }
6981 dsc->numinsns = 2;
6982
6983 install_pc_relative (gdbarch, regs, dsc, rd);
6984
6985 return 0;
6986}
6987
6988static int
6989thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6990 struct regcache *regs,
6991 arm_displaced_step_closure *dsc)
6992{
6993 unsigned int rt = bits (insn1, 8, 10);
6994 unsigned int pc;
6995 int imm8 = (bits (insn1, 0, 7) << 2);
6996
6997 /* LDR Rd, #imm8
6998
6999 Rwrite as:
7000
7001 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7002
7003 Insn: LDR R0, [R2, R3];
7004 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7005
7006 if (debug_displaced)
7007 fprintf_unfiltered (gdb_stdlog,
7008 "displaced: copying thumb ldr r%d [pc #%d]\n"
7009 , rt, imm8);
7010
7011 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7012 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7013 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7014 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7015 /* The assembler calculates the required value of the offset from the
7016 Align(PC,4) value of this instruction to the label. */
7017 pc = pc & 0xfffffffc;
7018
7019 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7020 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7021
7022 dsc->rd = rt;
7023 dsc->u.ldst.xfersize = 4;
7024 dsc->u.ldst.rn = 0;
7025 dsc->u.ldst.immed = 0;
7026 dsc->u.ldst.writeback = 0;
7027 dsc->u.ldst.restore_r4 = 0;
7028
7029 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7030
7031 dsc->cleanup = &cleanup_load;
7032
7033 return 0;
7034}
7035
7036/* Copy Thumb cbnz/cbz insruction. */
7037
7038static int
7039thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7040 struct regcache *regs,
7041 arm_displaced_step_closure *dsc)
7042{
7043 int non_zero = bit (insn1, 11);
7044 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7045 CORE_ADDR from = dsc->insn_addr;
7046 int rn = bits (insn1, 0, 2);
7047 int rn_val = displaced_read_reg (regs, dsc, rn);
7048
7049 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7050 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7051 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7052 condition is false, let it be, cleanup_branch will do nothing. */
7053 if (dsc->u.branch.cond)
7054 {
7055 dsc->u.branch.cond = INST_AL;
7056 dsc->u.branch.dest = from + 4 + imm5;
7057 }
7058 else
7059 dsc->u.branch.dest = from + 2;
7060
7061 dsc->u.branch.link = 0;
7062 dsc->u.branch.exchange = 0;
7063
7064 if (debug_displaced)
7065 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7066 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7067 rn, rn_val, insn1, dsc->u.branch.dest);
7068
7069 dsc->modinsn[0] = THUMB_NOP;
7070
7071 dsc->cleanup = &cleanup_branch;
7072 return 0;
7073}
7074
7075/* Copy Table Branch Byte/Halfword */
7076static int
7077thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7078 uint16_t insn2, struct regcache *regs,
7079 arm_displaced_step_closure *dsc)
7080{
7081 ULONGEST rn_val, rm_val;
7082 int is_tbh = bit (insn2, 4);
7083 CORE_ADDR halfwords = 0;
7084 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7085
7086 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7087 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7088
7089 if (is_tbh)
7090 {
7091 gdb_byte buf[2];
7092
7093 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7094 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7095 }
7096 else
7097 {
7098 gdb_byte buf[1];
7099
7100 target_read_memory (rn_val + rm_val, buf, 1);
7101 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7102 }
7103
7104 if (debug_displaced)
7105 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7106 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7107 (unsigned int) rn_val, (unsigned int) rm_val,
7108 (unsigned int) halfwords);
7109
7110 dsc->u.branch.cond = INST_AL;
7111 dsc->u.branch.link = 0;
7112 dsc->u.branch.exchange = 0;
7113 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7114
7115 dsc->cleanup = &cleanup_branch;
7116
7117 return 0;
7118}
7119
7120static void
7121cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7122 arm_displaced_step_closure *dsc)
7123{
7124 /* PC <- r7 */
7125 int val = displaced_read_reg (regs, dsc, 7);
7126 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7127
7128 /* r7 <- r8 */
7129 val = displaced_read_reg (regs, dsc, 8);
7130 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7131
7132 /* r8 <- tmp[0] */
7133 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7134
7135}
7136
7137static int
7138thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7139 struct regcache *regs,
7140 arm_displaced_step_closure *dsc)
7141{
7142 dsc->u.block.regmask = insn1 & 0x00ff;
7143
7144 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7145 to :
7146
7147 (1) register list is full, that is, r0-r7 are used.
7148 Prepare: tmp[0] <- r8
7149
7150 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7151 MOV r8, r7; Move value of r7 to r8;
7152 POP {r7}; Store PC value into r7.
7153
7154 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7155
7156 (2) register list is not full, supposing there are N registers in
7157 register list (except PC, 0 <= N <= 7).
7158 Prepare: for each i, 0 - N, tmp[i] <- ri.
7159
7160 POP {r0, r1, ...., rN};
7161
7162 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7163 from tmp[] properly.
7164 */
7165 if (debug_displaced)
7166 fprintf_unfiltered (gdb_stdlog,
7167 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7168 dsc->u.block.regmask, insn1);
7169
7170 if (dsc->u.block.regmask == 0xff)
7171 {
7172 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7173
7174 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7175 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7176 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7177
7178 dsc->numinsns = 3;
7179 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7180 }
7181 else
7182 {
7183 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7184 unsigned int i;
7185 unsigned int new_regmask;
7186
7187 for (i = 0; i < num_in_list + 1; i++)
7188 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7189
7190 new_regmask = (1 << (num_in_list + 1)) - 1;
7191
7192 if (debug_displaced)
7193 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7194 "{..., pc}: original reg list %.4x,"
7195 " modified list %.4x\n"),
7196 (int) dsc->u.block.regmask, new_regmask);
7197
7198 dsc->u.block.regmask |= 0x8000;
7199 dsc->u.block.writeback = 0;
7200 dsc->u.block.cond = INST_AL;
7201
7202 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7203
7204 dsc->cleanup = &cleanup_block_load_pc;
7205 }
7206
7207 return 0;
7208}
7209
7210static void
7211thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7212 struct regcache *regs,
7213 arm_displaced_step_closure *dsc)
7214{
7215 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7216 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7217 int err = 0;
7218
7219 /* 16-bit thumb instructions. */
7220 switch (op_bit_12_15)
7221 {
7222 /* Shift (imme), add, subtract, move and compare. */
7223 case 0: case 1: case 2: case 3:
7224 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7225 "shift/add/sub/mov/cmp",
7226 dsc);
7227 break;
7228 case 4:
7229 switch (op_bit_10_11)
7230 {
7231 case 0: /* Data-processing */
7232 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7233 "data-processing",
7234 dsc);
7235 break;
7236 case 1: /* Special data instructions and branch and exchange. */
7237 {
7238 unsigned short op = bits (insn1, 7, 9);
7239 if (op == 6 || op == 7) /* BX or BLX */
7240 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7241 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7242 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7243 else
7244 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7245 dsc);
7246 }
7247 break;
7248 default: /* LDR (literal) */
7249 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7250 }
7251 break;
7252 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7253 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7254 break;
7255 case 10:
7256 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7257 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7258 else /* Generate SP-relative address */
7259 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7260 break;
7261 case 11: /* Misc 16-bit instructions */
7262 {
7263 switch (bits (insn1, 8, 11))
7264 {
7265 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7266 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7267 break;
7268 case 12: case 13: /* POP */
7269 if (bit (insn1, 8)) /* PC is in register list. */
7270 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7271 else
7272 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7273 break;
7274 case 15: /* If-Then, and hints */
7275 if (bits (insn1, 0, 3))
7276 /* If-Then makes up to four following instructions conditional.
7277 IT instruction itself is not conditional, so handle it as a
7278 common unmodified instruction. */
7279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7280 dsc);
7281 else
7282 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7283 break;
7284 default:
7285 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7286 }
7287 }
7288 break;
7289 case 12:
7290 if (op_bit_10_11 < 2) /* Store multiple registers */
7291 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7292 else /* Load multiple registers */
7293 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7294 break;
7295 case 13: /* Conditional branch and supervisor call */
7296 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7297 err = thumb_copy_b (gdbarch, insn1, dsc);
7298 else
7299 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7300 break;
7301 case 14: /* Unconditional branch */
7302 err = thumb_copy_b (gdbarch, insn1, dsc);
7303 break;
7304 default:
7305 err = 1;
7306 }
7307
7308 if (err)
7309 internal_error (__FILE__, __LINE__,
7310 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7311}
7312
7313static int
7314decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7315 uint16_t insn1, uint16_t insn2,
7316 struct regcache *regs,
7317 arm_displaced_step_closure *dsc)
7318{
7319 int rt = bits (insn2, 12, 15);
7320 int rn = bits (insn1, 0, 3);
7321 int op1 = bits (insn1, 7, 8);
7322
7323 switch (bits (insn1, 5, 6))
7324 {
7325 case 0: /* Load byte and memory hints */
7326 if (rt == 0xf) /* PLD/PLI */
7327 {
7328 if (rn == 0xf)
7329 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7330 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7331 else
7332 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7333 "pli/pld", dsc);
7334 }
7335 else
7336 {
7337 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7338 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7339 1);
7340 else
7341 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7342 "ldrb{reg, immediate}/ldrbt",
7343 dsc);
7344 }
7345
7346 break;
7347 case 1: /* Load halfword and memory hints. */
7348 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7349 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7350 "pld/unalloc memhint", dsc);
7351 else
7352 {
7353 if (rn == 0xf)
7354 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7355 2);
7356 else
7357 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7358 "ldrh/ldrht", dsc);
7359 }
7360 break;
7361 case 2: /* Load word */
7362 {
7363 int insn2_bit_8_11 = bits (insn2, 8, 11);
7364
7365 if (rn == 0xf)
7366 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7367 else if (op1 == 0x1) /* Encoding T3 */
7368 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7369 0, 1);
7370 else /* op1 == 0x0 */
7371 {
7372 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7373 /* LDR (immediate) */
7374 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7375 dsc, bit (insn2, 8), 1);
7376 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7377 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7378 "ldrt", dsc);
7379 else
7380 /* LDR (register) */
7381 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7382 dsc, 0, 0);
7383 }
7384 break;
7385 }
7386 default:
7387 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7388 break;
7389 }
7390 return 0;
7391}
7392
7393static void
7394thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7395 uint16_t insn2, struct regcache *regs,
7396 arm_displaced_step_closure *dsc)
7397{
7398 int err = 0;
7399 unsigned short op = bit (insn2, 15);
7400 unsigned int op1 = bits (insn1, 11, 12);
7401
7402 switch (op1)
7403 {
7404 case 1:
7405 {
7406 switch (bits (insn1, 9, 10))
7407 {
7408 case 0:
7409 if (bit (insn1, 6))
7410 {
7411 /* Load/store {dual, execlusive}, table branch. */
7412 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7413 && bits (insn2, 5, 7) == 0)
7414 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7415 dsc);
7416 else
7417 /* PC is not allowed to use in load/store {dual, exclusive}
7418 instructions. */
7419 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 "load/store dual/ex", dsc);
7421 }
7422 else /* load/store multiple */
7423 {
7424 switch (bits (insn1, 7, 8))
7425 {
7426 case 0: case 3: /* SRS, RFE */
7427 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7428 "srs/rfe", dsc);
7429 break;
7430 case 1: case 2: /* LDM/STM/PUSH/POP */
7431 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7432 break;
7433 }
7434 }
7435 break;
7436
7437 case 1:
7438 /* Data-processing (shift register). */
7439 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7440 dsc);
7441 break;
7442 default: /* Coprocessor instructions. */
7443 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7444 break;
7445 }
7446 break;
7447 }
7448 case 2: /* op1 = 2 */
7449 if (op) /* Branch and misc control. */
7450 {
7451 if (bit (insn2, 14) /* BLX/BL */
7452 || bit (insn2, 12) /* Unconditional branch */
7453 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7454 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7455 else
7456 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7457 "misc ctrl", dsc);
7458 }
7459 else
7460 {
7461 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7462 {
7463 int op = bits (insn1, 4, 8);
7464 int rn = bits (insn1, 0, 3);
7465 if ((op == 0 || op == 0xa) && rn == 0xf)
7466 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7467 regs, dsc);
7468 else
7469 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7470 "dp/pb", dsc);
7471 }
7472 else /* Data processing (modified immeidate) */
7473 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7474 "dp/mi", dsc);
7475 }
7476 break;
7477 case 3: /* op1 = 3 */
7478 switch (bits (insn1, 9, 10))
7479 {
7480 case 0:
7481 if (bit (insn1, 4))
7482 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7483 regs, dsc);
7484 else /* NEON Load/Store and Store single data item */
7485 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7486 "neon elt/struct load/store",
7487 dsc);
7488 break;
7489 case 1: /* op1 = 3, bits (9, 10) == 1 */
7490 switch (bits (insn1, 7, 8))
7491 {
7492 case 0: case 1: /* Data processing (register) */
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "dp(reg)", dsc);
7495 break;
7496 case 2: /* Multiply and absolute difference */
7497 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7498 "mul/mua/diff", dsc);
7499 break;
7500 case 3: /* Long multiply and divide */
7501 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7502 "lmul/lmua", dsc);
7503 break;
7504 }
7505 break;
7506 default: /* Coprocessor instructions */
7507 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7508 break;
7509 }
7510 break;
7511 default:
7512 err = 1;
7513 }
7514
7515 if (err)
7516 internal_error (__FILE__, __LINE__,
7517 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7518
7519}
7520
7521static void
7522thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7523 struct regcache *regs,
7524 arm_displaced_step_closure *dsc)
7525{
7526 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7527 uint16_t insn1
7528 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7529
7530 if (debug_displaced)
7531 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7532 "at %.8lx\n", insn1, (unsigned long) from);
7533
7534 dsc->is_thumb = 1;
7535 dsc->insn_size = thumb_insn_size (insn1);
7536 if (thumb_insn_size (insn1) == 4)
7537 {
7538 uint16_t insn2
7539 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7540 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7541 }
7542 else
7543 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7544}
7545
7546void
7547arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7548 CORE_ADDR to, struct regcache *regs,
7549 arm_displaced_step_closure *dsc)
7550{
7551 int err = 0;
7552 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7553 uint32_t insn;
7554
7555 /* Most displaced instructions use a 1-instruction scratch space, so set this
7556 here and override below if/when necessary. */
7557 dsc->numinsns = 1;
7558 dsc->insn_addr = from;
7559 dsc->scratch_base = to;
7560 dsc->cleanup = NULL;
7561 dsc->wrote_to_pc = 0;
7562
7563 if (!displaced_in_arm_mode (regs))
7564 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7565
7566 dsc->is_thumb = 0;
7567 dsc->insn_size = 4;
7568 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7569 if (debug_displaced)
7570 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7571 "at %.8lx\n", (unsigned long) insn,
7572 (unsigned long) from);
7573
7574 if ((insn & 0xf0000000) == 0xf0000000)
7575 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7576 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7577 {
7578 case 0x0: case 0x1: case 0x2: case 0x3:
7579 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7580 break;
7581
7582 case 0x4: case 0x5: case 0x6:
7583 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7584 break;
7585
7586 case 0x7:
7587 err = arm_decode_media (gdbarch, insn, dsc);
7588 break;
7589
7590 case 0x8: case 0x9: case 0xa: case 0xb:
7591 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7592 break;
7593
7594 case 0xc: case 0xd: case 0xe: case 0xf:
7595 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7596 break;
7597 }
7598
7599 if (err)
7600 internal_error (__FILE__, __LINE__,
7601 _("arm_process_displaced_insn: Instruction decode error"));
7602}
7603
7604/* Actually set up the scratch space for a displaced instruction. */
7605
7606void
7607arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7608 CORE_ADDR to, arm_displaced_step_closure *dsc)
7609{
7610 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7611 unsigned int i, len, offset;
7612 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7613 int size = dsc->is_thumb? 2 : 4;
7614 const gdb_byte *bkp_insn;
7615
7616 offset = 0;
7617 /* Poke modified instruction(s). */
7618 for (i = 0; i < dsc->numinsns; i++)
7619 {
7620 if (debug_displaced)
7621 {
7622 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7623 if (size == 4)
7624 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7625 dsc->modinsn[i]);
7626 else if (size == 2)
7627 fprintf_unfiltered (gdb_stdlog, "%.4x",
7628 (unsigned short)dsc->modinsn[i]);
7629
7630 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7631 (unsigned long) to + offset);
7632
7633 }
7634 write_memory_unsigned_integer (to + offset, size,
7635 byte_order_for_code,
7636 dsc->modinsn[i]);
7637 offset += size;
7638 }
7639
7640 /* Choose the correct breakpoint instruction. */
7641 if (dsc->is_thumb)
7642 {
7643 bkp_insn = tdep->thumb_breakpoint;
7644 len = tdep->thumb_breakpoint_size;
7645 }
7646 else
7647 {
7648 bkp_insn = tdep->arm_breakpoint;
7649 len = tdep->arm_breakpoint_size;
7650 }
7651
7652 /* Put breakpoint afterwards. */
7653 write_memory (to + offset, bkp_insn, len);
7654
7655 if (debug_displaced)
7656 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7657 paddress (gdbarch, from), paddress (gdbarch, to));
7658}
7659
7660/* Entry point for cleaning things up after a displaced instruction has been
7661 single-stepped. */
7662
7663void
7664arm_displaced_step_fixup (struct gdbarch *gdbarch,
7665 struct displaced_step_closure *dsc_,
7666 CORE_ADDR from, CORE_ADDR to,
7667 struct regcache *regs)
7668{
7669 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7670
7671 if (dsc->cleanup)
7672 dsc->cleanup (gdbarch, regs, dsc);
7673
7674 if (!dsc->wrote_to_pc)
7675 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7676 dsc->insn_addr + dsc->insn_size);
7677
7678}
7679
7680#include "bfd-in2.h"
7681#include "libcoff.h"
7682
7683static int
7684gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7685{
7686 gdb_disassembler *di
7687 = static_cast<gdb_disassembler *>(info->application_data);
7688 struct gdbarch *gdbarch = di->arch ();
7689
7690 if (arm_pc_is_thumb (gdbarch, memaddr))
7691 {
7692 static asymbol *asym;
7693 static combined_entry_type ce;
7694 static struct coff_symbol_struct csym;
7695 static struct bfd fake_bfd;
7696 static bfd_target fake_target;
7697
7698 if (csym.native == NULL)
7699 {
7700 /* Create a fake symbol vector containing a Thumb symbol.
7701 This is solely so that the code in print_insn_little_arm()
7702 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7703 the presence of a Thumb symbol and switch to decoding
7704 Thumb instructions. */
7705
7706 fake_target.flavour = bfd_target_coff_flavour;
7707 fake_bfd.xvec = &fake_target;
7708 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7709 csym.native = &ce;
7710 csym.symbol.the_bfd = &fake_bfd;
7711 csym.symbol.name = "fake";
7712 asym = (asymbol *) & csym;
7713 }
7714
7715 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7716 info->symbols = &asym;
7717 }
7718 else
7719 info->symbols = NULL;
7720
7721 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7722 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7723 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7724 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7725 in default_print_insn. */
7726 if (exec_bfd != NULL)
7727 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7728
7729 return default_print_insn (memaddr, info);
7730}
7731
7732/* The following define instruction sequences that will cause ARM
7733 cpu's to take an undefined instruction trap. These are used to
7734 signal a breakpoint to GDB.
7735
7736 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7737 modes. A different instruction is required for each mode. The ARM
7738 cpu's can also be big or little endian. Thus four different
7739 instructions are needed to support all cases.
7740
7741 Note: ARMv4 defines several new instructions that will take the
7742 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7743 not in fact add the new instructions. The new undefined
7744 instructions in ARMv4 are all instructions that had no defined
7745 behaviour in earlier chips. There is no guarantee that they will
7746 raise an exception, but may be treated as NOP's. In practice, it
7747 may only safe to rely on instructions matching:
7748
7749 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7750 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7751 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7752
7753 Even this may only true if the condition predicate is true. The
7754 following use a condition predicate of ALWAYS so it is always TRUE.
7755
7756 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7757 and NetBSD all use a software interrupt rather than an undefined
7758 instruction to force a trap. This can be handled by by the
7759 abi-specific code during establishment of the gdbarch vector. */
7760
7761#define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7762#define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7763#define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7764#define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7765
7766static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7767static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7768static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7769static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7770
7771/* Implement the breakpoint_kind_from_pc gdbarch method. */
7772
7773static int
7774arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7775{
7776 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7777 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7778
7779 if (arm_pc_is_thumb (gdbarch, *pcptr))
7780 {
7781 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7782
7783 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7784 check whether we are replacing a 32-bit instruction. */
7785 if (tdep->thumb2_breakpoint != NULL)
7786 {
7787 gdb_byte buf[2];
7788
7789 if (target_read_memory (*pcptr, buf, 2) == 0)
7790 {
7791 unsigned short inst1;
7792
7793 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7794 if (thumb_insn_size (inst1) == 4)
7795 return ARM_BP_KIND_THUMB2;
7796 }
7797 }
7798
7799 return ARM_BP_KIND_THUMB;
7800 }
7801 else
7802 return ARM_BP_KIND_ARM;
7803
7804}
7805
7806/* Implement the sw_breakpoint_from_kind gdbarch method. */
7807
7808static const gdb_byte *
7809arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7810{
7811 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7812
7813 switch (kind)
7814 {
7815 case ARM_BP_KIND_ARM:
7816 *size = tdep->arm_breakpoint_size;
7817 return tdep->arm_breakpoint;
7818 case ARM_BP_KIND_THUMB:
7819 *size = tdep->thumb_breakpoint_size;
7820 return tdep->thumb_breakpoint;
7821 case ARM_BP_KIND_THUMB2:
7822 *size = tdep->thumb2_breakpoint_size;
7823 return tdep->thumb2_breakpoint;
7824 default:
7825 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7826 }
7827}
7828
7829/* Implement the breakpoint_kind_from_current_state gdbarch method. */
7830
7831static int
7832arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7833 struct regcache *regcache,
7834 CORE_ADDR *pcptr)
7835{
7836 gdb_byte buf[4];
7837
7838 /* Check the memory pointed by PC is readable. */
7839 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7840 {
7841 struct arm_get_next_pcs next_pcs_ctx;
7842
7843 arm_get_next_pcs_ctor (&next_pcs_ctx,
7844 &arm_get_next_pcs_ops,
7845 gdbarch_byte_order (gdbarch),
7846 gdbarch_byte_order_for_code (gdbarch),
7847 0,
7848 regcache);
7849
7850 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7851
7852 /* If MEMADDR is the next instruction of current pc, do the
7853 software single step computation, and get the thumb mode by
7854 the destination address. */
7855 for (CORE_ADDR pc : next_pcs)
7856 {
7857 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7858 {
7859 if (IS_THUMB_ADDR (pc))
7860 {
7861 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7862 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7863 }
7864 else
7865 return ARM_BP_KIND_ARM;
7866 }
7867 }
7868 }
7869
7870 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7871}
7872
7873/* Extract from an array REGBUF containing the (raw) register state a
7874 function return value of type TYPE, and copy that, in virtual
7875 format, into VALBUF. */
7876
7877static void
7878arm_extract_return_value (struct type *type, struct regcache *regs,
7879 gdb_byte *valbuf)
7880{
7881 struct gdbarch *gdbarch = regs->arch ();
7882 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7883
7884 if (TYPE_CODE_FLT == TYPE_CODE (type))
7885 {
7886 switch (gdbarch_tdep (gdbarch)->fp_model)
7887 {
7888 case ARM_FLOAT_FPA:
7889 {
7890 /* The value is in register F0 in internal format. We need to
7891 extract the raw value and then convert it to the desired
7892 internal type. */
7893 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7894
7895 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7896 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7897 valbuf, type);
7898 }
7899 break;
7900
7901 case ARM_FLOAT_SOFT_FPA:
7902 case ARM_FLOAT_SOFT_VFP:
7903 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7904 not using the VFP ABI code. */
7905 case ARM_FLOAT_VFP:
7906 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7907 if (TYPE_LENGTH (type) > 4)
7908 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7909 break;
7910
7911 default:
7912 internal_error (__FILE__, __LINE__,
7913 _("arm_extract_return_value: "
7914 "Floating point model not supported"));
7915 break;
7916 }
7917 }
7918 else if (TYPE_CODE (type) == TYPE_CODE_INT
7919 || TYPE_CODE (type) == TYPE_CODE_CHAR
7920 || TYPE_CODE (type) == TYPE_CODE_BOOL
7921 || TYPE_CODE (type) == TYPE_CODE_PTR
7922 || TYPE_IS_REFERENCE (type)
7923 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7924 {
7925 /* If the type is a plain integer, then the access is
7926 straight-forward. Otherwise we have to play around a bit
7927 more. */
7928 int len = TYPE_LENGTH (type);
7929 int regno = ARM_A1_REGNUM;
7930 ULONGEST tmp;
7931
7932 while (len > 0)
7933 {
7934 /* By using store_unsigned_integer we avoid having to do
7935 anything special for small big-endian values. */
7936 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7937 store_unsigned_integer (valbuf,
7938 (len > INT_REGISTER_SIZE
7939 ? INT_REGISTER_SIZE : len),
7940 byte_order, tmp);
7941 len -= INT_REGISTER_SIZE;
7942 valbuf += INT_REGISTER_SIZE;
7943 }
7944 }
7945 else
7946 {
7947 /* For a structure or union the behaviour is as if the value had
7948 been stored to word-aligned memory and then loaded into
7949 registers with 32-bit load instruction(s). */
7950 int len = TYPE_LENGTH (type);
7951 int regno = ARM_A1_REGNUM;
7952 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7953
7954 while (len > 0)
7955 {
7956 regs->cooked_read (regno++, tmpbuf);
7957 memcpy (valbuf, tmpbuf,
7958 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7959 len -= INT_REGISTER_SIZE;
7960 valbuf += INT_REGISTER_SIZE;
7961 }
7962 }
7963}
7964
7965
7966/* Will a function return an aggregate type in memory or in a
7967 register? Return 0 if an aggregate type can be returned in a
7968 register, 1 if it must be returned in memory. */
7969
7970static int
7971arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7972{
7973 enum type_code code;
7974
7975 type = check_typedef (type);
7976
7977 /* Simple, non-aggregate types (ie not including vectors and
7978 complex) are always returned in a register (or registers). */
7979 code = TYPE_CODE (type);
7980 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7981 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7982 return 0;
7983
7984 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7985 {
7986 /* Vector values should be returned using ARM registers if they
7987 are not over 16 bytes. */
7988 return (TYPE_LENGTH (type) > 16);
7989 }
7990
7991 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7992 {
7993 /* The AAPCS says all aggregates not larger than a word are returned
7994 in a register. */
7995 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7996 return 0;
7997
7998 return 1;
7999 }
8000 else
8001 {
8002 int nRc;
8003
8004 /* All aggregate types that won't fit in a register must be returned
8005 in memory. */
8006 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8007 return 1;
8008
8009 /* In the ARM ABI, "integer" like aggregate types are returned in
8010 registers. For an aggregate type to be integer like, its size
8011 must be less than or equal to INT_REGISTER_SIZE and the
8012 offset of each addressable subfield must be zero. Note that bit
8013 fields are not addressable, and all addressable subfields of
8014 unions always start at offset zero.
8015
8016 This function is based on the behaviour of GCC 2.95.1.
8017 See: gcc/arm.c: arm_return_in_memory() for details.
8018
8019 Note: All versions of GCC before GCC 2.95.2 do not set up the
8020 parameters correctly for a function returning the following
8021 structure: struct { float f;}; This should be returned in memory,
8022 not a register. Richard Earnshaw sent me a patch, but I do not
8023 know of any way to detect if a function like the above has been
8024 compiled with the correct calling convention. */
8025
8026 /* Assume all other aggregate types can be returned in a register.
8027 Run a check for structures, unions and arrays. */
8028 nRc = 0;
8029
8030 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8031 {
8032 int i;
8033 /* Need to check if this struct/union is "integer" like. For
8034 this to be true, its size must be less than or equal to
8035 INT_REGISTER_SIZE and the offset of each addressable
8036 subfield must be zero. Note that bit fields are not
8037 addressable, and unions always start at offset zero. If any
8038 of the subfields is a floating point type, the struct/union
8039 cannot be an integer type. */
8040
8041 /* For each field in the object, check:
8042 1) Is it FP? --> yes, nRc = 1;
8043 2) Is it addressable (bitpos != 0) and
8044 not packed (bitsize == 0)?
8045 --> yes, nRc = 1
8046 */
8047
8048 for (i = 0; i < TYPE_NFIELDS (type); i++)
8049 {
8050 enum type_code field_type_code;
8051
8052 field_type_code
8053 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8054 i)));
8055
8056 /* Is it a floating point type field? */
8057 if (field_type_code == TYPE_CODE_FLT)
8058 {
8059 nRc = 1;
8060 break;
8061 }
8062
8063 /* If bitpos != 0, then we have to care about it. */
8064 if (TYPE_FIELD_BITPOS (type, i) != 0)
8065 {
8066 /* Bitfields are not addressable. If the field bitsize is
8067 zero, then the field is not packed. Hence it cannot be
8068 a bitfield or any other packed type. */
8069 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8070 {
8071 nRc = 1;
8072 break;
8073 }
8074 }
8075 }
8076 }
8077
8078 return nRc;
8079 }
8080}
8081
8082/* Write into appropriate registers a function return value of type
8083 TYPE, given in virtual format. */
8084
8085static void
8086arm_store_return_value (struct type *type, struct regcache *regs,
8087 const gdb_byte *valbuf)
8088{
8089 struct gdbarch *gdbarch = regs->arch ();
8090 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8091
8092 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8093 {
8094 gdb_byte buf[FP_REGISTER_SIZE];
8095
8096 switch (gdbarch_tdep (gdbarch)->fp_model)
8097 {
8098 case ARM_FLOAT_FPA:
8099
8100 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8101 regs->cooked_write (ARM_F0_REGNUM, buf);
8102 break;
8103
8104 case ARM_FLOAT_SOFT_FPA:
8105 case ARM_FLOAT_SOFT_VFP:
8106 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8107 not using the VFP ABI code. */
8108 case ARM_FLOAT_VFP:
8109 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8110 if (TYPE_LENGTH (type) > 4)
8111 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8112 break;
8113
8114 default:
8115 internal_error (__FILE__, __LINE__,
8116 _("arm_store_return_value: Floating "
8117 "point model not supported"));
8118 break;
8119 }
8120 }
8121 else if (TYPE_CODE (type) == TYPE_CODE_INT
8122 || TYPE_CODE (type) == TYPE_CODE_CHAR
8123 || TYPE_CODE (type) == TYPE_CODE_BOOL
8124 || TYPE_CODE (type) == TYPE_CODE_PTR
8125 || TYPE_IS_REFERENCE (type)
8126 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8127 {
8128 if (TYPE_LENGTH (type) <= 4)
8129 {
8130 /* Values of one word or less are zero/sign-extended and
8131 returned in r0. */
8132 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8133 LONGEST val = unpack_long (type, valbuf);
8134
8135 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8136 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8137 }
8138 else
8139 {
8140 /* Integral values greater than one word are stored in consecutive
8141 registers starting with r0. This will always be a multiple of
8142 the regiser size. */
8143 int len = TYPE_LENGTH (type);
8144 int regno = ARM_A1_REGNUM;
8145
8146 while (len > 0)
8147 {
8148 regs->cooked_write (regno++, valbuf);
8149 len -= INT_REGISTER_SIZE;
8150 valbuf += INT_REGISTER_SIZE;
8151 }
8152 }
8153 }
8154 else
8155 {
8156 /* For a structure or union the behaviour is as if the value had
8157 been stored to word-aligned memory and then loaded into
8158 registers with 32-bit load instruction(s). */
8159 int len = TYPE_LENGTH (type);
8160 int regno = ARM_A1_REGNUM;
8161 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8162
8163 while (len > 0)
8164 {
8165 memcpy (tmpbuf, valbuf,
8166 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8167 regs->cooked_write (regno++, tmpbuf);
8168 len -= INT_REGISTER_SIZE;
8169 valbuf += INT_REGISTER_SIZE;
8170 }
8171 }
8172}
8173
8174
8175/* Handle function return values. */
8176
8177static enum return_value_convention
8178arm_return_value (struct gdbarch *gdbarch, struct value *function,
8179 struct type *valtype, struct regcache *regcache,
8180 gdb_byte *readbuf, const gdb_byte *writebuf)
8181{
8182 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8183 struct type *func_type = function ? value_type (function) : NULL;
8184 enum arm_vfp_cprc_base_type vfp_base_type;
8185 int vfp_base_count;
8186
8187 if (arm_vfp_abi_for_function (gdbarch, func_type)
8188 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8189 {
8190 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8191 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8192 int i;
8193 for (i = 0; i < vfp_base_count; i++)
8194 {
8195 if (reg_char == 'q')
8196 {
8197 if (writebuf)
8198 arm_neon_quad_write (gdbarch, regcache, i,
8199 writebuf + i * unit_length);
8200
8201 if (readbuf)
8202 arm_neon_quad_read (gdbarch, regcache, i,
8203 readbuf + i * unit_length);
8204 }
8205 else
8206 {
8207 char name_buf[4];
8208 int regnum;
8209
8210 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8211 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8212 strlen (name_buf));
8213 if (writebuf)
8214 regcache->cooked_write (regnum, writebuf + i * unit_length);
8215 if (readbuf)
8216 regcache->cooked_read (regnum, readbuf + i * unit_length);
8217 }
8218 }
8219 return RETURN_VALUE_REGISTER_CONVENTION;
8220 }
8221
8222 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8223 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8224 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8225 {
8226 if (tdep->struct_return == pcc_struct_return
8227 || arm_return_in_memory (gdbarch, valtype))
8228 return RETURN_VALUE_STRUCT_CONVENTION;
8229 }
8230 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8231 {
8232 if (arm_return_in_memory (gdbarch, valtype))
8233 return RETURN_VALUE_STRUCT_CONVENTION;
8234 }
8235
8236 if (writebuf)
8237 arm_store_return_value (valtype, regcache, writebuf);
8238
8239 if (readbuf)
8240 arm_extract_return_value (valtype, regcache, readbuf);
8241
8242 return RETURN_VALUE_REGISTER_CONVENTION;
8243}
8244
8245
8246static int
8247arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8248{
8249 struct gdbarch *gdbarch = get_frame_arch (frame);
8250 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8251 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8252 CORE_ADDR jb_addr;
8253 gdb_byte buf[INT_REGISTER_SIZE];
8254
8255 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8256
8257 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8258 INT_REGISTER_SIZE))
8259 return 0;
8260
8261 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8262 return 1;
8263}
8264
8265/* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8266 return the target PC. Otherwise return 0. */
8267
8268CORE_ADDR
8269arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8270{
8271 const char *name;
8272 int namelen;
8273 CORE_ADDR start_addr;
8274
8275 /* Find the starting address and name of the function containing the PC. */
8276 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8277 {
8278 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8279 check here. */
8280 start_addr = arm_skip_bx_reg (frame, pc);
8281 if (start_addr != 0)
8282 return start_addr;
8283
8284 return 0;
8285 }
8286
8287 /* If PC is in a Thumb call or return stub, return the address of the
8288 target PC, which is in a register. The thunk functions are called
8289 _call_via_xx, where x is the register name. The possible names
8290 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8291 functions, named __ARM_call_via_r[0-7]. */
8292 if (startswith (name, "_call_via_")
8293 || startswith (name, "__ARM_call_via_"))
8294 {
8295 /* Use the name suffix to determine which register contains the
8296 target PC. */
8297 static const char *table[15] =
8298 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8299 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8300 };
8301 int regno;
8302 int offset = strlen (name) - 2;
8303
8304 for (regno = 0; regno <= 14; regno++)
8305 if (strcmp (&name[offset], table[regno]) == 0)
8306 return get_frame_register_unsigned (frame, regno);
8307 }
8308
8309 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8310 non-interworking calls to foo. We could decode the stubs
8311 to find the target but it's easier to use the symbol table. */
8312 namelen = strlen (name);
8313 if (name[0] == '_' && name[1] == '_'
8314 && ((namelen > 2 + strlen ("_from_thumb")
8315 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8316 || (namelen > 2 + strlen ("_from_arm")
8317 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8318 {
8319 char *target_name;
8320 int target_len = namelen - 2;
8321 struct bound_minimal_symbol minsym;
8322 struct objfile *objfile;
8323 struct obj_section *sec;
8324
8325 if (name[namelen - 1] == 'b')
8326 target_len -= strlen ("_from_thumb");
8327 else
8328 target_len -= strlen ("_from_arm");
8329
8330 target_name = (char *) alloca (target_len + 1);
8331 memcpy (target_name, name + 2, target_len);
8332 target_name[target_len] = '\0';
8333
8334 sec = find_pc_section (pc);
8335 objfile = (sec == NULL) ? NULL : sec->objfile;
8336 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8337 if (minsym.minsym != NULL)
8338 return BMSYMBOL_VALUE_ADDRESS (minsym);
8339 else
8340 return 0;
8341 }
8342
8343 return 0; /* not a stub */
8344}
8345
8346static void
8347set_arm_command (const char *args, int from_tty)
8348{
8349 printf_unfiltered (_("\
8350\"set arm\" must be followed by an apporpriate subcommand.\n"));
8351 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8352}
8353
8354static void
8355show_arm_command (const char *args, int from_tty)
8356{
8357 cmd_show_list (showarmcmdlist, from_tty, "");
8358}
8359
8360static void
8361arm_update_current_architecture (void)
8362{
8363 struct gdbarch_info info;
8364
8365 /* If the current architecture is not ARM, we have nothing to do. */
8366 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8367 return;
8368
8369 /* Update the architecture. */
8370 gdbarch_info_init (&info);
8371
8372 if (!gdbarch_update_p (info))
8373 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8374}
8375
8376static void
8377set_fp_model_sfunc (const char *args, int from_tty,
8378 struct cmd_list_element *c)
8379{
8380 int fp_model;
8381
8382 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8383 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8384 {
8385 arm_fp_model = (enum arm_float_model) fp_model;
8386 break;
8387 }
8388
8389 if (fp_model == ARM_FLOAT_LAST)
8390 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8391 current_fp_model);
8392
8393 arm_update_current_architecture ();
8394}
8395
8396static void
8397show_fp_model (struct ui_file *file, int from_tty,
8398 struct cmd_list_element *c, const char *value)
8399{
8400 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8401
8402 if (arm_fp_model == ARM_FLOAT_AUTO
8403 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8404 fprintf_filtered (file, _("\
8405The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8406 fp_model_strings[tdep->fp_model]);
8407 else
8408 fprintf_filtered (file, _("\
8409The current ARM floating point model is \"%s\".\n"),
8410 fp_model_strings[arm_fp_model]);
8411}
8412
8413static void
8414arm_set_abi (const char *args, int from_tty,
8415 struct cmd_list_element *c)
8416{
8417 int arm_abi;
8418
8419 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8420 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8421 {
8422 arm_abi_global = (enum arm_abi_kind) arm_abi;
8423 break;
8424 }
8425
8426 if (arm_abi == ARM_ABI_LAST)
8427 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8428 arm_abi_string);
8429
8430 arm_update_current_architecture ();
8431}
8432
8433static void
8434arm_show_abi (struct ui_file *file, int from_tty,
8435 struct cmd_list_element *c, const char *value)
8436{
8437 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8438
8439 if (arm_abi_global == ARM_ABI_AUTO
8440 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8441 fprintf_filtered (file, _("\
8442The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8443 arm_abi_strings[tdep->arm_abi]);
8444 else
8445 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8446 arm_abi_string);
8447}
8448
8449static void
8450arm_show_fallback_mode (struct ui_file *file, int from_tty,
8451 struct cmd_list_element *c, const char *value)
8452{
8453 fprintf_filtered (file,
8454 _("The current execution mode assumed "
8455 "(when symbols are unavailable) is \"%s\".\n"),
8456 arm_fallback_mode_string);
8457}
8458
8459static void
8460arm_show_force_mode (struct ui_file *file, int from_tty,
8461 struct cmd_list_element *c, const char *value)
8462{
8463 fprintf_filtered (file,
8464 _("The current execution mode assumed "
8465 "(even when symbols are available) is \"%s\".\n"),
8466 arm_force_mode_string);
8467}
8468
8469/* If the user changes the register disassembly style used for info
8470 register and other commands, we have to also switch the style used
8471 in opcodes for disassembly output. This function is run in the "set
8472 arm disassembly" command, and does that. */
8473
8474static void
8475set_disassembly_style_sfunc (const char *args, int from_tty,
8476 struct cmd_list_element *c)
8477{
8478 /* Convert the short style name into the long style name (eg, reg-names-*)
8479 before calling the generic set_disassembler_options() function. */
8480 std::string long_name = std::string ("reg-names-") + disassembly_style;
8481 set_disassembler_options (&long_name[0]);
8482}
8483
8484static void
8485show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8486 struct cmd_list_element *c, const char *value)
8487{
8488 struct gdbarch *gdbarch = get_current_arch ();
8489 char *options = get_disassembler_options (gdbarch);
8490 const char *style = "";
8491 int len = 0;
8492 const char *opt;
8493
8494 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8495 if (CONST_STRNEQ (opt, "reg-names-"))
8496 {
8497 style = &opt[strlen ("reg-names-")];
8498 len = strcspn (style, ",");
8499 }
8500
8501 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8502}
8503\f
8504/* Return the ARM register name corresponding to register I. */
8505static const char *
8506arm_register_name (struct gdbarch *gdbarch, int i)
8507{
8508 const int num_regs = gdbarch_num_regs (gdbarch);
8509
8510 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8511 && i >= num_regs && i < num_regs + 32)
8512 {
8513 static const char *const vfp_pseudo_names[] = {
8514 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8515 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8516 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8517 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8518 };
8519
8520 return vfp_pseudo_names[i - num_regs];
8521 }
8522
8523 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8524 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8525 {
8526 static const char *const neon_pseudo_names[] = {
8527 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8528 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8529 };
8530
8531 return neon_pseudo_names[i - num_regs - 32];
8532 }
8533
8534 if (i >= ARRAY_SIZE (arm_register_names))
8535 /* These registers are only supported on targets which supply
8536 an XML description. */
8537 return "";
8538
8539 return arm_register_names[i];
8540}
8541
8542/* Test whether the coff symbol specific value corresponds to a Thumb
8543 function. */
8544
8545static int
8546coff_sym_is_thumb (int val)
8547{
8548 return (val == C_THUMBEXT
8549 || val == C_THUMBSTAT
8550 || val == C_THUMBEXTFUNC
8551 || val == C_THUMBSTATFUNC
8552 || val == C_THUMBLABEL);
8553}
8554
8555/* arm_coff_make_msymbol_special()
8556 arm_elf_make_msymbol_special()
8557
8558 These functions test whether the COFF or ELF symbol corresponds to
8559 an address in thumb code, and set a "special" bit in a minimal
8560 symbol to indicate that it does. */
8561
8562static void
8563arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8564{
8565 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8566
8567 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8568 == ST_BRANCH_TO_THUMB)
8569 MSYMBOL_SET_SPECIAL (msym);
8570}
8571
8572static void
8573arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8574{
8575 if (coff_sym_is_thumb (val))
8576 MSYMBOL_SET_SPECIAL (msym);
8577}
8578
8579static void
8580arm_objfile_data_free (struct objfile *objfile, void *arg)
8581{
8582 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8583 unsigned int i;
8584
8585 for (i = 0; i < objfile->obfd->section_count; i++)
8586 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8587}
8588
8589static void
8590arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8591 asymbol *sym)
8592{
8593 const char *name = bfd_asymbol_name (sym);
8594 struct arm_per_objfile *data;
8595 VEC(arm_mapping_symbol_s) **map_p;
8596 struct arm_mapping_symbol new_map_sym;
8597
8598 gdb_assert (name[0] == '$');
8599 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8600 return;
8601
8602 data = (struct arm_per_objfile *) objfile_data (objfile,
8603 arm_objfile_data_key);
8604 if (data == NULL)
8605 {
8606 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8607 struct arm_per_objfile);
8608 set_objfile_data (objfile, arm_objfile_data_key, data);
8609 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8610 objfile->obfd->section_count,
8611 VEC(arm_mapping_symbol_s) *);
8612 }
8613 map_p = &data->section_maps[bfd_get_section (sym)->index];
8614
8615 new_map_sym.value = sym->value;
8616 new_map_sym.type = name[1];
8617
8618 /* Assume that most mapping symbols appear in order of increasing
8619 value. If they were randomly distributed, it would be faster to
8620 always push here and then sort at first use. */
8621 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8622 {
8623 struct arm_mapping_symbol *prev_map_sym;
8624
8625 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8626 if (prev_map_sym->value >= sym->value)
8627 {
8628 unsigned int idx;
8629 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8630 arm_compare_mapping_symbols);
8631 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8632 return;
8633 }
8634 }
8635
8636 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8637}
8638
8639static void
8640arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8641{
8642 struct gdbarch *gdbarch = regcache->arch ();
8643 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8644
8645 /* If necessary, set the T bit. */
8646 if (arm_apcs_32)
8647 {
8648 ULONGEST val, t_bit;
8649 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8650 t_bit = arm_psr_thumb_bit (gdbarch);
8651 if (arm_pc_is_thumb (gdbarch, pc))
8652 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8653 val | t_bit);
8654 else
8655 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8656 val & ~t_bit);
8657 }
8658}
8659
8660/* Read the contents of a NEON quad register, by reading from two
8661 double registers. This is used to implement the quad pseudo
8662 registers, and for argument passing in case the quad registers are
8663 missing; vectors are passed in quad registers when using the VFP
8664 ABI, even if a NEON unit is not present. REGNUM is the index of
8665 the quad register, in [0, 15]. */
8666
8667static enum register_status
8668arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8669 int regnum, gdb_byte *buf)
8670{
8671 char name_buf[4];
8672 gdb_byte reg_buf[8];
8673 int offset, double_regnum;
8674 enum register_status status;
8675
8676 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8677 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8678 strlen (name_buf));
8679
8680 /* d0 is always the least significant half of q0. */
8681 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8682 offset = 8;
8683 else
8684 offset = 0;
8685
8686 status = regcache->raw_read (double_regnum, reg_buf);
8687 if (status != REG_VALID)
8688 return status;
8689 memcpy (buf + offset, reg_buf, 8);
8690
8691 offset = 8 - offset;
8692 status = regcache->raw_read (double_regnum + 1, reg_buf);
8693 if (status != REG_VALID)
8694 return status;
8695 memcpy (buf + offset, reg_buf, 8);
8696
8697 return REG_VALID;
8698}
8699
8700static enum register_status
8701arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8702 int regnum, gdb_byte *buf)
8703{
8704 const int num_regs = gdbarch_num_regs (gdbarch);
8705 char name_buf[4];
8706 gdb_byte reg_buf[8];
8707 int offset, double_regnum;
8708
8709 gdb_assert (regnum >= num_regs);
8710 regnum -= num_regs;
8711
8712 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8713 /* Quad-precision register. */
8714 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8715 else
8716 {
8717 enum register_status status;
8718
8719 /* Single-precision register. */
8720 gdb_assert (regnum < 32);
8721
8722 /* s0 is always the least significant half of d0. */
8723 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8724 offset = (regnum & 1) ? 0 : 4;
8725 else
8726 offset = (regnum & 1) ? 4 : 0;
8727
8728 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8729 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8730 strlen (name_buf));
8731
8732 status = regcache->raw_read (double_regnum, reg_buf);
8733 if (status == REG_VALID)
8734 memcpy (buf, reg_buf + offset, 4);
8735 return status;
8736 }
8737}
8738
8739/* Store the contents of BUF to a NEON quad register, by writing to
8740 two double registers. This is used to implement the quad pseudo
8741 registers, and for argument passing in case the quad registers are
8742 missing; vectors are passed in quad registers when using the VFP
8743 ABI, even if a NEON unit is not present. REGNUM is the index
8744 of the quad register, in [0, 15]. */
8745
8746static void
8747arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8748 int regnum, const gdb_byte *buf)
8749{
8750 char name_buf[4];
8751 int offset, double_regnum;
8752
8753 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8754 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8755 strlen (name_buf));
8756
8757 /* d0 is always the least significant half of q0. */
8758 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8759 offset = 8;
8760 else
8761 offset = 0;
8762
8763 regcache->raw_write (double_regnum, buf + offset);
8764 offset = 8 - offset;
8765 regcache->raw_write (double_regnum + 1, buf + offset);
8766}
8767
8768static void
8769arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8770 int regnum, const gdb_byte *buf)
8771{
8772 const int num_regs = gdbarch_num_regs (gdbarch);
8773 char name_buf[4];
8774 gdb_byte reg_buf[8];
8775 int offset, double_regnum;
8776
8777 gdb_assert (regnum >= num_regs);
8778 regnum -= num_regs;
8779
8780 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8781 /* Quad-precision register. */
8782 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8783 else
8784 {
8785 /* Single-precision register. */
8786 gdb_assert (regnum < 32);
8787
8788 /* s0 is always the least significant half of d0. */
8789 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8790 offset = (regnum & 1) ? 0 : 4;
8791 else
8792 offset = (regnum & 1) ? 4 : 0;
8793
8794 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8795 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8796 strlen (name_buf));
8797
8798 regcache->raw_read (double_regnum, reg_buf);
8799 memcpy (reg_buf + offset, buf, 4);
8800 regcache->raw_write (double_regnum, reg_buf);
8801 }
8802}
8803
8804static struct value *
8805value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8806{
8807 const int *reg_p = (const int *) baton;
8808 return value_of_register (*reg_p, frame);
8809}
8810\f
8811static enum gdb_osabi
8812arm_elf_osabi_sniffer (bfd *abfd)
8813{
8814 unsigned int elfosabi;
8815 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8816
8817 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8818
8819 if (elfosabi == ELFOSABI_ARM)
8820 /* GNU tools use this value. Check note sections in this case,
8821 as well. */
8822 bfd_map_over_sections (abfd,
8823 generic_elf_osabi_sniff_abi_tag_sections,
8824 &osabi);
8825
8826 /* Anything else will be handled by the generic ELF sniffer. */
8827 return osabi;
8828}
8829
8830static int
8831arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8832 struct reggroup *group)
8833{
8834 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8835 this, FPS register belongs to save_regroup, restore_reggroup, and
8836 all_reggroup, of course. */
8837 if (regnum == ARM_FPS_REGNUM)
8838 return (group == float_reggroup
8839 || group == save_reggroup
8840 || group == restore_reggroup
8841 || group == all_reggroup);
8842 else
8843 return default_register_reggroup_p (gdbarch, regnum, group);
8844}
8845
8846\f
8847/* For backward-compatibility we allow two 'g' packet lengths with
8848 the remote protocol depending on whether FPA registers are
8849 supplied. M-profile targets do not have FPA registers, but some
8850 stubs already exist in the wild which use a 'g' packet which
8851 supplies them albeit with dummy values. The packet format which
8852 includes FPA registers should be considered deprecated for
8853 M-profile targets. */
8854
8855static void
8856arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8857{
8858 if (gdbarch_tdep (gdbarch)->is_m)
8859 {
8860 /* If we know from the executable this is an M-profile target,
8861 cater for remote targets whose register set layout is the
8862 same as the FPA layout. */
8863 register_remote_g_packet_guess (gdbarch,
8864 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8865 (16 * INT_REGISTER_SIZE)
8866 + (8 * FP_REGISTER_SIZE)
8867 + (2 * INT_REGISTER_SIZE),
8868 tdesc_arm_with_m_fpa_layout);
8869
8870 /* The regular M-profile layout. */
8871 register_remote_g_packet_guess (gdbarch,
8872 /* r0-r12,sp,lr,pc; xpsr */
8873 (16 * INT_REGISTER_SIZE)
8874 + INT_REGISTER_SIZE,
8875 tdesc_arm_with_m);
8876
8877 /* M-profile plus M4F VFP. */
8878 register_remote_g_packet_guess (gdbarch,
8879 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8880 (16 * INT_REGISTER_SIZE)
8881 + (16 * VFP_REGISTER_SIZE)
8882 + (2 * INT_REGISTER_SIZE),
8883 tdesc_arm_with_m_vfp_d16);
8884 }
8885
8886 /* Otherwise we don't have a useful guess. */
8887}
8888
8889/* Implement the code_of_frame_writable gdbarch method. */
8890
8891static int
8892arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8893{
8894 if (gdbarch_tdep (gdbarch)->is_m
8895 && get_frame_type (frame) == SIGTRAMP_FRAME)
8896 {
8897 /* M-profile exception frames return to some magic PCs, where
8898 isn't writable at all. */
8899 return 0;
8900 }
8901 else
8902 return 1;
8903}
8904
8905\f
8906/* Initialize the current architecture based on INFO. If possible,
8907 re-use an architecture from ARCHES, which is a list of
8908 architectures already created during this debugging session.
8909
8910 Called e.g. at program startup, when reading a core file, and when
8911 reading a binary file. */
8912
8913static struct gdbarch *
8914arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8915{
8916 struct gdbarch_tdep *tdep;
8917 struct gdbarch *gdbarch;
8918 struct gdbarch_list *best_arch;
8919 enum arm_abi_kind arm_abi = arm_abi_global;
8920 enum arm_float_model fp_model = arm_fp_model;
8921 struct tdesc_arch_data *tdesc_data = NULL;
8922 int i, is_m = 0;
8923 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8924 int have_wmmx_registers = 0;
8925 int have_neon = 0;
8926 int have_fpa_registers = 1;
8927 const struct target_desc *tdesc = info.target_desc;
8928
8929 /* If we have an object to base this architecture on, try to determine
8930 its ABI. */
8931
8932 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8933 {
8934 int ei_osabi, e_flags;
8935
8936 switch (bfd_get_flavour (info.abfd))
8937 {
8938 case bfd_target_coff_flavour:
8939 /* Assume it's an old APCS-style ABI. */
8940 /* XXX WinCE? */
8941 arm_abi = ARM_ABI_APCS;
8942 break;
8943
8944 case bfd_target_elf_flavour:
8945 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8946 e_flags = elf_elfheader (info.abfd)->e_flags;
8947
8948 if (ei_osabi == ELFOSABI_ARM)
8949 {
8950 /* GNU tools used to use this value, but do not for EABI
8951 objects. There's nowhere to tag an EABI version
8952 anyway, so assume APCS. */
8953 arm_abi = ARM_ABI_APCS;
8954 }
8955 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8956 {
8957 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8958 int attr_arch, attr_profile;
8959
8960 switch (eabi_ver)
8961 {
8962 case EF_ARM_EABI_UNKNOWN:
8963 /* Assume GNU tools. */
8964 arm_abi = ARM_ABI_APCS;
8965 break;
8966
8967 case EF_ARM_EABI_VER4:
8968 case EF_ARM_EABI_VER5:
8969 arm_abi = ARM_ABI_AAPCS;
8970 /* EABI binaries default to VFP float ordering.
8971 They may also contain build attributes that can
8972 be used to identify if the VFP argument-passing
8973 ABI is in use. */
8974 if (fp_model == ARM_FLOAT_AUTO)
8975 {
8976#ifdef HAVE_ELF
8977 switch (bfd_elf_get_obj_attr_int (info.abfd,
8978 OBJ_ATTR_PROC,
8979 Tag_ABI_VFP_args))
8980 {
8981 case AEABI_VFP_args_base:
8982 /* "The user intended FP parameter/result
8983 passing to conform to AAPCS, base
8984 variant". */
8985 fp_model = ARM_FLOAT_SOFT_VFP;
8986 break;
8987 case AEABI_VFP_args_vfp:
8988 /* "The user intended FP parameter/result
8989 passing to conform to AAPCS, VFP
8990 variant". */
8991 fp_model = ARM_FLOAT_VFP;
8992 break;
8993 case AEABI_VFP_args_toolchain:
8994 /* "The user intended FP parameter/result
8995 passing to conform to tool chain-specific
8996 conventions" - we don't know any such
8997 conventions, so leave it as "auto". */
8998 break;
8999 case AEABI_VFP_args_compatible:
9000 /* "Code is compatible with both the base
9001 and VFP variants; the user did not permit
9002 non-variadic functions to pass FP
9003 parameters/results" - leave it as
9004 "auto". */
9005 break;
9006 default:
9007 /* Attribute value not mentioned in the
9008 November 2012 ABI, so leave it as
9009 "auto". */
9010 break;
9011 }
9012#else
9013 fp_model = ARM_FLOAT_SOFT_VFP;
9014#endif
9015 }
9016 break;
9017
9018 default:
9019 /* Leave it as "auto". */
9020 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9021 break;
9022 }
9023
9024#ifdef HAVE_ELF
9025 /* Detect M-profile programs. This only works if the
9026 executable file includes build attributes; GCC does
9027 copy them to the executable, but e.g. RealView does
9028 not. */
9029 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9030 Tag_CPU_arch);
9031 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9032 OBJ_ATTR_PROC,
9033 Tag_CPU_arch_profile);
9034 /* GCC specifies the profile for v6-M; RealView only
9035 specifies the profile for architectures starting with
9036 V7 (as opposed to architectures with a tag
9037 numerically greater than TAG_CPU_ARCH_V7). */
9038 if (!tdesc_has_registers (tdesc)
9039 && (attr_arch == TAG_CPU_ARCH_V6_M
9040 || attr_arch == TAG_CPU_ARCH_V6S_M
9041 || attr_profile == 'M'))
9042 is_m = 1;
9043#endif
9044 }
9045
9046 if (fp_model == ARM_FLOAT_AUTO)
9047 {
9048 int e_flags = elf_elfheader (info.abfd)->e_flags;
9049
9050 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9051 {
9052 case 0:
9053 /* Leave it as "auto". Strictly speaking this case
9054 means FPA, but almost nobody uses that now, and
9055 many toolchains fail to set the appropriate bits
9056 for the floating-point model they use. */
9057 break;
9058 case EF_ARM_SOFT_FLOAT:
9059 fp_model = ARM_FLOAT_SOFT_FPA;
9060 break;
9061 case EF_ARM_VFP_FLOAT:
9062 fp_model = ARM_FLOAT_VFP;
9063 break;
9064 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9065 fp_model = ARM_FLOAT_SOFT_VFP;
9066 break;
9067 }
9068 }
9069
9070 if (e_flags & EF_ARM_BE8)
9071 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9072
9073 break;
9074
9075 default:
9076 /* Leave it as "auto". */
9077 break;
9078 }
9079 }
9080
9081 /* Check any target description for validity. */
9082 if (tdesc_has_registers (tdesc))
9083 {
9084 /* For most registers we require GDB's default names; but also allow
9085 the numeric names for sp / lr / pc, as a convenience. */
9086 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9087 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9088 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9089
9090 const struct tdesc_feature *feature;
9091 int valid_p;
9092
9093 feature = tdesc_find_feature (tdesc,
9094 "org.gnu.gdb.arm.core");
9095 if (feature == NULL)
9096 {
9097 feature = tdesc_find_feature (tdesc,
9098 "org.gnu.gdb.arm.m-profile");
9099 if (feature == NULL)
9100 return NULL;
9101 else
9102 is_m = 1;
9103 }
9104
9105 tdesc_data = tdesc_data_alloc ();
9106
9107 valid_p = 1;
9108 for (i = 0; i < ARM_SP_REGNUM; i++)
9109 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9110 arm_register_names[i]);
9111 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9112 ARM_SP_REGNUM,
9113 arm_sp_names);
9114 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9115 ARM_LR_REGNUM,
9116 arm_lr_names);
9117 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9118 ARM_PC_REGNUM,
9119 arm_pc_names);
9120 if (is_m)
9121 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9122 ARM_PS_REGNUM, "xpsr");
9123 else
9124 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9125 ARM_PS_REGNUM, "cpsr");
9126
9127 if (!valid_p)
9128 {
9129 tdesc_data_cleanup (tdesc_data);
9130 return NULL;
9131 }
9132
9133 feature = tdesc_find_feature (tdesc,
9134 "org.gnu.gdb.arm.fpa");
9135 if (feature != NULL)
9136 {
9137 valid_p = 1;
9138 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9139 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9140 arm_register_names[i]);
9141 if (!valid_p)
9142 {
9143 tdesc_data_cleanup (tdesc_data);
9144 return NULL;
9145 }
9146 }
9147 else
9148 have_fpa_registers = 0;
9149
9150 feature = tdesc_find_feature (tdesc,
9151 "org.gnu.gdb.xscale.iwmmxt");
9152 if (feature != NULL)
9153 {
9154 static const char *const iwmmxt_names[] = {
9155 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9156 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9157 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9158 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9159 };
9160
9161 valid_p = 1;
9162 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9163 valid_p
9164 &= tdesc_numbered_register (feature, tdesc_data, i,
9165 iwmmxt_names[i - ARM_WR0_REGNUM]);
9166
9167 /* Check for the control registers, but do not fail if they
9168 are missing. */
9169 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9170 tdesc_numbered_register (feature, tdesc_data, i,
9171 iwmmxt_names[i - ARM_WR0_REGNUM]);
9172
9173 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9174 valid_p
9175 &= tdesc_numbered_register (feature, tdesc_data, i,
9176 iwmmxt_names[i - ARM_WR0_REGNUM]);
9177
9178 if (!valid_p)
9179 {
9180 tdesc_data_cleanup (tdesc_data);
9181 return NULL;
9182 }
9183
9184 have_wmmx_registers = 1;
9185 }
9186
9187 /* If we have a VFP unit, check whether the single precision registers
9188 are present. If not, then we will synthesize them as pseudo
9189 registers. */
9190 feature = tdesc_find_feature (tdesc,
9191 "org.gnu.gdb.arm.vfp");
9192 if (feature != NULL)
9193 {
9194 static const char *const vfp_double_names[] = {
9195 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9196 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9197 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9198 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9199 };
9200
9201 /* Require the double precision registers. There must be either
9202 16 or 32. */
9203 valid_p = 1;
9204 for (i = 0; i < 32; i++)
9205 {
9206 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9207 ARM_D0_REGNUM + i,
9208 vfp_double_names[i]);
9209 if (!valid_p)
9210 break;
9211 }
9212 if (!valid_p && i == 16)
9213 valid_p = 1;
9214
9215 /* Also require FPSCR. */
9216 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9217 ARM_FPSCR_REGNUM, "fpscr");
9218 if (!valid_p)
9219 {
9220 tdesc_data_cleanup (tdesc_data);
9221 return NULL;
9222 }
9223
9224 if (tdesc_unnumbered_register (feature, "s0") == 0)
9225 have_vfp_pseudos = 1;
9226
9227 vfp_register_count = i;
9228
9229 /* If we have VFP, also check for NEON. The architecture allows
9230 NEON without VFP (integer vector operations only), but GDB
9231 does not support that. */
9232 feature = tdesc_find_feature (tdesc,
9233 "org.gnu.gdb.arm.neon");
9234 if (feature != NULL)
9235 {
9236 /* NEON requires 32 double-precision registers. */
9237 if (i != 32)
9238 {
9239 tdesc_data_cleanup (tdesc_data);
9240 return NULL;
9241 }
9242
9243 /* If there are quad registers defined by the stub, use
9244 their type; otherwise (normally) provide them with
9245 the default type. */
9246 if (tdesc_unnumbered_register (feature, "q0") == 0)
9247 have_neon_pseudos = 1;
9248
9249 have_neon = 1;
9250 }
9251 }
9252 }
9253
9254 /* If there is already a candidate, use it. */
9255 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9256 best_arch != NULL;
9257 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9258 {
9259 if (arm_abi != ARM_ABI_AUTO
9260 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9261 continue;
9262
9263 if (fp_model != ARM_FLOAT_AUTO
9264 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9265 continue;
9266
9267 /* There are various other properties in tdep that we do not
9268 need to check here: those derived from a target description,
9269 since gdbarches with a different target description are
9270 automatically disqualified. */
9271
9272 /* Do check is_m, though, since it might come from the binary. */
9273 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9274 continue;
9275
9276 /* Found a match. */
9277 break;
9278 }
9279
9280 if (best_arch != NULL)
9281 {
9282 if (tdesc_data != NULL)
9283 tdesc_data_cleanup (tdesc_data);
9284 return best_arch->gdbarch;
9285 }
9286
9287 tdep = XCNEW (struct gdbarch_tdep);
9288 gdbarch = gdbarch_alloc (&info, tdep);
9289
9290 /* Record additional information about the architecture we are defining.
9291 These are gdbarch discriminators, like the OSABI. */
9292 tdep->arm_abi = arm_abi;
9293 tdep->fp_model = fp_model;
9294 tdep->is_m = is_m;
9295 tdep->have_fpa_registers = have_fpa_registers;
9296 tdep->have_wmmx_registers = have_wmmx_registers;
9297 gdb_assert (vfp_register_count == 0
9298 || vfp_register_count == 16
9299 || vfp_register_count == 32);
9300 tdep->vfp_register_count = vfp_register_count;
9301 tdep->have_vfp_pseudos = have_vfp_pseudos;
9302 tdep->have_neon_pseudos = have_neon_pseudos;
9303 tdep->have_neon = have_neon;
9304
9305 arm_register_g_packet_guesses (gdbarch);
9306
9307 /* Breakpoints. */
9308 switch (info.byte_order_for_code)
9309 {
9310 case BFD_ENDIAN_BIG:
9311 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9312 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9313 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9314 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9315
9316 break;
9317
9318 case BFD_ENDIAN_LITTLE:
9319 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9320 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9321 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9322 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9323
9324 break;
9325
9326 default:
9327 internal_error (__FILE__, __LINE__,
9328 _("arm_gdbarch_init: bad byte order for float format"));
9329 }
9330
9331 /* On ARM targets char defaults to unsigned. */
9332 set_gdbarch_char_signed (gdbarch, 0);
9333
9334 /* wchar_t is unsigned under the AAPCS. */
9335 if (tdep->arm_abi == ARM_ABI_AAPCS)
9336 set_gdbarch_wchar_signed (gdbarch, 0);
9337 else
9338 set_gdbarch_wchar_signed (gdbarch, 1);
9339
9340 /* Note: for displaced stepping, this includes the breakpoint, and one word
9341 of additional scratch space. This setting isn't used for anything beside
9342 displaced stepping at present. */
9343 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9344
9345 /* This should be low enough for everything. */
9346 tdep->lowest_pc = 0x20;
9347 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9348
9349 /* The default, for both APCS and AAPCS, is to return small
9350 structures in registers. */
9351 tdep->struct_return = reg_struct_return;
9352
9353 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9354 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9355
9356 if (is_m)
9357 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9358
9359 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9360
9361 /* Frame handling. */
9362 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9363 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9364 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9365
9366 frame_base_set_default (gdbarch, &arm_normal_base);
9367
9368 /* Address manipulation. */
9369 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9370
9371 /* Advance PC across function entry code. */
9372 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9373
9374 /* Detect whether PC is at a point where the stack has been destroyed. */
9375 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9376
9377 /* Skip trampolines. */
9378 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9379
9380 /* The stack grows downward. */
9381 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9382
9383 /* Breakpoint manipulation. */
9384 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9385 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9386 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9387 arm_breakpoint_kind_from_current_state);
9388
9389 /* Information about registers, etc. */
9390 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9391 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9392 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9393 set_gdbarch_register_type (gdbarch, arm_register_type);
9394 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9395
9396 /* This "info float" is FPA-specific. Use the generic version if we
9397 do not have FPA. */
9398 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9399 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9400
9401 /* Internal <-> external register number maps. */
9402 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9403 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9404
9405 set_gdbarch_register_name (gdbarch, arm_register_name);
9406
9407 /* Returning results. */
9408 set_gdbarch_return_value (gdbarch, arm_return_value);
9409
9410 /* Disassembly. */
9411 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9412
9413 /* Minsymbol frobbing. */
9414 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9415 set_gdbarch_coff_make_msymbol_special (gdbarch,
9416 arm_coff_make_msymbol_special);
9417 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9418
9419 /* Thumb-2 IT block support. */
9420 set_gdbarch_adjust_breakpoint_address (gdbarch,
9421 arm_adjust_breakpoint_address);
9422
9423 /* Virtual tables. */
9424 set_gdbarch_vbit_in_delta (gdbarch, 1);
9425
9426 /* Hook in the ABI-specific overrides, if they have been registered. */
9427 gdbarch_init_osabi (info, gdbarch);
9428
9429 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9430
9431 /* Add some default predicates. */
9432 if (is_m)
9433 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9434 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9435 dwarf2_append_unwinders (gdbarch);
9436 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9437 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9438 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9439
9440 /* Now we have tuned the configuration, set a few final things,
9441 based on what the OS ABI has told us. */
9442
9443 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9444 binaries are always marked. */
9445 if (tdep->arm_abi == ARM_ABI_AUTO)
9446 tdep->arm_abi = ARM_ABI_APCS;
9447
9448 /* Watchpoints are not steppable. */
9449 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9450
9451 /* We used to default to FPA for generic ARM, but almost nobody
9452 uses that now, and we now provide a way for the user to force
9453 the model. So default to the most useful variant. */
9454 if (tdep->fp_model == ARM_FLOAT_AUTO)
9455 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9456
9457 if (tdep->jb_pc >= 0)
9458 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9459
9460 /* Floating point sizes and format. */
9461 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9462 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9463 {
9464 set_gdbarch_double_format
9465 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9466 set_gdbarch_long_double_format
9467 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9468 }
9469 else
9470 {
9471 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9472 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9473 }
9474
9475 if (have_vfp_pseudos)
9476 {
9477 /* NOTE: These are the only pseudo registers used by
9478 the ARM target at the moment. If more are added, a
9479 little more care in numbering will be needed. */
9480
9481 int num_pseudos = 32;
9482 if (have_neon_pseudos)
9483 num_pseudos += 16;
9484 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9485 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9486 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9487 }
9488
9489 if (tdesc_data)
9490 {
9491 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9492
9493 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9494
9495 /* Override tdesc_register_type to adjust the types of VFP
9496 registers for NEON. */
9497 set_gdbarch_register_type (gdbarch, arm_register_type);
9498 }
9499
9500 /* Add standard register aliases. We add aliases even for those
9501 nanes which are used by the current architecture - it's simpler,
9502 and does no harm, since nothing ever lists user registers. */
9503 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9504 user_reg_add (gdbarch, arm_register_aliases[i].name,
9505 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9506
9507 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9508 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9509
9510 return gdbarch;
9511}
9512
9513static void
9514arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9515{
9516 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9517
9518 if (tdep == NULL)
9519 return;
9520
9521 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9522 (unsigned long) tdep->lowest_pc);
9523}
9524
9525#if GDB_SELF_TEST
9526namespace selftests
9527{
9528static void arm_record_test (void);
9529}
9530#endif
9531
9532void
9533_initialize_arm_tdep (void)
9534{
9535 long length;
9536 int i, j;
9537 char regdesc[1024], *rdptr = regdesc;
9538 size_t rest = sizeof (regdesc);
9539
9540 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9541
9542 arm_objfile_data_key
9543 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9544
9545 /* Add ourselves to objfile event chain. */
9546 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9547 arm_exidx_data_key
9548 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9549
9550 /* Register an ELF OS ABI sniffer for ARM binaries. */
9551 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9552 bfd_target_elf_flavour,
9553 arm_elf_osabi_sniffer);
9554
9555 /* Initialize the standard target descriptions. */
9556 initialize_tdesc_arm_with_m ();
9557 initialize_tdesc_arm_with_m_fpa_layout ();
9558 initialize_tdesc_arm_with_m_vfp_d16 ();
9559 initialize_tdesc_arm_with_iwmmxt ();
9560 initialize_tdesc_arm_with_vfpv2 ();
9561 initialize_tdesc_arm_with_vfpv3 ();
9562 initialize_tdesc_arm_with_neon ();
9563
9564 /* Add root prefix command for all "set arm"/"show arm" commands. */
9565 add_prefix_cmd ("arm", no_class, set_arm_command,
9566 _("Various ARM-specific commands."),
9567 &setarmcmdlist, "set arm ", 0, &setlist);
9568
9569 add_prefix_cmd ("arm", no_class, show_arm_command,
9570 _("Various ARM-specific commands."),
9571 &showarmcmdlist, "show arm ", 0, &showlist);
9572
9573
9574 arm_disassembler_options = xstrdup ("reg-names-std");
9575 const disasm_options_t *disasm_options
9576 = &disassembler_options_arm ()->options;
9577 int num_disassembly_styles = 0;
9578 for (i = 0; disasm_options->name[i] != NULL; i++)
9579 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9580 num_disassembly_styles++;
9581
9582 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9583 valid_disassembly_styles = XNEWVEC (const char *,
9584 num_disassembly_styles + 1);
9585 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9586 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9587 {
9588 size_t offset = strlen ("reg-names-");
9589 const char *style = disasm_options->name[i];
9590 valid_disassembly_styles[j++] = &style[offset];
9591 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9592 disasm_options->description[i]);
9593 rdptr += length;
9594 rest -= length;
9595 }
9596 /* Mark the end of valid options. */
9597 valid_disassembly_styles[num_disassembly_styles] = NULL;
9598
9599 /* Create the help text. */
9600 std::string helptext = string_printf ("%s%s%s",
9601 _("The valid values are:\n"),
9602 regdesc,
9603 _("The default is \"std\"."));
9604
9605 add_setshow_enum_cmd("disassembler", no_class,
9606 valid_disassembly_styles, &disassembly_style,
9607 _("Set the disassembly style."),
9608 _("Show the disassembly style."),
9609 helptext.c_str (),
9610 set_disassembly_style_sfunc,
9611 show_disassembly_style_sfunc,
9612 &setarmcmdlist, &showarmcmdlist);
9613
9614 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9615 _("Set usage of ARM 32-bit mode."),
9616 _("Show usage of ARM 32-bit mode."),
9617 _("When off, a 26-bit PC will be used."),
9618 NULL,
9619 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9620 mode is %s. */
9621 &setarmcmdlist, &showarmcmdlist);
9622
9623 /* Add a command to allow the user to force the FPU model. */
9624 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9625 _("Set the floating point type."),
9626 _("Show the floating point type."),
9627 _("auto - Determine the FP typefrom the OS-ABI.\n\
9628softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9629fpa - FPA co-processor (GCC compiled).\n\
9630softvfp - Software FP with pure-endian doubles.\n\
9631vfp - VFP co-processor."),
9632 set_fp_model_sfunc, show_fp_model,
9633 &setarmcmdlist, &showarmcmdlist);
9634
9635 /* Add a command to allow the user to force the ABI. */
9636 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9637 _("Set the ABI."),
9638 _("Show the ABI."),
9639 NULL, arm_set_abi, arm_show_abi,
9640 &setarmcmdlist, &showarmcmdlist);
9641
9642 /* Add two commands to allow the user to force the assumed
9643 execution mode. */
9644 add_setshow_enum_cmd ("fallback-mode", class_support,
9645 arm_mode_strings, &arm_fallback_mode_string,
9646 _("Set the mode assumed when symbols are unavailable."),
9647 _("Show the mode assumed when symbols are unavailable."),
9648 NULL, NULL, arm_show_fallback_mode,
9649 &setarmcmdlist, &showarmcmdlist);
9650 add_setshow_enum_cmd ("force-mode", class_support,
9651 arm_mode_strings, &arm_force_mode_string,
9652 _("Set the mode assumed even when symbols are available."),
9653 _("Show the mode assumed even when symbols are available."),
9654 NULL, NULL, arm_show_force_mode,
9655 &setarmcmdlist, &showarmcmdlist);
9656
9657 /* Debugging flag. */
9658 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9659 _("Set ARM debugging."),
9660 _("Show ARM debugging."),
9661 _("When on, arm-specific debugging is enabled."),
9662 NULL,
9663 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9664 &setdebuglist, &showdebuglist);
9665
9666#if GDB_SELF_TEST
9667 selftests::register_test ("arm-record", selftests::arm_record_test);
9668#endif
9669
9670}
9671
9672/* ARM-reversible process record data structures. */
9673
9674#define ARM_INSN_SIZE_BYTES 4
9675#define THUMB_INSN_SIZE_BYTES 2
9676#define THUMB2_INSN_SIZE_BYTES 4
9677
9678
9679/* Position of the bit within a 32-bit ARM instruction
9680 that defines whether the instruction is a load or store. */
9681#define INSN_S_L_BIT_NUM 20
9682
9683#define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9684 do \
9685 { \
9686 unsigned int reg_len = LENGTH; \
9687 if (reg_len) \
9688 { \
9689 REGS = XNEWVEC (uint32_t, reg_len); \
9690 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9691 } \
9692 } \
9693 while (0)
9694
9695#define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9696 do \
9697 { \
9698 unsigned int mem_len = LENGTH; \
9699 if (mem_len) \
9700 { \
9701 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9702 memcpy(&MEMS->len, &RECORD_BUF[0], \
9703 sizeof(struct arm_mem_r) * LENGTH); \
9704 } \
9705 } \
9706 while (0)
9707
9708/* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9709#define INSN_RECORDED(ARM_RECORD) \
9710 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9711
9712/* ARM memory record structure. */
9713struct arm_mem_r
9714{
9715 uint32_t len; /* Record length. */
9716 uint32_t addr; /* Memory address. */
9717};
9718
9719/* ARM instruction record contains opcode of current insn
9720 and execution state (before entry to decode_insn()),
9721 contains list of to-be-modified registers and
9722 memory blocks (on return from decode_insn()). */
9723
9724typedef struct insn_decode_record_t
9725{
9726 struct gdbarch *gdbarch;
9727 struct regcache *regcache;
9728 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9729 uint32_t arm_insn; /* Should accommodate thumb. */
9730 uint32_t cond; /* Condition code. */
9731 uint32_t opcode; /* Insn opcode. */
9732 uint32_t decode; /* Insn decode bits. */
9733 uint32_t mem_rec_count; /* No of mem records. */
9734 uint32_t reg_rec_count; /* No of reg records. */
9735 uint32_t *arm_regs; /* Registers to be saved for this record. */
9736 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9737} insn_decode_record;
9738
9739
9740/* Checks ARM SBZ and SBO mandatory fields. */
9741
9742static int
9743sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9744{
9745 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9746
9747 if (!len)
9748 return 1;
9749
9750 if (!sbo)
9751 ones = ~ones;
9752
9753 while (ones)
9754 {
9755 if (!(ones & sbo))
9756 {
9757 return 0;
9758 }
9759 ones = ones >> 1;
9760 }
9761 return 1;
9762}
9763
9764enum arm_record_result
9765{
9766 ARM_RECORD_SUCCESS = 0,
9767 ARM_RECORD_FAILURE = 1
9768};
9769
9770typedef enum
9771{
9772 ARM_RECORD_STRH=1,
9773 ARM_RECORD_STRD
9774} arm_record_strx_t;
9775
9776typedef enum
9777{
9778 ARM_RECORD=1,
9779 THUMB_RECORD,
9780 THUMB2_RECORD
9781} record_type_t;
9782
9783
9784static int
9785arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9786 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9787{
9788
9789 struct regcache *reg_cache = arm_insn_r->regcache;
9790 ULONGEST u_regval[2]= {0};
9791
9792 uint32_t reg_src1 = 0, reg_src2 = 0;
9793 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9794
9795 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9796 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9797
9798 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9799 {
9800 /* 1) Handle misc store, immediate offset. */
9801 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9802 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9803 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9804 regcache_raw_read_unsigned (reg_cache, reg_src1,
9805 &u_regval[0]);
9806 if (ARM_PC_REGNUM == reg_src1)
9807 {
9808 /* If R15 was used as Rn, hence current PC+8. */
9809 u_regval[0] = u_regval[0] + 8;
9810 }
9811 offset_8 = (immed_high << 4) | immed_low;
9812 /* Calculate target store address. */
9813 if (14 == arm_insn_r->opcode)
9814 {
9815 tgt_mem_addr = u_regval[0] + offset_8;
9816 }
9817 else
9818 {
9819 tgt_mem_addr = u_regval[0] - offset_8;
9820 }
9821 if (ARM_RECORD_STRH == str_type)
9822 {
9823 record_buf_mem[0] = 2;
9824 record_buf_mem[1] = tgt_mem_addr;
9825 arm_insn_r->mem_rec_count = 1;
9826 }
9827 else if (ARM_RECORD_STRD == str_type)
9828 {
9829 record_buf_mem[0] = 4;
9830 record_buf_mem[1] = tgt_mem_addr;
9831 record_buf_mem[2] = 4;
9832 record_buf_mem[3] = tgt_mem_addr + 4;
9833 arm_insn_r->mem_rec_count = 2;
9834 }
9835 }
9836 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9837 {
9838 /* 2) Store, register offset. */
9839 /* Get Rm. */
9840 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9841 /* Get Rn. */
9842 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9843 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9844 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9845 if (15 == reg_src2)
9846 {
9847 /* If R15 was used as Rn, hence current PC+8. */
9848 u_regval[0] = u_regval[0] + 8;
9849 }
9850 /* Calculate target store address, Rn +/- Rm, register offset. */
9851 if (12 == arm_insn_r->opcode)
9852 {
9853 tgt_mem_addr = u_regval[0] + u_regval[1];
9854 }
9855 else
9856 {
9857 tgt_mem_addr = u_regval[1] - u_regval[0];
9858 }
9859 if (ARM_RECORD_STRH == str_type)
9860 {
9861 record_buf_mem[0] = 2;
9862 record_buf_mem[1] = tgt_mem_addr;
9863 arm_insn_r->mem_rec_count = 1;
9864 }
9865 else if (ARM_RECORD_STRD == str_type)
9866 {
9867 record_buf_mem[0] = 4;
9868 record_buf_mem[1] = tgt_mem_addr;
9869 record_buf_mem[2] = 4;
9870 record_buf_mem[3] = tgt_mem_addr + 4;
9871 arm_insn_r->mem_rec_count = 2;
9872 }
9873 }
9874 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9875 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9876 {
9877 /* 3) Store, immediate pre-indexed. */
9878 /* 5) Store, immediate post-indexed. */
9879 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9880 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9881 offset_8 = (immed_high << 4) | immed_low;
9882 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9883 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9884 /* Calculate target store address, Rn +/- Rm, register offset. */
9885 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9886 {
9887 tgt_mem_addr = u_regval[0] + offset_8;
9888 }
9889 else
9890 {
9891 tgt_mem_addr = u_regval[0] - offset_8;
9892 }
9893 if (ARM_RECORD_STRH == str_type)
9894 {
9895 record_buf_mem[0] = 2;
9896 record_buf_mem[1] = tgt_mem_addr;
9897 arm_insn_r->mem_rec_count = 1;
9898 }
9899 else if (ARM_RECORD_STRD == str_type)
9900 {
9901 record_buf_mem[0] = 4;
9902 record_buf_mem[1] = tgt_mem_addr;
9903 record_buf_mem[2] = 4;
9904 record_buf_mem[3] = tgt_mem_addr + 4;
9905 arm_insn_r->mem_rec_count = 2;
9906 }
9907 /* Record Rn also as it changes. */
9908 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9909 arm_insn_r->reg_rec_count = 1;
9910 }
9911 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9912 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9913 {
9914 /* 4) Store, register pre-indexed. */
9915 /* 6) Store, register post -indexed. */
9916 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9917 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9918 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9919 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9920 /* Calculate target store address, Rn +/- Rm, register offset. */
9921 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9922 {
9923 tgt_mem_addr = u_regval[0] + u_regval[1];
9924 }
9925 else
9926 {
9927 tgt_mem_addr = u_regval[1] - u_regval[0];
9928 }
9929 if (ARM_RECORD_STRH == str_type)
9930 {
9931 record_buf_mem[0] = 2;
9932 record_buf_mem[1] = tgt_mem_addr;
9933 arm_insn_r->mem_rec_count = 1;
9934 }
9935 else if (ARM_RECORD_STRD == str_type)
9936 {
9937 record_buf_mem[0] = 4;
9938 record_buf_mem[1] = tgt_mem_addr;
9939 record_buf_mem[2] = 4;
9940 record_buf_mem[3] = tgt_mem_addr + 4;
9941 arm_insn_r->mem_rec_count = 2;
9942 }
9943 /* Record Rn also as it changes. */
9944 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9945 arm_insn_r->reg_rec_count = 1;
9946 }
9947 return 0;
9948}
9949
9950/* Handling ARM extension space insns. */
9951
9952static int
9953arm_record_extension_space (insn_decode_record *arm_insn_r)
9954{
9955 int ret = 0; /* Return value: -1:record failure ; 0:success */
9956 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9957 uint32_t record_buf[8], record_buf_mem[8];
9958 uint32_t reg_src1 = 0;
9959 struct regcache *reg_cache = arm_insn_r->regcache;
9960 ULONGEST u_regval = 0;
9961
9962 gdb_assert (!INSN_RECORDED(arm_insn_r));
9963 /* Handle unconditional insn extension space. */
9964
9965 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9966 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9967 if (arm_insn_r->cond)
9968 {
9969 /* PLD has no affect on architectural state, it just affects
9970 the caches. */
9971 if (5 == ((opcode1 & 0xE0) >> 5))
9972 {
9973 /* BLX(1) */
9974 record_buf[0] = ARM_PS_REGNUM;
9975 record_buf[1] = ARM_LR_REGNUM;
9976 arm_insn_r->reg_rec_count = 2;
9977 }
9978 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9979 }
9980
9981
9982 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9983 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9984 {
9985 ret = -1;
9986 /* Undefined instruction on ARM V5; need to handle if later
9987 versions define it. */
9988 }
9989
9990 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9991 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9992 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9993
9994 /* Handle arithmetic insn extension space. */
9995 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9996 && !INSN_RECORDED(arm_insn_r))
9997 {
9998 /* Handle MLA(S) and MUL(S). */
9999 if (in_inclusive_range (insn_op1, 0U, 3U))
10000 {
10001 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10002 record_buf[1] = ARM_PS_REGNUM;
10003 arm_insn_r->reg_rec_count = 2;
10004 }
10005 else if (in_inclusive_range (insn_op1, 4U, 15U))
10006 {
10007 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10008 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10009 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10010 record_buf[2] = ARM_PS_REGNUM;
10011 arm_insn_r->reg_rec_count = 3;
10012 }
10013 }
10014
10015 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10016 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10017 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10018
10019 /* Handle control insn extension space. */
10020
10021 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10022 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10023 {
10024 if (!bit (arm_insn_r->arm_insn,25))
10025 {
10026 if (!bits (arm_insn_r->arm_insn, 4, 7))
10027 {
10028 if ((0 == insn_op1) || (2 == insn_op1))
10029 {
10030 /* MRS. */
10031 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10032 arm_insn_r->reg_rec_count = 1;
10033 }
10034 else if (1 == insn_op1)
10035 {
10036 /* CSPR is going to be changed. */
10037 record_buf[0] = ARM_PS_REGNUM;
10038 arm_insn_r->reg_rec_count = 1;
10039 }
10040 else if (3 == insn_op1)
10041 {
10042 /* SPSR is going to be changed. */
10043 /* We need to get SPSR value, which is yet to be done. */
10044 return -1;
10045 }
10046 }
10047 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10048 {
10049 if (1 == insn_op1)
10050 {
10051 /* BX. */
10052 record_buf[0] = ARM_PS_REGNUM;
10053 arm_insn_r->reg_rec_count = 1;
10054 }
10055 else if (3 == insn_op1)
10056 {
10057 /* CLZ. */
10058 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10059 arm_insn_r->reg_rec_count = 1;
10060 }
10061 }
10062 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10063 {
10064 /* BLX. */
10065 record_buf[0] = ARM_PS_REGNUM;
10066 record_buf[1] = ARM_LR_REGNUM;
10067 arm_insn_r->reg_rec_count = 2;
10068 }
10069 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10070 {
10071 /* QADD, QSUB, QDADD, QDSUB */
10072 record_buf[0] = ARM_PS_REGNUM;
10073 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10074 arm_insn_r->reg_rec_count = 2;
10075 }
10076 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10077 {
10078 /* BKPT. */
10079 record_buf[0] = ARM_PS_REGNUM;
10080 record_buf[1] = ARM_LR_REGNUM;
10081 arm_insn_r->reg_rec_count = 2;
10082
10083 /* Save SPSR also;how? */
10084 return -1;
10085 }
10086 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10087 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10088 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10089 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10090 )
10091 {
10092 if (0 == insn_op1 || 1 == insn_op1)
10093 {
10094 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10095 /* We dont do optimization for SMULW<y> where we
10096 need only Rd. */
10097 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10098 record_buf[1] = ARM_PS_REGNUM;
10099 arm_insn_r->reg_rec_count = 2;
10100 }
10101 else if (2 == insn_op1)
10102 {
10103 /* SMLAL<x><y>. */
10104 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10105 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10106 arm_insn_r->reg_rec_count = 2;
10107 }
10108 else if (3 == insn_op1)
10109 {
10110 /* SMUL<x><y>. */
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 arm_insn_r->reg_rec_count = 1;
10113 }
10114 }
10115 }
10116 else
10117 {
10118 /* MSR : immediate form. */
10119 if (1 == insn_op1)
10120 {
10121 /* CSPR is going to be changed. */
10122 record_buf[0] = ARM_PS_REGNUM;
10123 arm_insn_r->reg_rec_count = 1;
10124 }
10125 else if (3 == insn_op1)
10126 {
10127 /* SPSR is going to be changed. */
10128 /* we need to get SPSR value, which is yet to be done */
10129 return -1;
10130 }
10131 }
10132 }
10133
10134 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10135 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10136 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10137
10138 /* Handle load/store insn extension space. */
10139
10140 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10141 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10142 && !INSN_RECORDED(arm_insn_r))
10143 {
10144 /* SWP/SWPB. */
10145 if (0 == insn_op1)
10146 {
10147 /* These insn, changes register and memory as well. */
10148 /* SWP or SWPB insn. */
10149 /* Get memory address given by Rn. */
10150 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10151 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10152 /* SWP insn ?, swaps word. */
10153 if (8 == arm_insn_r->opcode)
10154 {
10155 record_buf_mem[0] = 4;
10156 }
10157 else
10158 {
10159 /* SWPB insn, swaps only byte. */
10160 record_buf_mem[0] = 1;
10161 }
10162 record_buf_mem[1] = u_regval;
10163 arm_insn_r->mem_rec_count = 1;
10164 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10165 arm_insn_r->reg_rec_count = 1;
10166 }
10167 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10168 {
10169 /* STRH. */
10170 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10171 ARM_RECORD_STRH);
10172 }
10173 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10174 {
10175 /* LDRD. */
10176 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10177 record_buf[1] = record_buf[0] + 1;
10178 arm_insn_r->reg_rec_count = 2;
10179 }
10180 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10181 {
10182 /* STRD. */
10183 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10184 ARM_RECORD_STRD);
10185 }
10186 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10187 {
10188 /* LDRH, LDRSB, LDRSH. */
10189 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10190 arm_insn_r->reg_rec_count = 1;
10191 }
10192
10193 }
10194
10195 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10196 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10197 && !INSN_RECORDED(arm_insn_r))
10198 {
10199 ret = -1;
10200 /* Handle coprocessor insn extension space. */
10201 }
10202
10203 /* To be done for ARMv5 and later; as of now we return -1. */
10204 if (-1 == ret)
10205 return ret;
10206
10207 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10208 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10209
10210 return ret;
10211}
10212
10213/* Handling opcode 000 insns. */
10214
10215static int
10216arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10217{
10218 struct regcache *reg_cache = arm_insn_r->regcache;
10219 uint32_t record_buf[8], record_buf_mem[8];
10220 ULONGEST u_regval[2] = {0};
10221
10222 uint32_t reg_src1 = 0, reg_dest = 0;
10223 uint32_t opcode1 = 0;
10224
10225 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10226 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10227 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10228
10229 if (!((opcode1 & 0x19) == 0x10))
10230 {
10231 /* Data-processing (register) and Data-processing (register-shifted
10232 register */
10233 /* Out of 11 shifter operands mode, all the insn modifies destination
10234 register, which is specified by 13-16 decode. */
10235 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10236 record_buf[1] = ARM_PS_REGNUM;
10237 arm_insn_r->reg_rec_count = 2;
10238 }
10239 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10240 {
10241 /* Miscellaneous instructions */
10242
10243 if (3 == arm_insn_r->decode && 0x12 == opcode1
10244 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10245 {
10246 /* Handle BLX, branch and link/exchange. */
10247 if (9 == arm_insn_r->opcode)
10248 {
10249 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10250 and R14 stores the return address. */
10251 record_buf[0] = ARM_PS_REGNUM;
10252 record_buf[1] = ARM_LR_REGNUM;
10253 arm_insn_r->reg_rec_count = 2;
10254 }
10255 }
10256 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10257 {
10258 /* Handle enhanced software breakpoint insn, BKPT. */
10259 /* CPSR is changed to be executed in ARM state, disabling normal
10260 interrupts, entering abort mode. */
10261 /* According to high vector configuration PC is set. */
10262 /* user hit breakpoint and type reverse, in
10263 that case, we need to go back with previous CPSR and
10264 Program Counter. */
10265 record_buf[0] = ARM_PS_REGNUM;
10266 record_buf[1] = ARM_LR_REGNUM;
10267 arm_insn_r->reg_rec_count = 2;
10268
10269 /* Save SPSR also; how? */
10270 return -1;
10271 }
10272 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10273 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10274 {
10275 /* Handle BX, branch and link/exchange. */
10276 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10277 record_buf[0] = ARM_PS_REGNUM;
10278 arm_insn_r->reg_rec_count = 1;
10279 }
10280 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10281 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10282 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10283 {
10284 /* Count leading zeros: CLZ. */
10285 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10286 arm_insn_r->reg_rec_count = 1;
10287 }
10288 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10289 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10290 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10291 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10292 {
10293 /* Handle MRS insn. */
10294 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10295 arm_insn_r->reg_rec_count = 1;
10296 }
10297 }
10298 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10299 {
10300 /* Multiply and multiply-accumulate */
10301
10302 /* Handle multiply instructions. */
10303 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10304 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10305 {
10306 /* Handle MLA and MUL. */
10307 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10308 record_buf[1] = ARM_PS_REGNUM;
10309 arm_insn_r->reg_rec_count = 2;
10310 }
10311 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10312 {
10313 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10314 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10315 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10316 record_buf[2] = ARM_PS_REGNUM;
10317 arm_insn_r->reg_rec_count = 3;
10318 }
10319 }
10320 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10321 {
10322 /* Synchronization primitives */
10323
10324 /* Handling SWP, SWPB. */
10325 /* These insn, changes register and memory as well. */
10326 /* SWP or SWPB insn. */
10327
10328 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10329 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10330 /* SWP insn ?, swaps word. */
10331 if (8 == arm_insn_r->opcode)
10332 {
10333 record_buf_mem[0] = 4;
10334 }
10335 else
10336 {
10337 /* SWPB insn, swaps only byte. */
10338 record_buf_mem[0] = 1;
10339 }
10340 record_buf_mem[1] = u_regval[0];
10341 arm_insn_r->mem_rec_count = 1;
10342 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10343 arm_insn_r->reg_rec_count = 1;
10344 }
10345 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10346 || 15 == arm_insn_r->decode)
10347 {
10348 if ((opcode1 & 0x12) == 2)
10349 {
10350 /* Extra load/store (unprivileged) */
10351 return -1;
10352 }
10353 else
10354 {
10355 /* Extra load/store */
10356 switch (bits (arm_insn_r->arm_insn, 5, 6))
10357 {
10358 case 1:
10359 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10360 {
10361 /* STRH (register), STRH (immediate) */
10362 arm_record_strx (arm_insn_r, &record_buf[0],
10363 &record_buf_mem[0], ARM_RECORD_STRH);
10364 }
10365 else if ((opcode1 & 0x05) == 0x1)
10366 {
10367 /* LDRH (register) */
10368 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10369 arm_insn_r->reg_rec_count = 1;
10370
10371 if (bit (arm_insn_r->arm_insn, 21))
10372 {
10373 /* Write back to Rn. */
10374 record_buf[arm_insn_r->reg_rec_count++]
10375 = bits (arm_insn_r->arm_insn, 16, 19);
10376 }
10377 }
10378 else if ((opcode1 & 0x05) == 0x5)
10379 {
10380 /* LDRH (immediate), LDRH (literal) */
10381 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10382
10383 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10384 arm_insn_r->reg_rec_count = 1;
10385
10386 if (rn != 15)
10387 {
10388 /*LDRH (immediate) */
10389 if (bit (arm_insn_r->arm_insn, 21))
10390 {
10391 /* Write back to Rn. */
10392 record_buf[arm_insn_r->reg_rec_count++] = rn;
10393 }
10394 }
10395 }
10396 else
10397 return -1;
10398 break;
10399 case 2:
10400 if ((opcode1 & 0x05) == 0x0)
10401 {
10402 /* LDRD (register) */
10403 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10404 record_buf[1] = record_buf[0] + 1;
10405 arm_insn_r->reg_rec_count = 2;
10406
10407 if (bit (arm_insn_r->arm_insn, 21))
10408 {
10409 /* Write back to Rn. */
10410 record_buf[arm_insn_r->reg_rec_count++]
10411 = bits (arm_insn_r->arm_insn, 16, 19);
10412 }
10413 }
10414 else if ((opcode1 & 0x05) == 0x1)
10415 {
10416 /* LDRSB (register) */
10417 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10418 arm_insn_r->reg_rec_count = 1;
10419
10420 if (bit (arm_insn_r->arm_insn, 21))
10421 {
10422 /* Write back to Rn. */
10423 record_buf[arm_insn_r->reg_rec_count++]
10424 = bits (arm_insn_r->arm_insn, 16, 19);
10425 }
10426 }
10427 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10428 {
10429 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10430 LDRSB (literal) */
10431 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10432
10433 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10434 arm_insn_r->reg_rec_count = 1;
10435
10436 if (rn != 15)
10437 {
10438 /*LDRD (immediate), LDRSB (immediate) */
10439 if (bit (arm_insn_r->arm_insn, 21))
10440 {
10441 /* Write back to Rn. */
10442 record_buf[arm_insn_r->reg_rec_count++] = rn;
10443 }
10444 }
10445 }
10446 else
10447 return -1;
10448 break;
10449 case 3:
10450 if ((opcode1 & 0x05) == 0x0)
10451 {
10452 /* STRD (register) */
10453 arm_record_strx (arm_insn_r, &record_buf[0],
10454 &record_buf_mem[0], ARM_RECORD_STRD);
10455 }
10456 else if ((opcode1 & 0x05) == 0x1)
10457 {
10458 /* LDRSH (register) */
10459 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10460 arm_insn_r->reg_rec_count = 1;
10461
10462 if (bit (arm_insn_r->arm_insn, 21))
10463 {
10464 /* Write back to Rn. */
10465 record_buf[arm_insn_r->reg_rec_count++]
10466 = bits (arm_insn_r->arm_insn, 16, 19);
10467 }
10468 }
10469 else if ((opcode1 & 0x05) == 0x4)
10470 {
10471 /* STRD (immediate) */
10472 arm_record_strx (arm_insn_r, &record_buf[0],
10473 &record_buf_mem[0], ARM_RECORD_STRD);
10474 }
10475 else if ((opcode1 & 0x05) == 0x5)
10476 {
10477 /* LDRSH (immediate), LDRSH (literal) */
10478 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10479 arm_insn_r->reg_rec_count = 1;
10480
10481 if (bit (arm_insn_r->arm_insn, 21))
10482 {
10483 /* Write back to Rn. */
10484 record_buf[arm_insn_r->reg_rec_count++]
10485 = bits (arm_insn_r->arm_insn, 16, 19);
10486 }
10487 }
10488 else
10489 return -1;
10490 break;
10491 default:
10492 return -1;
10493 }
10494 }
10495 }
10496 else
10497 {
10498 return -1;
10499 }
10500
10501 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10502 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10503 return 0;
10504}
10505
10506/* Handling opcode 001 insns. */
10507
10508static int
10509arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10510{
10511 uint32_t record_buf[8], record_buf_mem[8];
10512
10513 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10514 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10515
10516 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10517 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10518 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10519 )
10520 {
10521 /* Handle MSR insn. */
10522 if (9 == arm_insn_r->opcode)
10523 {
10524 /* CSPR is going to be changed. */
10525 record_buf[0] = ARM_PS_REGNUM;
10526 arm_insn_r->reg_rec_count = 1;
10527 }
10528 else
10529 {
10530 /* SPSR is going to be changed. */
10531 }
10532 }
10533 else if (arm_insn_r->opcode <= 15)
10534 {
10535 /* Normal data processing insns. */
10536 /* Out of 11 shifter operands mode, all the insn modifies destination
10537 register, which is specified by 13-16 decode. */
10538 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10539 record_buf[1] = ARM_PS_REGNUM;
10540 arm_insn_r->reg_rec_count = 2;
10541 }
10542 else
10543 {
10544 return -1;
10545 }
10546
10547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10548 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10549 return 0;
10550}
10551
10552static int
10553arm_record_media (insn_decode_record *arm_insn_r)
10554{
10555 uint32_t record_buf[8];
10556
10557 switch (bits (arm_insn_r->arm_insn, 22, 24))
10558 {
10559 case 0:
10560 /* Parallel addition and subtraction, signed */
10561 case 1:
10562 /* Parallel addition and subtraction, unsigned */
10563 case 2:
10564 case 3:
10565 /* Packing, unpacking, saturation and reversal */
10566 {
10567 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10568
10569 record_buf[arm_insn_r->reg_rec_count++] = rd;
10570 }
10571 break;
10572
10573 case 4:
10574 case 5:
10575 /* Signed multiplies */
10576 {
10577 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10578 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10579
10580 record_buf[arm_insn_r->reg_rec_count++] = rd;
10581 if (op1 == 0x0)
10582 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10583 else if (op1 == 0x4)
10584 record_buf[arm_insn_r->reg_rec_count++]
10585 = bits (arm_insn_r->arm_insn, 12, 15);
10586 }
10587 break;
10588
10589 case 6:
10590 {
10591 if (bit (arm_insn_r->arm_insn, 21)
10592 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10593 {
10594 /* SBFX */
10595 record_buf[arm_insn_r->reg_rec_count++]
10596 = bits (arm_insn_r->arm_insn, 12, 15);
10597 }
10598 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10599 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10600 {
10601 /* USAD8 and USADA8 */
10602 record_buf[arm_insn_r->reg_rec_count++]
10603 = bits (arm_insn_r->arm_insn, 16, 19);
10604 }
10605 }
10606 break;
10607
10608 case 7:
10609 {
10610 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10611 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10612 {
10613 /* Permanently UNDEFINED */
10614 return -1;
10615 }
10616 else
10617 {
10618 /* BFC, BFI and UBFX */
10619 record_buf[arm_insn_r->reg_rec_count++]
10620 = bits (arm_insn_r->arm_insn, 12, 15);
10621 }
10622 }
10623 break;
10624
10625 default:
10626 return -1;
10627 }
10628
10629 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10630
10631 return 0;
10632}
10633
10634/* Handle ARM mode instructions with opcode 010. */
10635
10636static int
10637arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10638{
10639 struct regcache *reg_cache = arm_insn_r->regcache;
10640
10641 uint32_t reg_base , reg_dest;
10642 uint32_t offset_12, tgt_mem_addr;
10643 uint32_t record_buf[8], record_buf_mem[8];
10644 unsigned char wback;
10645 ULONGEST u_regval;
10646
10647 /* Calculate wback. */
10648 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10649 || (bit (arm_insn_r->arm_insn, 21) == 1);
10650
10651 arm_insn_r->reg_rec_count = 0;
10652 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10653
10654 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10655 {
10656 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10657 and LDRT. */
10658
10659 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10660 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10661
10662 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10663 preceeds a LDR instruction having R15 as reg_base, it
10664 emulates a branch and link instruction, and hence we need to save
10665 CPSR and PC as well. */
10666 if (ARM_PC_REGNUM == reg_dest)
10667 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10668
10669 /* If wback is true, also save the base register, which is going to be
10670 written to. */
10671 if (wback)
10672 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10673 }
10674 else
10675 {
10676 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10677
10678 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10679 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10680
10681 /* Handle bit U. */
10682 if (bit (arm_insn_r->arm_insn, 23))
10683 {
10684 /* U == 1: Add the offset. */
10685 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10686 }
10687 else
10688 {
10689 /* U == 0: subtract the offset. */
10690 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10691 }
10692
10693 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10694 bytes. */
10695 if (bit (arm_insn_r->arm_insn, 22))
10696 {
10697 /* STRB and STRBT: 1 byte. */
10698 record_buf_mem[0] = 1;
10699 }
10700 else
10701 {
10702 /* STR and STRT: 4 bytes. */
10703 record_buf_mem[0] = 4;
10704 }
10705
10706 /* Handle bit P. */
10707 if (bit (arm_insn_r->arm_insn, 24))
10708 record_buf_mem[1] = tgt_mem_addr;
10709 else
10710 record_buf_mem[1] = (uint32_t) u_regval;
10711
10712 arm_insn_r->mem_rec_count = 1;
10713
10714 /* If wback is true, also save the base register, which is going to be
10715 written to. */
10716 if (wback)
10717 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10718 }
10719
10720 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10721 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10722 return 0;
10723}
10724
10725/* Handling opcode 011 insns. */
10726
10727static int
10728arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10729{
10730 struct regcache *reg_cache = arm_insn_r->regcache;
10731
10732 uint32_t shift_imm = 0;
10733 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10734 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10735 uint32_t record_buf[8], record_buf_mem[8];
10736
10737 LONGEST s_word;
10738 ULONGEST u_regval[2];
10739
10740 if (bit (arm_insn_r->arm_insn, 4))
10741 return arm_record_media (arm_insn_r);
10742
10743 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10744 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10745
10746 /* Handle enhanced store insns and LDRD DSP insn,
10747 order begins according to addressing modes for store insns
10748 STRH insn. */
10749
10750 /* LDR or STR? */
10751 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10752 {
10753 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10754 /* LDR insn has a capability to do branching, if
10755 MOV LR, PC is precedded by LDR insn having Rn as R15
10756 in that case, it emulates branch and link insn, and hence we
10757 need to save CSPR and PC as well. */
10758 if (15 != reg_dest)
10759 {
10760 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10761 arm_insn_r->reg_rec_count = 1;
10762 }
10763 else
10764 {
10765 record_buf[0] = reg_dest;
10766 record_buf[1] = ARM_PS_REGNUM;
10767 arm_insn_r->reg_rec_count = 2;
10768 }
10769 }
10770 else
10771 {
10772 if (! bits (arm_insn_r->arm_insn, 4, 11))
10773 {
10774 /* Store insn, register offset and register pre-indexed,
10775 register post-indexed. */
10776 /* Get Rm. */
10777 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10778 /* Get Rn. */
10779 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10780 regcache_raw_read_unsigned (reg_cache, reg_src1
10781 , &u_regval[0]);
10782 regcache_raw_read_unsigned (reg_cache, reg_src2
10783 , &u_regval[1]);
10784 if (15 == reg_src2)
10785 {
10786 /* If R15 was used as Rn, hence current PC+8. */
10787 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10788 u_regval[0] = u_regval[0] + 8;
10789 }
10790 /* Calculate target store address, Rn +/- Rm, register offset. */
10791 /* U == 1. */
10792 if (bit (arm_insn_r->arm_insn, 23))
10793 {
10794 tgt_mem_addr = u_regval[0] + u_regval[1];
10795 }
10796 else
10797 {
10798 tgt_mem_addr = u_regval[1] - u_regval[0];
10799 }
10800
10801 switch (arm_insn_r->opcode)
10802 {
10803 /* STR. */
10804 case 8:
10805 case 12:
10806 /* STR. */
10807 case 9:
10808 case 13:
10809 /* STRT. */
10810 case 1:
10811 case 5:
10812 /* STR. */
10813 case 0:
10814 case 4:
10815 record_buf_mem[0] = 4;
10816 break;
10817
10818 /* STRB. */
10819 case 10:
10820 case 14:
10821 /* STRB. */
10822 case 11:
10823 case 15:
10824 /* STRBT. */
10825 case 3:
10826 case 7:
10827 /* STRB. */
10828 case 2:
10829 case 6:
10830 record_buf_mem[0] = 1;
10831 break;
10832
10833 default:
10834 gdb_assert_not_reached ("no decoding pattern found");
10835 break;
10836 }
10837 record_buf_mem[1] = tgt_mem_addr;
10838 arm_insn_r->mem_rec_count = 1;
10839
10840 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10841 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10842 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10843 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10844 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10845 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10846 )
10847 {
10848 /* Rn is going to be changed in pre-indexed mode and
10849 post-indexed mode as well. */
10850 record_buf[0] = reg_src2;
10851 arm_insn_r->reg_rec_count = 1;
10852 }
10853 }
10854 else
10855 {
10856 /* Store insn, scaled register offset; scaled pre-indexed. */
10857 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10858 /* Get Rm. */
10859 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10860 /* Get Rn. */
10861 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10862 /* Get shift_imm. */
10863 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10864 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10865 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10866 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10867 /* Offset_12 used as shift. */
10868 switch (offset_12)
10869 {
10870 case 0:
10871 /* Offset_12 used as index. */
10872 offset_12 = u_regval[0] << shift_imm;
10873 break;
10874
10875 case 1:
10876 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10877 break;
10878
10879 case 2:
10880 if (!shift_imm)
10881 {
10882 if (bit (u_regval[0], 31))
10883 {
10884 offset_12 = 0xFFFFFFFF;
10885 }
10886 else
10887 {
10888 offset_12 = 0;
10889 }
10890 }
10891 else
10892 {
10893 /* This is arithmetic shift. */
10894 offset_12 = s_word >> shift_imm;
10895 }
10896 break;
10897
10898 case 3:
10899 if (!shift_imm)
10900 {
10901 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10902 &u_regval[1]);
10903 /* Get C flag value and shift it by 31. */
10904 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10905 | (u_regval[0]) >> 1);
10906 }
10907 else
10908 {
10909 offset_12 = (u_regval[0] >> shift_imm) \
10910 | (u_regval[0] <<
10911 (sizeof(uint32_t) - shift_imm));
10912 }
10913 break;
10914
10915 default:
10916 gdb_assert_not_reached ("no decoding pattern found");
10917 break;
10918 }
10919
10920 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10921 /* bit U set. */
10922 if (bit (arm_insn_r->arm_insn, 23))
10923 {
10924 tgt_mem_addr = u_regval[1] + offset_12;
10925 }
10926 else
10927 {
10928 tgt_mem_addr = u_regval[1] - offset_12;
10929 }
10930
10931 switch (arm_insn_r->opcode)
10932 {
10933 /* STR. */
10934 case 8:
10935 case 12:
10936 /* STR. */
10937 case 9:
10938 case 13:
10939 /* STRT. */
10940 case 1:
10941 case 5:
10942 /* STR. */
10943 case 0:
10944 case 4:
10945 record_buf_mem[0] = 4;
10946 break;
10947
10948 /* STRB. */
10949 case 10:
10950 case 14:
10951 /* STRB. */
10952 case 11:
10953 case 15:
10954 /* STRBT. */
10955 case 3:
10956 case 7:
10957 /* STRB. */
10958 case 2:
10959 case 6:
10960 record_buf_mem[0] = 1;
10961 break;
10962
10963 default:
10964 gdb_assert_not_reached ("no decoding pattern found");
10965 break;
10966 }
10967 record_buf_mem[1] = tgt_mem_addr;
10968 arm_insn_r->mem_rec_count = 1;
10969
10970 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10971 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10972 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10973 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10974 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10975 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10976 )
10977 {
10978 /* Rn is going to be changed in register scaled pre-indexed
10979 mode,and scaled post indexed mode. */
10980 record_buf[0] = reg_src2;
10981 arm_insn_r->reg_rec_count = 1;
10982 }
10983 }
10984 }
10985
10986 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10987 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10988 return 0;
10989}
10990
10991/* Handle ARM mode instructions with opcode 100. */
10992
10993static int
10994arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10995{
10996 struct regcache *reg_cache = arm_insn_r->regcache;
10997 uint32_t register_count = 0, register_bits;
10998 uint32_t reg_base, addr_mode;
10999 uint32_t record_buf[24], record_buf_mem[48];
11000 uint32_t wback;
11001 ULONGEST u_regval;
11002
11003 /* Fetch the list of registers. */
11004 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11005 arm_insn_r->reg_rec_count = 0;
11006
11007 /* Fetch the base register that contains the address we are loading data
11008 to. */
11009 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11010
11011 /* Calculate wback. */
11012 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11013
11014 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11015 {
11016 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11017
11018 /* Find out which registers are going to be loaded from memory. */
11019 while (register_bits)
11020 {
11021 if (register_bits & 0x00000001)
11022 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11023 register_bits = register_bits >> 1;
11024 register_count++;
11025 }
11026
11027
11028 /* If wback is true, also save the base register, which is going to be
11029 written to. */
11030 if (wback)
11031 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11032
11033 /* Save the CPSR register. */
11034 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11035 }
11036 else
11037 {
11038 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11039
11040 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11041
11042 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11043
11044 /* Find out how many registers are going to be stored to memory. */
11045 while (register_bits)
11046 {
11047 if (register_bits & 0x00000001)
11048 register_count++;
11049 register_bits = register_bits >> 1;
11050 }
11051
11052 switch (addr_mode)
11053 {
11054 /* STMDA (STMED): Decrement after. */
11055 case 0:
11056 record_buf_mem[1] = (uint32_t) u_regval
11057 - register_count * INT_REGISTER_SIZE + 4;
11058 break;
11059 /* STM (STMIA, STMEA): Increment after. */
11060 case 1:
11061 record_buf_mem[1] = (uint32_t) u_regval;
11062 break;
11063 /* STMDB (STMFD): Decrement before. */
11064 case 2:
11065 record_buf_mem[1] = (uint32_t) u_regval
11066 - register_count * INT_REGISTER_SIZE;
11067 break;
11068 /* STMIB (STMFA): Increment before. */
11069 case 3:
11070 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11071 break;
11072 default:
11073 gdb_assert_not_reached ("no decoding pattern found");
11074 break;
11075 }
11076
11077 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11078 arm_insn_r->mem_rec_count = 1;
11079
11080 /* If wback is true, also save the base register, which is going to be
11081 written to. */
11082 if (wback)
11083 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11084 }
11085
11086 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11087 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11088 return 0;
11089}
11090
11091/* Handling opcode 101 insns. */
11092
11093static int
11094arm_record_b_bl (insn_decode_record *arm_insn_r)
11095{
11096 uint32_t record_buf[8];
11097
11098 /* Handle B, BL, BLX(1) insns. */
11099 /* B simply branches so we do nothing here. */
11100 /* Note: BLX(1) doesnt fall here but instead it falls into
11101 extension space. */
11102 if (bit (arm_insn_r->arm_insn, 24))
11103 {
11104 record_buf[0] = ARM_LR_REGNUM;
11105 arm_insn_r->reg_rec_count = 1;
11106 }
11107
11108 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11109
11110 return 0;
11111}
11112
11113static int
11114arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11115{
11116 printf_unfiltered (_("Process record does not support instruction "
11117 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11118 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11119
11120 return -1;
11121}
11122
11123/* Record handler for vector data transfer instructions. */
11124
11125static int
11126arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11127{
11128 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11129 uint32_t record_buf[4];
11130
11131 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11132 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11133 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11134 bit_l = bit (arm_insn_r->arm_insn, 20);
11135 bit_c = bit (arm_insn_r->arm_insn, 8);
11136
11137 /* Handle VMOV instruction. */
11138 if (bit_l && bit_c)
11139 {
11140 record_buf[0] = reg_t;
11141 arm_insn_r->reg_rec_count = 1;
11142 }
11143 else if (bit_l && !bit_c)
11144 {
11145 /* Handle VMOV instruction. */
11146 if (bits_a == 0x00)
11147 {
11148 record_buf[0] = reg_t;
11149 arm_insn_r->reg_rec_count = 1;
11150 }
11151 /* Handle VMRS instruction. */
11152 else if (bits_a == 0x07)
11153 {
11154 if (reg_t == 15)
11155 reg_t = ARM_PS_REGNUM;
11156
11157 record_buf[0] = reg_t;
11158 arm_insn_r->reg_rec_count = 1;
11159 }
11160 }
11161 else if (!bit_l && !bit_c)
11162 {
11163 /* Handle VMOV instruction. */
11164 if (bits_a == 0x00)
11165 {
11166 record_buf[0] = ARM_D0_REGNUM + reg_v;
11167
11168 arm_insn_r->reg_rec_count = 1;
11169 }
11170 /* Handle VMSR instruction. */
11171 else if (bits_a == 0x07)
11172 {
11173 record_buf[0] = ARM_FPSCR_REGNUM;
11174 arm_insn_r->reg_rec_count = 1;
11175 }
11176 }
11177 else if (!bit_l && bit_c)
11178 {
11179 /* Handle VMOV instruction. */
11180 if (!(bits_a & 0x04))
11181 {
11182 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11183 + ARM_D0_REGNUM;
11184 arm_insn_r->reg_rec_count = 1;
11185 }
11186 /* Handle VDUP instruction. */
11187 else
11188 {
11189 if (bit (arm_insn_r->arm_insn, 21))
11190 {
11191 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11192 record_buf[0] = reg_v + ARM_D0_REGNUM;
11193 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11194 arm_insn_r->reg_rec_count = 2;
11195 }
11196 else
11197 {
11198 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11199 record_buf[0] = reg_v + ARM_D0_REGNUM;
11200 arm_insn_r->reg_rec_count = 1;
11201 }
11202 }
11203 }
11204
11205 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11206 return 0;
11207}
11208
11209/* Record handler for extension register load/store instructions. */
11210
11211static int
11212arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11213{
11214 uint32_t opcode, single_reg;
11215 uint8_t op_vldm_vstm;
11216 uint32_t record_buf[8], record_buf_mem[128];
11217 ULONGEST u_regval = 0;
11218
11219 struct regcache *reg_cache = arm_insn_r->regcache;
11220
11221 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11222 single_reg = !bit (arm_insn_r->arm_insn, 8);
11223 op_vldm_vstm = opcode & 0x1b;
11224
11225 /* Handle VMOV instructions. */
11226 if ((opcode & 0x1e) == 0x04)
11227 {
11228 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11229 {
11230 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11231 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11232 arm_insn_r->reg_rec_count = 2;
11233 }
11234 else
11235 {
11236 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11237 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11238
11239 if (single_reg)
11240 {
11241 /* The first S register number m is REG_M:M (M is bit 5),
11242 the corresponding D register number is REG_M:M / 2, which
11243 is REG_M. */
11244 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11245 /* The second S register number is REG_M:M + 1, the
11246 corresponding D register number is (REG_M:M + 1) / 2.
11247 IOW, if bit M is 1, the first and second S registers
11248 are mapped to different D registers, otherwise, they are
11249 in the same D register. */
11250 if (bit_m)
11251 {
11252 record_buf[arm_insn_r->reg_rec_count++]
11253 = ARM_D0_REGNUM + reg_m + 1;
11254 }
11255 }
11256 else
11257 {
11258 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11259 arm_insn_r->reg_rec_count = 1;
11260 }
11261 }
11262 }
11263 /* Handle VSTM and VPUSH instructions. */
11264 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11265 || op_vldm_vstm == 0x12)
11266 {
11267 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11268 uint32_t memory_index = 0;
11269
11270 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11271 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11272 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11273 imm_off32 = imm_off8 << 2;
11274 memory_count = imm_off8;
11275
11276 if (bit (arm_insn_r->arm_insn, 23))
11277 start_address = u_regval;
11278 else
11279 start_address = u_regval - imm_off32;
11280
11281 if (bit (arm_insn_r->arm_insn, 21))
11282 {
11283 record_buf[0] = reg_rn;
11284 arm_insn_r->reg_rec_count = 1;
11285 }
11286
11287 while (memory_count > 0)
11288 {
11289 if (single_reg)
11290 {
11291 record_buf_mem[memory_index] = 4;
11292 record_buf_mem[memory_index + 1] = start_address;
11293 start_address = start_address + 4;
11294 memory_index = memory_index + 2;
11295 }
11296 else
11297 {
11298 record_buf_mem[memory_index] = 4;
11299 record_buf_mem[memory_index + 1] = start_address;
11300 record_buf_mem[memory_index + 2] = 4;
11301 record_buf_mem[memory_index + 3] = start_address + 4;
11302 start_address = start_address + 8;
11303 memory_index = memory_index + 4;
11304 }
11305 memory_count--;
11306 }
11307 arm_insn_r->mem_rec_count = (memory_index >> 1);
11308 }
11309 /* Handle VLDM instructions. */
11310 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11311 || op_vldm_vstm == 0x13)
11312 {
11313 uint32_t reg_count, reg_vd;
11314 uint32_t reg_index = 0;
11315 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11316
11317 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11318 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11319
11320 /* REG_VD is the first D register number. If the instruction
11321 loads memory to S registers (SINGLE_REG is TRUE), the register
11322 number is (REG_VD << 1 | bit D), so the corresponding D
11323 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11324 if (!single_reg)
11325 reg_vd = reg_vd | (bit_d << 4);
11326
11327 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11328 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11329
11330 /* If the instruction loads memory to D register, REG_COUNT should
11331 be divided by 2, according to the ARM Architecture Reference
11332 Manual. If the instruction loads memory to S register, divide by
11333 2 as well because two S registers are mapped to D register. */
11334 reg_count = reg_count / 2;
11335 if (single_reg && bit_d)
11336 {
11337 /* Increase the register count if S register list starts from
11338 an odd number (bit d is one). */
11339 reg_count++;
11340 }
11341
11342 while (reg_count > 0)
11343 {
11344 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11345 reg_count--;
11346 }
11347 arm_insn_r->reg_rec_count = reg_index;
11348 }
11349 /* VSTR Vector store register. */
11350 else if ((opcode & 0x13) == 0x10)
11351 {
11352 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11353 uint32_t memory_index = 0;
11354
11355 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11356 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11357 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11358 imm_off32 = imm_off8 << 2;
11359
11360 if (bit (arm_insn_r->arm_insn, 23))
11361 start_address = u_regval + imm_off32;
11362 else
11363 start_address = u_regval - imm_off32;
11364
11365 if (single_reg)
11366 {
11367 record_buf_mem[memory_index] = 4;
11368 record_buf_mem[memory_index + 1] = start_address;
11369 arm_insn_r->mem_rec_count = 1;
11370 }
11371 else
11372 {
11373 record_buf_mem[memory_index] = 4;
11374 record_buf_mem[memory_index + 1] = start_address;
11375 record_buf_mem[memory_index + 2] = 4;
11376 record_buf_mem[memory_index + 3] = start_address + 4;
11377 arm_insn_r->mem_rec_count = 2;
11378 }
11379 }
11380 /* VLDR Vector load register. */
11381 else if ((opcode & 0x13) == 0x11)
11382 {
11383 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11384
11385 if (!single_reg)
11386 {
11387 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11388 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11389 }
11390 else
11391 {
11392 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11393 /* Record register D rather than pseudo register S. */
11394 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11395 }
11396 arm_insn_r->reg_rec_count = 1;
11397 }
11398
11399 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11400 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11401 return 0;
11402}
11403
11404/* Record handler for arm/thumb mode VFP data processing instructions. */
11405
11406static int
11407arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11408{
11409 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11410 uint32_t record_buf[4];
11411 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11412 enum insn_types curr_insn_type = INSN_INV;
11413
11414 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11415 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11416 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11417 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11418 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11419 bit_d = bit (arm_insn_r->arm_insn, 22);
11420 /* Mask off the "D" bit. */
11421 opc1 = opc1 & ~0x04;
11422
11423 /* Handle VMLA, VMLS. */
11424 if (opc1 == 0x00)
11425 {
11426 if (bit (arm_insn_r->arm_insn, 10))
11427 {
11428 if (bit (arm_insn_r->arm_insn, 6))
11429 curr_insn_type = INSN_T0;
11430 else
11431 curr_insn_type = INSN_T1;
11432 }
11433 else
11434 {
11435 if (dp_op_sz)
11436 curr_insn_type = INSN_T1;
11437 else
11438 curr_insn_type = INSN_T2;
11439 }
11440 }
11441 /* Handle VNMLA, VNMLS, VNMUL. */
11442 else if (opc1 == 0x01)
11443 {
11444 if (dp_op_sz)
11445 curr_insn_type = INSN_T1;
11446 else
11447 curr_insn_type = INSN_T2;
11448 }
11449 /* Handle VMUL. */
11450 else if (opc1 == 0x02 && !(opc3 & 0x01))
11451 {
11452 if (bit (arm_insn_r->arm_insn, 10))
11453 {
11454 if (bit (arm_insn_r->arm_insn, 6))
11455 curr_insn_type = INSN_T0;
11456 else
11457 curr_insn_type = INSN_T1;
11458 }
11459 else
11460 {
11461 if (dp_op_sz)
11462 curr_insn_type = INSN_T1;
11463 else
11464 curr_insn_type = INSN_T2;
11465 }
11466 }
11467 /* Handle VADD, VSUB. */
11468 else if (opc1 == 0x03)
11469 {
11470 if (!bit (arm_insn_r->arm_insn, 9))
11471 {
11472 if (bit (arm_insn_r->arm_insn, 6))
11473 curr_insn_type = INSN_T0;
11474 else
11475 curr_insn_type = INSN_T1;
11476 }
11477 else
11478 {
11479 if (dp_op_sz)
11480 curr_insn_type = INSN_T1;
11481 else
11482 curr_insn_type = INSN_T2;
11483 }
11484 }
11485 /* Handle VDIV. */
11486 else if (opc1 == 0x08)
11487 {
11488 if (dp_op_sz)
11489 curr_insn_type = INSN_T1;
11490 else
11491 curr_insn_type = INSN_T2;
11492 }
11493 /* Handle all other vfp data processing instructions. */
11494 else if (opc1 == 0x0b)
11495 {
11496 /* Handle VMOV. */
11497 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11498 {
11499 if (bit (arm_insn_r->arm_insn, 4))
11500 {
11501 if (bit (arm_insn_r->arm_insn, 6))
11502 curr_insn_type = INSN_T0;
11503 else
11504 curr_insn_type = INSN_T1;
11505 }
11506 else
11507 {
11508 if (dp_op_sz)
11509 curr_insn_type = INSN_T1;
11510 else
11511 curr_insn_type = INSN_T2;
11512 }
11513 }
11514 /* Handle VNEG and VABS. */
11515 else if ((opc2 == 0x01 && opc3 == 0x01)
11516 || (opc2 == 0x00 && opc3 == 0x03))
11517 {
11518 if (!bit (arm_insn_r->arm_insn, 11))
11519 {
11520 if (bit (arm_insn_r->arm_insn, 6))
11521 curr_insn_type = INSN_T0;
11522 else
11523 curr_insn_type = INSN_T1;
11524 }
11525 else
11526 {
11527 if (dp_op_sz)
11528 curr_insn_type = INSN_T1;
11529 else
11530 curr_insn_type = INSN_T2;
11531 }
11532 }
11533 /* Handle VSQRT. */
11534 else if (opc2 == 0x01 && opc3 == 0x03)
11535 {
11536 if (dp_op_sz)
11537 curr_insn_type = INSN_T1;
11538 else
11539 curr_insn_type = INSN_T2;
11540 }
11541 /* Handle VCVT. */
11542 else if (opc2 == 0x07 && opc3 == 0x03)
11543 {
11544 if (!dp_op_sz)
11545 curr_insn_type = INSN_T1;
11546 else
11547 curr_insn_type = INSN_T2;
11548 }
11549 else if (opc3 & 0x01)
11550 {
11551 /* Handle VCVT. */
11552 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11553 {
11554 if (!bit (arm_insn_r->arm_insn, 18))
11555 curr_insn_type = INSN_T2;
11556 else
11557 {
11558 if (dp_op_sz)
11559 curr_insn_type = INSN_T1;
11560 else
11561 curr_insn_type = INSN_T2;
11562 }
11563 }
11564 /* Handle VCVT. */
11565 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11566 {
11567 if (dp_op_sz)
11568 curr_insn_type = INSN_T1;
11569 else
11570 curr_insn_type = INSN_T2;
11571 }
11572 /* Handle VCVTB, VCVTT. */
11573 else if ((opc2 & 0x0e) == 0x02)
11574 curr_insn_type = INSN_T2;
11575 /* Handle VCMP, VCMPE. */
11576 else if ((opc2 & 0x0e) == 0x04)
11577 curr_insn_type = INSN_T3;
11578 }
11579 }
11580
11581 switch (curr_insn_type)
11582 {
11583 case INSN_T0:
11584 reg_vd = reg_vd | (bit_d << 4);
11585 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11586 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11587 arm_insn_r->reg_rec_count = 2;
11588 break;
11589
11590 case INSN_T1:
11591 reg_vd = reg_vd | (bit_d << 4);
11592 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11593 arm_insn_r->reg_rec_count = 1;
11594 break;
11595
11596 case INSN_T2:
11597 reg_vd = (reg_vd << 1) | bit_d;
11598 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11599 arm_insn_r->reg_rec_count = 1;
11600 break;
11601
11602 case INSN_T3:
11603 record_buf[0] = ARM_FPSCR_REGNUM;
11604 arm_insn_r->reg_rec_count = 1;
11605 break;
11606
11607 default:
11608 gdb_assert_not_reached ("no decoding pattern found");
11609 break;
11610 }
11611
11612 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11613 return 0;
11614}
11615
11616/* Handling opcode 110 insns. */
11617
11618static int
11619arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11620{
11621 uint32_t op1, op1_ebit, coproc;
11622
11623 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11624 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11625 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11626
11627 if ((coproc & 0x0e) == 0x0a)
11628 {
11629 /* Handle extension register ld/st instructions. */
11630 if (!(op1 & 0x20))
11631 return arm_record_exreg_ld_st_insn (arm_insn_r);
11632
11633 /* 64-bit transfers between arm core and extension registers. */
11634 if ((op1 & 0x3e) == 0x04)
11635 return arm_record_exreg_ld_st_insn (arm_insn_r);
11636 }
11637 else
11638 {
11639 /* Handle coprocessor ld/st instructions. */
11640 if (!(op1 & 0x3a))
11641 {
11642 /* Store. */
11643 if (!op1_ebit)
11644 return arm_record_unsupported_insn (arm_insn_r);
11645 else
11646 /* Load. */
11647 return arm_record_unsupported_insn (arm_insn_r);
11648 }
11649
11650 /* Move to coprocessor from two arm core registers. */
11651 if (op1 == 0x4)
11652 return arm_record_unsupported_insn (arm_insn_r);
11653
11654 /* Move to two arm core registers from coprocessor. */
11655 if (op1 == 0x5)
11656 {
11657 uint32_t reg_t[2];
11658
11659 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11660 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11661 arm_insn_r->reg_rec_count = 2;
11662
11663 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11664 return 0;
11665 }
11666 }
11667 return arm_record_unsupported_insn (arm_insn_r);
11668}
11669
11670/* Handling opcode 111 insns. */
11671
11672static int
11673arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11674{
11675 uint32_t op, op1_ebit, coproc, bits_24_25;
11676 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11677 struct regcache *reg_cache = arm_insn_r->regcache;
11678
11679 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11680 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11681 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11682 op = bit (arm_insn_r->arm_insn, 4);
11683 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11684
11685 /* Handle arm SWI/SVC system call instructions. */
11686 if (bits_24_25 == 0x3)
11687 {
11688 if (tdep->arm_syscall_record != NULL)
11689 {
11690 ULONGEST svc_operand, svc_number;
11691
11692 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11693
11694 if (svc_operand) /* OABI. */
11695 svc_number = svc_operand - 0x900000;
11696 else /* EABI. */
11697 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11698
11699 return tdep->arm_syscall_record (reg_cache, svc_number);
11700 }
11701 else
11702 {
11703 printf_unfiltered (_("no syscall record support\n"));
11704 return -1;
11705 }
11706 }
11707 else if (bits_24_25 == 0x02)
11708 {
11709 if (op)
11710 {
11711 if ((coproc & 0x0e) == 0x0a)
11712 {
11713 /* 8, 16, and 32-bit transfer */
11714 return arm_record_vdata_transfer_insn (arm_insn_r);
11715 }
11716 else
11717 {
11718 if (op1_ebit)
11719 {
11720 /* MRC, MRC2 */
11721 uint32_t record_buf[1];
11722
11723 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11724 if (record_buf[0] == 15)
11725 record_buf[0] = ARM_PS_REGNUM;
11726
11727 arm_insn_r->reg_rec_count = 1;
11728 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11729 record_buf);
11730 return 0;
11731 }
11732 else
11733 {
11734 /* MCR, MCR2 */
11735 return -1;
11736 }
11737 }
11738 }
11739 else
11740 {
11741 if ((coproc & 0x0e) == 0x0a)
11742 {
11743 /* VFP data-processing instructions. */
11744 return arm_record_vfp_data_proc_insn (arm_insn_r);
11745 }
11746 else
11747 {
11748 /* CDP, CDP2 */
11749 return -1;
11750 }
11751 }
11752 }
11753 else
11754 {
11755 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11756
11757 if (op1 == 5)
11758 {
11759 if ((coproc & 0x0e) != 0x0a)
11760 {
11761 /* MRRC, MRRC2 */
11762 return -1;
11763 }
11764 }
11765 else if (op1 == 4 || op1 == 5)
11766 {
11767 if ((coproc & 0x0e) == 0x0a)
11768 {
11769 /* 64-bit transfers between ARM core and extension */
11770 return -1;
11771 }
11772 else if (op1 == 4)
11773 {
11774 /* MCRR, MCRR2 */
11775 return -1;
11776 }
11777 }
11778 else if (op1 == 0 || op1 == 1)
11779 {
11780 /* UNDEFINED */
11781 return -1;
11782 }
11783 else
11784 {
11785 if ((coproc & 0x0e) == 0x0a)
11786 {
11787 /* Extension register load/store */
11788 }
11789 else
11790 {
11791 /* STC, STC2, LDC, LDC2 */
11792 }
11793 return -1;
11794 }
11795 }
11796
11797 return -1;
11798}
11799
11800/* Handling opcode 000 insns. */
11801
11802static int
11803thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11804{
11805 uint32_t record_buf[8];
11806 uint32_t reg_src1 = 0;
11807
11808 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11809
11810 record_buf[0] = ARM_PS_REGNUM;
11811 record_buf[1] = reg_src1;
11812 thumb_insn_r->reg_rec_count = 2;
11813
11814 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11815
11816 return 0;
11817}
11818
11819
11820/* Handling opcode 001 insns. */
11821
11822static int
11823thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11824{
11825 uint32_t record_buf[8];
11826 uint32_t reg_src1 = 0;
11827
11828 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11829
11830 record_buf[0] = ARM_PS_REGNUM;
11831 record_buf[1] = reg_src1;
11832 thumb_insn_r->reg_rec_count = 2;
11833
11834 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11835
11836 return 0;
11837}
11838
11839/* Handling opcode 010 insns. */
11840
11841static int
11842thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11843{
11844 struct regcache *reg_cache = thumb_insn_r->regcache;
11845 uint32_t record_buf[8], record_buf_mem[8];
11846
11847 uint32_t reg_src1 = 0, reg_src2 = 0;
11848 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11849
11850 ULONGEST u_regval[2] = {0};
11851
11852 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11853
11854 if (bit (thumb_insn_r->arm_insn, 12))
11855 {
11856 /* Handle load/store register offset. */
11857 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11858
11859 if (in_inclusive_range (opB, 4U, 7U))
11860 {
11861 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11862 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11863 record_buf[0] = reg_src1;
11864 thumb_insn_r->reg_rec_count = 1;
11865 }
11866 else if (in_inclusive_range (opB, 0U, 2U))
11867 {
11868 /* STR(2), STRB(2), STRH(2) . */
11869 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11870 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11871 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11872 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11873 if (0 == opB)
11874 record_buf_mem[0] = 4; /* STR (2). */
11875 else if (2 == opB)
11876 record_buf_mem[0] = 1; /* STRB (2). */
11877 else if (1 == opB)
11878 record_buf_mem[0] = 2; /* STRH (2). */
11879 record_buf_mem[1] = u_regval[0] + u_regval[1];
11880 thumb_insn_r->mem_rec_count = 1;
11881 }
11882 }
11883 else if (bit (thumb_insn_r->arm_insn, 11))
11884 {
11885 /* Handle load from literal pool. */
11886 /* LDR(3). */
11887 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11888 record_buf[0] = reg_src1;
11889 thumb_insn_r->reg_rec_count = 1;
11890 }
11891 else if (opcode1)
11892 {
11893 /* Special data instructions and branch and exchange */
11894 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11895 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11896 if ((3 == opcode2) && (!opcode3))
11897 {
11898 /* Branch with exchange. */
11899 record_buf[0] = ARM_PS_REGNUM;
11900 thumb_insn_r->reg_rec_count = 1;
11901 }
11902 else
11903 {
11904 /* Format 8; special data processing insns. */
11905 record_buf[0] = ARM_PS_REGNUM;
11906 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11907 | bits (thumb_insn_r->arm_insn, 0, 2));
11908 thumb_insn_r->reg_rec_count = 2;
11909 }
11910 }
11911 else
11912 {
11913 /* Format 5; data processing insns. */
11914 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11915 if (bit (thumb_insn_r->arm_insn, 7))
11916 {
11917 reg_src1 = reg_src1 + 8;
11918 }
11919 record_buf[0] = ARM_PS_REGNUM;
11920 record_buf[1] = reg_src1;
11921 thumb_insn_r->reg_rec_count = 2;
11922 }
11923
11924 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11925 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11926 record_buf_mem);
11927
11928 return 0;
11929}
11930
11931/* Handling opcode 001 insns. */
11932
11933static int
11934thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11935{
11936 struct regcache *reg_cache = thumb_insn_r->regcache;
11937 uint32_t record_buf[8], record_buf_mem[8];
11938
11939 uint32_t reg_src1 = 0;
11940 uint32_t opcode = 0, immed_5 = 0;
11941
11942 ULONGEST u_regval = 0;
11943
11944 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11945
11946 if (opcode)
11947 {
11948 /* LDR(1). */
11949 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11950 record_buf[0] = reg_src1;
11951 thumb_insn_r->reg_rec_count = 1;
11952 }
11953 else
11954 {
11955 /* STR(1). */
11956 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11957 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11958 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11959 record_buf_mem[0] = 4;
11960 record_buf_mem[1] = u_regval + (immed_5 * 4);
11961 thumb_insn_r->mem_rec_count = 1;
11962 }
11963
11964 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11965 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11966 record_buf_mem);
11967
11968 return 0;
11969}
11970
11971/* Handling opcode 100 insns. */
11972
11973static int
11974thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11975{
11976 struct regcache *reg_cache = thumb_insn_r->regcache;
11977 uint32_t record_buf[8], record_buf_mem[8];
11978
11979 uint32_t reg_src1 = 0;
11980 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11981
11982 ULONGEST u_regval = 0;
11983
11984 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11985
11986 if (3 == opcode)
11987 {
11988 /* LDR(4). */
11989 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11990 record_buf[0] = reg_src1;
11991 thumb_insn_r->reg_rec_count = 1;
11992 }
11993 else if (1 == opcode)
11994 {
11995 /* LDRH(1). */
11996 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11997 record_buf[0] = reg_src1;
11998 thumb_insn_r->reg_rec_count = 1;
11999 }
12000 else if (2 == opcode)
12001 {
12002 /* STR(3). */
12003 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12004 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12005 record_buf_mem[0] = 4;
12006 record_buf_mem[1] = u_regval + (immed_8 * 4);
12007 thumb_insn_r->mem_rec_count = 1;
12008 }
12009 else if (0 == opcode)
12010 {
12011 /* STRH(1). */
12012 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12013 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12014 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12015 record_buf_mem[0] = 2;
12016 record_buf_mem[1] = u_regval + (immed_5 * 2);
12017 thumb_insn_r->mem_rec_count = 1;
12018 }
12019
12020 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12021 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12022 record_buf_mem);
12023
12024 return 0;
12025}
12026
12027/* Handling opcode 101 insns. */
12028
12029static int
12030thumb_record_misc (insn_decode_record *thumb_insn_r)
12031{
12032 struct regcache *reg_cache = thumb_insn_r->regcache;
12033
12034 uint32_t opcode = 0;
12035 uint32_t register_bits = 0, register_count = 0;
12036 uint32_t index = 0, start_address = 0;
12037 uint32_t record_buf[24], record_buf_mem[48];
12038 uint32_t reg_src1;
12039
12040 ULONGEST u_regval = 0;
12041
12042 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12043
12044 if (opcode == 0 || opcode == 1)
12045 {
12046 /* ADR and ADD (SP plus immediate) */
12047
12048 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12049 record_buf[0] = reg_src1;
12050 thumb_insn_r->reg_rec_count = 1;
12051 }
12052 else
12053 {
12054 /* Miscellaneous 16-bit instructions */
12055 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12056
12057 switch (opcode2)
12058 {
12059 case 6:
12060 /* SETEND and CPS */
12061 break;
12062 case 0:
12063 /* ADD/SUB (SP plus immediate) */
12064 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12065 record_buf[0] = ARM_SP_REGNUM;
12066 thumb_insn_r->reg_rec_count = 1;
12067 break;
12068 case 1: /* fall through */
12069 case 3: /* fall through */
12070 case 9: /* fall through */
12071 case 11:
12072 /* CBNZ, CBZ */
12073 break;
12074 case 2:
12075 /* SXTH, SXTB, UXTH, UXTB */
12076 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12077 thumb_insn_r->reg_rec_count = 1;
12078 break;
12079 case 4: /* fall through */
12080 case 5:
12081 /* PUSH. */
12082 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12083 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12084 while (register_bits)
12085 {
12086 if (register_bits & 0x00000001)
12087 register_count++;
12088 register_bits = register_bits >> 1;
12089 }
12090 start_address = u_regval - \
12091 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12092 thumb_insn_r->mem_rec_count = register_count;
12093 while (register_count)
12094 {
12095 record_buf_mem[(register_count * 2) - 1] = start_address;
12096 record_buf_mem[(register_count * 2) - 2] = 4;
12097 start_address = start_address + 4;
12098 register_count--;
12099 }
12100 record_buf[0] = ARM_SP_REGNUM;
12101 thumb_insn_r->reg_rec_count = 1;
12102 break;
12103 case 10:
12104 /* REV, REV16, REVSH */
12105 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12106 thumb_insn_r->reg_rec_count = 1;
12107 break;
12108 case 12: /* fall through */
12109 case 13:
12110 /* POP. */
12111 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12112 while (register_bits)
12113 {
12114 if (register_bits & 0x00000001)
12115 record_buf[index++] = register_count;
12116 register_bits = register_bits >> 1;
12117 register_count++;
12118 }
12119 record_buf[index++] = ARM_PS_REGNUM;
12120 record_buf[index++] = ARM_SP_REGNUM;
12121 thumb_insn_r->reg_rec_count = index;
12122 break;
12123 case 0xe:
12124 /* BKPT insn. */
12125 /* Handle enhanced software breakpoint insn, BKPT. */
12126 /* CPSR is changed to be executed in ARM state, disabling normal
12127 interrupts, entering abort mode. */
12128 /* According to high vector configuration PC is set. */
12129 /* User hits breakpoint and type reverse, in that case, we need to go back with
12130 previous CPSR and Program Counter. */
12131 record_buf[0] = ARM_PS_REGNUM;
12132 record_buf[1] = ARM_LR_REGNUM;
12133 thumb_insn_r->reg_rec_count = 2;
12134 /* We need to save SPSR value, which is not yet done. */
12135 printf_unfiltered (_("Process record does not support instruction "
12136 "0x%0x at address %s.\n"),
12137 thumb_insn_r->arm_insn,
12138 paddress (thumb_insn_r->gdbarch,
12139 thumb_insn_r->this_addr));
12140 return -1;
12141
12142 case 0xf:
12143 /* If-Then, and hints */
12144 break;
12145 default:
12146 return -1;
12147 };
12148 }
12149
12150 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12151 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12152 record_buf_mem);
12153
12154 return 0;
12155}
12156
12157/* Handling opcode 110 insns. */
12158
12159static int
12160thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12161{
12162 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12163 struct regcache *reg_cache = thumb_insn_r->regcache;
12164
12165 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12166 uint32_t reg_src1 = 0;
12167 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12168 uint32_t index = 0, start_address = 0;
12169 uint32_t record_buf[24], record_buf_mem[48];
12170
12171 ULONGEST u_regval = 0;
12172
12173 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12174 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12175
12176 if (1 == opcode2)
12177 {
12178
12179 /* LDMIA. */
12180 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12181 /* Get Rn. */
12182 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12183 while (register_bits)
12184 {
12185 if (register_bits & 0x00000001)
12186 record_buf[index++] = register_count;
12187 register_bits = register_bits >> 1;
12188 register_count++;
12189 }
12190 record_buf[index++] = reg_src1;
12191 thumb_insn_r->reg_rec_count = index;
12192 }
12193 else if (0 == opcode2)
12194 {
12195 /* It handles both STMIA. */
12196 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12197 /* Get Rn. */
12198 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12199 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12200 while (register_bits)
12201 {
12202 if (register_bits & 0x00000001)
12203 register_count++;
12204 register_bits = register_bits >> 1;
12205 }
12206 start_address = u_regval;
12207 thumb_insn_r->mem_rec_count = register_count;
12208 while (register_count)
12209 {
12210 record_buf_mem[(register_count * 2) - 1] = start_address;
12211 record_buf_mem[(register_count * 2) - 2] = 4;
12212 start_address = start_address + 4;
12213 register_count--;
12214 }
12215 }
12216 else if (0x1F == opcode1)
12217 {
12218 /* Handle arm syscall insn. */
12219 if (tdep->arm_syscall_record != NULL)
12220 {
12221 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12222 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12223 }
12224 else
12225 {
12226 printf_unfiltered (_("no syscall record support\n"));
12227 return -1;
12228 }
12229 }
12230
12231 /* B (1), conditional branch is automatically taken care in process_record,
12232 as PC is saved there. */
12233
12234 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12235 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12236 record_buf_mem);
12237
12238 return ret;
12239}
12240
12241/* Handling opcode 111 insns. */
12242
12243static int
12244thumb_record_branch (insn_decode_record *thumb_insn_r)
12245{
12246 uint32_t record_buf[8];
12247 uint32_t bits_h = 0;
12248
12249 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12250
12251 if (2 == bits_h || 3 == bits_h)
12252 {
12253 /* BL */
12254 record_buf[0] = ARM_LR_REGNUM;
12255 thumb_insn_r->reg_rec_count = 1;
12256 }
12257 else if (1 == bits_h)
12258 {
12259 /* BLX(1). */
12260 record_buf[0] = ARM_PS_REGNUM;
12261 record_buf[1] = ARM_LR_REGNUM;
12262 thumb_insn_r->reg_rec_count = 2;
12263 }
12264
12265 /* B(2) is automatically taken care in process_record, as PC is
12266 saved there. */
12267
12268 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12269
12270 return 0;
12271}
12272
12273/* Handler for thumb2 load/store multiple instructions. */
12274
12275static int
12276thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12277{
12278 struct regcache *reg_cache = thumb2_insn_r->regcache;
12279
12280 uint32_t reg_rn, op;
12281 uint32_t register_bits = 0, register_count = 0;
12282 uint32_t index = 0, start_address = 0;
12283 uint32_t record_buf[24], record_buf_mem[48];
12284
12285 ULONGEST u_regval = 0;
12286
12287 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12288 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12289
12290 if (0 == op || 3 == op)
12291 {
12292 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12293 {
12294 /* Handle RFE instruction. */
12295 record_buf[0] = ARM_PS_REGNUM;
12296 thumb2_insn_r->reg_rec_count = 1;
12297 }
12298 else
12299 {
12300 /* Handle SRS instruction after reading banked SP. */
12301 return arm_record_unsupported_insn (thumb2_insn_r);
12302 }
12303 }
12304 else if (1 == op || 2 == op)
12305 {
12306 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12307 {
12308 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12309 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12310 while (register_bits)
12311 {
12312 if (register_bits & 0x00000001)
12313 record_buf[index++] = register_count;
12314
12315 register_count++;
12316 register_bits = register_bits >> 1;
12317 }
12318 record_buf[index++] = reg_rn;
12319 record_buf[index++] = ARM_PS_REGNUM;
12320 thumb2_insn_r->reg_rec_count = index;
12321 }
12322 else
12323 {
12324 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12325 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12326 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12327 while (register_bits)
12328 {
12329 if (register_bits & 0x00000001)
12330 register_count++;
12331
12332 register_bits = register_bits >> 1;
12333 }
12334
12335 if (1 == op)
12336 {
12337 /* Start address calculation for LDMDB/LDMEA. */
12338 start_address = u_regval;
12339 }
12340 else if (2 == op)
12341 {
12342 /* Start address calculation for LDMDB/LDMEA. */
12343 start_address = u_regval - register_count * 4;
12344 }
12345
12346 thumb2_insn_r->mem_rec_count = register_count;
12347 while (register_count)
12348 {
12349 record_buf_mem[register_count * 2 - 1] = start_address;
12350 record_buf_mem[register_count * 2 - 2] = 4;
12351 start_address = start_address + 4;
12352 register_count--;
12353 }
12354 record_buf[0] = reg_rn;
12355 record_buf[1] = ARM_PS_REGNUM;
12356 thumb2_insn_r->reg_rec_count = 2;
12357 }
12358 }
12359
12360 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12361 record_buf_mem);
12362 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12363 record_buf);
12364 return ARM_RECORD_SUCCESS;
12365}
12366
12367/* Handler for thumb2 load/store (dual/exclusive) and table branch
12368 instructions. */
12369
12370static int
12371thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12372{
12373 struct regcache *reg_cache = thumb2_insn_r->regcache;
12374
12375 uint32_t reg_rd, reg_rn, offset_imm;
12376 uint32_t reg_dest1, reg_dest2;
12377 uint32_t address, offset_addr;
12378 uint32_t record_buf[8], record_buf_mem[8];
12379 uint32_t op1, op2, op3;
12380
12381 ULONGEST u_regval[2];
12382
12383 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12384 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12385 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12386
12387 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12388 {
12389 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12390 {
12391 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12392 record_buf[0] = reg_dest1;
12393 record_buf[1] = ARM_PS_REGNUM;
12394 thumb2_insn_r->reg_rec_count = 2;
12395 }
12396
12397 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12398 {
12399 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12400 record_buf[2] = reg_dest2;
12401 thumb2_insn_r->reg_rec_count = 3;
12402 }
12403 }
12404 else
12405 {
12406 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12407 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12408
12409 if (0 == op1 && 0 == op2)
12410 {
12411 /* Handle STREX. */
12412 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12413 address = u_regval[0] + (offset_imm * 4);
12414 record_buf_mem[0] = 4;
12415 record_buf_mem[1] = address;
12416 thumb2_insn_r->mem_rec_count = 1;
12417 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12418 record_buf[0] = reg_rd;
12419 thumb2_insn_r->reg_rec_count = 1;
12420 }
12421 else if (1 == op1 && 0 == op2)
12422 {
12423 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12424 record_buf[0] = reg_rd;
12425 thumb2_insn_r->reg_rec_count = 1;
12426 address = u_regval[0];
12427 record_buf_mem[1] = address;
12428
12429 if (4 == op3)
12430 {
12431 /* Handle STREXB. */
12432 record_buf_mem[0] = 1;
12433 thumb2_insn_r->mem_rec_count = 1;
12434 }
12435 else if (5 == op3)
12436 {
12437 /* Handle STREXH. */
12438 record_buf_mem[0] = 2 ;
12439 thumb2_insn_r->mem_rec_count = 1;
12440 }
12441 else if (7 == op3)
12442 {
12443 /* Handle STREXD. */
12444 address = u_regval[0];
12445 record_buf_mem[0] = 4;
12446 record_buf_mem[2] = 4;
12447 record_buf_mem[3] = address + 4;
12448 thumb2_insn_r->mem_rec_count = 2;
12449 }
12450 }
12451 else
12452 {
12453 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12454
12455 if (bit (thumb2_insn_r->arm_insn, 24))
12456 {
12457 if (bit (thumb2_insn_r->arm_insn, 23))
12458 offset_addr = u_regval[0] + (offset_imm * 4);
12459 else
12460 offset_addr = u_regval[0] - (offset_imm * 4);
12461
12462 address = offset_addr;
12463 }
12464 else
12465 address = u_regval[0];
12466
12467 record_buf_mem[0] = 4;
12468 record_buf_mem[1] = address;
12469 record_buf_mem[2] = 4;
12470 record_buf_mem[3] = address + 4;
12471 thumb2_insn_r->mem_rec_count = 2;
12472 record_buf[0] = reg_rn;
12473 thumb2_insn_r->reg_rec_count = 1;
12474 }
12475 }
12476
12477 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12478 record_buf);
12479 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12480 record_buf_mem);
12481 return ARM_RECORD_SUCCESS;
12482}
12483
12484/* Handler for thumb2 data processing (shift register and modified immediate)
12485 instructions. */
12486
12487static int
12488thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12489{
12490 uint32_t reg_rd, op;
12491 uint32_t record_buf[8];
12492
12493 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12494 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12495
12496 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12497 {
12498 record_buf[0] = ARM_PS_REGNUM;
12499 thumb2_insn_r->reg_rec_count = 1;
12500 }
12501 else
12502 {
12503 record_buf[0] = reg_rd;
12504 record_buf[1] = ARM_PS_REGNUM;
12505 thumb2_insn_r->reg_rec_count = 2;
12506 }
12507
12508 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12509 record_buf);
12510 return ARM_RECORD_SUCCESS;
12511}
12512
12513/* Generic handler for thumb2 instructions which effect destination and PS
12514 registers. */
12515
12516static int
12517thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12518{
12519 uint32_t reg_rd;
12520 uint32_t record_buf[8];
12521
12522 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12523
12524 record_buf[0] = reg_rd;
12525 record_buf[1] = ARM_PS_REGNUM;
12526 thumb2_insn_r->reg_rec_count = 2;
12527
12528 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12529 record_buf);
12530 return ARM_RECORD_SUCCESS;
12531}
12532
12533/* Handler for thumb2 branch and miscellaneous control instructions. */
12534
12535static int
12536thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12537{
12538 uint32_t op, op1, op2;
12539 uint32_t record_buf[8];
12540
12541 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12542 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12543 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12544
12545 /* Handle MSR insn. */
12546 if (!(op1 & 0x2) && 0x38 == op)
12547 {
12548 if (!(op2 & 0x3))
12549 {
12550 /* CPSR is going to be changed. */
12551 record_buf[0] = ARM_PS_REGNUM;
12552 thumb2_insn_r->reg_rec_count = 1;
12553 }
12554 else
12555 {
12556 arm_record_unsupported_insn(thumb2_insn_r);
12557 return -1;
12558 }
12559 }
12560 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12561 {
12562 /* BLX. */
12563 record_buf[0] = ARM_PS_REGNUM;
12564 record_buf[1] = ARM_LR_REGNUM;
12565 thumb2_insn_r->reg_rec_count = 2;
12566 }
12567
12568 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12569 record_buf);
12570 return ARM_RECORD_SUCCESS;
12571}
12572
12573/* Handler for thumb2 store single data item instructions. */
12574
12575static int
12576thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12577{
12578 struct regcache *reg_cache = thumb2_insn_r->regcache;
12579
12580 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12581 uint32_t address, offset_addr;
12582 uint32_t record_buf[8], record_buf_mem[8];
12583 uint32_t op1, op2;
12584
12585 ULONGEST u_regval[2];
12586
12587 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12588 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12589 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12590 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12591
12592 if (bit (thumb2_insn_r->arm_insn, 23))
12593 {
12594 /* T2 encoding. */
12595 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12596 offset_addr = u_regval[0] + offset_imm;
12597 address = offset_addr;
12598 }
12599 else
12600 {
12601 /* T3 encoding. */
12602 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12603 {
12604 /* Handle STRB (register). */
12605 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12606 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12607 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12608 offset_addr = u_regval[1] << shift_imm;
12609 address = u_regval[0] + offset_addr;
12610 }
12611 else
12612 {
12613 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12614 if (bit (thumb2_insn_r->arm_insn, 10))
12615 {
12616 if (bit (thumb2_insn_r->arm_insn, 9))
12617 offset_addr = u_regval[0] + offset_imm;
12618 else
12619 offset_addr = u_regval[0] - offset_imm;
12620
12621 address = offset_addr;
12622 }
12623 else
12624 address = u_regval[0];
12625 }
12626 }
12627
12628 switch (op1)
12629 {
12630 /* Store byte instructions. */
12631 case 4:
12632 case 0:
12633 record_buf_mem[0] = 1;
12634 break;
12635 /* Store half word instructions. */
12636 case 1:
12637 case 5:
12638 record_buf_mem[0] = 2;
12639 break;
12640 /* Store word instructions. */
12641 case 2:
12642 case 6:
12643 record_buf_mem[0] = 4;
12644 break;
12645
12646 default:
12647 gdb_assert_not_reached ("no decoding pattern found");
12648 break;
12649 }
12650
12651 record_buf_mem[1] = address;
12652 thumb2_insn_r->mem_rec_count = 1;
12653 record_buf[0] = reg_rn;
12654 thumb2_insn_r->reg_rec_count = 1;
12655
12656 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12657 record_buf);
12658 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12659 record_buf_mem);
12660 return ARM_RECORD_SUCCESS;
12661}
12662
12663/* Handler for thumb2 load memory hints instructions. */
12664
12665static int
12666thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12667{
12668 uint32_t record_buf[8];
12669 uint32_t reg_rt, reg_rn;
12670
12671 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12672 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12673
12674 if (ARM_PC_REGNUM != reg_rt)
12675 {
12676 record_buf[0] = reg_rt;
12677 record_buf[1] = reg_rn;
12678 record_buf[2] = ARM_PS_REGNUM;
12679 thumb2_insn_r->reg_rec_count = 3;
12680
12681 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12682 record_buf);
12683 return ARM_RECORD_SUCCESS;
12684 }
12685
12686 return ARM_RECORD_FAILURE;
12687}
12688
12689/* Handler for thumb2 load word instructions. */
12690
12691static int
12692thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12693{
12694 uint32_t record_buf[8];
12695
12696 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12697 record_buf[1] = ARM_PS_REGNUM;
12698 thumb2_insn_r->reg_rec_count = 2;
12699
12700 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12701 record_buf);
12702 return ARM_RECORD_SUCCESS;
12703}
12704
12705/* Handler for thumb2 long multiply, long multiply accumulate, and
12706 divide instructions. */
12707
12708static int
12709thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12710{
12711 uint32_t opcode1 = 0, opcode2 = 0;
12712 uint32_t record_buf[8];
12713
12714 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12715 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12716
12717 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12718 {
12719 /* Handle SMULL, UMULL, SMULAL. */
12720 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12721 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12722 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12723 record_buf[2] = ARM_PS_REGNUM;
12724 thumb2_insn_r->reg_rec_count = 3;
12725 }
12726 else if (1 == opcode1 || 3 == opcode2)
12727 {
12728 /* Handle SDIV and UDIV. */
12729 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12730 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12731 record_buf[2] = ARM_PS_REGNUM;
12732 thumb2_insn_r->reg_rec_count = 3;
12733 }
12734 else
12735 return ARM_RECORD_FAILURE;
12736
12737 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12738 record_buf);
12739 return ARM_RECORD_SUCCESS;
12740}
12741
12742/* Record handler for thumb32 coprocessor instructions. */
12743
12744static int
12745thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12746{
12747 if (bit (thumb2_insn_r->arm_insn, 25))
12748 return arm_record_coproc_data_proc (thumb2_insn_r);
12749 else
12750 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12751}
12752
12753/* Record handler for advance SIMD structure load/store instructions. */
12754
12755static int
12756thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12757{
12758 struct regcache *reg_cache = thumb2_insn_r->regcache;
12759 uint32_t l_bit, a_bit, b_bits;
12760 uint32_t record_buf[128], record_buf_mem[128];
12761 uint32_t reg_rn, reg_vd, address, f_elem;
12762 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12763 uint8_t f_ebytes;
12764
12765 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12766 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12767 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12768 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12769 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12770 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12771 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12772 f_elem = 8 / f_ebytes;
12773
12774 if (!l_bit)
12775 {
12776 ULONGEST u_regval = 0;
12777 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12778 address = u_regval;
12779
12780 if (!a_bit)
12781 {
12782 /* Handle VST1. */
12783 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12784 {
12785 if (b_bits == 0x07)
12786 bf_regs = 1;
12787 else if (b_bits == 0x0a)
12788 bf_regs = 2;
12789 else if (b_bits == 0x06)
12790 bf_regs = 3;
12791 else if (b_bits == 0x02)
12792 bf_regs = 4;
12793 else
12794 bf_regs = 0;
12795
12796 for (index_r = 0; index_r < bf_regs; index_r++)
12797 {
12798 for (index_e = 0; index_e < f_elem; index_e++)
12799 {
12800 record_buf_mem[index_m++] = f_ebytes;
12801 record_buf_mem[index_m++] = address;
12802 address = address + f_ebytes;
12803 thumb2_insn_r->mem_rec_count += 1;
12804 }
12805 }
12806 }
12807 /* Handle VST2. */
12808 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12809 {
12810 if (b_bits == 0x09 || b_bits == 0x08)
12811 bf_regs = 1;
12812 else if (b_bits == 0x03)
12813 bf_regs = 2;
12814 else
12815 bf_regs = 0;
12816
12817 for (index_r = 0; index_r < bf_regs; index_r++)
12818 for (index_e = 0; index_e < f_elem; index_e++)
12819 {
12820 for (loop_t = 0; loop_t < 2; loop_t++)
12821 {
12822 record_buf_mem[index_m++] = f_ebytes;
12823 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12824 thumb2_insn_r->mem_rec_count += 1;
12825 }
12826 address = address + (2 * f_ebytes);
12827 }
12828 }
12829 /* Handle VST3. */
12830 else if ((b_bits & 0x0e) == 0x04)
12831 {
12832 for (index_e = 0; index_e < f_elem; index_e++)
12833 {
12834 for (loop_t = 0; loop_t < 3; loop_t++)
12835 {
12836 record_buf_mem[index_m++] = f_ebytes;
12837 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12838 thumb2_insn_r->mem_rec_count += 1;
12839 }
12840 address = address + (3 * f_ebytes);
12841 }
12842 }
12843 /* Handle VST4. */
12844 else if (!(b_bits & 0x0e))
12845 {
12846 for (index_e = 0; index_e < f_elem; index_e++)
12847 {
12848 for (loop_t = 0; loop_t < 4; loop_t++)
12849 {
12850 record_buf_mem[index_m++] = f_ebytes;
12851 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12852 thumb2_insn_r->mem_rec_count += 1;
12853 }
12854 address = address + (4 * f_ebytes);
12855 }
12856 }
12857 }
12858 else
12859 {
12860 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12861
12862 if (bft_size == 0x00)
12863 f_ebytes = 1;
12864 else if (bft_size == 0x01)
12865 f_ebytes = 2;
12866 else if (bft_size == 0x02)
12867 f_ebytes = 4;
12868 else
12869 f_ebytes = 0;
12870
12871 /* Handle VST1. */
12872 if (!(b_bits & 0x0b) || b_bits == 0x08)
12873 thumb2_insn_r->mem_rec_count = 1;
12874 /* Handle VST2. */
12875 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12876 thumb2_insn_r->mem_rec_count = 2;
12877 /* Handle VST3. */
12878 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12879 thumb2_insn_r->mem_rec_count = 3;
12880 /* Handle VST4. */
12881 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12882 thumb2_insn_r->mem_rec_count = 4;
12883
12884 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12885 {
12886 record_buf_mem[index_m] = f_ebytes;
12887 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12888 }
12889 }
12890 }
12891 else
12892 {
12893 if (!a_bit)
12894 {
12895 /* Handle VLD1. */
12896 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12897 thumb2_insn_r->reg_rec_count = 1;
12898 /* Handle VLD2. */
12899 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12900 thumb2_insn_r->reg_rec_count = 2;
12901 /* Handle VLD3. */
12902 else if ((b_bits & 0x0e) == 0x04)
12903 thumb2_insn_r->reg_rec_count = 3;
12904 /* Handle VLD4. */
12905 else if (!(b_bits & 0x0e))
12906 thumb2_insn_r->reg_rec_count = 4;
12907 }
12908 else
12909 {
12910 /* Handle VLD1. */
12911 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12912 thumb2_insn_r->reg_rec_count = 1;
12913 /* Handle VLD2. */
12914 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12915 thumb2_insn_r->reg_rec_count = 2;
12916 /* Handle VLD3. */
12917 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12918 thumb2_insn_r->reg_rec_count = 3;
12919 /* Handle VLD4. */
12920 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12921 thumb2_insn_r->reg_rec_count = 4;
12922
12923 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12924 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12925 }
12926 }
12927
12928 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12929 {
12930 record_buf[index_r] = reg_rn;
12931 thumb2_insn_r->reg_rec_count += 1;
12932 }
12933
12934 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12935 record_buf);
12936 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12937 record_buf_mem);
12938 return 0;
12939}
12940
12941/* Decodes thumb2 instruction type and invokes its record handler. */
12942
12943static unsigned int
12944thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12945{
12946 uint32_t op, op1, op2;
12947
12948 op = bit (thumb2_insn_r->arm_insn, 15);
12949 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12950 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12951
12952 if (op1 == 0x01)
12953 {
12954 if (!(op2 & 0x64 ))
12955 {
12956 /* Load/store multiple instruction. */
12957 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12958 }
12959 else if ((op2 & 0x64) == 0x4)
12960 {
12961 /* Load/store (dual/exclusive) and table branch instruction. */
12962 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12963 }
12964 else if ((op2 & 0x60) == 0x20)
12965 {
12966 /* Data-processing (shifted register). */
12967 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12968 }
12969 else if (op2 & 0x40)
12970 {
12971 /* Co-processor instructions. */
12972 return thumb2_record_coproc_insn (thumb2_insn_r);
12973 }
12974 }
12975 else if (op1 == 0x02)
12976 {
12977 if (op)
12978 {
12979 /* Branches and miscellaneous control instructions. */
12980 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12981 }
12982 else if (op2 & 0x20)
12983 {
12984 /* Data-processing (plain binary immediate) instruction. */
12985 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12986 }
12987 else
12988 {
12989 /* Data-processing (modified immediate). */
12990 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12991 }
12992 }
12993 else if (op1 == 0x03)
12994 {
12995 if (!(op2 & 0x71 ))
12996 {
12997 /* Store single data item. */
12998 return thumb2_record_str_single_data (thumb2_insn_r);
12999 }
13000 else if (!((op2 & 0x71) ^ 0x10))
13001 {
13002 /* Advanced SIMD or structure load/store instructions. */
13003 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13004 }
13005 else if (!((op2 & 0x67) ^ 0x01))
13006 {
13007 /* Load byte, memory hints instruction. */
13008 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13009 }
13010 else if (!((op2 & 0x67) ^ 0x03))
13011 {
13012 /* Load halfword, memory hints instruction. */
13013 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13014 }
13015 else if (!((op2 & 0x67) ^ 0x05))
13016 {
13017 /* Load word instruction. */
13018 return thumb2_record_ld_word (thumb2_insn_r);
13019 }
13020 else if (!((op2 & 0x70) ^ 0x20))
13021 {
13022 /* Data-processing (register) instruction. */
13023 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13024 }
13025 else if (!((op2 & 0x78) ^ 0x30))
13026 {
13027 /* Multiply, multiply accumulate, abs diff instruction. */
13028 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13029 }
13030 else if (!((op2 & 0x78) ^ 0x38))
13031 {
13032 /* Long multiply, long multiply accumulate, and divide. */
13033 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13034 }
13035 else if (op2 & 0x40)
13036 {
13037 /* Co-processor instructions. */
13038 return thumb2_record_coproc_insn (thumb2_insn_r);
13039 }
13040 }
13041
13042 return -1;
13043}
13044
13045namespace {
13046/* Abstract memory reader. */
13047
13048class abstract_memory_reader
13049{
13050public:
13051 /* Read LEN bytes of target memory at address MEMADDR, placing the
13052 results in GDB's memory at BUF. Return true on success. */
13053
13054 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13055};
13056
13057/* Instruction reader from real target. */
13058
13059class instruction_reader : public abstract_memory_reader
13060{
13061 public:
13062 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13063 {
13064 if (target_read_memory (memaddr, buf, len))
13065 return false;
13066 else
13067 return true;
13068 }
13069};
13070
13071} // namespace
13072
13073/* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13074and positive val on fauilure. */
13075
13076static int
13077extract_arm_insn (abstract_memory_reader& reader,
13078 insn_decode_record *insn_record, uint32_t insn_size)
13079{
13080 gdb_byte buf[insn_size];
13081
13082 memset (&buf[0], 0, insn_size);
13083
13084 if (!reader.read (insn_record->this_addr, buf, insn_size))
13085 return 1;
13086 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13087 insn_size,
13088 gdbarch_byte_order_for_code (insn_record->gdbarch));
13089 return 0;
13090}
13091
13092typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13093
13094/* Decode arm/thumb insn depending on condition cods and opcodes; and
13095 dispatch it. */
13096
13097static int
13098decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13099 record_type_t record_type, uint32_t insn_size)
13100{
13101
13102 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13103 instruction. */
13104 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13105 {
13106 arm_record_data_proc_misc_ld_str, /* 000. */
13107 arm_record_data_proc_imm, /* 001. */
13108 arm_record_ld_st_imm_offset, /* 010. */
13109 arm_record_ld_st_reg_offset, /* 011. */
13110 arm_record_ld_st_multiple, /* 100. */
13111 arm_record_b_bl, /* 101. */
13112 arm_record_asimd_vfp_coproc, /* 110. */
13113 arm_record_coproc_data_proc /* 111. */
13114 };
13115
13116 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13117 instruction. */
13118 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13119 { \
13120 thumb_record_shift_add_sub, /* 000. */
13121 thumb_record_add_sub_cmp_mov, /* 001. */
13122 thumb_record_ld_st_reg_offset, /* 010. */
13123 thumb_record_ld_st_imm_offset, /* 011. */
13124 thumb_record_ld_st_stack, /* 100. */
13125 thumb_record_misc, /* 101. */
13126 thumb_record_ldm_stm_swi, /* 110. */
13127 thumb_record_branch /* 111. */
13128 };
13129
13130 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13131 uint32_t insn_id = 0;
13132
13133 if (extract_arm_insn (reader, arm_record, insn_size))
13134 {
13135 if (record_debug)
13136 {
13137 printf_unfiltered (_("Process record: error reading memory at "
13138 "addr %s len = %d.\n"),
13139 paddress (arm_record->gdbarch,
13140 arm_record->this_addr), insn_size);
13141 }
13142 return -1;
13143 }
13144 else if (ARM_RECORD == record_type)
13145 {
13146 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13147 insn_id = bits (arm_record->arm_insn, 25, 27);
13148
13149 if (arm_record->cond == 0xf)
13150 ret = arm_record_extension_space (arm_record);
13151 else
13152 {
13153 /* If this insn has fallen into extension space
13154 then we need not decode it anymore. */
13155 ret = arm_handle_insn[insn_id] (arm_record);
13156 }
13157 if (ret != ARM_RECORD_SUCCESS)
13158 {
13159 arm_record_unsupported_insn (arm_record);
13160 ret = -1;
13161 }
13162 }
13163 else if (THUMB_RECORD == record_type)
13164 {
13165 /* As thumb does not have condition codes, we set negative. */
13166 arm_record->cond = -1;
13167 insn_id = bits (arm_record->arm_insn, 13, 15);
13168 ret = thumb_handle_insn[insn_id] (arm_record);
13169 if (ret != ARM_RECORD_SUCCESS)
13170 {
13171 arm_record_unsupported_insn (arm_record);
13172 ret = -1;
13173 }
13174 }
13175 else if (THUMB2_RECORD == record_type)
13176 {
13177 /* As thumb does not have condition codes, we set negative. */
13178 arm_record->cond = -1;
13179
13180 /* Swap first half of 32bit thumb instruction with second half. */
13181 arm_record->arm_insn
13182 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13183
13184 ret = thumb2_record_decode_insn_handler (arm_record);
13185
13186 if (ret != ARM_RECORD_SUCCESS)
13187 {
13188 arm_record_unsupported_insn (arm_record);
13189 ret = -1;
13190 }
13191 }
13192 else
13193 {
13194 /* Throw assertion. */
13195 gdb_assert_not_reached ("not a valid instruction, could not decode");
13196 }
13197
13198 return ret;
13199}
13200
13201#if GDB_SELF_TEST
13202namespace selftests {
13203
13204/* Provide both 16-bit and 32-bit thumb instructions. */
13205
13206class instruction_reader_thumb : public abstract_memory_reader
13207{
13208public:
13209 template<size_t SIZE>
13210 instruction_reader_thumb (enum bfd_endian endian,
13211 const uint16_t (&insns)[SIZE])
13212 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13213 {}
13214
13215 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13216 {
13217 SELF_CHECK (len == 4 || len == 2);
13218 SELF_CHECK (memaddr % 2 == 0);
13219 SELF_CHECK ((memaddr / 2) < m_insns_size);
13220
13221 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13222 if (len == 4)
13223 {
13224 store_unsigned_integer (&buf[2], 2, m_endian,
13225 m_insns[memaddr / 2 + 1]);
13226 }
13227 return true;
13228 }
13229
13230private:
13231 enum bfd_endian m_endian;
13232 const uint16_t *m_insns;
13233 size_t m_insns_size;
13234};
13235
13236static void
13237arm_record_test (void)
13238{
13239 struct gdbarch_info info;
13240 gdbarch_info_init (&info);
13241 info.bfd_arch_info = bfd_scan_arch ("arm");
13242
13243 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13244
13245 SELF_CHECK (gdbarch != NULL);
13246
13247 /* 16-bit Thumb instructions. */
13248 {
13249 insn_decode_record arm_record;
13250
13251 memset (&arm_record, 0, sizeof (insn_decode_record));
13252 arm_record.gdbarch = gdbarch;
13253
13254 static const uint16_t insns[] = {
13255 /* db b2 uxtb r3, r3 */
13256 0xb2db,
13257 /* cd 58 ldr r5, [r1, r3] */
13258 0x58cd,
13259 };
13260
13261 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13262 instruction_reader_thumb reader (endian, insns);
13263 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13264 THUMB_INSN_SIZE_BYTES);
13265
13266 SELF_CHECK (ret == 0);
13267 SELF_CHECK (arm_record.mem_rec_count == 0);
13268 SELF_CHECK (arm_record.reg_rec_count == 1);
13269 SELF_CHECK (arm_record.arm_regs[0] == 3);
13270
13271 arm_record.this_addr += 2;
13272 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13273 THUMB_INSN_SIZE_BYTES);
13274
13275 SELF_CHECK (ret == 0);
13276 SELF_CHECK (arm_record.mem_rec_count == 0);
13277 SELF_CHECK (arm_record.reg_rec_count == 1);
13278 SELF_CHECK (arm_record.arm_regs[0] == 5);
13279 }
13280
13281 /* 32-bit Thumb-2 instructions. */
13282 {
13283 insn_decode_record arm_record;
13284
13285 memset (&arm_record, 0, sizeof (insn_decode_record));
13286 arm_record.gdbarch = gdbarch;
13287
13288 static const uint16_t insns[] = {
13289 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13290 0xee1d, 0x7f70,
13291 };
13292
13293 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13294 instruction_reader_thumb reader (endian, insns);
13295 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13296 THUMB2_INSN_SIZE_BYTES);
13297
13298 SELF_CHECK (ret == 0);
13299 SELF_CHECK (arm_record.mem_rec_count == 0);
13300 SELF_CHECK (arm_record.reg_rec_count == 1);
13301 SELF_CHECK (arm_record.arm_regs[0] == 7);
13302 }
13303}
13304} // namespace selftests
13305#endif /* GDB_SELF_TEST */
13306
13307/* Cleans up local record registers and memory allocations. */
13308
13309static void
13310deallocate_reg_mem (insn_decode_record *record)
13311{
13312 xfree (record->arm_regs);
13313 xfree (record->arm_mems);
13314}
13315
13316
13317/* Parse the current instruction and record the values of the registers and
13318 memory that will be changed in current instruction to record_arch_list".
13319 Return -1 if something is wrong. */
13320
13321int
13322arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13323 CORE_ADDR insn_addr)
13324{
13325
13326 uint32_t no_of_rec = 0;
13327 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13328 ULONGEST t_bit = 0, insn_id = 0;
13329
13330 ULONGEST u_regval = 0;
13331
13332 insn_decode_record arm_record;
13333
13334 memset (&arm_record, 0, sizeof (insn_decode_record));
13335 arm_record.regcache = regcache;
13336 arm_record.this_addr = insn_addr;
13337 arm_record.gdbarch = gdbarch;
13338
13339
13340 if (record_debug > 1)
13341 {
13342 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13343 "addr = %s\n",
13344 paddress (gdbarch, arm_record.this_addr));
13345 }
13346
13347 instruction_reader reader;
13348 if (extract_arm_insn (reader, &arm_record, 2))
13349 {
13350 if (record_debug)
13351 {
13352 printf_unfiltered (_("Process record: error reading memory at "
13353 "addr %s len = %d.\n"),
13354 paddress (arm_record.gdbarch,
13355 arm_record.this_addr), 2);
13356 }
13357 return -1;
13358 }
13359
13360 /* Check the insn, whether it is thumb or arm one. */
13361
13362 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13363 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13364
13365
13366 if (!(u_regval & t_bit))
13367 {
13368 /* We are decoding arm insn. */
13369 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13370 }
13371 else
13372 {
13373 insn_id = bits (arm_record.arm_insn, 11, 15);
13374 /* is it thumb2 insn? */
13375 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13376 {
13377 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13378 THUMB2_INSN_SIZE_BYTES);
13379 }
13380 else
13381 {
13382 /* We are decoding thumb insn. */
13383 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13384 THUMB_INSN_SIZE_BYTES);
13385 }
13386 }
13387
13388 if (0 == ret)
13389 {
13390 /* Record registers. */
13391 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13392 if (arm_record.arm_regs)
13393 {
13394 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13395 {
13396 if (record_full_arch_list_add_reg
13397 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13398 ret = -1;
13399 }
13400 }
13401 /* Record memories. */
13402 if (arm_record.arm_mems)
13403 {
13404 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13405 {
13406 if (record_full_arch_list_add_mem
13407 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13408 arm_record.arm_mems[no_of_rec].len))
13409 ret = -1;
13410 }
13411 }
13412
13413 if (record_full_arch_list_add_end ())
13414 ret = -1;
13415 }
13416
13417
13418 deallocate_reg_mem (&arm_record);
13419
13420 return ret;
13421}
This page took 0.108877 seconds and 4 git commands to generate.