Print more information in arm_dump_tdep
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "record.h"
59 #include "record-full.h"
60 #include <algorithm>
61
62 #if GDB_SELF_TEST
63 #include "gdbsupport/selftest.h"
64 #endif
65
66 static bool arm_debug;
67
68 /* Macros for setting and testing a bit in a minimal symbol that marks
69 it as Thumb function. The MSB of the minimal symbol's "info" field
70 is used for this purpose.
71
72 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
73 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
74
75 #define MSYMBOL_SET_SPECIAL(msym) \
76 MSYMBOL_TARGET_FLAG_1 (msym) = 1
77
78 #define MSYMBOL_IS_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym)
80
81 struct arm_mapping_symbol
82 {
83 bfd_vma value;
84 char type;
85
86 bool operator< (const arm_mapping_symbol &other) const
87 { return this->value < other.value; }
88 };
89
90 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
91
92 struct arm_per_bfd
93 {
94 explicit arm_per_bfd (size_t num_sections)
95 : section_maps (new arm_mapping_symbol_vec[num_sections]),
96 section_maps_sorted (new bool[num_sections] ())
97 {}
98
99 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
100
101 /* Information about mapping symbols ($a, $d, $t) in the objfile.
102
103 The format is an array of vectors of arm_mapping_symbols, there is one
104 vector for each section of the objfile (the array is index by BFD section
105 index).
106
107 For each section, the vector of arm_mapping_symbol is sorted by
108 symbol value (address). */
109 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
110
111 /* For each corresponding element of section_maps above, is this vector
112 sorted. */
113 std::unique_ptr<bool[]> section_maps_sorted;
114 };
115
116 /* Per-bfd data used for mapping symbols. */
117 static bfd_key<arm_per_bfd> arm_bfd_data_key;
118
119 /* The list of available "set arm ..." and "show arm ..." commands. */
120 static struct cmd_list_element *setarmcmdlist = NULL;
121 static struct cmd_list_element *showarmcmdlist = NULL;
122
123 /* The type of floating-point to use. Keep this in sync with enum
124 arm_float_model, and the help string in _initialize_arm_tdep. */
125 static const char *const fp_model_strings[] =
126 {
127 "auto",
128 "softfpa",
129 "fpa",
130 "softvfp",
131 "vfp",
132 NULL
133 };
134
135 /* A variable that can be configured by the user. */
136 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
137 static const char *current_fp_model = "auto";
138
139 /* The ABI to use. Keep this in sync with arm_abi_kind. */
140 static const char *const arm_abi_strings[] =
141 {
142 "auto",
143 "APCS",
144 "AAPCS",
145 NULL
146 };
147
148 /* A variable that can be configured by the user. */
149 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
150 static const char *arm_abi_string = "auto";
151
152 /* The execution mode to assume. */
153 static const char *const arm_mode_strings[] =
154 {
155 "auto",
156 "arm",
157 "thumb",
158 NULL
159 };
160
161 static const char *arm_fallback_mode_string = "auto";
162 static const char *arm_force_mode_string = "auto";
163
164 /* The standard register names, and all the valid aliases for them. Note
165 that `fp', `sp' and `pc' are not added in this alias list, because they
166 have been added as builtin user registers in
167 std-regs.c:_initialize_frame_reg. */
168 static const struct
169 {
170 const char *name;
171 int regnum;
172 } arm_register_aliases[] = {
173 /* Basic register numbers. */
174 { "r0", 0 },
175 { "r1", 1 },
176 { "r2", 2 },
177 { "r3", 3 },
178 { "r4", 4 },
179 { "r5", 5 },
180 { "r6", 6 },
181 { "r7", 7 },
182 { "r8", 8 },
183 { "r9", 9 },
184 { "r10", 10 },
185 { "r11", 11 },
186 { "r12", 12 },
187 { "r13", 13 },
188 { "r14", 14 },
189 { "r15", 15 },
190 /* Synonyms (argument and variable registers). */
191 { "a1", 0 },
192 { "a2", 1 },
193 { "a3", 2 },
194 { "a4", 3 },
195 { "v1", 4 },
196 { "v2", 5 },
197 { "v3", 6 },
198 { "v4", 7 },
199 { "v5", 8 },
200 { "v6", 9 },
201 { "v7", 10 },
202 { "v8", 11 },
203 /* Other platform-specific names for r9. */
204 { "sb", 9 },
205 { "tr", 9 },
206 /* Special names. */
207 { "ip", 12 },
208 { "lr", 14 },
209 /* Names used by GCC (not listed in the ARM EABI). */
210 { "sl", 10 },
211 /* A special name from the older ATPCS. */
212 { "wr", 7 },
213 };
214
215 static const char *const arm_register_names[] =
216 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
217 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
218 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
219 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
220 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
221 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
222 "fps", "cpsr" }; /* 24 25 */
223
224 /* Holds the current set of options to be passed to the disassembler. */
225 static char *arm_disassembler_options;
226
227 /* Valid register name styles. */
228 static const char **valid_disassembly_styles;
229
230 /* Disassembly style to use. Default to "std" register names. */
231 static const char *disassembly_style;
232
233 /* All possible arm target descriptors. */
234 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
235 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
236
237 /* This is used to keep the bfd arch_info in sync with the disassembly
238 style. */
239 static void set_disassembly_style_sfunc (const char *, int,
240 struct cmd_list_element *);
241 static void show_disassembly_style_sfunc (struct ui_file *, int,
242 struct cmd_list_element *,
243 const char *);
244
245 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
246 readable_regcache *regcache,
247 int regnum, gdb_byte *buf);
248 static void arm_neon_quad_write (struct gdbarch *gdbarch,
249 struct regcache *regcache,
250 int regnum, const gdb_byte *buf);
251
252 static CORE_ADDR
253 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
254
255
256 /* get_next_pcs operations. */
257 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
258 arm_get_next_pcs_read_memory_unsigned_integer,
259 arm_get_next_pcs_syscall_next_pc,
260 arm_get_next_pcs_addr_bits_remove,
261 arm_get_next_pcs_is_thumb,
262 NULL,
263 };
264
265 struct arm_prologue_cache
266 {
267 /* The stack pointer at the time this frame was created; i.e. the
268 caller's stack pointer when this function was called. It is used
269 to identify this frame. */
270 CORE_ADDR prev_sp;
271
272 /* The frame base for this frame is just prev_sp - frame size.
273 FRAMESIZE is the distance from the frame pointer to the
274 initial stack pointer. */
275
276 int framesize;
277
278 /* The register used to hold the frame pointer for this frame. */
279 int framereg;
280
281 /* Saved register offsets. */
282 struct trad_frame_saved_reg *saved_regs;
283 };
284
285 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
286 CORE_ADDR prologue_start,
287 CORE_ADDR prologue_end,
288 struct arm_prologue_cache *cache);
289
290 /* Architecture version for displaced stepping. This effects the behaviour of
291 certain instructions, and really should not be hard-wired. */
292
293 #define DISPLACED_STEPPING_ARCH_VERSION 5
294
295 /* See arm-tdep.h. */
296
297 bool arm_apcs_32 = true;
298
299 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
300
301 int
302 arm_psr_thumb_bit (struct gdbarch *gdbarch)
303 {
304 if (gdbarch_tdep (gdbarch)->is_m)
305 return XPSR_T;
306 else
307 return CPSR_T;
308 }
309
310 /* Determine if the processor is currently executing in Thumb mode. */
311
312 int
313 arm_is_thumb (struct regcache *regcache)
314 {
315 ULONGEST cpsr;
316 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
317
318 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
319
320 return (cpsr & t_bit) != 0;
321 }
322
323 /* Determine if FRAME is executing in Thumb mode. */
324
325 int
326 arm_frame_is_thumb (struct frame_info *frame)
327 {
328 CORE_ADDR cpsr;
329 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
330
331 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
332 directly (from a signal frame or dummy frame) or by interpreting
333 the saved LR (from a prologue or DWARF frame). So consult it and
334 trust the unwinders. */
335 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
336
337 return (cpsr & t_bit) != 0;
338 }
339
340 /* Search for the mapping symbol covering MEMADDR. If one is found,
341 return its type. Otherwise, return 0. If START is non-NULL,
342 set *START to the location of the mapping symbol. */
343
344 static char
345 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
346 {
347 struct obj_section *sec;
348
349 /* If there are mapping symbols, consult them. */
350 sec = find_pc_section (memaddr);
351 if (sec != NULL)
352 {
353 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
354 if (data != NULL)
355 {
356 unsigned int section_idx = sec->the_bfd_section->index;
357 arm_mapping_symbol_vec &map
358 = data->section_maps[section_idx];
359
360 /* Sort the vector on first use. */
361 if (!data->section_maps_sorted[section_idx])
362 {
363 std::sort (map.begin (), map.end ());
364 data->section_maps_sorted[section_idx] = true;
365 }
366
367 struct arm_mapping_symbol map_key
368 = { memaddr - obj_section_addr (sec), 0 };
369 arm_mapping_symbol_vec::const_iterator it
370 = std::lower_bound (map.begin (), map.end (), map_key);
371
372 /* std::lower_bound finds the earliest ordered insertion
373 point. If the symbol at this position starts at this exact
374 address, we use that; otherwise, the preceding
375 mapping symbol covers this address. */
376 if (it < map.end ())
377 {
378 if (it->value == map_key.value)
379 {
380 if (start)
381 *start = it->value + obj_section_addr (sec);
382 return it->type;
383 }
384 }
385
386 if (it > map.begin ())
387 {
388 arm_mapping_symbol_vec::const_iterator prev_it
389 = it - 1;
390
391 if (start)
392 *start = prev_it->value + obj_section_addr (sec);
393 return prev_it->type;
394 }
395 }
396 }
397
398 return 0;
399 }
400
401 /* Determine if the program counter specified in MEMADDR is in a Thumb
402 function. This function should be called for addresses unrelated to
403 any executing frame; otherwise, prefer arm_frame_is_thumb. */
404
405 int
406 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
407 {
408 struct bound_minimal_symbol sym;
409 char type;
410 arm_displaced_step_closure *dsc
411 = ((arm_displaced_step_closure * )
412 get_displaced_step_closure_by_addr (memaddr));
413
414 /* If checking the mode of displaced instruction in copy area, the mode
415 should be determined by instruction on the original address. */
416 if (dsc)
417 {
418 if (debug_displaced)
419 fprintf_unfiltered (gdb_stdlog,
420 "displaced: check mode of %.8lx instead of %.8lx\n",
421 (unsigned long) dsc->insn_addr,
422 (unsigned long) memaddr);
423 memaddr = dsc->insn_addr;
424 }
425
426 /* If bit 0 of the address is set, assume this is a Thumb address. */
427 if (IS_THUMB_ADDR (memaddr))
428 return 1;
429
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
434 return 1;
435
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
438 return 1;
439
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
442 if (type)
443 return type == 't';
444
445 /* Thumb functions have a "special" bit set in minimal symbols. */
446 sym = lookup_minimal_symbol_by_pc (memaddr);
447 if (sym.minsym)
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
449
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 return 0;
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
454 return 1;
455
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
460 displayed in the mode it will be executed). */
461 if (target_has_registers)
462 return arm_frame_is_thumb (get_current_frame ());
463
464 /* Otherwise we're out of luck; we assume ARM. */
465 return 0;
466 }
467
468 /* Determine if the address specified equals any of these magic return
469 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
470 architectures.
471
472 From ARMv6-M Reference Manual B1.5.8
473 Table B1-5 Exception return behavior
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 From ARMv7-M Reference Manual B1.5.8
481 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
482
483 EXC_RETURN Return To Return Stack
484 0xFFFFFFF1 Handler mode Main
485 0xFFFFFFF9 Thread mode Main
486 0xFFFFFFFD Thread mode Process
487
488 Table B1-9 EXC_RETURN definition of exception return behavior, with
489 FP
490
491 EXC_RETURN Return To Return Stack Frame Type
492 0xFFFFFFE1 Handler mode Main Extended
493 0xFFFFFFE9 Thread mode Main Extended
494 0xFFFFFFED Thread mode Process Extended
495 0xFFFFFFF1 Handler mode Main Basic
496 0xFFFFFFF9 Thread mode Main Basic
497 0xFFFFFFFD Thread mode Process Basic
498
499 For more details see "B1.5.8 Exception return behavior"
500 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
501
502 static int
503 arm_m_addr_is_magic (CORE_ADDR addr)
504 {
505 switch (addr)
506 {
507 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
508 the exception return behavior. */
509 case 0xffffffe1:
510 case 0xffffffe9:
511 case 0xffffffed:
512 case 0xfffffff1:
513 case 0xfffffff9:
514 case 0xfffffffd:
515 /* Address is magic. */
516 return 1;
517
518 default:
519 /* Address is not magic. */
520 return 0;
521 }
522 }
523
524 /* Remove useless bits from addresses in a running program. */
525 static CORE_ADDR
526 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
527 {
528 /* On M-profile devices, do not strip the low bit from EXC_RETURN
529 (the magic exception return address). */
530 if (gdbarch_tdep (gdbarch)->is_m
531 && arm_m_addr_is_magic (val))
532 return val;
533
534 if (arm_apcs_32)
535 return UNMAKE_THUMB_ADDR (val);
536 else
537 return (val & 0x03fffffc);
538 }
539
540 /* Return 1 if PC is the start of a compiler helper function which
541 can be safely ignored during prologue skipping. IS_THUMB is true
542 if the function is known to be a Thumb function due to the way it
543 is being called. */
544 static int
545 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
546 {
547 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
548 struct bound_minimal_symbol msym;
549
550 msym = lookup_minimal_symbol_by_pc (pc);
551 if (msym.minsym != NULL
552 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
553 && msym.minsym->linkage_name () != NULL)
554 {
555 const char *name = msym.minsym->linkage_name ();
556
557 /* The GNU linker's Thumb call stub to foo is named
558 __foo_from_thumb. */
559 if (strstr (name, "_from_thumb") != NULL)
560 name += 2;
561
562 /* On soft-float targets, __truncdfsf2 is called to convert promoted
563 arguments to their argument types in non-prototyped
564 functions. */
565 if (startswith (name, "__truncdfsf2"))
566 return 1;
567 if (startswith (name, "__aeabi_d2f"))
568 return 1;
569
570 /* Internal functions related to thread-local storage. */
571 if (startswith (name, "__tls_get_addr"))
572 return 1;
573 if (startswith (name, "__aeabi_read_tp"))
574 return 1;
575 }
576 else
577 {
578 /* If we run against a stripped glibc, we may be unable to identify
579 special functions by name. Check for one important case,
580 __aeabi_read_tp, by comparing the *code* against the default
581 implementation (this is hand-written ARM assembler in glibc). */
582
583 if (!is_thumb
584 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
585 == 0xe3e00a0f /* mov r0, #0xffff0fff */
586 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
587 == 0xe240f01f) /* sub pc, r0, #31 */
588 return 1;
589 }
590
591 return 0;
592 }
593
594 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
595 the first 16-bit of instruction, and INSN2 is the second 16-bit of
596 instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
598 ((bits ((insn1), 0, 3) << 12) \
599 | (bits ((insn1), 10, 10) << 11) \
600 | (bits ((insn2), 12, 14) << 8) \
601 | bits ((insn2), 0, 7))
602
603 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
604 the 32-bit instruction. */
605 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
606 ((bits ((insn), 16, 19) << 12) \
607 | bits ((insn), 0, 11))
608
609 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
610
611 static unsigned int
612 thumb_expand_immediate (unsigned int imm)
613 {
614 unsigned int count = imm >> 7;
615
616 if (count < 8)
617 switch (count / 2)
618 {
619 case 0:
620 return imm & 0xff;
621 case 1:
622 return (imm & 0xff) | ((imm & 0xff) << 16);
623 case 2:
624 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
625 case 3:
626 return (imm & 0xff) | ((imm & 0xff) << 8)
627 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
628 }
629
630 return (0x80 | (imm & 0x7f)) << (32 - count);
631 }
632
633 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
634 epilogue, 0 otherwise. */
635
636 static int
637 thumb_instruction_restores_sp (unsigned short insn)
638 {
639 return (insn == 0x46bd /* mov sp, r7 */
640 || (insn & 0xff80) == 0xb000 /* add sp, imm */
641 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
642 }
643
644 /* Analyze a Thumb prologue, looking for a recognizable stack frame
645 and frame pointer. Scan until we encounter a store that could
646 clobber the stack frame unexpectedly, or an unknown instruction.
647 Return the last address which is definitely safe to skip for an
648 initial breakpoint. */
649
650 static CORE_ADDR
651 thumb_analyze_prologue (struct gdbarch *gdbarch,
652 CORE_ADDR start, CORE_ADDR limit,
653 struct arm_prologue_cache *cache)
654 {
655 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
656 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
657 int i;
658 pv_t regs[16];
659 CORE_ADDR offset;
660 CORE_ADDR unrecognized_pc = 0;
661
662 for (i = 0; i < 16; i++)
663 regs[i] = pv_register (i, 0);
664 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
665
666 while (start < limit)
667 {
668 unsigned short insn;
669
670 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
671
672 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
673 {
674 int regno;
675 int mask;
676
677 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
678 break;
679
680 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
681 whether to save LR (R14). */
682 mask = (insn & 0xff) | ((insn & 0x100) << 6);
683
684 /* Calculate offsets of saved R0-R7 and LR. */
685 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
686 if (mask & (1 << regno))
687 {
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -4);
690 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
691 }
692 }
693 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
694 {
695 offset = (insn & 0x7f) << 2; /* get scaled offset */
696 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
697 -offset);
698 }
699 else if (thumb_instruction_restores_sp (insn))
700 {
701 /* Don't scan past the epilogue. */
702 break;
703 }
704 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
706 (insn & 0xff) << 2);
707 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
708 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
709 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
710 bits (insn, 6, 8));
711 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
712 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
713 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
714 bits (insn, 0, 7));
715 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
716 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
717 && pv_is_constant (regs[bits (insn, 3, 5)]))
718 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
719 regs[bits (insn, 6, 8)]);
720 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
721 && pv_is_constant (regs[bits (insn, 3, 6)]))
722 {
723 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
724 int rm = bits (insn, 3, 6);
725 regs[rd] = pv_add (regs[rd], regs[rm]);
726 }
727 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
728 {
729 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
730 int src_reg = (insn & 0x78) >> 3;
731 regs[dst_reg] = regs[src_reg];
732 }
733 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
734 {
735 /* Handle stores to the stack. Normally pushes are used,
736 but with GCC -mtpcs-frame, there may be other stores
737 in the prologue to create the frame. */
738 int regno = (insn >> 8) & 0x7;
739 pv_t addr;
740
741 offset = (insn & 0xff) << 2;
742 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
743
744 if (stack.store_would_trash (addr))
745 break;
746
747 stack.store (addr, 4, regs[regno]);
748 }
749 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
750 {
751 int rd = bits (insn, 0, 2);
752 int rn = bits (insn, 3, 5);
753 pv_t addr;
754
755 offset = bits (insn, 6, 10) << 2;
756 addr = pv_add_constant (regs[rn], offset);
757
758 if (stack.store_would_trash (addr))
759 break;
760
761 stack.store (addr, 4, regs[rd]);
762 }
763 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
764 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
765 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
766 /* Ignore stores of argument registers to the stack. */
767 ;
768 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
769 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
770 /* Ignore block loads from the stack, potentially copying
771 parameters from memory. */
772 ;
773 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
774 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
775 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
776 /* Similarly ignore single loads from the stack. */
777 ;
778 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
779 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
780 /* Skip register copies, i.e. saves to another register
781 instead of the stack. */
782 ;
783 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
784 /* Recognize constant loads; even with small stacks these are necessary
785 on Thumb. */
786 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
787 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
788 {
789 /* Constant pool loads, for the same reason. */
790 unsigned int constant;
791 CORE_ADDR loc;
792
793 loc = start + 4 + bits (insn, 0, 7) * 4;
794 constant = read_memory_unsigned_integer (loc, 4, byte_order);
795 regs[bits (insn, 8, 10)] = pv_constant (constant);
796 }
797 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
798 {
799 unsigned short inst2;
800
801 inst2 = read_code_unsigned_integer (start + 2, 2,
802 byte_order_for_code);
803
804 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
805 {
806 /* BL, BLX. Allow some special function calls when
807 skipping the prologue; GCC generates these before
808 storing arguments to the stack. */
809 CORE_ADDR nextpc;
810 int j1, j2, imm1, imm2;
811
812 imm1 = sbits (insn, 0, 10);
813 imm2 = bits (inst2, 0, 10);
814 j1 = bit (inst2, 13);
815 j2 = bit (inst2, 11);
816
817 offset = ((imm1 << 12) + (imm2 << 1));
818 offset ^= ((!j2) << 22) | ((!j1) << 23);
819
820 nextpc = start + 4 + offset;
821 /* For BLX make sure to clear the low bits. */
822 if (bit (inst2, 12) == 0)
823 nextpc = nextpc & 0xfffffffc;
824
825 if (!skip_prologue_function (gdbarch, nextpc,
826 bit (inst2, 12) != 0))
827 break;
828 }
829
830 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
831 { registers } */
832 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
833 {
834 pv_t addr = regs[bits (insn, 0, 3)];
835 int regno;
836
837 if (stack.store_would_trash (addr))
838 break;
839
840 /* Calculate offsets of saved registers. */
841 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
842 if (inst2 & (1 << regno))
843 {
844 addr = pv_add_constant (addr, -4);
845 stack.store (addr, 4, regs[regno]);
846 }
847
848 if (insn & 0x0020)
849 regs[bits (insn, 0, 3)] = addr;
850 }
851
852 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
853 [Rn, #+/-imm]{!} */
854 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
855 {
856 int regno1 = bits (inst2, 12, 15);
857 int regno2 = bits (inst2, 8, 11);
858 pv_t addr = regs[bits (insn, 0, 3)];
859
860 offset = inst2 & 0xff;
861 if (insn & 0x0080)
862 addr = pv_add_constant (addr, offset);
863 else
864 addr = pv_add_constant (addr, -offset);
865
866 if (stack.store_would_trash (addr))
867 break;
868
869 stack.store (addr, 4, regs[regno1]);
870 stack.store (pv_add_constant (addr, 4),
871 4, regs[regno2]);
872
873 if (insn & 0x0020)
874 regs[bits (insn, 0, 3)] = addr;
875 }
876
877 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
878 && (inst2 & 0x0c00) == 0x0c00
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
880 {
881 int regno = bits (inst2, 12, 15);
882 pv_t addr = regs[bits (insn, 0, 3)];
883
884 offset = inst2 & 0xff;
885 if (inst2 & 0x0200)
886 addr = pv_add_constant (addr, offset);
887 else
888 addr = pv_add_constant (addr, -offset);
889
890 if (stack.store_would_trash (addr))
891 break;
892
893 stack.store (addr, 4, regs[regno]);
894
895 if (inst2 & 0x0100)
896 regs[bits (insn, 0, 3)] = addr;
897 }
898
899 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
900 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
901 {
902 int regno = bits (inst2, 12, 15);
903 pv_t addr;
904
905 offset = inst2 & 0xfff;
906 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
907
908 if (stack.store_would_trash (addr))
909 break;
910
911 stack.store (addr, 4, regs[regno]);
912 }
913
914 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 /* Ignore stores of argument registers to the stack. */
917 ;
918
919 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
920 && (inst2 & 0x0d00) == 0x0c00
921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
922 /* Ignore stores of argument registers to the stack. */
923 ;
924
925 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
926 { registers } */
927 && (inst2 & 0x8000) == 0x0000
928 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
929 /* Ignore block loads from the stack, potentially copying
930 parameters from memory. */
931 ;
932
933 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
934 [Rn, #+/-imm] */
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 /* Similarly ignore dual loads from the stack. */
937 ;
938
939 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
940 && (inst2 & 0x0d00) == 0x0c00
941 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
942 /* Similarly ignore single loads from the stack. */
943 ;
944
945 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
946 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
947 /* Similarly ignore single loads from the stack. */
948 ;
949
950 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
951 && (inst2 & 0x8000) == 0x0000)
952 {
953 unsigned int imm = ((bits (insn, 10, 10) << 11)
954 | (bits (inst2, 12, 14) << 8)
955 | bits (inst2, 0, 7));
956
957 regs[bits (inst2, 8, 11)]
958 = pv_add_constant (regs[bits (insn, 0, 3)],
959 thumb_expand_immediate (imm));
960 }
961
962 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
963 && (inst2 & 0x8000) == 0x0000)
964 {
965 unsigned int imm = ((bits (insn, 10, 10) << 11)
966 | (bits (inst2, 12, 14) << 8)
967 | bits (inst2, 0, 7));
968
969 regs[bits (inst2, 8, 11)]
970 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
971 }
972
973 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
974 && (inst2 & 0x8000) == 0x0000)
975 {
976 unsigned int imm = ((bits (insn, 10, 10) << 11)
977 | (bits (inst2, 12, 14) << 8)
978 | bits (inst2, 0, 7));
979
980 regs[bits (inst2, 8, 11)]
981 = pv_add_constant (regs[bits (insn, 0, 3)],
982 - (CORE_ADDR) thumb_expand_immediate (imm));
983 }
984
985 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
986 && (inst2 & 0x8000) == 0x0000)
987 {
988 unsigned int imm = ((bits (insn, 10, 10) << 11)
989 | (bits (inst2, 12, 14) << 8)
990 | bits (inst2, 0, 7));
991
992 regs[bits (inst2, 8, 11)]
993 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
994 }
995
996 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
997 {
998 unsigned int imm = ((bits (insn, 10, 10) << 11)
999 | (bits (inst2, 12, 14) << 8)
1000 | bits (inst2, 0, 7));
1001
1002 regs[bits (inst2, 8, 11)]
1003 = pv_constant (thumb_expand_immediate (imm));
1004 }
1005
1006 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1007 {
1008 unsigned int imm
1009 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1010
1011 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1012 }
1013
1014 else if (insn == 0xea5f /* mov.w Rd,Rm */
1015 && (inst2 & 0xf0f0) == 0)
1016 {
1017 int dst_reg = (inst2 & 0x0f00) >> 8;
1018 int src_reg = inst2 & 0xf;
1019 regs[dst_reg] = regs[src_reg];
1020 }
1021
1022 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1023 {
1024 /* Constant pool loads. */
1025 unsigned int constant;
1026 CORE_ADDR loc;
1027
1028 offset = bits (inst2, 0, 11);
1029 if (insn & 0x0080)
1030 loc = start + 4 + offset;
1031 else
1032 loc = start + 4 - offset;
1033
1034 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1035 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1036 }
1037
1038 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1039 {
1040 /* Constant pool loads. */
1041 unsigned int constant;
1042 CORE_ADDR loc;
1043
1044 offset = bits (inst2, 0, 7) << 2;
1045 if (insn & 0x0080)
1046 loc = start + 4 + offset;
1047 else
1048 loc = start + 4 - offset;
1049
1050 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1051 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1052
1053 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1054 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1055 }
1056
1057 else if (thumb2_instruction_changes_pc (insn, inst2))
1058 {
1059 /* Don't scan past anything that might change control flow. */
1060 break;
1061 }
1062 else
1063 {
1064 /* The optimizer might shove anything into the prologue,
1065 so we just skip what we don't recognize. */
1066 unrecognized_pc = start;
1067 }
1068
1069 start += 2;
1070 }
1071 else if (thumb_instruction_changes_pc (insn))
1072 {
1073 /* Don't scan past anything that might change control flow. */
1074 break;
1075 }
1076 else
1077 {
1078 /* The optimizer might shove anything into the prologue,
1079 so we just skip what we don't recognize. */
1080 unrecognized_pc = start;
1081 }
1082
1083 start += 2;
1084 }
1085
1086 if (arm_debug)
1087 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1088 paddress (gdbarch, start));
1089
1090 if (unrecognized_pc == 0)
1091 unrecognized_pc = start;
1092
1093 if (cache == NULL)
1094 return unrecognized_pc;
1095
1096 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1097 {
1098 /* Frame pointer is fp. Frame size is constant. */
1099 cache->framereg = ARM_FP_REGNUM;
1100 cache->framesize = -regs[ARM_FP_REGNUM].k;
1101 }
1102 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1103 {
1104 /* Frame pointer is r7. Frame size is constant. */
1105 cache->framereg = THUMB_FP_REGNUM;
1106 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1107 }
1108 else
1109 {
1110 /* Try the stack pointer... this is a bit desperate. */
1111 cache->framereg = ARM_SP_REGNUM;
1112 cache->framesize = -regs[ARM_SP_REGNUM].k;
1113 }
1114
1115 for (i = 0; i < 16; i++)
1116 if (stack.find_reg (gdbarch, i, &offset))
1117 cache->saved_regs[i].addr = offset;
1118
1119 return unrecognized_pc;
1120 }
1121
1122
1123 /* Try to analyze the instructions starting from PC, which load symbol
1124 __stack_chk_guard. Return the address of instruction after loading this
1125 symbol, set the dest register number to *BASEREG, and set the size of
1126 instructions for loading symbol in OFFSET. Return 0 if instructions are
1127 not recognized. */
1128
1129 static CORE_ADDR
1130 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1131 unsigned int *destreg, int *offset)
1132 {
1133 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1134 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1135 unsigned int low, high, address;
1136
1137 address = 0;
1138 if (is_thumb)
1139 {
1140 unsigned short insn1
1141 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1142
1143 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1144 {
1145 *destreg = bits (insn1, 8, 10);
1146 *offset = 2;
1147 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1148 address = read_memory_unsigned_integer (address, 4,
1149 byte_order_for_code);
1150 }
1151 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1152 {
1153 unsigned short insn2
1154 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1155
1156 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1157
1158 insn1
1159 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1160 insn2
1161 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1162
1163 /* movt Rd, #const */
1164 if ((insn1 & 0xfbc0) == 0xf2c0)
1165 {
1166 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1167 *destreg = bits (insn2, 8, 11);
1168 *offset = 8;
1169 address = (high << 16 | low);
1170 }
1171 }
1172 }
1173 else
1174 {
1175 unsigned int insn
1176 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1177
1178 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1179 {
1180 address = bits (insn, 0, 11) + pc + 8;
1181 address = read_memory_unsigned_integer (address, 4,
1182 byte_order_for_code);
1183
1184 *destreg = bits (insn, 12, 15);
1185 *offset = 4;
1186 }
1187 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1188 {
1189 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1190
1191 insn
1192 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1193
1194 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1195 {
1196 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1197 *destreg = bits (insn, 12, 15);
1198 *offset = 8;
1199 address = (high << 16 | low);
1200 }
1201 }
1202 }
1203
1204 return address;
1205 }
1206
1207 /* Try to skip a sequence of instructions used for stack protector. If PC
1208 points to the first instruction of this sequence, return the address of
1209 first instruction after this sequence, otherwise, return original PC.
1210
1211 On arm, this sequence of instructions is composed of mainly three steps,
1212 Step 1: load symbol __stack_chk_guard,
1213 Step 2: load from address of __stack_chk_guard,
1214 Step 3: store it to somewhere else.
1215
1216 Usually, instructions on step 2 and step 3 are the same on various ARM
1217 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1218 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1219 instructions in step 1 vary from different ARM architectures. On ARMv7,
1220 they are,
1221
1222 movw Rn, #:lower16:__stack_chk_guard
1223 movt Rn, #:upper16:__stack_chk_guard
1224
1225 On ARMv5t, it is,
1226
1227 ldr Rn, .Label
1228 ....
1229 .Lable:
1230 .word __stack_chk_guard
1231
1232 Since ldr/str is a very popular instruction, we can't use them as
1233 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1234 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1235 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1236
1237 static CORE_ADDR
1238 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1239 {
1240 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1241 unsigned int basereg;
1242 struct bound_minimal_symbol stack_chk_guard;
1243 int offset;
1244 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1245 CORE_ADDR addr;
1246
1247 /* Try to parse the instructions in Step 1. */
1248 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1249 &basereg, &offset);
1250 if (!addr)
1251 return pc;
1252
1253 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1254 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1255 Otherwise, this sequence cannot be for stack protector. */
1256 if (stack_chk_guard.minsym == NULL
1257 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1258 return pc;
1259
1260 if (is_thumb)
1261 {
1262 unsigned int destreg;
1263 unsigned short insn
1264 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1265
1266 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1267 if ((insn & 0xf800) != 0x6800)
1268 return pc;
1269 if (bits (insn, 3, 5) != basereg)
1270 return pc;
1271 destreg = bits (insn, 0, 2);
1272
1273 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1274 byte_order_for_code);
1275 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1276 if ((insn & 0xf800) != 0x6000)
1277 return pc;
1278 if (destreg != bits (insn, 0, 2))
1279 return pc;
1280 }
1281 else
1282 {
1283 unsigned int destreg;
1284 unsigned int insn
1285 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1286
1287 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1288 if ((insn & 0x0e500000) != 0x04100000)
1289 return pc;
1290 if (bits (insn, 16, 19) != basereg)
1291 return pc;
1292 destreg = bits (insn, 12, 15);
1293 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1294 insn = read_code_unsigned_integer (pc + offset + 4,
1295 4, byte_order_for_code);
1296 if ((insn & 0x0e500000) != 0x04000000)
1297 return pc;
1298 if (bits (insn, 12, 15) != destreg)
1299 return pc;
1300 }
1301 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1302 on arm. */
1303 if (is_thumb)
1304 return pc + offset + 4;
1305 else
1306 return pc + offset + 8;
1307 }
1308
1309 /* Advance the PC across any function entry prologue instructions to
1310 reach some "real" code.
1311
1312 The APCS (ARM Procedure Call Standard) defines the following
1313 prologue:
1314
1315 mov ip, sp
1316 [stmfd sp!, {a1,a2,a3,a4}]
1317 stmfd sp!, {...,fp,ip,lr,pc}
1318 [stfe f7, [sp, #-12]!]
1319 [stfe f6, [sp, #-12]!]
1320 [stfe f5, [sp, #-12]!]
1321 [stfe f4, [sp, #-12]!]
1322 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1323
1324 static CORE_ADDR
1325 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1326 {
1327 CORE_ADDR func_addr, limit_pc;
1328
1329 /* See if we can determine the end of the prologue via the symbol table.
1330 If so, then return either PC, or the PC after the prologue, whichever
1331 is greater. */
1332 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1333 {
1334 CORE_ADDR post_prologue_pc
1335 = skip_prologue_using_sal (gdbarch, func_addr);
1336 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1337
1338 if (post_prologue_pc)
1339 post_prologue_pc
1340 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1341
1342
1343 /* GCC always emits a line note before the prologue and another
1344 one after, even if the two are at the same address or on the
1345 same line. Take advantage of this so that we do not need to
1346 know every instruction that might appear in the prologue. We
1347 will have producer information for most binaries; if it is
1348 missing (e.g. for -gstabs), assuming the GNU tools. */
1349 if (post_prologue_pc
1350 && (cust == NULL
1351 || COMPUNIT_PRODUCER (cust) == NULL
1352 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1353 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1354 return post_prologue_pc;
1355
1356 if (post_prologue_pc != 0)
1357 {
1358 CORE_ADDR analyzed_limit;
1359
1360 /* For non-GCC compilers, make sure the entire line is an
1361 acceptable prologue; GDB will round this function's
1362 return value up to the end of the following line so we
1363 can not skip just part of a line (and we do not want to).
1364
1365 RealView does not treat the prologue specially, but does
1366 associate prologue code with the opening brace; so this
1367 lets us skip the first line if we think it is the opening
1368 brace. */
1369 if (arm_pc_is_thumb (gdbarch, func_addr))
1370 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1371 post_prologue_pc, NULL);
1372 else
1373 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1374 post_prologue_pc, NULL);
1375
1376 if (analyzed_limit != post_prologue_pc)
1377 return func_addr;
1378
1379 return post_prologue_pc;
1380 }
1381 }
1382
1383 /* Can't determine prologue from the symbol table, need to examine
1384 instructions. */
1385
1386 /* Find an upper limit on the function prologue using the debug
1387 information. If the debug information could not be used to provide
1388 that bound, then use an arbitrary large number as the upper bound. */
1389 /* Like arm_scan_prologue, stop no later than pc + 64. */
1390 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1391 if (limit_pc == 0)
1392 limit_pc = pc + 64; /* Magic. */
1393
1394
1395 /* Check if this is Thumb code. */
1396 if (arm_pc_is_thumb (gdbarch, pc))
1397 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1398 else
1399 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1400 }
1401
1402 /* *INDENT-OFF* */
1403 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1404 This function decodes a Thumb function prologue to determine:
1405 1) the size of the stack frame
1406 2) which registers are saved on it
1407 3) the offsets of saved regs
1408 4) the offset from the stack pointer to the frame pointer
1409
1410 A typical Thumb function prologue would create this stack frame
1411 (offsets relative to FP)
1412 old SP -> 24 stack parameters
1413 20 LR
1414 16 R7
1415 R7 -> 0 local variables (16 bytes)
1416 SP -> -12 additional stack space (12 bytes)
1417 The frame size would thus be 36 bytes, and the frame offset would be
1418 12 bytes. The frame register is R7.
1419
1420 The comments for thumb_skip_prolog() describe the algorithm we use
1421 to detect the end of the prolog. */
1422 /* *INDENT-ON* */
1423
1424 static void
1425 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1426 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1427 {
1428 CORE_ADDR prologue_start;
1429 CORE_ADDR prologue_end;
1430
1431 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1432 &prologue_end))
1433 {
1434 /* See comment in arm_scan_prologue for an explanation of
1435 this heuristics. */
1436 if (prologue_end > prologue_start + 64)
1437 {
1438 prologue_end = prologue_start + 64;
1439 }
1440 }
1441 else
1442 /* We're in the boondocks: we have no idea where the start of the
1443 function is. */
1444 return;
1445
1446 prologue_end = std::min (prologue_end, prev_pc);
1447
1448 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1449 }
1450
1451 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1452 otherwise. */
1453
1454 static int
1455 arm_instruction_restores_sp (unsigned int insn)
1456 {
1457 if (bits (insn, 28, 31) != INST_NV)
1458 {
1459 if ((insn & 0x0df0f000) == 0x0080d000
1460 /* ADD SP (register or immediate). */
1461 || (insn & 0x0df0f000) == 0x0040d000
1462 /* SUB SP (register or immediate). */
1463 || (insn & 0x0ffffff0) == 0x01a0d000
1464 /* MOV SP. */
1465 || (insn & 0x0fff0000) == 0x08bd0000
1466 /* POP (LDMIA). */
1467 || (insn & 0x0fff0000) == 0x049d0000)
1468 /* POP of a single register. */
1469 return 1;
1470 }
1471
1472 return 0;
1473 }
1474
1475 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1476 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1477 fill it in. Return the first address not recognized as a prologue
1478 instruction.
1479
1480 We recognize all the instructions typically found in ARM prologues,
1481 plus harmless instructions which can be skipped (either for analysis
1482 purposes, or a more restrictive set that can be skipped when finding
1483 the end of the prologue). */
1484
1485 static CORE_ADDR
1486 arm_analyze_prologue (struct gdbarch *gdbarch,
1487 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1488 struct arm_prologue_cache *cache)
1489 {
1490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1491 int regno;
1492 CORE_ADDR offset, current_pc;
1493 pv_t regs[ARM_FPS_REGNUM];
1494 CORE_ADDR unrecognized_pc = 0;
1495
1496 /* Search the prologue looking for instructions that set up the
1497 frame pointer, adjust the stack pointer, and save registers.
1498
1499 Be careful, however, and if it doesn't look like a prologue,
1500 don't try to scan it. If, for instance, a frameless function
1501 begins with stmfd sp!, then we will tell ourselves there is
1502 a frame, which will confuse stack traceback, as well as "finish"
1503 and other operations that rely on a knowledge of the stack
1504 traceback. */
1505
1506 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1507 regs[regno] = pv_register (regno, 0);
1508 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1509
1510 for (current_pc = prologue_start;
1511 current_pc < prologue_end;
1512 current_pc += 4)
1513 {
1514 unsigned int insn
1515 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1516
1517 if (insn == 0xe1a0c00d) /* mov ip, sp */
1518 {
1519 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1520 continue;
1521 }
1522 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1523 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1524 {
1525 unsigned imm = insn & 0xff; /* immediate value */
1526 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1527 int rd = bits (insn, 12, 15);
1528 imm = (imm >> rot) | (imm << (32 - rot));
1529 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1530 continue;
1531 }
1532 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1533 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1534 {
1535 unsigned imm = insn & 0xff; /* immediate value */
1536 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1537 int rd = bits (insn, 12, 15);
1538 imm = (imm >> rot) | (imm << (32 - rot));
1539 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1540 continue;
1541 }
1542 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1543 [sp, #-4]! */
1544 {
1545 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1546 break;
1547 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1548 stack.store (regs[ARM_SP_REGNUM], 4,
1549 regs[bits (insn, 12, 15)]);
1550 continue;
1551 }
1552 else if ((insn & 0xffff0000) == 0xe92d0000)
1553 /* stmfd sp!, {..., fp, ip, lr, pc}
1554 or
1555 stmfd sp!, {a1, a2, a3, a4} */
1556 {
1557 int mask = insn & 0xffff;
1558
1559 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1560 break;
1561
1562 /* Calculate offsets of saved registers. */
1563 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1564 if (mask & (1 << regno))
1565 {
1566 regs[ARM_SP_REGNUM]
1567 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1568 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1569 }
1570 }
1571 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1572 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1573 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1574 {
1575 /* No need to add this to saved_regs -- it's just an arg reg. */
1576 continue;
1577 }
1578 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1579 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1580 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1581 {
1582 /* No need to add this to saved_regs -- it's just an arg reg. */
1583 continue;
1584 }
1585 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1586 { registers } */
1587 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1588 {
1589 /* No need to add this to saved_regs -- it's just arg regs. */
1590 continue;
1591 }
1592 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1593 {
1594 unsigned imm = insn & 0xff; /* immediate value */
1595 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1596 imm = (imm >> rot) | (imm << (32 - rot));
1597 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1598 }
1599 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1600 {
1601 unsigned imm = insn & 0xff; /* immediate value */
1602 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1603 imm = (imm >> rot) | (imm << (32 - rot));
1604 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1605 }
1606 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1607 [sp, -#c]! */
1608 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1609 {
1610 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1611 break;
1612
1613 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1614 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1615 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1616 }
1617 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1618 [sp!] */
1619 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1620 {
1621 int n_saved_fp_regs;
1622 unsigned int fp_start_reg, fp_bound_reg;
1623
1624 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1625 break;
1626
1627 if ((insn & 0x800) == 0x800) /* N0 is set */
1628 {
1629 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1630 n_saved_fp_regs = 3;
1631 else
1632 n_saved_fp_regs = 1;
1633 }
1634 else
1635 {
1636 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1637 n_saved_fp_regs = 2;
1638 else
1639 n_saved_fp_regs = 4;
1640 }
1641
1642 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1643 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1644 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1645 {
1646 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1647 stack.store (regs[ARM_SP_REGNUM], 12,
1648 regs[fp_start_reg++]);
1649 }
1650 }
1651 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1652 {
1653 /* Allow some special function calls when skipping the
1654 prologue; GCC generates these before storing arguments to
1655 the stack. */
1656 CORE_ADDR dest = BranchDest (current_pc, insn);
1657
1658 if (skip_prologue_function (gdbarch, dest, 0))
1659 continue;
1660 else
1661 break;
1662 }
1663 else if ((insn & 0xf0000000) != 0xe0000000)
1664 break; /* Condition not true, exit early. */
1665 else if (arm_instruction_changes_pc (insn))
1666 /* Don't scan past anything that might change control flow. */
1667 break;
1668 else if (arm_instruction_restores_sp (insn))
1669 {
1670 /* Don't scan past the epilogue. */
1671 break;
1672 }
1673 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1674 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1675 /* Ignore block loads from the stack, potentially copying
1676 parameters from memory. */
1677 continue;
1678 else if ((insn & 0xfc500000) == 0xe4100000
1679 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1680 /* Similarly ignore single loads from the stack. */
1681 continue;
1682 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1683 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1684 register instead of the stack. */
1685 continue;
1686 else
1687 {
1688 /* The optimizer might shove anything into the prologue, if
1689 we build up cache (cache != NULL) from scanning prologue,
1690 we just skip what we don't recognize and scan further to
1691 make cache as complete as possible. However, if we skip
1692 prologue, we'll stop immediately on unrecognized
1693 instruction. */
1694 unrecognized_pc = current_pc;
1695 if (cache != NULL)
1696 continue;
1697 else
1698 break;
1699 }
1700 }
1701
1702 if (unrecognized_pc == 0)
1703 unrecognized_pc = current_pc;
1704
1705 if (cache)
1706 {
1707 int framereg, framesize;
1708
1709 /* The frame size is just the distance from the frame register
1710 to the original stack pointer. */
1711 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1712 {
1713 /* Frame pointer is fp. */
1714 framereg = ARM_FP_REGNUM;
1715 framesize = -regs[ARM_FP_REGNUM].k;
1716 }
1717 else
1718 {
1719 /* Try the stack pointer... this is a bit desperate. */
1720 framereg = ARM_SP_REGNUM;
1721 framesize = -regs[ARM_SP_REGNUM].k;
1722 }
1723
1724 cache->framereg = framereg;
1725 cache->framesize = framesize;
1726
1727 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1728 if (stack.find_reg (gdbarch, regno, &offset))
1729 cache->saved_regs[regno].addr = offset;
1730 }
1731
1732 if (arm_debug)
1733 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1734 paddress (gdbarch, unrecognized_pc));
1735
1736 return unrecognized_pc;
1737 }
1738
1739 static void
1740 arm_scan_prologue (struct frame_info *this_frame,
1741 struct arm_prologue_cache *cache)
1742 {
1743 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1744 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1745 CORE_ADDR prologue_start, prologue_end;
1746 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1747 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1748
1749 /* Assume there is no frame until proven otherwise. */
1750 cache->framereg = ARM_SP_REGNUM;
1751 cache->framesize = 0;
1752
1753 /* Check for Thumb prologue. */
1754 if (arm_frame_is_thumb (this_frame))
1755 {
1756 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1757 return;
1758 }
1759
1760 /* Find the function prologue. If we can't find the function in
1761 the symbol table, peek in the stack frame to find the PC. */
1762 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1763 &prologue_end))
1764 {
1765 /* One way to find the end of the prologue (which works well
1766 for unoptimized code) is to do the following:
1767
1768 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1769
1770 if (sal.line == 0)
1771 prologue_end = prev_pc;
1772 else if (sal.end < prologue_end)
1773 prologue_end = sal.end;
1774
1775 This mechanism is very accurate so long as the optimizer
1776 doesn't move any instructions from the function body into the
1777 prologue. If this happens, sal.end will be the last
1778 instruction in the first hunk of prologue code just before
1779 the first instruction that the scheduler has moved from
1780 the body to the prologue.
1781
1782 In order to make sure that we scan all of the prologue
1783 instructions, we use a slightly less accurate mechanism which
1784 may scan more than necessary. To help compensate for this
1785 lack of accuracy, the prologue scanning loop below contains
1786 several clauses which'll cause the loop to terminate early if
1787 an implausible prologue instruction is encountered.
1788
1789 The expression
1790
1791 prologue_start + 64
1792
1793 is a suitable endpoint since it accounts for the largest
1794 possible prologue plus up to five instructions inserted by
1795 the scheduler. */
1796
1797 if (prologue_end > prologue_start + 64)
1798 {
1799 prologue_end = prologue_start + 64; /* See above. */
1800 }
1801 }
1802 else
1803 {
1804 /* We have no symbol information. Our only option is to assume this
1805 function has a standard stack frame and the normal frame register.
1806 Then, we can find the value of our frame pointer on entrance to
1807 the callee (or at the present moment if this is the innermost frame).
1808 The value stored there should be the address of the stmfd + 8. */
1809 CORE_ADDR frame_loc;
1810 ULONGEST return_value;
1811
1812 /* AAPCS does not use a frame register, so we can abort here. */
1813 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1814 return;
1815
1816 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1817 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1818 &return_value))
1819 return;
1820 else
1821 {
1822 prologue_start = gdbarch_addr_bits_remove
1823 (gdbarch, return_value) - 8;
1824 prologue_end = prologue_start + 64; /* See above. */
1825 }
1826 }
1827
1828 if (prev_pc < prologue_end)
1829 prologue_end = prev_pc;
1830
1831 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1832 }
1833
1834 static struct arm_prologue_cache *
1835 arm_make_prologue_cache (struct frame_info *this_frame)
1836 {
1837 int reg;
1838 struct arm_prologue_cache *cache;
1839 CORE_ADDR unwound_fp;
1840
1841 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1842 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1843
1844 arm_scan_prologue (this_frame, cache);
1845
1846 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1847 if (unwound_fp == 0)
1848 return cache;
1849
1850 cache->prev_sp = unwound_fp + cache->framesize;
1851
1852 /* Calculate actual addresses of saved registers using offsets
1853 determined by arm_scan_prologue. */
1854 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1855 if (trad_frame_addr_p (cache->saved_regs, reg))
1856 cache->saved_regs[reg].addr += cache->prev_sp;
1857
1858 return cache;
1859 }
1860
1861 /* Implementation of the stop_reason hook for arm_prologue frames. */
1862
1863 static enum unwind_stop_reason
1864 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1865 void **this_cache)
1866 {
1867 struct arm_prologue_cache *cache;
1868 CORE_ADDR pc;
1869
1870 if (*this_cache == NULL)
1871 *this_cache = arm_make_prologue_cache (this_frame);
1872 cache = (struct arm_prologue_cache *) *this_cache;
1873
1874 /* This is meant to halt the backtrace at "_start". */
1875 pc = get_frame_pc (this_frame);
1876 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1877 return UNWIND_OUTERMOST;
1878
1879 /* If we've hit a wall, stop. */
1880 if (cache->prev_sp == 0)
1881 return UNWIND_OUTERMOST;
1882
1883 return UNWIND_NO_REASON;
1884 }
1885
1886 /* Our frame ID for a normal frame is the current function's starting PC
1887 and the caller's SP when we were called. */
1888
1889 static void
1890 arm_prologue_this_id (struct frame_info *this_frame,
1891 void **this_cache,
1892 struct frame_id *this_id)
1893 {
1894 struct arm_prologue_cache *cache;
1895 struct frame_id id;
1896 CORE_ADDR pc, func;
1897
1898 if (*this_cache == NULL)
1899 *this_cache = arm_make_prologue_cache (this_frame);
1900 cache = (struct arm_prologue_cache *) *this_cache;
1901
1902 /* Use function start address as part of the frame ID. If we cannot
1903 identify the start address (due to missing symbol information),
1904 fall back to just using the current PC. */
1905 pc = get_frame_pc (this_frame);
1906 func = get_frame_func (this_frame);
1907 if (!func)
1908 func = pc;
1909
1910 id = frame_id_build (cache->prev_sp, func);
1911 *this_id = id;
1912 }
1913
1914 static struct value *
1915 arm_prologue_prev_register (struct frame_info *this_frame,
1916 void **this_cache,
1917 int prev_regnum)
1918 {
1919 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1920 struct arm_prologue_cache *cache;
1921
1922 if (*this_cache == NULL)
1923 *this_cache = arm_make_prologue_cache (this_frame);
1924 cache = (struct arm_prologue_cache *) *this_cache;
1925
1926 /* If we are asked to unwind the PC, then we need to return the LR
1927 instead. The prologue may save PC, but it will point into this
1928 frame's prologue, not the next frame's resume location. Also
1929 strip the saved T bit. A valid LR may have the low bit set, but
1930 a valid PC never does. */
1931 if (prev_regnum == ARM_PC_REGNUM)
1932 {
1933 CORE_ADDR lr;
1934
1935 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1936 return frame_unwind_got_constant (this_frame, prev_regnum,
1937 arm_addr_bits_remove (gdbarch, lr));
1938 }
1939
1940 /* SP is generally not saved to the stack, but this frame is
1941 identified by the next frame's stack pointer at the time of the call.
1942 The value was already reconstructed into PREV_SP. */
1943 if (prev_regnum == ARM_SP_REGNUM)
1944 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1945
1946 /* The CPSR may have been changed by the call instruction and by the
1947 called function. The only bit we can reconstruct is the T bit,
1948 by checking the low bit of LR as of the call. This is a reliable
1949 indicator of Thumb-ness except for some ARM v4T pre-interworking
1950 Thumb code, which could get away with a clear low bit as long as
1951 the called function did not use bx. Guess that all other
1952 bits are unchanged; the condition flags are presumably lost,
1953 but the processor status is likely valid. */
1954 if (prev_regnum == ARM_PS_REGNUM)
1955 {
1956 CORE_ADDR lr, cpsr;
1957 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1958
1959 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1960 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1961 if (IS_THUMB_ADDR (lr))
1962 cpsr |= t_bit;
1963 else
1964 cpsr &= ~t_bit;
1965 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1966 }
1967
1968 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1969 prev_regnum);
1970 }
1971
1972 struct frame_unwind arm_prologue_unwind = {
1973 NORMAL_FRAME,
1974 arm_prologue_unwind_stop_reason,
1975 arm_prologue_this_id,
1976 arm_prologue_prev_register,
1977 NULL,
1978 default_frame_sniffer
1979 };
1980
1981 /* Maintain a list of ARM exception table entries per objfile, similar to the
1982 list of mapping symbols. We only cache entries for standard ARM-defined
1983 personality routines; the cache will contain only the frame unwinding
1984 instructions associated with the entry (not the descriptors). */
1985
1986 struct arm_exidx_entry
1987 {
1988 bfd_vma addr;
1989 gdb_byte *entry;
1990
1991 bool operator< (const arm_exidx_entry &other) const
1992 {
1993 return addr < other.addr;
1994 }
1995 };
1996
1997 struct arm_exidx_data
1998 {
1999 std::vector<std::vector<arm_exidx_entry>> section_maps;
2000 };
2001
2002 /* Per-BFD key to store exception handling information. */
2003 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2004
2005 static struct obj_section *
2006 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2007 {
2008 struct obj_section *osect;
2009
2010 ALL_OBJFILE_OSECTIONS (objfile, osect)
2011 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2012 {
2013 bfd_vma start, size;
2014 start = bfd_section_vma (osect->the_bfd_section);
2015 size = bfd_section_size (osect->the_bfd_section);
2016
2017 if (start <= vma && vma < start + size)
2018 return osect;
2019 }
2020
2021 return NULL;
2022 }
2023
2024 /* Parse contents of exception table and exception index sections
2025 of OBJFILE, and fill in the exception table entry cache.
2026
2027 For each entry that refers to a standard ARM-defined personality
2028 routine, extract the frame unwinding instructions (from either
2029 the index or the table section). The unwinding instructions
2030 are normalized by:
2031 - extracting them from the rest of the table data
2032 - converting to host endianness
2033 - appending the implicit 0xb0 ("Finish") code
2034
2035 The extracted and normalized instructions are stored for later
2036 retrieval by the arm_find_exidx_entry routine. */
2037
2038 static void
2039 arm_exidx_new_objfile (struct objfile *objfile)
2040 {
2041 struct arm_exidx_data *data;
2042 asection *exidx, *extab;
2043 bfd_vma exidx_vma = 0, extab_vma = 0;
2044 LONGEST i;
2045
2046 /* If we've already touched this file, do nothing. */
2047 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2048 return;
2049
2050 /* Read contents of exception table and index. */
2051 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2052 gdb::byte_vector exidx_data;
2053 if (exidx)
2054 {
2055 exidx_vma = bfd_section_vma (exidx);
2056 exidx_data.resize (bfd_section_size (exidx));
2057
2058 if (!bfd_get_section_contents (objfile->obfd, exidx,
2059 exidx_data.data (), 0,
2060 exidx_data.size ()))
2061 return;
2062 }
2063
2064 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2065 gdb::byte_vector extab_data;
2066 if (extab)
2067 {
2068 extab_vma = bfd_section_vma (extab);
2069 extab_data.resize (bfd_section_size (extab));
2070
2071 if (!bfd_get_section_contents (objfile->obfd, extab,
2072 extab_data.data (), 0,
2073 extab_data.size ()))
2074 return;
2075 }
2076
2077 /* Allocate exception table data structure. */
2078 data = arm_exidx_data_key.emplace (objfile->obfd);
2079 data->section_maps.resize (objfile->obfd->section_count);
2080
2081 /* Fill in exception table. */
2082 for (i = 0; i < exidx_data.size () / 8; i++)
2083 {
2084 struct arm_exidx_entry new_exidx_entry;
2085 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2086 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2087 exidx_data.data () + i * 8 + 4);
2088 bfd_vma addr = 0, word = 0;
2089 int n_bytes = 0, n_words = 0;
2090 struct obj_section *sec;
2091 gdb_byte *entry = NULL;
2092
2093 /* Extract address of start of function. */
2094 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2095 idx += exidx_vma + i * 8;
2096
2097 /* Find section containing function and compute section offset. */
2098 sec = arm_obj_section_from_vma (objfile, idx);
2099 if (sec == NULL)
2100 continue;
2101 idx -= bfd_section_vma (sec->the_bfd_section);
2102
2103 /* Determine address of exception table entry. */
2104 if (val == 1)
2105 {
2106 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2107 }
2108 else if ((val & 0xff000000) == 0x80000000)
2109 {
2110 /* Exception table entry embedded in .ARM.exidx
2111 -- must be short form. */
2112 word = val;
2113 n_bytes = 3;
2114 }
2115 else if (!(val & 0x80000000))
2116 {
2117 /* Exception table entry in .ARM.extab. */
2118 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2119 addr += exidx_vma + i * 8 + 4;
2120
2121 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2122 {
2123 word = bfd_h_get_32 (objfile->obfd,
2124 extab_data.data () + addr - extab_vma);
2125 addr += 4;
2126
2127 if ((word & 0xff000000) == 0x80000000)
2128 {
2129 /* Short form. */
2130 n_bytes = 3;
2131 }
2132 else if ((word & 0xff000000) == 0x81000000
2133 || (word & 0xff000000) == 0x82000000)
2134 {
2135 /* Long form. */
2136 n_bytes = 2;
2137 n_words = ((word >> 16) & 0xff);
2138 }
2139 else if (!(word & 0x80000000))
2140 {
2141 bfd_vma pers;
2142 struct obj_section *pers_sec;
2143 int gnu_personality = 0;
2144
2145 /* Custom personality routine. */
2146 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2147 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2148
2149 /* Check whether we've got one of the variants of the
2150 GNU personality routines. */
2151 pers_sec = arm_obj_section_from_vma (objfile, pers);
2152 if (pers_sec)
2153 {
2154 static const char *personality[] =
2155 {
2156 "__gcc_personality_v0",
2157 "__gxx_personality_v0",
2158 "__gcj_personality_v0",
2159 "__gnu_objc_personality_v0",
2160 NULL
2161 };
2162
2163 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2164 int k;
2165
2166 for (k = 0; personality[k]; k++)
2167 if (lookup_minimal_symbol_by_pc_name
2168 (pc, personality[k], objfile))
2169 {
2170 gnu_personality = 1;
2171 break;
2172 }
2173 }
2174
2175 /* If so, the next word contains a word count in the high
2176 byte, followed by the same unwind instructions as the
2177 pre-defined forms. */
2178 if (gnu_personality
2179 && addr + 4 <= extab_vma + extab_data.size ())
2180 {
2181 word = bfd_h_get_32 (objfile->obfd,
2182 (extab_data.data ()
2183 + addr - extab_vma));
2184 addr += 4;
2185 n_bytes = 3;
2186 n_words = ((word >> 24) & 0xff);
2187 }
2188 }
2189 }
2190 }
2191
2192 /* Sanity check address. */
2193 if (n_words)
2194 if (addr < extab_vma
2195 || addr + 4 * n_words > extab_vma + extab_data.size ())
2196 n_words = n_bytes = 0;
2197
2198 /* The unwind instructions reside in WORD (only the N_BYTES least
2199 significant bytes are valid), followed by N_WORDS words in the
2200 extab section starting at ADDR. */
2201 if (n_bytes || n_words)
2202 {
2203 gdb_byte *p = entry
2204 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2205 n_bytes + n_words * 4 + 1);
2206
2207 while (n_bytes--)
2208 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2209
2210 while (n_words--)
2211 {
2212 word = bfd_h_get_32 (objfile->obfd,
2213 extab_data.data () + addr - extab_vma);
2214 addr += 4;
2215
2216 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2217 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2218 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2219 *p++ = (gdb_byte) (word & 0xff);
2220 }
2221
2222 /* Implied "Finish" to terminate the list. */
2223 *p++ = 0xb0;
2224 }
2225
2226 /* Push entry onto vector. They are guaranteed to always
2227 appear in order of increasing addresses. */
2228 new_exidx_entry.addr = idx;
2229 new_exidx_entry.entry = entry;
2230 data->section_maps[sec->the_bfd_section->index].push_back
2231 (new_exidx_entry);
2232 }
2233 }
2234
2235 /* Search for the exception table entry covering MEMADDR. If one is found,
2236 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2237 set *START to the start of the region covered by this entry. */
2238
2239 static gdb_byte *
2240 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2241 {
2242 struct obj_section *sec;
2243
2244 sec = find_pc_section (memaddr);
2245 if (sec != NULL)
2246 {
2247 struct arm_exidx_data *data;
2248 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2249
2250 data = arm_exidx_data_key.get (sec->objfile->obfd);
2251 if (data != NULL)
2252 {
2253 std::vector<arm_exidx_entry> &map
2254 = data->section_maps[sec->the_bfd_section->index];
2255 if (!map.empty ())
2256 {
2257 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2258
2259 /* std::lower_bound finds the earliest ordered insertion
2260 point. If the following symbol starts at this exact
2261 address, we use that; otherwise, the preceding
2262 exception table entry covers this address. */
2263 if (idx < map.end ())
2264 {
2265 if (idx->addr == map_key.addr)
2266 {
2267 if (start)
2268 *start = idx->addr + obj_section_addr (sec);
2269 return idx->entry;
2270 }
2271 }
2272
2273 if (idx > map.begin ())
2274 {
2275 idx = idx - 1;
2276 if (start)
2277 *start = idx->addr + obj_section_addr (sec);
2278 return idx->entry;
2279 }
2280 }
2281 }
2282 }
2283
2284 return NULL;
2285 }
2286
2287 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2288 instruction list from the ARM exception table entry ENTRY, allocate and
2289 return a prologue cache structure describing how to unwind this frame.
2290
2291 Return NULL if the unwinding instruction list contains a "spare",
2292 "reserved" or "refuse to unwind" instruction as defined in section
2293 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2294 for the ARM Architecture" document. */
2295
2296 static struct arm_prologue_cache *
2297 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2298 {
2299 CORE_ADDR vsp = 0;
2300 int vsp_valid = 0;
2301
2302 struct arm_prologue_cache *cache;
2303 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2304 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2305
2306 for (;;)
2307 {
2308 gdb_byte insn;
2309
2310 /* Whenever we reload SP, we actually have to retrieve its
2311 actual value in the current frame. */
2312 if (!vsp_valid)
2313 {
2314 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2315 {
2316 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2317 vsp = get_frame_register_unsigned (this_frame, reg);
2318 }
2319 else
2320 {
2321 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2322 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2323 }
2324
2325 vsp_valid = 1;
2326 }
2327
2328 /* Decode next unwind instruction. */
2329 insn = *entry++;
2330
2331 if ((insn & 0xc0) == 0)
2332 {
2333 int offset = insn & 0x3f;
2334 vsp += (offset << 2) + 4;
2335 }
2336 else if ((insn & 0xc0) == 0x40)
2337 {
2338 int offset = insn & 0x3f;
2339 vsp -= (offset << 2) + 4;
2340 }
2341 else if ((insn & 0xf0) == 0x80)
2342 {
2343 int mask = ((insn & 0xf) << 8) | *entry++;
2344 int i;
2345
2346 /* The special case of an all-zero mask identifies
2347 "Refuse to unwind". We return NULL to fall back
2348 to the prologue analyzer. */
2349 if (mask == 0)
2350 return NULL;
2351
2352 /* Pop registers r4..r15 under mask. */
2353 for (i = 0; i < 12; i++)
2354 if (mask & (1 << i))
2355 {
2356 cache->saved_regs[4 + i].addr = vsp;
2357 vsp += 4;
2358 }
2359
2360 /* Special-case popping SP -- we need to reload vsp. */
2361 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2362 vsp_valid = 0;
2363 }
2364 else if ((insn & 0xf0) == 0x90)
2365 {
2366 int reg = insn & 0xf;
2367
2368 /* Reserved cases. */
2369 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2370 return NULL;
2371
2372 /* Set SP from another register and mark VSP for reload. */
2373 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2374 vsp_valid = 0;
2375 }
2376 else if ((insn & 0xf0) == 0xa0)
2377 {
2378 int count = insn & 0x7;
2379 int pop_lr = (insn & 0x8) != 0;
2380 int i;
2381
2382 /* Pop r4..r[4+count]. */
2383 for (i = 0; i <= count; i++)
2384 {
2385 cache->saved_regs[4 + i].addr = vsp;
2386 vsp += 4;
2387 }
2388
2389 /* If indicated by flag, pop LR as well. */
2390 if (pop_lr)
2391 {
2392 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2393 vsp += 4;
2394 }
2395 }
2396 else if (insn == 0xb0)
2397 {
2398 /* We could only have updated PC by popping into it; if so, it
2399 will show up as address. Otherwise, copy LR into PC. */
2400 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2401 cache->saved_regs[ARM_PC_REGNUM]
2402 = cache->saved_regs[ARM_LR_REGNUM];
2403
2404 /* We're done. */
2405 break;
2406 }
2407 else if (insn == 0xb1)
2408 {
2409 int mask = *entry++;
2410 int i;
2411
2412 /* All-zero mask and mask >= 16 is "spare". */
2413 if (mask == 0 || mask >= 16)
2414 return NULL;
2415
2416 /* Pop r0..r3 under mask. */
2417 for (i = 0; i < 4; i++)
2418 if (mask & (1 << i))
2419 {
2420 cache->saved_regs[i].addr = vsp;
2421 vsp += 4;
2422 }
2423 }
2424 else if (insn == 0xb2)
2425 {
2426 ULONGEST offset = 0;
2427 unsigned shift = 0;
2428
2429 do
2430 {
2431 offset |= (*entry & 0x7f) << shift;
2432 shift += 7;
2433 }
2434 while (*entry++ & 0x80);
2435
2436 vsp += 0x204 + (offset << 2);
2437 }
2438 else if (insn == 0xb3)
2439 {
2440 int start = *entry >> 4;
2441 int count = (*entry++) & 0xf;
2442 int i;
2443
2444 /* Only registers D0..D15 are valid here. */
2445 if (start + count >= 16)
2446 return NULL;
2447
2448 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2449 for (i = 0; i <= count; i++)
2450 {
2451 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2452 vsp += 8;
2453 }
2454
2455 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2456 vsp += 4;
2457 }
2458 else if ((insn & 0xf8) == 0xb8)
2459 {
2460 int count = insn & 0x7;
2461 int i;
2462
2463 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2464 for (i = 0; i <= count; i++)
2465 {
2466 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2467 vsp += 8;
2468 }
2469
2470 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2471 vsp += 4;
2472 }
2473 else if (insn == 0xc6)
2474 {
2475 int start = *entry >> 4;
2476 int count = (*entry++) & 0xf;
2477 int i;
2478
2479 /* Only registers WR0..WR15 are valid. */
2480 if (start + count >= 16)
2481 return NULL;
2482
2483 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2484 for (i = 0; i <= count; i++)
2485 {
2486 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2487 vsp += 8;
2488 }
2489 }
2490 else if (insn == 0xc7)
2491 {
2492 int mask = *entry++;
2493 int i;
2494
2495 /* All-zero mask and mask >= 16 is "spare". */
2496 if (mask == 0 || mask >= 16)
2497 return NULL;
2498
2499 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2500 for (i = 0; i < 4; i++)
2501 if (mask & (1 << i))
2502 {
2503 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2504 vsp += 4;
2505 }
2506 }
2507 else if ((insn & 0xf8) == 0xc0)
2508 {
2509 int count = insn & 0x7;
2510 int i;
2511
2512 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2513 for (i = 0; i <= count; i++)
2514 {
2515 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2516 vsp += 8;
2517 }
2518 }
2519 else if (insn == 0xc8)
2520 {
2521 int start = *entry >> 4;
2522 int count = (*entry++) & 0xf;
2523 int i;
2524
2525 /* Only registers D0..D31 are valid. */
2526 if (start + count >= 16)
2527 return NULL;
2528
2529 /* Pop VFP double-precision registers
2530 D[16+start]..D[16+start+count]. */
2531 for (i = 0; i <= count; i++)
2532 {
2533 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2534 vsp += 8;
2535 }
2536 }
2537 else if (insn == 0xc9)
2538 {
2539 int start = *entry >> 4;
2540 int count = (*entry++) & 0xf;
2541 int i;
2542
2543 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2544 for (i = 0; i <= count; i++)
2545 {
2546 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2547 vsp += 8;
2548 }
2549 }
2550 else if ((insn & 0xf8) == 0xd0)
2551 {
2552 int count = insn & 0x7;
2553 int i;
2554
2555 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2556 for (i = 0; i <= count; i++)
2557 {
2558 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2559 vsp += 8;
2560 }
2561 }
2562 else
2563 {
2564 /* Everything else is "spare". */
2565 return NULL;
2566 }
2567 }
2568
2569 /* If we restore SP from a register, assume this was the frame register.
2570 Otherwise just fall back to SP as frame register. */
2571 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2572 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2573 else
2574 cache->framereg = ARM_SP_REGNUM;
2575
2576 /* Determine offset to previous frame. */
2577 cache->framesize
2578 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2579
2580 /* We already got the previous SP. */
2581 cache->prev_sp = vsp;
2582
2583 return cache;
2584 }
2585
2586 /* Unwinding via ARM exception table entries. Note that the sniffer
2587 already computes a filled-in prologue cache, which is then used
2588 with the same arm_prologue_this_id and arm_prologue_prev_register
2589 routines also used for prologue-parsing based unwinding. */
2590
2591 static int
2592 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2593 struct frame_info *this_frame,
2594 void **this_prologue_cache)
2595 {
2596 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2597 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2598 CORE_ADDR addr_in_block, exidx_region, func_start;
2599 struct arm_prologue_cache *cache;
2600 gdb_byte *entry;
2601
2602 /* See if we have an ARM exception table entry covering this address. */
2603 addr_in_block = get_frame_address_in_block (this_frame);
2604 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2605 if (!entry)
2606 return 0;
2607
2608 /* The ARM exception table does not describe unwind information
2609 for arbitrary PC values, but is guaranteed to be correct only
2610 at call sites. We have to decide here whether we want to use
2611 ARM exception table information for this frame, or fall back
2612 to using prologue parsing. (Note that if we have DWARF CFI,
2613 this sniffer isn't even called -- CFI is always preferred.)
2614
2615 Before we make this decision, however, we check whether we
2616 actually have *symbol* information for the current frame.
2617 If not, prologue parsing would not work anyway, so we might
2618 as well use the exception table and hope for the best. */
2619 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2620 {
2621 int exc_valid = 0;
2622
2623 /* If the next frame is "normal", we are at a call site in this
2624 frame, so exception information is guaranteed to be valid. */
2625 if (get_next_frame (this_frame)
2626 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2627 exc_valid = 1;
2628
2629 /* We also assume exception information is valid if we're currently
2630 blocked in a system call. The system library is supposed to
2631 ensure this, so that e.g. pthread cancellation works. */
2632 if (arm_frame_is_thumb (this_frame))
2633 {
2634 ULONGEST insn;
2635
2636 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2637 2, byte_order_for_code, &insn)
2638 && (insn & 0xff00) == 0xdf00 /* svc */)
2639 exc_valid = 1;
2640 }
2641 else
2642 {
2643 ULONGEST insn;
2644
2645 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2646 4, byte_order_for_code, &insn)
2647 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2648 exc_valid = 1;
2649 }
2650
2651 /* Bail out if we don't know that exception information is valid. */
2652 if (!exc_valid)
2653 return 0;
2654
2655 /* The ARM exception index does not mark the *end* of the region
2656 covered by the entry, and some functions will not have any entry.
2657 To correctly recognize the end of the covered region, the linker
2658 should have inserted dummy records with a CANTUNWIND marker.
2659
2660 Unfortunately, current versions of GNU ld do not reliably do
2661 this, and thus we may have found an incorrect entry above.
2662 As a (temporary) sanity check, we only use the entry if it
2663 lies *within* the bounds of the function. Note that this check
2664 might reject perfectly valid entries that just happen to cover
2665 multiple functions; therefore this check ought to be removed
2666 once the linker is fixed. */
2667 if (func_start > exidx_region)
2668 return 0;
2669 }
2670
2671 /* Decode the list of unwinding instructions into a prologue cache.
2672 Note that this may fail due to e.g. a "refuse to unwind" code. */
2673 cache = arm_exidx_fill_cache (this_frame, entry);
2674 if (!cache)
2675 return 0;
2676
2677 *this_prologue_cache = cache;
2678 return 1;
2679 }
2680
2681 struct frame_unwind arm_exidx_unwind = {
2682 NORMAL_FRAME,
2683 default_frame_unwind_stop_reason,
2684 arm_prologue_this_id,
2685 arm_prologue_prev_register,
2686 NULL,
2687 arm_exidx_unwind_sniffer
2688 };
2689
2690 static struct arm_prologue_cache *
2691 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2692 {
2693 struct arm_prologue_cache *cache;
2694 int reg;
2695
2696 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2697 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2698
2699 /* Still rely on the offset calculated from prologue. */
2700 arm_scan_prologue (this_frame, cache);
2701
2702 /* Since we are in epilogue, the SP has been restored. */
2703 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2704
2705 /* Calculate actual addresses of saved registers using offsets
2706 determined by arm_scan_prologue. */
2707 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2708 if (trad_frame_addr_p (cache->saved_regs, reg))
2709 cache->saved_regs[reg].addr += cache->prev_sp;
2710
2711 return cache;
2712 }
2713
2714 /* Implementation of function hook 'this_id' in
2715 'struct frame_uwnind' for epilogue unwinder. */
2716
2717 static void
2718 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2719 void **this_cache,
2720 struct frame_id *this_id)
2721 {
2722 struct arm_prologue_cache *cache;
2723 CORE_ADDR pc, func;
2724
2725 if (*this_cache == NULL)
2726 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2727 cache = (struct arm_prologue_cache *) *this_cache;
2728
2729 /* Use function start address as part of the frame ID. If we cannot
2730 identify the start address (due to missing symbol information),
2731 fall back to just using the current PC. */
2732 pc = get_frame_pc (this_frame);
2733 func = get_frame_func (this_frame);
2734 if (func == 0)
2735 func = pc;
2736
2737 (*this_id) = frame_id_build (cache->prev_sp, pc);
2738 }
2739
2740 /* Implementation of function hook 'prev_register' in
2741 'struct frame_uwnind' for epilogue unwinder. */
2742
2743 static struct value *
2744 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2745 void **this_cache, int regnum)
2746 {
2747 if (*this_cache == NULL)
2748 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2749
2750 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2751 }
2752
2753 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2754 CORE_ADDR pc);
2755 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2756 CORE_ADDR pc);
2757
2758 /* Implementation of function hook 'sniffer' in
2759 'struct frame_uwnind' for epilogue unwinder. */
2760
2761 static int
2762 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2763 struct frame_info *this_frame,
2764 void **this_prologue_cache)
2765 {
2766 if (frame_relative_level (this_frame) == 0)
2767 {
2768 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2769 CORE_ADDR pc = get_frame_pc (this_frame);
2770
2771 if (arm_frame_is_thumb (this_frame))
2772 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2773 else
2774 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2775 }
2776 else
2777 return 0;
2778 }
2779
2780 /* Frame unwinder from epilogue. */
2781
2782 static const struct frame_unwind arm_epilogue_frame_unwind =
2783 {
2784 NORMAL_FRAME,
2785 default_frame_unwind_stop_reason,
2786 arm_epilogue_frame_this_id,
2787 arm_epilogue_frame_prev_register,
2788 NULL,
2789 arm_epilogue_frame_sniffer,
2790 };
2791
2792 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2793 trampoline, return the target PC. Otherwise return 0.
2794
2795 void call0a (char c, short s, int i, long l) {}
2796
2797 int main (void)
2798 {
2799 (*pointer_to_call0a) (c, s, i, l);
2800 }
2801
2802 Instead of calling a stub library function _call_via_xx (xx is
2803 the register name), GCC may inline the trampoline in the object
2804 file as below (register r2 has the address of call0a).
2805
2806 .global main
2807 .type main, %function
2808 ...
2809 bl .L1
2810 ...
2811 .size main, .-main
2812
2813 .L1:
2814 bx r2
2815
2816 The trampoline 'bx r2' doesn't belong to main. */
2817
2818 static CORE_ADDR
2819 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2820 {
2821 /* The heuristics of recognizing such trampoline is that FRAME is
2822 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2823 if (arm_frame_is_thumb (frame))
2824 {
2825 gdb_byte buf[2];
2826
2827 if (target_read_memory (pc, buf, 2) == 0)
2828 {
2829 struct gdbarch *gdbarch = get_frame_arch (frame);
2830 enum bfd_endian byte_order_for_code
2831 = gdbarch_byte_order_for_code (gdbarch);
2832 uint16_t insn
2833 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2834
2835 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2836 {
2837 CORE_ADDR dest
2838 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2839
2840 /* Clear the LSB so that gdb core sets step-resume
2841 breakpoint at the right address. */
2842 return UNMAKE_THUMB_ADDR (dest);
2843 }
2844 }
2845 }
2846
2847 return 0;
2848 }
2849
2850 static struct arm_prologue_cache *
2851 arm_make_stub_cache (struct frame_info *this_frame)
2852 {
2853 struct arm_prologue_cache *cache;
2854
2855 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2856 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2857
2858 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2859
2860 return cache;
2861 }
2862
2863 /* Our frame ID for a stub frame is the current SP and LR. */
2864
2865 static void
2866 arm_stub_this_id (struct frame_info *this_frame,
2867 void **this_cache,
2868 struct frame_id *this_id)
2869 {
2870 struct arm_prologue_cache *cache;
2871
2872 if (*this_cache == NULL)
2873 *this_cache = arm_make_stub_cache (this_frame);
2874 cache = (struct arm_prologue_cache *) *this_cache;
2875
2876 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2877 }
2878
2879 static int
2880 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2881 struct frame_info *this_frame,
2882 void **this_prologue_cache)
2883 {
2884 CORE_ADDR addr_in_block;
2885 gdb_byte dummy[4];
2886 CORE_ADDR pc, start_addr;
2887 const char *name;
2888
2889 addr_in_block = get_frame_address_in_block (this_frame);
2890 pc = get_frame_pc (this_frame);
2891 if (in_plt_section (addr_in_block)
2892 /* We also use the stub winder if the target memory is unreadable
2893 to avoid having the prologue unwinder trying to read it. */
2894 || target_read_memory (pc, dummy, 4) != 0)
2895 return 1;
2896
2897 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2898 && arm_skip_bx_reg (this_frame, pc) != 0)
2899 return 1;
2900
2901 return 0;
2902 }
2903
2904 struct frame_unwind arm_stub_unwind = {
2905 NORMAL_FRAME,
2906 default_frame_unwind_stop_reason,
2907 arm_stub_this_id,
2908 arm_prologue_prev_register,
2909 NULL,
2910 arm_stub_unwind_sniffer
2911 };
2912
2913 /* Put here the code to store, into CACHE->saved_regs, the addresses
2914 of the saved registers of frame described by THIS_FRAME. CACHE is
2915 returned. */
2916
2917 static struct arm_prologue_cache *
2918 arm_m_exception_cache (struct frame_info *this_frame)
2919 {
2920 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2921 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2922 struct arm_prologue_cache *cache;
2923 CORE_ADDR unwound_sp;
2924 LONGEST xpsr;
2925
2926 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2927 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2928
2929 unwound_sp = get_frame_register_unsigned (this_frame,
2930 ARM_SP_REGNUM);
2931
2932 /* The hardware saves eight 32-bit words, comprising xPSR,
2933 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2934 "B1.5.6 Exception entry behavior" in
2935 "ARMv7-M Architecture Reference Manual". */
2936 cache->saved_regs[0].addr = unwound_sp;
2937 cache->saved_regs[1].addr = unwound_sp + 4;
2938 cache->saved_regs[2].addr = unwound_sp + 8;
2939 cache->saved_regs[3].addr = unwound_sp + 12;
2940 cache->saved_regs[12].addr = unwound_sp + 16;
2941 cache->saved_regs[14].addr = unwound_sp + 20;
2942 cache->saved_regs[15].addr = unwound_sp + 24;
2943 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2944
2945 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2946 aligner between the top of the 32-byte stack frame and the
2947 previous context's stack pointer. */
2948 cache->prev_sp = unwound_sp + 32;
2949 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2950 && (xpsr & (1 << 9)) != 0)
2951 cache->prev_sp += 4;
2952
2953 return cache;
2954 }
2955
2956 /* Implementation of function hook 'this_id' in
2957 'struct frame_uwnind'. */
2958
2959 static void
2960 arm_m_exception_this_id (struct frame_info *this_frame,
2961 void **this_cache,
2962 struct frame_id *this_id)
2963 {
2964 struct arm_prologue_cache *cache;
2965
2966 if (*this_cache == NULL)
2967 *this_cache = arm_m_exception_cache (this_frame);
2968 cache = (struct arm_prologue_cache *) *this_cache;
2969
2970 /* Our frame ID for a stub frame is the current SP and LR. */
2971 *this_id = frame_id_build (cache->prev_sp,
2972 get_frame_pc (this_frame));
2973 }
2974
2975 /* Implementation of function hook 'prev_register' in
2976 'struct frame_uwnind'. */
2977
2978 static struct value *
2979 arm_m_exception_prev_register (struct frame_info *this_frame,
2980 void **this_cache,
2981 int prev_regnum)
2982 {
2983 struct arm_prologue_cache *cache;
2984
2985 if (*this_cache == NULL)
2986 *this_cache = arm_m_exception_cache (this_frame);
2987 cache = (struct arm_prologue_cache *) *this_cache;
2988
2989 /* The value was already reconstructed into PREV_SP. */
2990 if (prev_regnum == ARM_SP_REGNUM)
2991 return frame_unwind_got_constant (this_frame, prev_regnum,
2992 cache->prev_sp);
2993
2994 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2995 prev_regnum);
2996 }
2997
2998 /* Implementation of function hook 'sniffer' in
2999 'struct frame_uwnind'. */
3000
3001 static int
3002 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3003 struct frame_info *this_frame,
3004 void **this_prologue_cache)
3005 {
3006 CORE_ADDR this_pc = get_frame_pc (this_frame);
3007
3008 /* No need to check is_m; this sniffer is only registered for
3009 M-profile architectures. */
3010
3011 /* Check if exception frame returns to a magic PC value. */
3012 return arm_m_addr_is_magic (this_pc);
3013 }
3014
3015 /* Frame unwinder for M-profile exceptions. */
3016
3017 struct frame_unwind arm_m_exception_unwind =
3018 {
3019 SIGTRAMP_FRAME,
3020 default_frame_unwind_stop_reason,
3021 arm_m_exception_this_id,
3022 arm_m_exception_prev_register,
3023 NULL,
3024 arm_m_exception_unwind_sniffer
3025 };
3026
3027 static CORE_ADDR
3028 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3029 {
3030 struct arm_prologue_cache *cache;
3031
3032 if (*this_cache == NULL)
3033 *this_cache = arm_make_prologue_cache (this_frame);
3034 cache = (struct arm_prologue_cache *) *this_cache;
3035
3036 return cache->prev_sp - cache->framesize;
3037 }
3038
3039 struct frame_base arm_normal_base = {
3040 &arm_prologue_unwind,
3041 arm_normal_frame_base,
3042 arm_normal_frame_base,
3043 arm_normal_frame_base
3044 };
3045
3046 static struct value *
3047 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3048 int regnum)
3049 {
3050 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3051 CORE_ADDR lr, cpsr;
3052 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3053
3054 switch (regnum)
3055 {
3056 case ARM_PC_REGNUM:
3057 /* The PC is normally copied from the return column, which
3058 describes saves of LR. However, that version may have an
3059 extra bit set to indicate Thumb state. The bit is not
3060 part of the PC. */
3061 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3062 return frame_unwind_got_constant (this_frame, regnum,
3063 arm_addr_bits_remove (gdbarch, lr));
3064
3065 case ARM_PS_REGNUM:
3066 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3067 cpsr = get_frame_register_unsigned (this_frame, regnum);
3068 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3069 if (IS_THUMB_ADDR (lr))
3070 cpsr |= t_bit;
3071 else
3072 cpsr &= ~t_bit;
3073 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3074
3075 default:
3076 internal_error (__FILE__, __LINE__,
3077 _("Unexpected register %d"), regnum);
3078 }
3079 }
3080
3081 static void
3082 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3083 struct dwarf2_frame_state_reg *reg,
3084 struct frame_info *this_frame)
3085 {
3086 switch (regnum)
3087 {
3088 case ARM_PC_REGNUM:
3089 case ARM_PS_REGNUM:
3090 reg->how = DWARF2_FRAME_REG_FN;
3091 reg->loc.fn = arm_dwarf2_prev_register;
3092 break;
3093 case ARM_SP_REGNUM:
3094 reg->how = DWARF2_FRAME_REG_CFA;
3095 break;
3096 }
3097 }
3098
3099 /* Implement the stack_frame_destroyed_p gdbarch method. */
3100
3101 static int
3102 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3103 {
3104 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3105 unsigned int insn, insn2;
3106 int found_return = 0, found_stack_adjust = 0;
3107 CORE_ADDR func_start, func_end;
3108 CORE_ADDR scan_pc;
3109 gdb_byte buf[4];
3110
3111 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3112 return 0;
3113
3114 /* The epilogue is a sequence of instructions along the following lines:
3115
3116 - add stack frame size to SP or FP
3117 - [if frame pointer used] restore SP from FP
3118 - restore registers from SP [may include PC]
3119 - a return-type instruction [if PC wasn't already restored]
3120
3121 In a first pass, we scan forward from the current PC and verify the
3122 instructions we find as compatible with this sequence, ending in a
3123 return instruction.
3124
3125 However, this is not sufficient to distinguish indirect function calls
3126 within a function from indirect tail calls in the epilogue in some cases.
3127 Therefore, if we didn't already find any SP-changing instruction during
3128 forward scan, we add a backward scanning heuristic to ensure we actually
3129 are in the epilogue. */
3130
3131 scan_pc = pc;
3132 while (scan_pc < func_end && !found_return)
3133 {
3134 if (target_read_memory (scan_pc, buf, 2))
3135 break;
3136
3137 scan_pc += 2;
3138 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3139
3140 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3141 found_return = 1;
3142 else if (insn == 0x46f7) /* mov pc, lr */
3143 found_return = 1;
3144 else if (thumb_instruction_restores_sp (insn))
3145 {
3146 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3147 found_return = 1;
3148 }
3149 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3150 {
3151 if (target_read_memory (scan_pc, buf, 2))
3152 break;
3153
3154 scan_pc += 2;
3155 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3156
3157 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3158 {
3159 if (insn2 & 0x8000) /* <registers> include PC. */
3160 found_return = 1;
3161 }
3162 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3163 && (insn2 & 0x0fff) == 0x0b04)
3164 {
3165 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3166 found_return = 1;
3167 }
3168 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3169 && (insn2 & 0x0e00) == 0x0a00)
3170 ;
3171 else
3172 break;
3173 }
3174 else
3175 break;
3176 }
3177
3178 if (!found_return)
3179 return 0;
3180
3181 /* Since any instruction in the epilogue sequence, with the possible
3182 exception of return itself, updates the stack pointer, we need to
3183 scan backwards for at most one instruction. Try either a 16-bit or
3184 a 32-bit instruction. This is just a heuristic, so we do not worry
3185 too much about false positives. */
3186
3187 if (pc - 4 < func_start)
3188 return 0;
3189 if (target_read_memory (pc - 4, buf, 4))
3190 return 0;
3191
3192 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3193 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3194
3195 if (thumb_instruction_restores_sp (insn2))
3196 found_stack_adjust = 1;
3197 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3198 found_stack_adjust = 1;
3199 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3200 && (insn2 & 0x0fff) == 0x0b04)
3201 found_stack_adjust = 1;
3202 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3203 && (insn2 & 0x0e00) == 0x0a00)
3204 found_stack_adjust = 1;
3205
3206 return found_stack_adjust;
3207 }
3208
3209 static int
3210 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3211 {
3212 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3213 unsigned int insn;
3214 int found_return;
3215 CORE_ADDR func_start, func_end;
3216
3217 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3218 return 0;
3219
3220 /* We are in the epilogue if the previous instruction was a stack
3221 adjustment and the next instruction is a possible return (bx, mov
3222 pc, or pop). We could have to scan backwards to find the stack
3223 adjustment, or forwards to find the return, but this is a decent
3224 approximation. First scan forwards. */
3225
3226 found_return = 0;
3227 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3228 if (bits (insn, 28, 31) != INST_NV)
3229 {
3230 if ((insn & 0x0ffffff0) == 0x012fff10)
3231 /* BX. */
3232 found_return = 1;
3233 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3234 /* MOV PC. */
3235 found_return = 1;
3236 else if ((insn & 0x0fff0000) == 0x08bd0000
3237 && (insn & 0x0000c000) != 0)
3238 /* POP (LDMIA), including PC or LR. */
3239 found_return = 1;
3240 }
3241
3242 if (!found_return)
3243 return 0;
3244
3245 /* Scan backwards. This is just a heuristic, so do not worry about
3246 false positives from mode changes. */
3247
3248 if (pc < func_start + 4)
3249 return 0;
3250
3251 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3252 if (arm_instruction_restores_sp (insn))
3253 return 1;
3254
3255 return 0;
3256 }
3257
3258 /* Implement the stack_frame_destroyed_p gdbarch method. */
3259
3260 static int
3261 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3262 {
3263 if (arm_pc_is_thumb (gdbarch, pc))
3264 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3265 else
3266 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3267 }
3268
3269 /* When arguments must be pushed onto the stack, they go on in reverse
3270 order. The code below implements a FILO (stack) to do this. */
3271
3272 struct stack_item
3273 {
3274 int len;
3275 struct stack_item *prev;
3276 gdb_byte *data;
3277 };
3278
3279 static struct stack_item *
3280 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3281 {
3282 struct stack_item *si;
3283 si = XNEW (struct stack_item);
3284 si->data = (gdb_byte *) xmalloc (len);
3285 si->len = len;
3286 si->prev = prev;
3287 memcpy (si->data, contents, len);
3288 return si;
3289 }
3290
3291 static struct stack_item *
3292 pop_stack_item (struct stack_item *si)
3293 {
3294 struct stack_item *dead = si;
3295 si = si->prev;
3296 xfree (dead->data);
3297 xfree (dead);
3298 return si;
3299 }
3300
3301 /* Implement the gdbarch type alignment method, overrides the generic
3302 alignment algorithm for anything that is arm specific. */
3303
3304 static ULONGEST
3305 arm_type_align (gdbarch *gdbarch, struct type *t)
3306 {
3307 t = check_typedef (t);
3308 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3309 {
3310 /* Use the natural alignment for vector types (the same for
3311 scalar type), but the maximum alignment is 64-bit. */
3312 if (TYPE_LENGTH (t) > 8)
3313 return 8;
3314 else
3315 return TYPE_LENGTH (t);
3316 }
3317
3318 /* Allow the common code to calculate the alignment. */
3319 return 0;
3320 }
3321
3322 /* Possible base types for a candidate for passing and returning in
3323 VFP registers. */
3324
3325 enum arm_vfp_cprc_base_type
3326 {
3327 VFP_CPRC_UNKNOWN,
3328 VFP_CPRC_SINGLE,
3329 VFP_CPRC_DOUBLE,
3330 VFP_CPRC_VEC64,
3331 VFP_CPRC_VEC128
3332 };
3333
3334 /* The length of one element of base type B. */
3335
3336 static unsigned
3337 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3338 {
3339 switch (b)
3340 {
3341 case VFP_CPRC_SINGLE:
3342 return 4;
3343 case VFP_CPRC_DOUBLE:
3344 return 8;
3345 case VFP_CPRC_VEC64:
3346 return 8;
3347 case VFP_CPRC_VEC128:
3348 return 16;
3349 default:
3350 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3351 (int) b);
3352 }
3353 }
3354
3355 /* The character ('s', 'd' or 'q') for the type of VFP register used
3356 for passing base type B. */
3357
3358 static int
3359 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3360 {
3361 switch (b)
3362 {
3363 case VFP_CPRC_SINGLE:
3364 return 's';
3365 case VFP_CPRC_DOUBLE:
3366 return 'd';
3367 case VFP_CPRC_VEC64:
3368 return 'd';
3369 case VFP_CPRC_VEC128:
3370 return 'q';
3371 default:
3372 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3373 (int) b);
3374 }
3375 }
3376
3377 /* Determine whether T may be part of a candidate for passing and
3378 returning in VFP registers, ignoring the limit on the total number
3379 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3380 classification of the first valid component found; if it is not
3381 VFP_CPRC_UNKNOWN, all components must have the same classification
3382 as *BASE_TYPE. If it is found that T contains a type not permitted
3383 for passing and returning in VFP registers, a type differently
3384 classified from *BASE_TYPE, or two types differently classified
3385 from each other, return -1, otherwise return the total number of
3386 base-type elements found (possibly 0 in an empty structure or
3387 array). Vector types are not currently supported, matching the
3388 generic AAPCS support. */
3389
3390 static int
3391 arm_vfp_cprc_sub_candidate (struct type *t,
3392 enum arm_vfp_cprc_base_type *base_type)
3393 {
3394 t = check_typedef (t);
3395 switch (TYPE_CODE (t))
3396 {
3397 case TYPE_CODE_FLT:
3398 switch (TYPE_LENGTH (t))
3399 {
3400 case 4:
3401 if (*base_type == VFP_CPRC_UNKNOWN)
3402 *base_type = VFP_CPRC_SINGLE;
3403 else if (*base_type != VFP_CPRC_SINGLE)
3404 return -1;
3405 return 1;
3406
3407 case 8:
3408 if (*base_type == VFP_CPRC_UNKNOWN)
3409 *base_type = VFP_CPRC_DOUBLE;
3410 else if (*base_type != VFP_CPRC_DOUBLE)
3411 return -1;
3412 return 1;
3413
3414 default:
3415 return -1;
3416 }
3417 break;
3418
3419 case TYPE_CODE_COMPLEX:
3420 /* Arguments of complex T where T is one of the types float or
3421 double get treated as if they are implemented as:
3422
3423 struct complexT
3424 {
3425 T real;
3426 T imag;
3427 };
3428
3429 */
3430 switch (TYPE_LENGTH (t))
3431 {
3432 case 8:
3433 if (*base_type == VFP_CPRC_UNKNOWN)
3434 *base_type = VFP_CPRC_SINGLE;
3435 else if (*base_type != VFP_CPRC_SINGLE)
3436 return -1;
3437 return 2;
3438
3439 case 16:
3440 if (*base_type == VFP_CPRC_UNKNOWN)
3441 *base_type = VFP_CPRC_DOUBLE;
3442 else if (*base_type != VFP_CPRC_DOUBLE)
3443 return -1;
3444 return 2;
3445
3446 default:
3447 return -1;
3448 }
3449 break;
3450
3451 case TYPE_CODE_ARRAY:
3452 {
3453 if (TYPE_VECTOR (t))
3454 {
3455 /* A 64-bit or 128-bit containerized vector type are VFP
3456 CPRCs. */
3457 switch (TYPE_LENGTH (t))
3458 {
3459 case 8:
3460 if (*base_type == VFP_CPRC_UNKNOWN)
3461 *base_type = VFP_CPRC_VEC64;
3462 return 1;
3463 case 16:
3464 if (*base_type == VFP_CPRC_UNKNOWN)
3465 *base_type = VFP_CPRC_VEC128;
3466 return 1;
3467 default:
3468 return -1;
3469 }
3470 }
3471 else
3472 {
3473 int count;
3474 unsigned unitlen;
3475
3476 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3477 base_type);
3478 if (count == -1)
3479 return -1;
3480 if (TYPE_LENGTH (t) == 0)
3481 {
3482 gdb_assert (count == 0);
3483 return 0;
3484 }
3485 else if (count == 0)
3486 return -1;
3487 unitlen = arm_vfp_cprc_unit_length (*base_type);
3488 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3489 return TYPE_LENGTH (t) / unitlen;
3490 }
3491 }
3492 break;
3493
3494 case TYPE_CODE_STRUCT:
3495 {
3496 int count = 0;
3497 unsigned unitlen;
3498 int i;
3499 for (i = 0; i < TYPE_NFIELDS (t); i++)
3500 {
3501 int sub_count = 0;
3502
3503 if (!field_is_static (&TYPE_FIELD (t, i)))
3504 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3505 base_type);
3506 if (sub_count == -1)
3507 return -1;
3508 count += sub_count;
3509 }
3510 if (TYPE_LENGTH (t) == 0)
3511 {
3512 gdb_assert (count == 0);
3513 return 0;
3514 }
3515 else if (count == 0)
3516 return -1;
3517 unitlen = arm_vfp_cprc_unit_length (*base_type);
3518 if (TYPE_LENGTH (t) != unitlen * count)
3519 return -1;
3520 return count;
3521 }
3522
3523 case TYPE_CODE_UNION:
3524 {
3525 int count = 0;
3526 unsigned unitlen;
3527 int i;
3528 for (i = 0; i < TYPE_NFIELDS (t); i++)
3529 {
3530 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3531 base_type);
3532 if (sub_count == -1)
3533 return -1;
3534 count = (count > sub_count ? count : sub_count);
3535 }
3536 if (TYPE_LENGTH (t) == 0)
3537 {
3538 gdb_assert (count == 0);
3539 return 0;
3540 }
3541 else if (count == 0)
3542 return -1;
3543 unitlen = arm_vfp_cprc_unit_length (*base_type);
3544 if (TYPE_LENGTH (t) != unitlen * count)
3545 return -1;
3546 return count;
3547 }
3548
3549 default:
3550 break;
3551 }
3552
3553 return -1;
3554 }
3555
3556 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3557 if passed to or returned from a non-variadic function with the VFP
3558 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3559 *BASE_TYPE to the base type for T and *COUNT to the number of
3560 elements of that base type before returning. */
3561
3562 static int
3563 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3564 int *count)
3565 {
3566 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3567 int c = arm_vfp_cprc_sub_candidate (t, &b);
3568 if (c <= 0 || c > 4)
3569 return 0;
3570 *base_type = b;
3571 *count = c;
3572 return 1;
3573 }
3574
3575 /* Return 1 if the VFP ABI should be used for passing arguments to and
3576 returning values from a function of type FUNC_TYPE, 0
3577 otherwise. */
3578
3579 static int
3580 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3581 {
3582 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3583 /* Variadic functions always use the base ABI. Assume that functions
3584 without debug info are not variadic. */
3585 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3586 return 0;
3587 /* The VFP ABI is only supported as a variant of AAPCS. */
3588 if (tdep->arm_abi != ARM_ABI_AAPCS)
3589 return 0;
3590 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3591 }
3592
3593 /* We currently only support passing parameters in integer registers, which
3594 conforms with GCC's default model, and VFP argument passing following
3595 the VFP variant of AAPCS. Several other variants exist and
3596 we should probably support some of them based on the selected ABI. */
3597
3598 static CORE_ADDR
3599 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3600 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3601 struct value **args, CORE_ADDR sp,
3602 function_call_return_method return_method,
3603 CORE_ADDR struct_addr)
3604 {
3605 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3606 int argnum;
3607 int argreg;
3608 int nstack;
3609 struct stack_item *si = NULL;
3610 int use_vfp_abi;
3611 struct type *ftype;
3612 unsigned vfp_regs_free = (1 << 16) - 1;
3613
3614 /* Determine the type of this function and whether the VFP ABI
3615 applies. */
3616 ftype = check_typedef (value_type (function));
3617 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3618 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3619 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3620
3621 /* Set the return address. For the ARM, the return breakpoint is
3622 always at BP_ADDR. */
3623 if (arm_pc_is_thumb (gdbarch, bp_addr))
3624 bp_addr |= 1;
3625 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3626
3627 /* Walk through the list of args and determine how large a temporary
3628 stack is required. Need to take care here as structs may be
3629 passed on the stack, and we have to push them. */
3630 nstack = 0;
3631
3632 argreg = ARM_A1_REGNUM;
3633 nstack = 0;
3634
3635 /* The struct_return pointer occupies the first parameter
3636 passing register. */
3637 if (return_method == return_method_struct)
3638 {
3639 if (arm_debug)
3640 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3641 gdbarch_register_name (gdbarch, argreg),
3642 paddress (gdbarch, struct_addr));
3643 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3644 argreg++;
3645 }
3646
3647 for (argnum = 0; argnum < nargs; argnum++)
3648 {
3649 int len;
3650 struct type *arg_type;
3651 struct type *target_type;
3652 enum type_code typecode;
3653 const bfd_byte *val;
3654 int align;
3655 enum arm_vfp_cprc_base_type vfp_base_type;
3656 int vfp_base_count;
3657 int may_use_core_reg = 1;
3658
3659 arg_type = check_typedef (value_type (args[argnum]));
3660 len = TYPE_LENGTH (arg_type);
3661 target_type = TYPE_TARGET_TYPE (arg_type);
3662 typecode = TYPE_CODE (arg_type);
3663 val = value_contents (args[argnum]);
3664
3665 align = type_align (arg_type);
3666 /* Round alignment up to a whole number of words. */
3667 align = (align + ARM_INT_REGISTER_SIZE - 1)
3668 & ~(ARM_INT_REGISTER_SIZE - 1);
3669 /* Different ABIs have different maximum alignments. */
3670 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3671 {
3672 /* The APCS ABI only requires word alignment. */
3673 align = ARM_INT_REGISTER_SIZE;
3674 }
3675 else
3676 {
3677 /* The AAPCS requires at most doubleword alignment. */
3678 if (align > ARM_INT_REGISTER_SIZE * 2)
3679 align = ARM_INT_REGISTER_SIZE * 2;
3680 }
3681
3682 if (use_vfp_abi
3683 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3684 &vfp_base_count))
3685 {
3686 int regno;
3687 int unit_length;
3688 int shift;
3689 unsigned mask;
3690
3691 /* Because this is a CPRC it cannot go in a core register or
3692 cause a core register to be skipped for alignment.
3693 Either it goes in VFP registers and the rest of this loop
3694 iteration is skipped for this argument, or it goes on the
3695 stack (and the stack alignment code is correct for this
3696 case). */
3697 may_use_core_reg = 0;
3698
3699 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3700 shift = unit_length / 4;
3701 mask = (1 << (shift * vfp_base_count)) - 1;
3702 for (regno = 0; regno < 16; regno += shift)
3703 if (((vfp_regs_free >> regno) & mask) == mask)
3704 break;
3705
3706 if (regno < 16)
3707 {
3708 int reg_char;
3709 int reg_scaled;
3710 int i;
3711
3712 vfp_regs_free &= ~(mask << regno);
3713 reg_scaled = regno / shift;
3714 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3715 for (i = 0; i < vfp_base_count; i++)
3716 {
3717 char name_buf[4];
3718 int regnum;
3719 if (reg_char == 'q')
3720 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3721 val + i * unit_length);
3722 else
3723 {
3724 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3725 reg_char, reg_scaled + i);
3726 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3727 strlen (name_buf));
3728 regcache->cooked_write (regnum, val + i * unit_length);
3729 }
3730 }
3731 continue;
3732 }
3733 else
3734 {
3735 /* This CPRC could not go in VFP registers, so all VFP
3736 registers are now marked as used. */
3737 vfp_regs_free = 0;
3738 }
3739 }
3740
3741 /* Push stack padding for doubleword alignment. */
3742 if (nstack & (align - 1))
3743 {
3744 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3745 nstack += ARM_INT_REGISTER_SIZE;
3746 }
3747
3748 /* Doubleword aligned quantities must go in even register pairs. */
3749 if (may_use_core_reg
3750 && argreg <= ARM_LAST_ARG_REGNUM
3751 && align > ARM_INT_REGISTER_SIZE
3752 && argreg & 1)
3753 argreg++;
3754
3755 /* If the argument is a pointer to a function, and it is a
3756 Thumb function, create a LOCAL copy of the value and set
3757 the THUMB bit in it. */
3758 if (TYPE_CODE_PTR == typecode
3759 && target_type != NULL
3760 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3761 {
3762 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3763 if (arm_pc_is_thumb (gdbarch, regval))
3764 {
3765 bfd_byte *copy = (bfd_byte *) alloca (len);
3766 store_unsigned_integer (copy, len, byte_order,
3767 MAKE_THUMB_ADDR (regval));
3768 val = copy;
3769 }
3770 }
3771
3772 /* Copy the argument to general registers or the stack in
3773 register-sized pieces. Large arguments are split between
3774 registers and stack. */
3775 while (len > 0)
3776 {
3777 int partial_len = len < ARM_INT_REGISTER_SIZE
3778 ? len : ARM_INT_REGISTER_SIZE;
3779 CORE_ADDR regval
3780 = extract_unsigned_integer (val, partial_len, byte_order);
3781
3782 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3783 {
3784 /* The argument is being passed in a general purpose
3785 register. */
3786 if (byte_order == BFD_ENDIAN_BIG)
3787 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3788 if (arm_debug)
3789 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3790 argnum,
3791 gdbarch_register_name
3792 (gdbarch, argreg),
3793 phex (regval, ARM_INT_REGISTER_SIZE));
3794 regcache_cooked_write_unsigned (regcache, argreg, regval);
3795 argreg++;
3796 }
3797 else
3798 {
3799 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3800
3801 memset (buf, 0, sizeof (buf));
3802 store_unsigned_integer (buf, partial_len, byte_order, regval);
3803
3804 /* Push the arguments onto the stack. */
3805 if (arm_debug)
3806 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3807 argnum, nstack);
3808 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3809 nstack += ARM_INT_REGISTER_SIZE;
3810 }
3811
3812 len -= partial_len;
3813 val += partial_len;
3814 }
3815 }
3816 /* If we have an odd number of words to push, then decrement the stack
3817 by one word now, so first stack argument will be dword aligned. */
3818 if (nstack & 4)
3819 sp -= 4;
3820
3821 while (si)
3822 {
3823 sp -= si->len;
3824 write_memory (sp, si->data, si->len);
3825 si = pop_stack_item (si);
3826 }
3827
3828 /* Finally, update teh SP register. */
3829 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3830
3831 return sp;
3832 }
3833
3834
3835 /* Always align the frame to an 8-byte boundary. This is required on
3836 some platforms and harmless on the rest. */
3837
3838 static CORE_ADDR
3839 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3840 {
3841 /* Align the stack to eight bytes. */
3842 return sp & ~ (CORE_ADDR) 7;
3843 }
3844
3845 static void
3846 print_fpu_flags (struct ui_file *file, int flags)
3847 {
3848 if (flags & (1 << 0))
3849 fputs_filtered ("IVO ", file);
3850 if (flags & (1 << 1))
3851 fputs_filtered ("DVZ ", file);
3852 if (flags & (1 << 2))
3853 fputs_filtered ("OFL ", file);
3854 if (flags & (1 << 3))
3855 fputs_filtered ("UFL ", file);
3856 if (flags & (1 << 4))
3857 fputs_filtered ("INX ", file);
3858 fputc_filtered ('\n', file);
3859 }
3860
3861 /* Print interesting information about the floating point processor
3862 (if present) or emulator. */
3863 static void
3864 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3865 struct frame_info *frame, const char *args)
3866 {
3867 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3868 int type;
3869
3870 type = (status >> 24) & 127;
3871 if (status & (1 << 31))
3872 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3873 else
3874 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3875 /* i18n: [floating point unit] mask */
3876 fputs_filtered (_("mask: "), file);
3877 print_fpu_flags (file, status >> 16);
3878 /* i18n: [floating point unit] flags */
3879 fputs_filtered (_("flags: "), file);
3880 print_fpu_flags (file, status);
3881 }
3882
3883 /* Construct the ARM extended floating point type. */
3884 static struct type *
3885 arm_ext_type (struct gdbarch *gdbarch)
3886 {
3887 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3888
3889 if (!tdep->arm_ext_type)
3890 tdep->arm_ext_type
3891 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3892 floatformats_arm_ext);
3893
3894 return tdep->arm_ext_type;
3895 }
3896
3897 static struct type *
3898 arm_neon_double_type (struct gdbarch *gdbarch)
3899 {
3900 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3901
3902 if (tdep->neon_double_type == NULL)
3903 {
3904 struct type *t, *elem;
3905
3906 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3907 TYPE_CODE_UNION);
3908 elem = builtin_type (gdbarch)->builtin_uint8;
3909 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3910 elem = builtin_type (gdbarch)->builtin_uint16;
3911 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3912 elem = builtin_type (gdbarch)->builtin_uint32;
3913 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3914 elem = builtin_type (gdbarch)->builtin_uint64;
3915 append_composite_type_field (t, "u64", elem);
3916 elem = builtin_type (gdbarch)->builtin_float;
3917 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3918 elem = builtin_type (gdbarch)->builtin_double;
3919 append_composite_type_field (t, "f64", elem);
3920
3921 TYPE_VECTOR (t) = 1;
3922 TYPE_NAME (t) = "neon_d";
3923 tdep->neon_double_type = t;
3924 }
3925
3926 return tdep->neon_double_type;
3927 }
3928
3929 /* FIXME: The vector types are not correctly ordered on big-endian
3930 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3931 bits of d0 - regardless of what unit size is being held in d0. So
3932 the offset of the first uint8 in d0 is 7, but the offset of the
3933 first float is 4. This code works as-is for little-endian
3934 targets. */
3935
3936 static struct type *
3937 arm_neon_quad_type (struct gdbarch *gdbarch)
3938 {
3939 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3940
3941 if (tdep->neon_quad_type == NULL)
3942 {
3943 struct type *t, *elem;
3944
3945 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3946 TYPE_CODE_UNION);
3947 elem = builtin_type (gdbarch)->builtin_uint8;
3948 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3949 elem = builtin_type (gdbarch)->builtin_uint16;
3950 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3951 elem = builtin_type (gdbarch)->builtin_uint32;
3952 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3953 elem = builtin_type (gdbarch)->builtin_uint64;
3954 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3955 elem = builtin_type (gdbarch)->builtin_float;
3956 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3957 elem = builtin_type (gdbarch)->builtin_double;
3958 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3959
3960 TYPE_VECTOR (t) = 1;
3961 TYPE_NAME (t) = "neon_q";
3962 tdep->neon_quad_type = t;
3963 }
3964
3965 return tdep->neon_quad_type;
3966 }
3967
3968 /* Return the GDB type object for the "standard" data type of data in
3969 register N. */
3970
3971 static struct type *
3972 arm_register_type (struct gdbarch *gdbarch, int regnum)
3973 {
3974 int num_regs = gdbarch_num_regs (gdbarch);
3975
3976 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3977 && regnum >= num_regs && regnum < num_regs + 32)
3978 return builtin_type (gdbarch)->builtin_float;
3979
3980 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3981 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3982 return arm_neon_quad_type (gdbarch);
3983
3984 /* If the target description has register information, we are only
3985 in this function so that we can override the types of
3986 double-precision registers for NEON. */
3987 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3988 {
3989 struct type *t = tdesc_register_type (gdbarch, regnum);
3990
3991 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3992 && TYPE_CODE (t) == TYPE_CODE_FLT
3993 && gdbarch_tdep (gdbarch)->have_neon)
3994 return arm_neon_double_type (gdbarch);
3995 else
3996 return t;
3997 }
3998
3999 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4000 {
4001 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4002 return builtin_type (gdbarch)->builtin_void;
4003
4004 return arm_ext_type (gdbarch);
4005 }
4006 else if (regnum == ARM_SP_REGNUM)
4007 return builtin_type (gdbarch)->builtin_data_ptr;
4008 else if (regnum == ARM_PC_REGNUM)
4009 return builtin_type (gdbarch)->builtin_func_ptr;
4010 else if (regnum >= ARRAY_SIZE (arm_register_names))
4011 /* These registers are only supported on targets which supply
4012 an XML description. */
4013 return builtin_type (gdbarch)->builtin_int0;
4014 else
4015 return builtin_type (gdbarch)->builtin_uint32;
4016 }
4017
4018 /* Map a DWARF register REGNUM onto the appropriate GDB register
4019 number. */
4020
4021 static int
4022 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4023 {
4024 /* Core integer regs. */
4025 if (reg >= 0 && reg <= 15)
4026 return reg;
4027
4028 /* Legacy FPA encoding. These were once used in a way which
4029 overlapped with VFP register numbering, so their use is
4030 discouraged, but GDB doesn't support the ARM toolchain
4031 which used them for VFP. */
4032 if (reg >= 16 && reg <= 23)
4033 return ARM_F0_REGNUM + reg - 16;
4034
4035 /* New assignments for the FPA registers. */
4036 if (reg >= 96 && reg <= 103)
4037 return ARM_F0_REGNUM + reg - 96;
4038
4039 /* WMMX register assignments. */
4040 if (reg >= 104 && reg <= 111)
4041 return ARM_WCGR0_REGNUM + reg - 104;
4042
4043 if (reg >= 112 && reg <= 127)
4044 return ARM_WR0_REGNUM + reg - 112;
4045
4046 if (reg >= 192 && reg <= 199)
4047 return ARM_WC0_REGNUM + reg - 192;
4048
4049 /* VFP v2 registers. A double precision value is actually
4050 in d1 rather than s2, but the ABI only defines numbering
4051 for the single precision registers. This will "just work"
4052 in GDB for little endian targets (we'll read eight bytes,
4053 starting in s0 and then progressing to s1), but will be
4054 reversed on big endian targets with VFP. This won't
4055 be a problem for the new Neon quad registers; you're supposed
4056 to use DW_OP_piece for those. */
4057 if (reg >= 64 && reg <= 95)
4058 {
4059 char name_buf[4];
4060
4061 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4062 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4063 strlen (name_buf));
4064 }
4065
4066 /* VFP v3 / Neon registers. This range is also used for VFP v2
4067 registers, except that it now describes d0 instead of s0. */
4068 if (reg >= 256 && reg <= 287)
4069 {
4070 char name_buf[4];
4071
4072 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4073 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4074 strlen (name_buf));
4075 }
4076
4077 return -1;
4078 }
4079
4080 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4081 static int
4082 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4083 {
4084 int reg = regnum;
4085 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4086
4087 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4088 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4089
4090 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4091 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4092
4093 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4094 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4095
4096 if (reg < NUM_GREGS)
4097 return SIM_ARM_R0_REGNUM + reg;
4098 reg -= NUM_GREGS;
4099
4100 if (reg < NUM_FREGS)
4101 return SIM_ARM_FP0_REGNUM + reg;
4102 reg -= NUM_FREGS;
4103
4104 if (reg < NUM_SREGS)
4105 return SIM_ARM_FPS_REGNUM + reg;
4106 reg -= NUM_SREGS;
4107
4108 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4109 }
4110
4111 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4112 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4113 NULL if an error occurs. BUF is freed. */
4114
4115 static gdb_byte *
4116 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4117 int old_len, int new_len)
4118 {
4119 gdb_byte *new_buf;
4120 int bytes_to_read = new_len - old_len;
4121
4122 new_buf = (gdb_byte *) xmalloc (new_len);
4123 memcpy (new_buf + bytes_to_read, buf, old_len);
4124 xfree (buf);
4125 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4126 {
4127 xfree (new_buf);
4128 return NULL;
4129 }
4130 return new_buf;
4131 }
4132
4133 /* An IT block is at most the 2-byte IT instruction followed by
4134 four 4-byte instructions. The furthest back we must search to
4135 find an IT block that affects the current instruction is thus
4136 2 + 3 * 4 == 14 bytes. */
4137 #define MAX_IT_BLOCK_PREFIX 14
4138
4139 /* Use a quick scan if there are more than this many bytes of
4140 code. */
4141 #define IT_SCAN_THRESHOLD 32
4142
4143 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4144 A breakpoint in an IT block may not be hit, depending on the
4145 condition flags. */
4146 static CORE_ADDR
4147 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4148 {
4149 gdb_byte *buf;
4150 char map_type;
4151 CORE_ADDR boundary, func_start;
4152 int buf_len;
4153 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4154 int i, any, last_it, last_it_count;
4155
4156 /* If we are using BKPT breakpoints, none of this is necessary. */
4157 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4158 return bpaddr;
4159
4160 /* ARM mode does not have this problem. */
4161 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4162 return bpaddr;
4163
4164 /* We are setting a breakpoint in Thumb code that could potentially
4165 contain an IT block. The first step is to find how much Thumb
4166 code there is; we do not need to read outside of known Thumb
4167 sequences. */
4168 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4169 if (map_type == 0)
4170 /* Thumb-2 code must have mapping symbols to have a chance. */
4171 return bpaddr;
4172
4173 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4174
4175 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4176 && func_start > boundary)
4177 boundary = func_start;
4178
4179 /* Search for a candidate IT instruction. We have to do some fancy
4180 footwork to distinguish a real IT instruction from the second
4181 half of a 32-bit instruction, but there is no need for that if
4182 there's no candidate. */
4183 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4184 if (buf_len == 0)
4185 /* No room for an IT instruction. */
4186 return bpaddr;
4187
4188 buf = (gdb_byte *) xmalloc (buf_len);
4189 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4190 return bpaddr;
4191 any = 0;
4192 for (i = 0; i < buf_len; i += 2)
4193 {
4194 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4195 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4196 {
4197 any = 1;
4198 break;
4199 }
4200 }
4201
4202 if (any == 0)
4203 {
4204 xfree (buf);
4205 return bpaddr;
4206 }
4207
4208 /* OK, the code bytes before this instruction contain at least one
4209 halfword which resembles an IT instruction. We know that it's
4210 Thumb code, but there are still two possibilities. Either the
4211 halfword really is an IT instruction, or it is the second half of
4212 a 32-bit Thumb instruction. The only way we can tell is to
4213 scan forwards from a known instruction boundary. */
4214 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4215 {
4216 int definite;
4217
4218 /* There's a lot of code before this instruction. Start with an
4219 optimistic search; it's easy to recognize halfwords that can
4220 not be the start of a 32-bit instruction, and use that to
4221 lock on to the instruction boundaries. */
4222 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4223 if (buf == NULL)
4224 return bpaddr;
4225 buf_len = IT_SCAN_THRESHOLD;
4226
4227 definite = 0;
4228 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4229 {
4230 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4231 if (thumb_insn_size (inst1) == 2)
4232 {
4233 definite = 1;
4234 break;
4235 }
4236 }
4237
4238 /* At this point, if DEFINITE, BUF[I] is the first place we
4239 are sure that we know the instruction boundaries, and it is far
4240 enough from BPADDR that we could not miss an IT instruction
4241 affecting BPADDR. If ! DEFINITE, give up - start from a
4242 known boundary. */
4243 if (! definite)
4244 {
4245 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4246 bpaddr - boundary);
4247 if (buf == NULL)
4248 return bpaddr;
4249 buf_len = bpaddr - boundary;
4250 i = 0;
4251 }
4252 }
4253 else
4254 {
4255 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4256 if (buf == NULL)
4257 return bpaddr;
4258 buf_len = bpaddr - boundary;
4259 i = 0;
4260 }
4261
4262 /* Scan forwards. Find the last IT instruction before BPADDR. */
4263 last_it = -1;
4264 last_it_count = 0;
4265 while (i < buf_len)
4266 {
4267 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4268 last_it_count--;
4269 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4270 {
4271 last_it = i;
4272 if (inst1 & 0x0001)
4273 last_it_count = 4;
4274 else if (inst1 & 0x0002)
4275 last_it_count = 3;
4276 else if (inst1 & 0x0004)
4277 last_it_count = 2;
4278 else
4279 last_it_count = 1;
4280 }
4281 i += thumb_insn_size (inst1);
4282 }
4283
4284 xfree (buf);
4285
4286 if (last_it == -1)
4287 /* There wasn't really an IT instruction after all. */
4288 return bpaddr;
4289
4290 if (last_it_count < 1)
4291 /* It was too far away. */
4292 return bpaddr;
4293
4294 /* This really is a trouble spot. Move the breakpoint to the IT
4295 instruction. */
4296 return bpaddr - buf_len + last_it;
4297 }
4298
4299 /* ARM displaced stepping support.
4300
4301 Generally ARM displaced stepping works as follows:
4302
4303 1. When an instruction is to be single-stepped, it is first decoded by
4304 arm_process_displaced_insn. Depending on the type of instruction, it is
4305 then copied to a scratch location, possibly in a modified form. The
4306 copy_* set of functions performs such modification, as necessary. A
4307 breakpoint is placed after the modified instruction in the scratch space
4308 to return control to GDB. Note in particular that instructions which
4309 modify the PC will no longer do so after modification.
4310
4311 2. The instruction is single-stepped, by setting the PC to the scratch
4312 location address, and resuming. Control returns to GDB when the
4313 breakpoint is hit.
4314
4315 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4316 function used for the current instruction. This function's job is to
4317 put the CPU/memory state back to what it would have been if the
4318 instruction had been executed unmodified in its original location. */
4319
4320 /* NOP instruction (mov r0, r0). */
4321 #define ARM_NOP 0xe1a00000
4322 #define THUMB_NOP 0x4600
4323
4324 /* Helper for register reads for displaced stepping. In particular, this
4325 returns the PC as it would be seen by the instruction at its original
4326 location. */
4327
4328 ULONGEST
4329 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4330 int regno)
4331 {
4332 ULONGEST ret;
4333 CORE_ADDR from = dsc->insn_addr;
4334
4335 if (regno == ARM_PC_REGNUM)
4336 {
4337 /* Compute pipeline offset:
4338 - When executing an ARM instruction, PC reads as the address of the
4339 current instruction plus 8.
4340 - When executing a Thumb instruction, PC reads as the address of the
4341 current instruction plus 4. */
4342
4343 if (!dsc->is_thumb)
4344 from += 8;
4345 else
4346 from += 4;
4347
4348 if (debug_displaced)
4349 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4350 (unsigned long) from);
4351 return (ULONGEST) from;
4352 }
4353 else
4354 {
4355 regcache_cooked_read_unsigned (regs, regno, &ret);
4356 if (debug_displaced)
4357 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4358 regno, (unsigned long) ret);
4359 return ret;
4360 }
4361 }
4362
4363 static int
4364 displaced_in_arm_mode (struct regcache *regs)
4365 {
4366 ULONGEST ps;
4367 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4368
4369 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4370
4371 return (ps & t_bit) == 0;
4372 }
4373
4374 /* Write to the PC as from a branch instruction. */
4375
4376 static void
4377 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4378 ULONGEST val)
4379 {
4380 if (!dsc->is_thumb)
4381 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4382 architecture versions < 6. */
4383 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4384 val & ~(ULONGEST) 0x3);
4385 else
4386 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4387 val & ~(ULONGEST) 0x1);
4388 }
4389
4390 /* Write to the PC as from a branch-exchange instruction. */
4391
4392 static void
4393 bx_write_pc (struct regcache *regs, ULONGEST val)
4394 {
4395 ULONGEST ps;
4396 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4397
4398 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4399
4400 if ((val & 1) == 1)
4401 {
4402 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4403 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4404 }
4405 else if ((val & 2) == 0)
4406 {
4407 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4408 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4409 }
4410 else
4411 {
4412 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4413 mode, align dest to 4 bytes). */
4414 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4415 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4416 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4417 }
4418 }
4419
4420 /* Write to the PC as if from a load instruction. */
4421
4422 static void
4423 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4424 ULONGEST val)
4425 {
4426 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4427 bx_write_pc (regs, val);
4428 else
4429 branch_write_pc (regs, dsc, val);
4430 }
4431
4432 /* Write to the PC as if from an ALU instruction. */
4433
4434 static void
4435 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4436 ULONGEST val)
4437 {
4438 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4439 bx_write_pc (regs, val);
4440 else
4441 branch_write_pc (regs, dsc, val);
4442 }
4443
4444 /* Helper for writing to registers for displaced stepping. Writing to the PC
4445 has a varying effects depending on the instruction which does the write:
4446 this is controlled by the WRITE_PC argument. */
4447
4448 void
4449 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4450 int regno, ULONGEST val, enum pc_write_style write_pc)
4451 {
4452 if (regno == ARM_PC_REGNUM)
4453 {
4454 if (debug_displaced)
4455 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4456 (unsigned long) val);
4457 switch (write_pc)
4458 {
4459 case BRANCH_WRITE_PC:
4460 branch_write_pc (regs, dsc, val);
4461 break;
4462
4463 case BX_WRITE_PC:
4464 bx_write_pc (regs, val);
4465 break;
4466
4467 case LOAD_WRITE_PC:
4468 load_write_pc (regs, dsc, val);
4469 break;
4470
4471 case ALU_WRITE_PC:
4472 alu_write_pc (regs, dsc, val);
4473 break;
4474
4475 case CANNOT_WRITE_PC:
4476 warning (_("Instruction wrote to PC in an unexpected way when "
4477 "single-stepping"));
4478 break;
4479
4480 default:
4481 internal_error (__FILE__, __LINE__,
4482 _("Invalid argument to displaced_write_reg"));
4483 }
4484
4485 dsc->wrote_to_pc = 1;
4486 }
4487 else
4488 {
4489 if (debug_displaced)
4490 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4491 regno, (unsigned long) val);
4492 regcache_cooked_write_unsigned (regs, regno, val);
4493 }
4494 }
4495
4496 /* This function is used to concisely determine if an instruction INSN
4497 references PC. Register fields of interest in INSN should have the
4498 corresponding fields of BITMASK set to 0b1111. The function
4499 returns return 1 if any of these fields in INSN reference the PC
4500 (also 0b1111, r15), else it returns 0. */
4501
4502 static int
4503 insn_references_pc (uint32_t insn, uint32_t bitmask)
4504 {
4505 uint32_t lowbit = 1;
4506
4507 while (bitmask != 0)
4508 {
4509 uint32_t mask;
4510
4511 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4512 ;
4513
4514 if (!lowbit)
4515 break;
4516
4517 mask = lowbit * 0xf;
4518
4519 if ((insn & mask) == mask)
4520 return 1;
4521
4522 bitmask &= ~mask;
4523 }
4524
4525 return 0;
4526 }
4527
4528 /* The simplest copy function. Many instructions have the same effect no
4529 matter what address they are executed at: in those cases, use this. */
4530
4531 static int
4532 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4533 const char *iname, arm_displaced_step_closure *dsc)
4534 {
4535 if (debug_displaced)
4536 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4537 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4538 iname);
4539
4540 dsc->modinsn[0] = insn;
4541
4542 return 0;
4543 }
4544
4545 static int
4546 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4547 uint16_t insn2, const char *iname,
4548 arm_displaced_step_closure *dsc)
4549 {
4550 if (debug_displaced)
4551 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4552 "opcode/class '%s' unmodified\n", insn1, insn2,
4553 iname);
4554
4555 dsc->modinsn[0] = insn1;
4556 dsc->modinsn[1] = insn2;
4557 dsc->numinsns = 2;
4558
4559 return 0;
4560 }
4561
4562 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4563 modification. */
4564 static int
4565 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4566 const char *iname,
4567 arm_displaced_step_closure *dsc)
4568 {
4569 if (debug_displaced)
4570 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4571 "opcode/class '%s' unmodified\n", insn,
4572 iname);
4573
4574 dsc->modinsn[0] = insn;
4575
4576 return 0;
4577 }
4578
4579 /* Preload instructions with immediate offset. */
4580
4581 static void
4582 cleanup_preload (struct gdbarch *gdbarch,
4583 struct regcache *regs, arm_displaced_step_closure *dsc)
4584 {
4585 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4586 if (!dsc->u.preload.immed)
4587 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4588 }
4589
4590 static void
4591 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4592 arm_displaced_step_closure *dsc, unsigned int rn)
4593 {
4594 ULONGEST rn_val;
4595 /* Preload instructions:
4596
4597 {pli/pld} [rn, #+/-imm]
4598 ->
4599 {pli/pld} [r0, #+/-imm]. */
4600
4601 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4602 rn_val = displaced_read_reg (regs, dsc, rn);
4603 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4604 dsc->u.preload.immed = 1;
4605
4606 dsc->cleanup = &cleanup_preload;
4607 }
4608
4609 static int
4610 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4611 arm_displaced_step_closure *dsc)
4612 {
4613 unsigned int rn = bits (insn, 16, 19);
4614
4615 if (!insn_references_pc (insn, 0x000f0000ul))
4616 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4617
4618 if (debug_displaced)
4619 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4620 (unsigned long) insn);
4621
4622 dsc->modinsn[0] = insn & 0xfff0ffff;
4623
4624 install_preload (gdbarch, regs, dsc, rn);
4625
4626 return 0;
4627 }
4628
4629 static int
4630 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4631 struct regcache *regs, arm_displaced_step_closure *dsc)
4632 {
4633 unsigned int rn = bits (insn1, 0, 3);
4634 unsigned int u_bit = bit (insn1, 7);
4635 int imm12 = bits (insn2, 0, 11);
4636 ULONGEST pc_val;
4637
4638 if (rn != ARM_PC_REGNUM)
4639 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4640
4641 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4642 PLD (literal) Encoding T1. */
4643 if (debug_displaced)
4644 fprintf_unfiltered (gdb_stdlog,
4645 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4646 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4647 imm12);
4648
4649 if (!u_bit)
4650 imm12 = -1 * imm12;
4651
4652 /* Rewrite instruction {pli/pld} PC imm12 into:
4653 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4654
4655 {pli/pld} [r0, r1]
4656
4657 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4658
4659 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4660 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4661
4662 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4663
4664 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4665 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4666 dsc->u.preload.immed = 0;
4667
4668 /* {pli/pld} [r0, r1] */
4669 dsc->modinsn[0] = insn1 & 0xfff0;
4670 dsc->modinsn[1] = 0xf001;
4671 dsc->numinsns = 2;
4672
4673 dsc->cleanup = &cleanup_preload;
4674 return 0;
4675 }
4676
4677 /* Preload instructions with register offset. */
4678
4679 static void
4680 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4681 arm_displaced_step_closure *dsc, unsigned int rn,
4682 unsigned int rm)
4683 {
4684 ULONGEST rn_val, rm_val;
4685
4686 /* Preload register-offset instructions:
4687
4688 {pli/pld} [rn, rm {, shift}]
4689 ->
4690 {pli/pld} [r0, r1 {, shift}]. */
4691
4692 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4693 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4694 rn_val = displaced_read_reg (regs, dsc, rn);
4695 rm_val = displaced_read_reg (regs, dsc, rm);
4696 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4697 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4698 dsc->u.preload.immed = 0;
4699
4700 dsc->cleanup = &cleanup_preload;
4701 }
4702
4703 static int
4704 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4705 struct regcache *regs,
4706 arm_displaced_step_closure *dsc)
4707 {
4708 unsigned int rn = bits (insn, 16, 19);
4709 unsigned int rm = bits (insn, 0, 3);
4710
4711
4712 if (!insn_references_pc (insn, 0x000f000ful))
4713 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4714
4715 if (debug_displaced)
4716 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4717 (unsigned long) insn);
4718
4719 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4720
4721 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4722 return 0;
4723 }
4724
4725 /* Copy/cleanup coprocessor load and store instructions. */
4726
4727 static void
4728 cleanup_copro_load_store (struct gdbarch *gdbarch,
4729 struct regcache *regs,
4730 arm_displaced_step_closure *dsc)
4731 {
4732 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4733
4734 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4735
4736 if (dsc->u.ldst.writeback)
4737 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4738 }
4739
4740 static void
4741 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4742 arm_displaced_step_closure *dsc,
4743 int writeback, unsigned int rn)
4744 {
4745 ULONGEST rn_val;
4746
4747 /* Coprocessor load/store instructions:
4748
4749 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4750 ->
4751 {stc/stc2} [r0, #+/-imm].
4752
4753 ldc/ldc2 are handled identically. */
4754
4755 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4756 rn_val = displaced_read_reg (regs, dsc, rn);
4757 /* PC should be 4-byte aligned. */
4758 rn_val = rn_val & 0xfffffffc;
4759 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4760
4761 dsc->u.ldst.writeback = writeback;
4762 dsc->u.ldst.rn = rn;
4763
4764 dsc->cleanup = &cleanup_copro_load_store;
4765 }
4766
4767 static int
4768 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4769 struct regcache *regs,
4770 arm_displaced_step_closure *dsc)
4771 {
4772 unsigned int rn = bits (insn, 16, 19);
4773
4774 if (!insn_references_pc (insn, 0x000f0000ul))
4775 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4776
4777 if (debug_displaced)
4778 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4779 "load/store insn %.8lx\n", (unsigned long) insn);
4780
4781 dsc->modinsn[0] = insn & 0xfff0ffff;
4782
4783 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4784
4785 return 0;
4786 }
4787
4788 static int
4789 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4790 uint16_t insn2, struct regcache *regs,
4791 arm_displaced_step_closure *dsc)
4792 {
4793 unsigned int rn = bits (insn1, 0, 3);
4794
4795 if (rn != ARM_PC_REGNUM)
4796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4797 "copro load/store", dsc);
4798
4799 if (debug_displaced)
4800 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4801 "load/store insn %.4x%.4x\n", insn1, insn2);
4802
4803 dsc->modinsn[0] = insn1 & 0xfff0;
4804 dsc->modinsn[1] = insn2;
4805 dsc->numinsns = 2;
4806
4807 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4808 doesn't support writeback, so pass 0. */
4809 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4810
4811 return 0;
4812 }
4813
4814 /* Clean up branch instructions (actually perform the branch, by setting
4815 PC). */
4816
4817 static void
4818 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4819 arm_displaced_step_closure *dsc)
4820 {
4821 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4822 int branch_taken = condition_true (dsc->u.branch.cond, status);
4823 enum pc_write_style write_pc = dsc->u.branch.exchange
4824 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4825
4826 if (!branch_taken)
4827 return;
4828
4829 if (dsc->u.branch.link)
4830 {
4831 /* The value of LR should be the next insn of current one. In order
4832 not to confuse logic handling later insn `bx lr', if current insn mode
4833 is Thumb, the bit 0 of LR value should be set to 1. */
4834 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4835
4836 if (dsc->is_thumb)
4837 next_insn_addr |= 0x1;
4838
4839 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4840 CANNOT_WRITE_PC);
4841 }
4842
4843 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4844 }
4845
4846 /* Copy B/BL/BLX instructions with immediate destinations. */
4847
4848 static void
4849 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4850 arm_displaced_step_closure *dsc,
4851 unsigned int cond, int exchange, int link, long offset)
4852 {
4853 /* Implement "BL<cond> <label>" as:
4854
4855 Preparation: cond <- instruction condition
4856 Insn: mov r0, r0 (nop)
4857 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4858
4859 B<cond> similar, but don't set r14 in cleanup. */
4860
4861 dsc->u.branch.cond = cond;
4862 dsc->u.branch.link = link;
4863 dsc->u.branch.exchange = exchange;
4864
4865 dsc->u.branch.dest = dsc->insn_addr;
4866 if (link && exchange)
4867 /* For BLX, offset is computed from the Align (PC, 4). */
4868 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4869
4870 if (dsc->is_thumb)
4871 dsc->u.branch.dest += 4 + offset;
4872 else
4873 dsc->u.branch.dest += 8 + offset;
4874
4875 dsc->cleanup = &cleanup_branch;
4876 }
4877 static int
4878 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4879 struct regcache *regs, arm_displaced_step_closure *dsc)
4880 {
4881 unsigned int cond = bits (insn, 28, 31);
4882 int exchange = (cond == 0xf);
4883 int link = exchange || bit (insn, 24);
4884 long offset;
4885
4886 if (debug_displaced)
4887 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4888 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4889 (unsigned long) insn);
4890 if (exchange)
4891 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4892 then arrange the switch into Thumb mode. */
4893 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4894 else
4895 offset = bits (insn, 0, 23) << 2;
4896
4897 if (bit (offset, 25))
4898 offset = offset | ~0x3ffffff;
4899
4900 dsc->modinsn[0] = ARM_NOP;
4901
4902 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4903 return 0;
4904 }
4905
4906 static int
4907 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4908 uint16_t insn2, struct regcache *regs,
4909 arm_displaced_step_closure *dsc)
4910 {
4911 int link = bit (insn2, 14);
4912 int exchange = link && !bit (insn2, 12);
4913 int cond = INST_AL;
4914 long offset = 0;
4915 int j1 = bit (insn2, 13);
4916 int j2 = bit (insn2, 11);
4917 int s = sbits (insn1, 10, 10);
4918 int i1 = !(j1 ^ bit (insn1, 10));
4919 int i2 = !(j2 ^ bit (insn1, 10));
4920
4921 if (!link && !exchange) /* B */
4922 {
4923 offset = (bits (insn2, 0, 10) << 1);
4924 if (bit (insn2, 12)) /* Encoding T4 */
4925 {
4926 offset |= (bits (insn1, 0, 9) << 12)
4927 | (i2 << 22)
4928 | (i1 << 23)
4929 | (s << 24);
4930 cond = INST_AL;
4931 }
4932 else /* Encoding T3 */
4933 {
4934 offset |= (bits (insn1, 0, 5) << 12)
4935 | (j1 << 18)
4936 | (j2 << 19)
4937 | (s << 20);
4938 cond = bits (insn1, 6, 9);
4939 }
4940 }
4941 else
4942 {
4943 offset = (bits (insn1, 0, 9) << 12);
4944 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4945 offset |= exchange ?
4946 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4947 }
4948
4949 if (debug_displaced)
4950 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4951 "%.4x %.4x with offset %.8lx\n",
4952 link ? (exchange) ? "blx" : "bl" : "b",
4953 insn1, insn2, offset);
4954
4955 dsc->modinsn[0] = THUMB_NOP;
4956
4957 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4958 return 0;
4959 }
4960
4961 /* Copy B Thumb instructions. */
4962 static int
4963 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4964 arm_displaced_step_closure *dsc)
4965 {
4966 unsigned int cond = 0;
4967 int offset = 0;
4968 unsigned short bit_12_15 = bits (insn, 12, 15);
4969 CORE_ADDR from = dsc->insn_addr;
4970
4971 if (bit_12_15 == 0xd)
4972 {
4973 /* offset = SignExtend (imm8:0, 32) */
4974 offset = sbits ((insn << 1), 0, 8);
4975 cond = bits (insn, 8, 11);
4976 }
4977 else if (bit_12_15 == 0xe) /* Encoding T2 */
4978 {
4979 offset = sbits ((insn << 1), 0, 11);
4980 cond = INST_AL;
4981 }
4982
4983 if (debug_displaced)
4984 fprintf_unfiltered (gdb_stdlog,
4985 "displaced: copying b immediate insn %.4x "
4986 "with offset %d\n", insn, offset);
4987
4988 dsc->u.branch.cond = cond;
4989 dsc->u.branch.link = 0;
4990 dsc->u.branch.exchange = 0;
4991 dsc->u.branch.dest = from + 4 + offset;
4992
4993 dsc->modinsn[0] = THUMB_NOP;
4994
4995 dsc->cleanup = &cleanup_branch;
4996
4997 return 0;
4998 }
4999
5000 /* Copy BX/BLX with register-specified destinations. */
5001
5002 static void
5003 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5004 arm_displaced_step_closure *dsc, int link,
5005 unsigned int cond, unsigned int rm)
5006 {
5007 /* Implement {BX,BLX}<cond> <reg>" as:
5008
5009 Preparation: cond <- instruction condition
5010 Insn: mov r0, r0 (nop)
5011 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5012
5013 Don't set r14 in cleanup for BX. */
5014
5015 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5016
5017 dsc->u.branch.cond = cond;
5018 dsc->u.branch.link = link;
5019
5020 dsc->u.branch.exchange = 1;
5021
5022 dsc->cleanup = &cleanup_branch;
5023 }
5024
5025 static int
5026 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5027 struct regcache *regs, arm_displaced_step_closure *dsc)
5028 {
5029 unsigned int cond = bits (insn, 28, 31);
5030 /* BX: x12xxx1x
5031 BLX: x12xxx3x. */
5032 int link = bit (insn, 5);
5033 unsigned int rm = bits (insn, 0, 3);
5034
5035 if (debug_displaced)
5036 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5037 (unsigned long) insn);
5038
5039 dsc->modinsn[0] = ARM_NOP;
5040
5041 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5042 return 0;
5043 }
5044
5045 static int
5046 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5047 struct regcache *regs,
5048 arm_displaced_step_closure *dsc)
5049 {
5050 int link = bit (insn, 7);
5051 unsigned int rm = bits (insn, 3, 6);
5052
5053 if (debug_displaced)
5054 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5055 (unsigned short) insn);
5056
5057 dsc->modinsn[0] = THUMB_NOP;
5058
5059 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5060
5061 return 0;
5062 }
5063
5064
5065 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5066
5067 static void
5068 cleanup_alu_imm (struct gdbarch *gdbarch,
5069 struct regcache *regs, arm_displaced_step_closure *dsc)
5070 {
5071 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5072 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5073 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5074 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5075 }
5076
5077 static int
5078 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5079 arm_displaced_step_closure *dsc)
5080 {
5081 unsigned int rn = bits (insn, 16, 19);
5082 unsigned int rd = bits (insn, 12, 15);
5083 unsigned int op = bits (insn, 21, 24);
5084 int is_mov = (op == 0xd);
5085 ULONGEST rd_val, rn_val;
5086
5087 if (!insn_references_pc (insn, 0x000ff000ul))
5088 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5089
5090 if (debug_displaced)
5091 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5092 "%.8lx\n", is_mov ? "move" : "ALU",
5093 (unsigned long) insn);
5094
5095 /* Instruction is of form:
5096
5097 <op><cond> rd, [rn,] #imm
5098
5099 Rewrite as:
5100
5101 Preparation: tmp1, tmp2 <- r0, r1;
5102 r0, r1 <- rd, rn
5103 Insn: <op><cond> r0, r1, #imm
5104 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5105 */
5106
5107 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5108 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5109 rn_val = displaced_read_reg (regs, dsc, rn);
5110 rd_val = displaced_read_reg (regs, dsc, rd);
5111 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5112 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5113 dsc->rd = rd;
5114
5115 if (is_mov)
5116 dsc->modinsn[0] = insn & 0xfff00fff;
5117 else
5118 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5119
5120 dsc->cleanup = &cleanup_alu_imm;
5121
5122 return 0;
5123 }
5124
5125 static int
5126 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5127 uint16_t insn2, struct regcache *regs,
5128 arm_displaced_step_closure *dsc)
5129 {
5130 unsigned int op = bits (insn1, 5, 8);
5131 unsigned int rn, rm, rd;
5132 ULONGEST rd_val, rn_val;
5133
5134 rn = bits (insn1, 0, 3); /* Rn */
5135 rm = bits (insn2, 0, 3); /* Rm */
5136 rd = bits (insn2, 8, 11); /* Rd */
5137
5138 /* This routine is only called for instruction MOV. */
5139 gdb_assert (op == 0x2 && rn == 0xf);
5140
5141 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5142 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5143
5144 if (debug_displaced)
5145 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5146 "ALU", insn1, insn2);
5147
5148 /* Instruction is of form:
5149
5150 <op><cond> rd, [rn,] #imm
5151
5152 Rewrite as:
5153
5154 Preparation: tmp1, tmp2 <- r0, r1;
5155 r0, r1 <- rd, rn
5156 Insn: <op><cond> r0, r1, #imm
5157 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5158 */
5159
5160 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5161 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5162 rn_val = displaced_read_reg (regs, dsc, rn);
5163 rd_val = displaced_read_reg (regs, dsc, rd);
5164 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5165 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5166 dsc->rd = rd;
5167
5168 dsc->modinsn[0] = insn1;
5169 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5170 dsc->numinsns = 2;
5171
5172 dsc->cleanup = &cleanup_alu_imm;
5173
5174 return 0;
5175 }
5176
5177 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5178
5179 static void
5180 cleanup_alu_reg (struct gdbarch *gdbarch,
5181 struct regcache *regs, arm_displaced_step_closure *dsc)
5182 {
5183 ULONGEST rd_val;
5184 int i;
5185
5186 rd_val = displaced_read_reg (regs, dsc, 0);
5187
5188 for (i = 0; i < 3; i++)
5189 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5190
5191 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5192 }
5193
5194 static void
5195 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5196 arm_displaced_step_closure *dsc,
5197 unsigned int rd, unsigned int rn, unsigned int rm)
5198 {
5199 ULONGEST rd_val, rn_val, rm_val;
5200
5201 /* Instruction is of form:
5202
5203 <op><cond> rd, [rn,] rm [, <shift>]
5204
5205 Rewrite as:
5206
5207 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5208 r0, r1, r2 <- rd, rn, rm
5209 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5210 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5211 */
5212
5213 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5214 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5215 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5216 rd_val = displaced_read_reg (regs, dsc, rd);
5217 rn_val = displaced_read_reg (regs, dsc, rn);
5218 rm_val = displaced_read_reg (regs, dsc, rm);
5219 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5220 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5221 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5222 dsc->rd = rd;
5223
5224 dsc->cleanup = &cleanup_alu_reg;
5225 }
5226
5227 static int
5228 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5229 arm_displaced_step_closure *dsc)
5230 {
5231 unsigned int op = bits (insn, 21, 24);
5232 int is_mov = (op == 0xd);
5233
5234 if (!insn_references_pc (insn, 0x000ff00ful))
5235 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5236
5237 if (debug_displaced)
5238 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5239 is_mov ? "move" : "ALU", (unsigned long) insn);
5240
5241 if (is_mov)
5242 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5243 else
5244 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5245
5246 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5247 bits (insn, 0, 3));
5248 return 0;
5249 }
5250
5251 static int
5252 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5253 struct regcache *regs,
5254 arm_displaced_step_closure *dsc)
5255 {
5256 unsigned rm, rd;
5257
5258 rm = bits (insn, 3, 6);
5259 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5260
5261 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5262 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5263
5264 if (debug_displaced)
5265 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5266 (unsigned short) insn);
5267
5268 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5269
5270 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5271
5272 return 0;
5273 }
5274
5275 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5276
5277 static void
5278 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5279 struct regcache *regs,
5280 arm_displaced_step_closure *dsc)
5281 {
5282 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5283 int i;
5284
5285 for (i = 0; i < 4; i++)
5286 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5287
5288 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5289 }
5290
5291 static void
5292 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5293 arm_displaced_step_closure *dsc,
5294 unsigned int rd, unsigned int rn, unsigned int rm,
5295 unsigned rs)
5296 {
5297 int i;
5298 ULONGEST rd_val, rn_val, rm_val, rs_val;
5299
5300 /* Instruction is of form:
5301
5302 <op><cond> rd, [rn,] rm, <shift> rs
5303
5304 Rewrite as:
5305
5306 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5307 r0, r1, r2, r3 <- rd, rn, rm, rs
5308 Insn: <op><cond> r0, r1, r2, <shift> r3
5309 Cleanup: tmp5 <- r0
5310 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5311 rd <- tmp5
5312 */
5313
5314 for (i = 0; i < 4; i++)
5315 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5316
5317 rd_val = displaced_read_reg (regs, dsc, rd);
5318 rn_val = displaced_read_reg (regs, dsc, rn);
5319 rm_val = displaced_read_reg (regs, dsc, rm);
5320 rs_val = displaced_read_reg (regs, dsc, rs);
5321 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5322 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5325 dsc->rd = rd;
5326 dsc->cleanup = &cleanup_alu_shifted_reg;
5327 }
5328
5329 static int
5330 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5331 struct regcache *regs,
5332 arm_displaced_step_closure *dsc)
5333 {
5334 unsigned int op = bits (insn, 21, 24);
5335 int is_mov = (op == 0xd);
5336 unsigned int rd, rn, rm, rs;
5337
5338 if (!insn_references_pc (insn, 0x000fff0ful))
5339 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5340
5341 if (debug_displaced)
5342 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5343 "%.8lx\n", is_mov ? "move" : "ALU",
5344 (unsigned long) insn);
5345
5346 rn = bits (insn, 16, 19);
5347 rm = bits (insn, 0, 3);
5348 rs = bits (insn, 8, 11);
5349 rd = bits (insn, 12, 15);
5350
5351 if (is_mov)
5352 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5353 else
5354 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5355
5356 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5357
5358 return 0;
5359 }
5360
5361 /* Clean up load instructions. */
5362
5363 static void
5364 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5365 arm_displaced_step_closure *dsc)
5366 {
5367 ULONGEST rt_val, rt_val2 = 0, rn_val;
5368
5369 rt_val = displaced_read_reg (regs, dsc, 0);
5370 if (dsc->u.ldst.xfersize == 8)
5371 rt_val2 = displaced_read_reg (regs, dsc, 1);
5372 rn_val = displaced_read_reg (regs, dsc, 2);
5373
5374 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5375 if (dsc->u.ldst.xfersize > 4)
5376 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5377 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5378 if (!dsc->u.ldst.immed)
5379 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5380
5381 /* Handle register writeback. */
5382 if (dsc->u.ldst.writeback)
5383 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5384 /* Put result in right place. */
5385 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5386 if (dsc->u.ldst.xfersize == 8)
5387 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5388 }
5389
5390 /* Clean up store instructions. */
5391
5392 static void
5393 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5394 arm_displaced_step_closure *dsc)
5395 {
5396 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5397
5398 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5399 if (dsc->u.ldst.xfersize > 4)
5400 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5401 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5402 if (!dsc->u.ldst.immed)
5403 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5404 if (!dsc->u.ldst.restore_r4)
5405 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5406
5407 /* Writeback. */
5408 if (dsc->u.ldst.writeback)
5409 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5410 }
5411
5412 /* Copy "extra" load/store instructions. These are halfword/doubleword
5413 transfers, which have a different encoding to byte/word transfers. */
5414
5415 static int
5416 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5417 struct regcache *regs, arm_displaced_step_closure *dsc)
5418 {
5419 unsigned int op1 = bits (insn, 20, 24);
5420 unsigned int op2 = bits (insn, 5, 6);
5421 unsigned int rt = bits (insn, 12, 15);
5422 unsigned int rn = bits (insn, 16, 19);
5423 unsigned int rm = bits (insn, 0, 3);
5424 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5425 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5426 int immed = (op1 & 0x4) != 0;
5427 int opcode;
5428 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5429
5430 if (!insn_references_pc (insn, 0x000ff00ful))
5431 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5432
5433 if (debug_displaced)
5434 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5435 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5436 (unsigned long) insn);
5437
5438 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5439
5440 if (opcode < 0)
5441 internal_error (__FILE__, __LINE__,
5442 _("copy_extra_ld_st: instruction decode error"));
5443
5444 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5445 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5446 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5447 if (!immed)
5448 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5449
5450 rt_val = displaced_read_reg (regs, dsc, rt);
5451 if (bytesize[opcode] == 8)
5452 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5453 rn_val = displaced_read_reg (regs, dsc, rn);
5454 if (!immed)
5455 rm_val = displaced_read_reg (regs, dsc, rm);
5456
5457 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5458 if (bytesize[opcode] == 8)
5459 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5460 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5461 if (!immed)
5462 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5463
5464 dsc->rd = rt;
5465 dsc->u.ldst.xfersize = bytesize[opcode];
5466 dsc->u.ldst.rn = rn;
5467 dsc->u.ldst.immed = immed;
5468 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5469 dsc->u.ldst.restore_r4 = 0;
5470
5471 if (immed)
5472 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5473 ->
5474 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5475 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5476 else
5477 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5478 ->
5479 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5480 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5481
5482 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5483
5484 return 0;
5485 }
5486
5487 /* Copy byte/half word/word loads and stores. */
5488
5489 static void
5490 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5491 arm_displaced_step_closure *dsc, int load,
5492 int immed, int writeback, int size, int usermode,
5493 int rt, int rm, int rn)
5494 {
5495 ULONGEST rt_val, rn_val, rm_val = 0;
5496
5497 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5498 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5499 if (!immed)
5500 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5501 if (!load)
5502 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5503
5504 rt_val = displaced_read_reg (regs, dsc, rt);
5505 rn_val = displaced_read_reg (regs, dsc, rn);
5506 if (!immed)
5507 rm_val = displaced_read_reg (regs, dsc, rm);
5508
5509 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5510 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5511 if (!immed)
5512 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5513 dsc->rd = rt;
5514 dsc->u.ldst.xfersize = size;
5515 dsc->u.ldst.rn = rn;
5516 dsc->u.ldst.immed = immed;
5517 dsc->u.ldst.writeback = writeback;
5518
5519 /* To write PC we can do:
5520
5521 Before this sequence of instructions:
5522 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5523 r2 is the Rn value got from displaced_read_reg.
5524
5525 Insn1: push {pc} Write address of STR instruction + offset on stack
5526 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5527 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5528 = addr(Insn1) + offset - addr(Insn3) - 8
5529 = offset - 16
5530 Insn4: add r4, r4, #8 r4 = offset - 8
5531 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5532 = from + offset
5533 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5534
5535 Otherwise we don't know what value to write for PC, since the offset is
5536 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5537 of this can be found in Section "Saving from r15" in
5538 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5539
5540 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5541 }
5542
5543
5544 static int
5545 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5546 uint16_t insn2, struct regcache *regs,
5547 arm_displaced_step_closure *dsc, int size)
5548 {
5549 unsigned int u_bit = bit (insn1, 7);
5550 unsigned int rt = bits (insn2, 12, 15);
5551 int imm12 = bits (insn2, 0, 11);
5552 ULONGEST pc_val;
5553
5554 if (debug_displaced)
5555 fprintf_unfiltered (gdb_stdlog,
5556 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5557 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5558 imm12);
5559
5560 if (!u_bit)
5561 imm12 = -1 * imm12;
5562
5563 /* Rewrite instruction LDR Rt imm12 into:
5564
5565 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5566
5567 LDR R0, R2, R3,
5568
5569 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5570
5571
5572 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5573 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5574 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5575
5576 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5577
5578 pc_val = pc_val & 0xfffffffc;
5579
5580 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5581 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5582
5583 dsc->rd = rt;
5584
5585 dsc->u.ldst.xfersize = size;
5586 dsc->u.ldst.immed = 0;
5587 dsc->u.ldst.writeback = 0;
5588 dsc->u.ldst.restore_r4 = 0;
5589
5590 /* LDR R0, R2, R3 */
5591 dsc->modinsn[0] = 0xf852;
5592 dsc->modinsn[1] = 0x3;
5593 dsc->numinsns = 2;
5594
5595 dsc->cleanup = &cleanup_load;
5596
5597 return 0;
5598 }
5599
5600 static int
5601 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5602 uint16_t insn2, struct regcache *regs,
5603 arm_displaced_step_closure *dsc,
5604 int writeback, int immed)
5605 {
5606 unsigned int rt = bits (insn2, 12, 15);
5607 unsigned int rn = bits (insn1, 0, 3);
5608 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5609 /* In LDR (register), there is also a register Rm, which is not allowed to
5610 be PC, so we don't have to check it. */
5611
5612 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5613 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5614 dsc);
5615
5616 if (debug_displaced)
5617 fprintf_unfiltered (gdb_stdlog,
5618 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5619 rt, rn, insn1, insn2);
5620
5621 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5622 0, rt, rm, rn);
5623
5624 dsc->u.ldst.restore_r4 = 0;
5625
5626 if (immed)
5627 /* ldr[b]<cond> rt, [rn, #imm], etc.
5628 ->
5629 ldr[b]<cond> r0, [r2, #imm]. */
5630 {
5631 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5632 dsc->modinsn[1] = insn2 & 0x0fff;
5633 }
5634 else
5635 /* ldr[b]<cond> rt, [rn, rm], etc.
5636 ->
5637 ldr[b]<cond> r0, [r2, r3]. */
5638 {
5639 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5640 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5641 }
5642
5643 dsc->numinsns = 2;
5644
5645 return 0;
5646 }
5647
5648
5649 static int
5650 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5651 struct regcache *regs,
5652 arm_displaced_step_closure *dsc,
5653 int load, int size, int usermode)
5654 {
5655 int immed = !bit (insn, 25);
5656 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5657 unsigned int rt = bits (insn, 12, 15);
5658 unsigned int rn = bits (insn, 16, 19);
5659 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5660
5661 if (!insn_references_pc (insn, 0x000ff00ful))
5662 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5663
5664 if (debug_displaced)
5665 fprintf_unfiltered (gdb_stdlog,
5666 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5667 load ? (size == 1 ? "ldrb" : "ldr")
5668 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5669 rt, rn,
5670 (unsigned long) insn);
5671
5672 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5673 usermode, rt, rm, rn);
5674
5675 if (load || rt != ARM_PC_REGNUM)
5676 {
5677 dsc->u.ldst.restore_r4 = 0;
5678
5679 if (immed)
5680 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5681 ->
5682 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5683 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5684 else
5685 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5686 ->
5687 {ldr,str}[b]<cond> r0, [r2, r3]. */
5688 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5689 }
5690 else
5691 {
5692 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5693 dsc->u.ldst.restore_r4 = 1;
5694 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5695 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5696 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5697 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5698 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5699
5700 /* As above. */
5701 if (immed)
5702 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5703 else
5704 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5705
5706 dsc->numinsns = 6;
5707 }
5708
5709 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5710
5711 return 0;
5712 }
5713
5714 /* Cleanup LDM instructions with fully-populated register list. This is an
5715 unfortunate corner case: it's impossible to implement correctly by modifying
5716 the instruction. The issue is as follows: we have an instruction,
5717
5718 ldm rN, {r0-r15}
5719
5720 which we must rewrite to avoid loading PC. A possible solution would be to
5721 do the load in two halves, something like (with suitable cleanup
5722 afterwards):
5723
5724 mov r8, rN
5725 ldm[id][ab] r8!, {r0-r7}
5726 str r7, <temp>
5727 ldm[id][ab] r8, {r7-r14}
5728 <bkpt>
5729
5730 but at present there's no suitable place for <temp>, since the scratch space
5731 is overwritten before the cleanup routine is called. For now, we simply
5732 emulate the instruction. */
5733
5734 static void
5735 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5736 arm_displaced_step_closure *dsc)
5737 {
5738 int inc = dsc->u.block.increment;
5739 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5740 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5741 uint32_t regmask = dsc->u.block.regmask;
5742 int regno = inc ? 0 : 15;
5743 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5744 int exception_return = dsc->u.block.load && dsc->u.block.user
5745 && (regmask & 0x8000) != 0;
5746 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5747 int do_transfer = condition_true (dsc->u.block.cond, status);
5748 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5749
5750 if (!do_transfer)
5751 return;
5752
5753 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5754 sensible we can do here. Complain loudly. */
5755 if (exception_return)
5756 error (_("Cannot single-step exception return"));
5757
5758 /* We don't handle any stores here for now. */
5759 gdb_assert (dsc->u.block.load != 0);
5760
5761 if (debug_displaced)
5762 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5763 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5764 dsc->u.block.increment ? "inc" : "dec",
5765 dsc->u.block.before ? "before" : "after");
5766
5767 while (regmask)
5768 {
5769 uint32_t memword;
5770
5771 if (inc)
5772 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5773 regno++;
5774 else
5775 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5776 regno--;
5777
5778 xfer_addr += bump_before;
5779
5780 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5781 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5782
5783 xfer_addr += bump_after;
5784
5785 regmask &= ~(1 << regno);
5786 }
5787
5788 if (dsc->u.block.writeback)
5789 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5790 CANNOT_WRITE_PC);
5791 }
5792
5793 /* Clean up an STM which included the PC in the register list. */
5794
5795 static void
5796 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5797 arm_displaced_step_closure *dsc)
5798 {
5799 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5800 int store_executed = condition_true (dsc->u.block.cond, status);
5801 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5802 CORE_ADDR stm_insn_addr;
5803 uint32_t pc_val;
5804 long offset;
5805 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5806
5807 /* If condition code fails, there's nothing else to do. */
5808 if (!store_executed)
5809 return;
5810
5811 if (dsc->u.block.increment)
5812 {
5813 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5814
5815 if (dsc->u.block.before)
5816 pc_stored_at += 4;
5817 }
5818 else
5819 {
5820 pc_stored_at = dsc->u.block.xfer_addr;
5821
5822 if (dsc->u.block.before)
5823 pc_stored_at -= 4;
5824 }
5825
5826 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5827 stm_insn_addr = dsc->scratch_base;
5828 offset = pc_val - stm_insn_addr;
5829
5830 if (debug_displaced)
5831 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5832 "STM instruction\n", offset);
5833
5834 /* Rewrite the stored PC to the proper value for the non-displaced original
5835 instruction. */
5836 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5837 dsc->insn_addr + offset);
5838 }
5839
5840 /* Clean up an LDM which includes the PC in the register list. We clumped all
5841 the registers in the transferred list into a contiguous range r0...rX (to
5842 avoid loading PC directly and losing control of the debugged program), so we
5843 must undo that here. */
5844
5845 static void
5846 cleanup_block_load_pc (struct gdbarch *gdbarch,
5847 struct regcache *regs,
5848 arm_displaced_step_closure *dsc)
5849 {
5850 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5851 int load_executed = condition_true (dsc->u.block.cond, status);
5852 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5853 unsigned int regs_loaded = bitcount (mask);
5854 unsigned int num_to_shuffle = regs_loaded, clobbered;
5855
5856 /* The method employed here will fail if the register list is fully populated
5857 (we need to avoid loading PC directly). */
5858 gdb_assert (num_to_shuffle < 16);
5859
5860 if (!load_executed)
5861 return;
5862
5863 clobbered = (1 << num_to_shuffle) - 1;
5864
5865 while (num_to_shuffle > 0)
5866 {
5867 if ((mask & (1 << write_reg)) != 0)
5868 {
5869 unsigned int read_reg = num_to_shuffle - 1;
5870
5871 if (read_reg != write_reg)
5872 {
5873 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5874 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5875 if (debug_displaced)
5876 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5877 "loaded register r%d to r%d\n"), read_reg,
5878 write_reg);
5879 }
5880 else if (debug_displaced)
5881 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5882 "r%d already in the right place\n"),
5883 write_reg);
5884
5885 clobbered &= ~(1 << write_reg);
5886
5887 num_to_shuffle--;
5888 }
5889
5890 write_reg--;
5891 }
5892
5893 /* Restore any registers we scribbled over. */
5894 for (write_reg = 0; clobbered != 0; write_reg++)
5895 {
5896 if ((clobbered & (1 << write_reg)) != 0)
5897 {
5898 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5899 CANNOT_WRITE_PC);
5900 if (debug_displaced)
5901 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5902 "clobbered register r%d\n"), write_reg);
5903 clobbered &= ~(1 << write_reg);
5904 }
5905 }
5906
5907 /* Perform register writeback manually. */
5908 if (dsc->u.block.writeback)
5909 {
5910 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5911
5912 if (dsc->u.block.increment)
5913 new_rn_val += regs_loaded * 4;
5914 else
5915 new_rn_val -= regs_loaded * 4;
5916
5917 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5918 CANNOT_WRITE_PC);
5919 }
5920 }
5921
5922 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5923 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5924
5925 static int
5926 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5927 struct regcache *regs,
5928 arm_displaced_step_closure *dsc)
5929 {
5930 int load = bit (insn, 20);
5931 int user = bit (insn, 22);
5932 int increment = bit (insn, 23);
5933 int before = bit (insn, 24);
5934 int writeback = bit (insn, 21);
5935 int rn = bits (insn, 16, 19);
5936
5937 /* Block transfers which don't mention PC can be run directly
5938 out-of-line. */
5939 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5940 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5941
5942 if (rn == ARM_PC_REGNUM)
5943 {
5944 warning (_("displaced: Unpredictable LDM or STM with "
5945 "base register r15"));
5946 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5947 }
5948
5949 if (debug_displaced)
5950 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5951 "%.8lx\n", (unsigned long) insn);
5952
5953 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5954 dsc->u.block.rn = rn;
5955
5956 dsc->u.block.load = load;
5957 dsc->u.block.user = user;
5958 dsc->u.block.increment = increment;
5959 dsc->u.block.before = before;
5960 dsc->u.block.writeback = writeback;
5961 dsc->u.block.cond = bits (insn, 28, 31);
5962
5963 dsc->u.block.regmask = insn & 0xffff;
5964
5965 if (load)
5966 {
5967 if ((insn & 0xffff) == 0xffff)
5968 {
5969 /* LDM with a fully-populated register list. This case is
5970 particularly tricky. Implement for now by fully emulating the
5971 instruction (which might not behave perfectly in all cases, but
5972 these instructions should be rare enough for that not to matter
5973 too much). */
5974 dsc->modinsn[0] = ARM_NOP;
5975
5976 dsc->cleanup = &cleanup_block_load_all;
5977 }
5978 else
5979 {
5980 /* LDM of a list of registers which includes PC. Implement by
5981 rewriting the list of registers to be transferred into a
5982 contiguous chunk r0...rX before doing the transfer, then shuffling
5983 registers into the correct places in the cleanup routine. */
5984 unsigned int regmask = insn & 0xffff;
5985 unsigned int num_in_list = bitcount (regmask), new_regmask;
5986 unsigned int i;
5987
5988 for (i = 0; i < num_in_list; i++)
5989 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5990
5991 /* Writeback makes things complicated. We need to avoid clobbering
5992 the base register with one of the registers in our modified
5993 register list, but just using a different register can't work in
5994 all cases, e.g.:
5995
5996 ldm r14!, {r0-r13,pc}
5997
5998 which would need to be rewritten as:
5999
6000 ldm rN!, {r0-r14}
6001
6002 but that can't work, because there's no free register for N.
6003
6004 Solve this by turning off the writeback bit, and emulating
6005 writeback manually in the cleanup routine. */
6006
6007 if (writeback)
6008 insn &= ~(1 << 21);
6009
6010 new_regmask = (1 << num_in_list) - 1;
6011
6012 if (debug_displaced)
6013 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6014 "{..., pc}: original reg list %.4x, modified "
6015 "list %.4x\n"), rn, writeback ? "!" : "",
6016 (int) insn & 0xffff, new_regmask);
6017
6018 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6019
6020 dsc->cleanup = &cleanup_block_load_pc;
6021 }
6022 }
6023 else
6024 {
6025 /* STM of a list of registers which includes PC. Run the instruction
6026 as-is, but out of line: this will store the wrong value for the PC,
6027 so we must manually fix up the memory in the cleanup routine.
6028 Doing things this way has the advantage that we can auto-detect
6029 the offset of the PC write (which is architecture-dependent) in
6030 the cleanup routine. */
6031 dsc->modinsn[0] = insn;
6032
6033 dsc->cleanup = &cleanup_block_store_pc;
6034 }
6035
6036 return 0;
6037 }
6038
6039 static int
6040 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6041 struct regcache *regs,
6042 arm_displaced_step_closure *dsc)
6043 {
6044 int rn = bits (insn1, 0, 3);
6045 int load = bit (insn1, 4);
6046 int writeback = bit (insn1, 5);
6047
6048 /* Block transfers which don't mention PC can be run directly
6049 out-of-line. */
6050 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6051 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6052
6053 if (rn == ARM_PC_REGNUM)
6054 {
6055 warning (_("displaced: Unpredictable LDM or STM with "
6056 "base register r15"));
6057 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6058 "unpredictable ldm/stm", dsc);
6059 }
6060
6061 if (debug_displaced)
6062 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6063 "%.4x%.4x\n", insn1, insn2);
6064
6065 /* Clear bit 13, since it should be always zero. */
6066 dsc->u.block.regmask = (insn2 & 0xdfff);
6067 dsc->u.block.rn = rn;
6068
6069 dsc->u.block.load = load;
6070 dsc->u.block.user = 0;
6071 dsc->u.block.increment = bit (insn1, 7);
6072 dsc->u.block.before = bit (insn1, 8);
6073 dsc->u.block.writeback = writeback;
6074 dsc->u.block.cond = INST_AL;
6075 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6076
6077 if (load)
6078 {
6079 if (dsc->u.block.regmask == 0xffff)
6080 {
6081 /* This branch is impossible to happen. */
6082 gdb_assert (0);
6083 }
6084 else
6085 {
6086 unsigned int regmask = dsc->u.block.regmask;
6087 unsigned int num_in_list = bitcount (regmask), new_regmask;
6088 unsigned int i;
6089
6090 for (i = 0; i < num_in_list; i++)
6091 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6092
6093 if (writeback)
6094 insn1 &= ~(1 << 5);
6095
6096 new_regmask = (1 << num_in_list) - 1;
6097
6098 if (debug_displaced)
6099 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6100 "{..., pc}: original reg list %.4x, modified "
6101 "list %.4x\n"), rn, writeback ? "!" : "",
6102 (int) dsc->u.block.regmask, new_regmask);
6103
6104 dsc->modinsn[0] = insn1;
6105 dsc->modinsn[1] = (new_regmask & 0xffff);
6106 dsc->numinsns = 2;
6107
6108 dsc->cleanup = &cleanup_block_load_pc;
6109 }
6110 }
6111 else
6112 {
6113 dsc->modinsn[0] = insn1;
6114 dsc->modinsn[1] = insn2;
6115 dsc->numinsns = 2;
6116 dsc->cleanup = &cleanup_block_store_pc;
6117 }
6118 return 0;
6119 }
6120
6121 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6122 This is used to avoid a dependency on BFD's bfd_endian enum. */
6123
6124 ULONGEST
6125 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6126 int byte_order)
6127 {
6128 return read_memory_unsigned_integer (memaddr, len,
6129 (enum bfd_endian) byte_order);
6130 }
6131
6132 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6133
6134 CORE_ADDR
6135 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6136 CORE_ADDR val)
6137 {
6138 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6139 }
6140
6141 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6142
6143 static CORE_ADDR
6144 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6145 {
6146 return 0;
6147 }
6148
6149 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6150
6151 int
6152 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6153 {
6154 return arm_is_thumb (self->regcache);
6155 }
6156
6157 /* single_step() is called just before we want to resume the inferior,
6158 if we want to single-step it but there is no hardware or kernel
6159 single-step support. We find the target of the coming instructions
6160 and breakpoint them. */
6161
6162 std::vector<CORE_ADDR>
6163 arm_software_single_step (struct regcache *regcache)
6164 {
6165 struct gdbarch *gdbarch = regcache->arch ();
6166 struct arm_get_next_pcs next_pcs_ctx;
6167
6168 arm_get_next_pcs_ctor (&next_pcs_ctx,
6169 &arm_get_next_pcs_ops,
6170 gdbarch_byte_order (gdbarch),
6171 gdbarch_byte_order_for_code (gdbarch),
6172 0,
6173 regcache);
6174
6175 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6176
6177 for (CORE_ADDR &pc_ref : next_pcs)
6178 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6179
6180 return next_pcs;
6181 }
6182
6183 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6184 for Linux, where some SVC instructions must be treated specially. */
6185
6186 static void
6187 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6188 arm_displaced_step_closure *dsc)
6189 {
6190 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6191
6192 if (debug_displaced)
6193 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6194 "%.8lx\n", (unsigned long) resume_addr);
6195
6196 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6197 }
6198
6199
6200 /* Common copy routine for svc instruction. */
6201
6202 static int
6203 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6204 arm_displaced_step_closure *dsc)
6205 {
6206 /* Preparation: none.
6207 Insn: unmodified svc.
6208 Cleanup: pc <- insn_addr + insn_size. */
6209
6210 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6211 instruction. */
6212 dsc->wrote_to_pc = 1;
6213
6214 /* Allow OS-specific code to override SVC handling. */
6215 if (dsc->u.svc.copy_svc_os)
6216 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6217 else
6218 {
6219 dsc->cleanup = &cleanup_svc;
6220 return 0;
6221 }
6222 }
6223
6224 static int
6225 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6226 struct regcache *regs, arm_displaced_step_closure *dsc)
6227 {
6228
6229 if (debug_displaced)
6230 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6231 (unsigned long) insn);
6232
6233 dsc->modinsn[0] = insn;
6234
6235 return install_svc (gdbarch, regs, dsc);
6236 }
6237
6238 static int
6239 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6240 struct regcache *regs, arm_displaced_step_closure *dsc)
6241 {
6242
6243 if (debug_displaced)
6244 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6245 insn);
6246
6247 dsc->modinsn[0] = insn;
6248
6249 return install_svc (gdbarch, regs, dsc);
6250 }
6251
6252 /* Copy undefined instructions. */
6253
6254 static int
6255 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6256 arm_displaced_step_closure *dsc)
6257 {
6258 if (debug_displaced)
6259 fprintf_unfiltered (gdb_stdlog,
6260 "displaced: copying undefined insn %.8lx\n",
6261 (unsigned long) insn);
6262
6263 dsc->modinsn[0] = insn;
6264
6265 return 0;
6266 }
6267
6268 static int
6269 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6270 arm_displaced_step_closure *dsc)
6271 {
6272
6273 if (debug_displaced)
6274 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6275 "%.4x %.4x\n", (unsigned short) insn1,
6276 (unsigned short) insn2);
6277
6278 dsc->modinsn[0] = insn1;
6279 dsc->modinsn[1] = insn2;
6280 dsc->numinsns = 2;
6281
6282 return 0;
6283 }
6284
6285 /* Copy unpredictable instructions. */
6286
6287 static int
6288 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6289 arm_displaced_step_closure *dsc)
6290 {
6291 if (debug_displaced)
6292 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6293 "%.8lx\n", (unsigned long) insn);
6294
6295 dsc->modinsn[0] = insn;
6296
6297 return 0;
6298 }
6299
6300 /* The decode_* functions are instruction decoding helpers. They mostly follow
6301 the presentation in the ARM ARM. */
6302
6303 static int
6304 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6305 struct regcache *regs,
6306 arm_displaced_step_closure *dsc)
6307 {
6308 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6309 unsigned int rn = bits (insn, 16, 19);
6310
6311 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6312 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6313 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6314 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6315 else if ((op1 & 0x60) == 0x20)
6316 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6317 else if ((op1 & 0x71) == 0x40)
6318 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6319 dsc);
6320 else if ((op1 & 0x77) == 0x41)
6321 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6322 else if ((op1 & 0x77) == 0x45)
6323 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6324 else if ((op1 & 0x77) == 0x51)
6325 {
6326 if (rn != 0xf)
6327 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6328 else
6329 return arm_copy_unpred (gdbarch, insn, dsc);
6330 }
6331 else if ((op1 & 0x77) == 0x55)
6332 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6333 else if (op1 == 0x57)
6334 switch (op2)
6335 {
6336 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6337 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6338 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6339 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6340 default: return arm_copy_unpred (gdbarch, insn, dsc);
6341 }
6342 else if ((op1 & 0x63) == 0x43)
6343 return arm_copy_unpred (gdbarch, insn, dsc);
6344 else if ((op2 & 0x1) == 0x0)
6345 switch (op1 & ~0x80)
6346 {
6347 case 0x61:
6348 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6349 case 0x65:
6350 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6351 case 0x71: case 0x75:
6352 /* pld/pldw reg. */
6353 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6354 case 0x63: case 0x67: case 0x73: case 0x77:
6355 return arm_copy_unpred (gdbarch, insn, dsc);
6356 default:
6357 return arm_copy_undef (gdbarch, insn, dsc);
6358 }
6359 else
6360 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6361 }
6362
6363 static int
6364 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6365 struct regcache *regs,
6366 arm_displaced_step_closure *dsc)
6367 {
6368 if (bit (insn, 27) == 0)
6369 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6370 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6371 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6372 {
6373 case 0x0: case 0x2:
6374 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6375
6376 case 0x1: case 0x3:
6377 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6378
6379 case 0x4: case 0x5: case 0x6: case 0x7:
6380 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6381
6382 case 0x8:
6383 switch ((insn & 0xe00000) >> 21)
6384 {
6385 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6386 /* stc/stc2. */
6387 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6388
6389 case 0x2:
6390 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6391
6392 default:
6393 return arm_copy_undef (gdbarch, insn, dsc);
6394 }
6395
6396 case 0x9:
6397 {
6398 int rn_f = (bits (insn, 16, 19) == 0xf);
6399 switch ((insn & 0xe00000) >> 21)
6400 {
6401 case 0x1: case 0x3:
6402 /* ldc/ldc2 imm (undefined for rn == pc). */
6403 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6404 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6405
6406 case 0x2:
6407 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6408
6409 case 0x4: case 0x5: case 0x6: case 0x7:
6410 /* ldc/ldc2 lit (undefined for rn != pc). */
6411 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6412 : arm_copy_undef (gdbarch, insn, dsc);
6413
6414 default:
6415 return arm_copy_undef (gdbarch, insn, dsc);
6416 }
6417 }
6418
6419 case 0xa:
6420 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6421
6422 case 0xb:
6423 if (bits (insn, 16, 19) == 0xf)
6424 /* ldc/ldc2 lit. */
6425 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6426 else
6427 return arm_copy_undef (gdbarch, insn, dsc);
6428
6429 case 0xc:
6430 if (bit (insn, 4))
6431 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6432 else
6433 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6434
6435 case 0xd:
6436 if (bit (insn, 4))
6437 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6438 else
6439 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6440
6441 default:
6442 return arm_copy_undef (gdbarch, insn, dsc);
6443 }
6444 }
6445
6446 /* Decode miscellaneous instructions in dp/misc encoding space. */
6447
6448 static int
6449 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6450 struct regcache *regs,
6451 arm_displaced_step_closure *dsc)
6452 {
6453 unsigned int op2 = bits (insn, 4, 6);
6454 unsigned int op = bits (insn, 21, 22);
6455
6456 switch (op2)
6457 {
6458 case 0x0:
6459 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6460
6461 case 0x1:
6462 if (op == 0x1) /* bx. */
6463 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6464 else if (op == 0x3)
6465 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6466 else
6467 return arm_copy_undef (gdbarch, insn, dsc);
6468
6469 case 0x2:
6470 if (op == 0x1)
6471 /* Not really supported. */
6472 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6473 else
6474 return arm_copy_undef (gdbarch, insn, dsc);
6475
6476 case 0x3:
6477 if (op == 0x1)
6478 return arm_copy_bx_blx_reg (gdbarch, insn,
6479 regs, dsc); /* blx register. */
6480 else
6481 return arm_copy_undef (gdbarch, insn, dsc);
6482
6483 case 0x5:
6484 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6485
6486 case 0x7:
6487 if (op == 0x1)
6488 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6489 else if (op == 0x3)
6490 /* Not really supported. */
6491 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6492 /* Fall through. */
6493
6494 default:
6495 return arm_copy_undef (gdbarch, insn, dsc);
6496 }
6497 }
6498
6499 static int
6500 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6501 struct regcache *regs,
6502 arm_displaced_step_closure *dsc)
6503 {
6504 if (bit (insn, 25))
6505 switch (bits (insn, 20, 24))
6506 {
6507 case 0x10:
6508 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6509
6510 case 0x14:
6511 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6512
6513 case 0x12: case 0x16:
6514 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6515
6516 default:
6517 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6518 }
6519 else
6520 {
6521 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6522
6523 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6524 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6525 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6526 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6527 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6528 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6530 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6531 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6532 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6533 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6534 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6535 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6536 /* 2nd arg means "unprivileged". */
6537 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6538 dsc);
6539 }
6540
6541 /* Should be unreachable. */
6542 return 1;
6543 }
6544
6545 static int
6546 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6547 struct regcache *regs,
6548 arm_displaced_step_closure *dsc)
6549 {
6550 int a = bit (insn, 25), b = bit (insn, 4);
6551 uint32_t op1 = bits (insn, 20, 24);
6552
6553 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6554 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6555 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6556 else if ((!a && (op1 & 0x17) == 0x02)
6557 || (a && (op1 & 0x17) == 0x02 && !b))
6558 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6559 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6560 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6561 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6562 else if ((!a && (op1 & 0x17) == 0x03)
6563 || (a && (op1 & 0x17) == 0x03 && !b))
6564 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6565 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6566 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6567 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6568 else if ((!a && (op1 & 0x17) == 0x06)
6569 || (a && (op1 & 0x17) == 0x06 && !b))
6570 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6571 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6572 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6573 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6574 else if ((!a && (op1 & 0x17) == 0x07)
6575 || (a && (op1 & 0x17) == 0x07 && !b))
6576 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6577
6578 /* Should be unreachable. */
6579 return 1;
6580 }
6581
6582 static int
6583 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6584 arm_displaced_step_closure *dsc)
6585 {
6586 switch (bits (insn, 20, 24))
6587 {
6588 case 0x00: case 0x01: case 0x02: case 0x03:
6589 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6590
6591 case 0x04: case 0x05: case 0x06: case 0x07:
6592 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6593
6594 case 0x08: case 0x09: case 0x0a: case 0x0b:
6595 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6596 return arm_copy_unmodified (gdbarch, insn,
6597 "decode/pack/unpack/saturate/reverse", dsc);
6598
6599 case 0x18:
6600 if (bits (insn, 5, 7) == 0) /* op2. */
6601 {
6602 if (bits (insn, 12, 15) == 0xf)
6603 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6604 else
6605 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6606 }
6607 else
6608 return arm_copy_undef (gdbarch, insn, dsc);
6609
6610 case 0x1a: case 0x1b:
6611 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6612 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6613 else
6614 return arm_copy_undef (gdbarch, insn, dsc);
6615
6616 case 0x1c: case 0x1d:
6617 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6618 {
6619 if (bits (insn, 0, 3) == 0xf)
6620 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6621 else
6622 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6623 }
6624 else
6625 return arm_copy_undef (gdbarch, insn, dsc);
6626
6627 case 0x1e: case 0x1f:
6628 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6629 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6630 else
6631 return arm_copy_undef (gdbarch, insn, dsc);
6632 }
6633
6634 /* Should be unreachable. */
6635 return 1;
6636 }
6637
6638 static int
6639 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6640 struct regcache *regs,
6641 arm_displaced_step_closure *dsc)
6642 {
6643 if (bit (insn, 25))
6644 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6645 else
6646 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6647 }
6648
6649 static int
6650 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6651 struct regcache *regs,
6652 arm_displaced_step_closure *dsc)
6653 {
6654 unsigned int opcode = bits (insn, 20, 24);
6655
6656 switch (opcode)
6657 {
6658 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6659 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6660
6661 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6662 case 0x12: case 0x16:
6663 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6664
6665 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6666 case 0x13: case 0x17:
6667 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6668
6669 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6670 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6671 /* Note: no writeback for these instructions. Bit 25 will always be
6672 zero though (via caller), so the following works OK. */
6673 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6674 }
6675
6676 /* Should be unreachable. */
6677 return 1;
6678 }
6679
6680 /* Decode shifted register instructions. */
6681
6682 static int
6683 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6684 uint16_t insn2, struct regcache *regs,
6685 arm_displaced_step_closure *dsc)
6686 {
6687 /* PC is only allowed to be used in instruction MOV. */
6688
6689 unsigned int op = bits (insn1, 5, 8);
6690 unsigned int rn = bits (insn1, 0, 3);
6691
6692 if (op == 0x2 && rn == 0xf) /* MOV */
6693 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6694 else
6695 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6696 "dp (shift reg)", dsc);
6697 }
6698
6699
6700 /* Decode extension register load/store. Exactly the same as
6701 arm_decode_ext_reg_ld_st. */
6702
6703 static int
6704 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6705 uint16_t insn2, struct regcache *regs,
6706 arm_displaced_step_closure *dsc)
6707 {
6708 unsigned int opcode = bits (insn1, 4, 8);
6709
6710 switch (opcode)
6711 {
6712 case 0x04: case 0x05:
6713 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6714 "vfp/neon vmov", dsc);
6715
6716 case 0x08: case 0x0c: /* 01x00 */
6717 case 0x0a: case 0x0e: /* 01x10 */
6718 case 0x12: case 0x16: /* 10x10 */
6719 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6720 "vfp/neon vstm/vpush", dsc);
6721
6722 case 0x09: case 0x0d: /* 01x01 */
6723 case 0x0b: case 0x0f: /* 01x11 */
6724 case 0x13: case 0x17: /* 10x11 */
6725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6726 "vfp/neon vldm/vpop", dsc);
6727
6728 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6729 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6730 "vstr", dsc);
6731 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6732 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6733 }
6734
6735 /* Should be unreachable. */
6736 return 1;
6737 }
6738
6739 static int
6740 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6741 struct regcache *regs, arm_displaced_step_closure *dsc)
6742 {
6743 unsigned int op1 = bits (insn, 20, 25);
6744 int op = bit (insn, 4);
6745 unsigned int coproc = bits (insn, 8, 11);
6746
6747 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6748 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6749 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6750 && (coproc & 0xe) != 0xa)
6751 /* stc/stc2. */
6752 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6753 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6754 && (coproc & 0xe) != 0xa)
6755 /* ldc/ldc2 imm/lit. */
6756 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6757 else if ((op1 & 0x3e) == 0x00)
6758 return arm_copy_undef (gdbarch, insn, dsc);
6759 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6760 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6761 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6762 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6763 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6765 else if ((op1 & 0x30) == 0x20 && !op)
6766 {
6767 if ((coproc & 0xe) == 0xa)
6768 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6769 else
6770 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6771 }
6772 else if ((op1 & 0x30) == 0x20 && op)
6773 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6774 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6775 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6776 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6778 else if ((op1 & 0x30) == 0x30)
6779 return arm_copy_svc (gdbarch, insn, regs, dsc);
6780 else
6781 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6782 }
6783
6784 static int
6785 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6786 uint16_t insn2, struct regcache *regs,
6787 arm_displaced_step_closure *dsc)
6788 {
6789 unsigned int coproc = bits (insn2, 8, 11);
6790 unsigned int bit_5_8 = bits (insn1, 5, 8);
6791 unsigned int bit_9 = bit (insn1, 9);
6792 unsigned int bit_4 = bit (insn1, 4);
6793
6794 if (bit_9 == 0)
6795 {
6796 if (bit_5_8 == 2)
6797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6798 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6799 dsc);
6800 else if (bit_5_8 == 0) /* UNDEFINED. */
6801 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6802 else
6803 {
6804 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6805 if ((coproc & 0xe) == 0xa)
6806 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6807 dsc);
6808 else /* coproc is not 101x. */
6809 {
6810 if (bit_4 == 0) /* STC/STC2. */
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 "stc/stc2", dsc);
6813 else /* LDC/LDC2 {literal, immediate}. */
6814 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6815 regs, dsc);
6816 }
6817 }
6818 }
6819 else
6820 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6821
6822 return 0;
6823 }
6824
6825 static void
6826 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6827 arm_displaced_step_closure *dsc, int rd)
6828 {
6829 /* ADR Rd, #imm
6830
6831 Rewrite as:
6832
6833 Preparation: Rd <- PC
6834 Insn: ADD Rd, #imm
6835 Cleanup: Null.
6836 */
6837
6838 /* Rd <- PC */
6839 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6840 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6841 }
6842
6843 static int
6844 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6845 arm_displaced_step_closure *dsc,
6846 int rd, unsigned int imm)
6847 {
6848
6849 /* Encoding T2: ADDS Rd, #imm */
6850 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6851
6852 install_pc_relative (gdbarch, regs, dsc, rd);
6853
6854 return 0;
6855 }
6856
6857 static int
6858 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6859 struct regcache *regs,
6860 arm_displaced_step_closure *dsc)
6861 {
6862 unsigned int rd = bits (insn, 8, 10);
6863 unsigned int imm8 = bits (insn, 0, 7);
6864
6865 if (debug_displaced)
6866 fprintf_unfiltered (gdb_stdlog,
6867 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6868 rd, imm8, insn);
6869
6870 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6871 }
6872
6873 static int
6874 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6875 uint16_t insn2, struct regcache *regs,
6876 arm_displaced_step_closure *dsc)
6877 {
6878 unsigned int rd = bits (insn2, 8, 11);
6879 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6880 extract raw immediate encoding rather than computing immediate. When
6881 generating ADD or SUB instruction, we can simply perform OR operation to
6882 set immediate into ADD. */
6883 unsigned int imm_3_8 = insn2 & 0x70ff;
6884 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6885
6886 if (debug_displaced)
6887 fprintf_unfiltered (gdb_stdlog,
6888 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6889 rd, imm_i, imm_3_8, insn1, insn2);
6890
6891 if (bit (insn1, 7)) /* Encoding T2 */
6892 {
6893 /* Encoding T3: SUB Rd, Rd, #imm */
6894 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6895 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6896 }
6897 else /* Encoding T3 */
6898 {
6899 /* Encoding T3: ADD Rd, Rd, #imm */
6900 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6901 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6902 }
6903 dsc->numinsns = 2;
6904
6905 install_pc_relative (gdbarch, regs, dsc, rd);
6906
6907 return 0;
6908 }
6909
6910 static int
6911 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6912 struct regcache *regs,
6913 arm_displaced_step_closure *dsc)
6914 {
6915 unsigned int rt = bits (insn1, 8, 10);
6916 unsigned int pc;
6917 int imm8 = (bits (insn1, 0, 7) << 2);
6918
6919 /* LDR Rd, #imm8
6920
6921 Rwrite as:
6922
6923 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6924
6925 Insn: LDR R0, [R2, R3];
6926 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6927
6928 if (debug_displaced)
6929 fprintf_unfiltered (gdb_stdlog,
6930 "displaced: copying thumb ldr r%d [pc #%d]\n"
6931 , rt, imm8);
6932
6933 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6934 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6935 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6936 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6937 /* The assembler calculates the required value of the offset from the
6938 Align(PC,4) value of this instruction to the label. */
6939 pc = pc & 0xfffffffc;
6940
6941 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6942 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6943
6944 dsc->rd = rt;
6945 dsc->u.ldst.xfersize = 4;
6946 dsc->u.ldst.rn = 0;
6947 dsc->u.ldst.immed = 0;
6948 dsc->u.ldst.writeback = 0;
6949 dsc->u.ldst.restore_r4 = 0;
6950
6951 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6952
6953 dsc->cleanup = &cleanup_load;
6954
6955 return 0;
6956 }
6957
6958 /* Copy Thumb cbnz/cbz instruction. */
6959
6960 static int
6961 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6962 struct regcache *regs,
6963 arm_displaced_step_closure *dsc)
6964 {
6965 int non_zero = bit (insn1, 11);
6966 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6967 CORE_ADDR from = dsc->insn_addr;
6968 int rn = bits (insn1, 0, 2);
6969 int rn_val = displaced_read_reg (regs, dsc, rn);
6970
6971 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6972 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6973 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6974 condition is false, let it be, cleanup_branch will do nothing. */
6975 if (dsc->u.branch.cond)
6976 {
6977 dsc->u.branch.cond = INST_AL;
6978 dsc->u.branch.dest = from + 4 + imm5;
6979 }
6980 else
6981 dsc->u.branch.dest = from + 2;
6982
6983 dsc->u.branch.link = 0;
6984 dsc->u.branch.exchange = 0;
6985
6986 if (debug_displaced)
6987 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6988 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6989 rn, rn_val, insn1, dsc->u.branch.dest);
6990
6991 dsc->modinsn[0] = THUMB_NOP;
6992
6993 dsc->cleanup = &cleanup_branch;
6994 return 0;
6995 }
6996
6997 /* Copy Table Branch Byte/Halfword */
6998 static int
6999 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7000 uint16_t insn2, struct regcache *regs,
7001 arm_displaced_step_closure *dsc)
7002 {
7003 ULONGEST rn_val, rm_val;
7004 int is_tbh = bit (insn2, 4);
7005 CORE_ADDR halfwords = 0;
7006 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7007
7008 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7009 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7010
7011 if (is_tbh)
7012 {
7013 gdb_byte buf[2];
7014
7015 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7016 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7017 }
7018 else
7019 {
7020 gdb_byte buf[1];
7021
7022 target_read_memory (rn_val + rm_val, buf, 1);
7023 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7024 }
7025
7026 if (debug_displaced)
7027 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7028 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7029 (unsigned int) rn_val, (unsigned int) rm_val,
7030 (unsigned int) halfwords);
7031
7032 dsc->u.branch.cond = INST_AL;
7033 dsc->u.branch.link = 0;
7034 dsc->u.branch.exchange = 0;
7035 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7036
7037 dsc->cleanup = &cleanup_branch;
7038
7039 return 0;
7040 }
7041
7042 static void
7043 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7044 arm_displaced_step_closure *dsc)
7045 {
7046 /* PC <- r7 */
7047 int val = displaced_read_reg (regs, dsc, 7);
7048 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7049
7050 /* r7 <- r8 */
7051 val = displaced_read_reg (regs, dsc, 8);
7052 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7053
7054 /* r8 <- tmp[0] */
7055 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7056
7057 }
7058
7059 static int
7060 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7061 struct regcache *regs,
7062 arm_displaced_step_closure *dsc)
7063 {
7064 dsc->u.block.regmask = insn1 & 0x00ff;
7065
7066 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7067 to :
7068
7069 (1) register list is full, that is, r0-r7 are used.
7070 Prepare: tmp[0] <- r8
7071
7072 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7073 MOV r8, r7; Move value of r7 to r8;
7074 POP {r7}; Store PC value into r7.
7075
7076 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7077
7078 (2) register list is not full, supposing there are N registers in
7079 register list (except PC, 0 <= N <= 7).
7080 Prepare: for each i, 0 - N, tmp[i] <- ri.
7081
7082 POP {r0, r1, ...., rN};
7083
7084 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7085 from tmp[] properly.
7086 */
7087 if (debug_displaced)
7088 fprintf_unfiltered (gdb_stdlog,
7089 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7090 dsc->u.block.regmask, insn1);
7091
7092 if (dsc->u.block.regmask == 0xff)
7093 {
7094 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7095
7096 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7097 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7098 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7099
7100 dsc->numinsns = 3;
7101 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7102 }
7103 else
7104 {
7105 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7106 unsigned int i;
7107 unsigned int new_regmask;
7108
7109 for (i = 0; i < num_in_list + 1; i++)
7110 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7111
7112 new_regmask = (1 << (num_in_list + 1)) - 1;
7113
7114 if (debug_displaced)
7115 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7116 "{..., pc}: original reg list %.4x,"
7117 " modified list %.4x\n"),
7118 (int) dsc->u.block.regmask, new_regmask);
7119
7120 dsc->u.block.regmask |= 0x8000;
7121 dsc->u.block.writeback = 0;
7122 dsc->u.block.cond = INST_AL;
7123
7124 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7125
7126 dsc->cleanup = &cleanup_block_load_pc;
7127 }
7128
7129 return 0;
7130 }
7131
7132 static void
7133 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7134 struct regcache *regs,
7135 arm_displaced_step_closure *dsc)
7136 {
7137 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7138 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7139 int err = 0;
7140
7141 /* 16-bit thumb instructions. */
7142 switch (op_bit_12_15)
7143 {
7144 /* Shift (imme), add, subtract, move and compare. */
7145 case 0: case 1: case 2: case 3:
7146 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7147 "shift/add/sub/mov/cmp",
7148 dsc);
7149 break;
7150 case 4:
7151 switch (op_bit_10_11)
7152 {
7153 case 0: /* Data-processing */
7154 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7155 "data-processing",
7156 dsc);
7157 break;
7158 case 1: /* Special data instructions and branch and exchange. */
7159 {
7160 unsigned short op = bits (insn1, 7, 9);
7161 if (op == 6 || op == 7) /* BX or BLX */
7162 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7163 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7164 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7165 else
7166 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7167 dsc);
7168 }
7169 break;
7170 default: /* LDR (literal) */
7171 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7172 }
7173 break;
7174 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7175 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7176 break;
7177 case 10:
7178 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7179 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7180 else /* Generate SP-relative address */
7181 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7182 break;
7183 case 11: /* Misc 16-bit instructions */
7184 {
7185 switch (bits (insn1, 8, 11))
7186 {
7187 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7188 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7189 break;
7190 case 12: case 13: /* POP */
7191 if (bit (insn1, 8)) /* PC is in register list. */
7192 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7193 else
7194 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7195 break;
7196 case 15: /* If-Then, and hints */
7197 if (bits (insn1, 0, 3))
7198 /* If-Then makes up to four following instructions conditional.
7199 IT instruction itself is not conditional, so handle it as a
7200 common unmodified instruction. */
7201 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7202 dsc);
7203 else
7204 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7205 break;
7206 default:
7207 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7208 }
7209 }
7210 break;
7211 case 12:
7212 if (op_bit_10_11 < 2) /* Store multiple registers */
7213 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7214 else /* Load multiple registers */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7216 break;
7217 case 13: /* Conditional branch and supervisor call */
7218 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7219 err = thumb_copy_b (gdbarch, insn1, dsc);
7220 else
7221 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7222 break;
7223 case 14: /* Unconditional branch */
7224 err = thumb_copy_b (gdbarch, insn1, dsc);
7225 break;
7226 default:
7227 err = 1;
7228 }
7229
7230 if (err)
7231 internal_error (__FILE__, __LINE__,
7232 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7233 }
7234
7235 static int
7236 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7237 uint16_t insn1, uint16_t insn2,
7238 struct regcache *regs,
7239 arm_displaced_step_closure *dsc)
7240 {
7241 int rt = bits (insn2, 12, 15);
7242 int rn = bits (insn1, 0, 3);
7243 int op1 = bits (insn1, 7, 8);
7244
7245 switch (bits (insn1, 5, 6))
7246 {
7247 case 0: /* Load byte and memory hints */
7248 if (rt == 0xf) /* PLD/PLI */
7249 {
7250 if (rn == 0xf)
7251 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7252 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7253 else
7254 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7255 "pli/pld", dsc);
7256 }
7257 else
7258 {
7259 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7260 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7261 1);
7262 else
7263 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7264 "ldrb{reg, immediate}/ldrbt",
7265 dsc);
7266 }
7267
7268 break;
7269 case 1: /* Load halfword and memory hints. */
7270 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7271 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7272 "pld/unalloc memhint", dsc);
7273 else
7274 {
7275 if (rn == 0xf)
7276 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7277 2);
7278 else
7279 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7280 "ldrh/ldrht", dsc);
7281 }
7282 break;
7283 case 2: /* Load word */
7284 {
7285 int insn2_bit_8_11 = bits (insn2, 8, 11);
7286
7287 if (rn == 0xf)
7288 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7289 else if (op1 == 0x1) /* Encoding T3 */
7290 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7291 0, 1);
7292 else /* op1 == 0x0 */
7293 {
7294 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7295 /* LDR (immediate) */
7296 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7297 dsc, bit (insn2, 8), 1);
7298 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7299 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7300 "ldrt", dsc);
7301 else
7302 /* LDR (register) */
7303 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7304 dsc, 0, 0);
7305 }
7306 break;
7307 }
7308 default:
7309 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7310 break;
7311 }
7312 return 0;
7313 }
7314
7315 static void
7316 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7317 uint16_t insn2, struct regcache *regs,
7318 arm_displaced_step_closure *dsc)
7319 {
7320 int err = 0;
7321 unsigned short op = bit (insn2, 15);
7322 unsigned int op1 = bits (insn1, 11, 12);
7323
7324 switch (op1)
7325 {
7326 case 1:
7327 {
7328 switch (bits (insn1, 9, 10))
7329 {
7330 case 0:
7331 if (bit (insn1, 6))
7332 {
7333 /* Load/store {dual, exclusive}, table branch. */
7334 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7335 && bits (insn2, 5, 7) == 0)
7336 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7337 dsc);
7338 else
7339 /* PC is not allowed to use in load/store {dual, exclusive}
7340 instructions. */
7341 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7342 "load/store dual/ex", dsc);
7343 }
7344 else /* load/store multiple */
7345 {
7346 switch (bits (insn1, 7, 8))
7347 {
7348 case 0: case 3: /* SRS, RFE */
7349 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7350 "srs/rfe", dsc);
7351 break;
7352 case 1: case 2: /* LDM/STM/PUSH/POP */
7353 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7354 break;
7355 }
7356 }
7357 break;
7358
7359 case 1:
7360 /* Data-processing (shift register). */
7361 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7362 dsc);
7363 break;
7364 default: /* Coprocessor instructions. */
7365 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7366 break;
7367 }
7368 break;
7369 }
7370 case 2: /* op1 = 2 */
7371 if (op) /* Branch and misc control. */
7372 {
7373 if (bit (insn2, 14) /* BLX/BL */
7374 || bit (insn2, 12) /* Unconditional branch */
7375 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7376 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7377 else
7378 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7379 "misc ctrl", dsc);
7380 }
7381 else
7382 {
7383 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7384 {
7385 int dp_op = bits (insn1, 4, 8);
7386 int rn = bits (insn1, 0, 3);
7387 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7388 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7389 regs, dsc);
7390 else
7391 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7392 "dp/pb", dsc);
7393 }
7394 else /* Data processing (modified immediate) */
7395 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7396 "dp/mi", dsc);
7397 }
7398 break;
7399 case 3: /* op1 = 3 */
7400 switch (bits (insn1, 9, 10))
7401 {
7402 case 0:
7403 if (bit (insn1, 4))
7404 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7405 regs, dsc);
7406 else /* NEON Load/Store and Store single data item */
7407 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7408 "neon elt/struct load/store",
7409 dsc);
7410 break;
7411 case 1: /* op1 = 3, bits (9, 10) == 1 */
7412 switch (bits (insn1, 7, 8))
7413 {
7414 case 0: case 1: /* Data processing (register) */
7415 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7416 "dp(reg)", dsc);
7417 break;
7418 case 2: /* Multiply and absolute difference */
7419 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7420 "mul/mua/diff", dsc);
7421 break;
7422 case 3: /* Long multiply and divide */
7423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7424 "lmul/lmua", dsc);
7425 break;
7426 }
7427 break;
7428 default: /* Coprocessor instructions */
7429 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7430 break;
7431 }
7432 break;
7433 default:
7434 err = 1;
7435 }
7436
7437 if (err)
7438 internal_error (__FILE__, __LINE__,
7439 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7440
7441 }
7442
7443 static void
7444 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7445 struct regcache *regs,
7446 arm_displaced_step_closure *dsc)
7447 {
7448 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7449 uint16_t insn1
7450 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7451
7452 if (debug_displaced)
7453 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7454 "at %.8lx\n", insn1, (unsigned long) from);
7455
7456 dsc->is_thumb = 1;
7457 dsc->insn_size = thumb_insn_size (insn1);
7458 if (thumb_insn_size (insn1) == 4)
7459 {
7460 uint16_t insn2
7461 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7462 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7463 }
7464 else
7465 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7466 }
7467
7468 void
7469 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7470 CORE_ADDR to, struct regcache *regs,
7471 arm_displaced_step_closure *dsc)
7472 {
7473 int err = 0;
7474 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7475 uint32_t insn;
7476
7477 /* Most displaced instructions use a 1-instruction scratch space, so set this
7478 here and override below if/when necessary. */
7479 dsc->numinsns = 1;
7480 dsc->insn_addr = from;
7481 dsc->scratch_base = to;
7482 dsc->cleanup = NULL;
7483 dsc->wrote_to_pc = 0;
7484
7485 if (!displaced_in_arm_mode (regs))
7486 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7487
7488 dsc->is_thumb = 0;
7489 dsc->insn_size = 4;
7490 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7491 if (debug_displaced)
7492 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7493 "at %.8lx\n", (unsigned long) insn,
7494 (unsigned long) from);
7495
7496 if ((insn & 0xf0000000) == 0xf0000000)
7497 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7498 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7499 {
7500 case 0x0: case 0x1: case 0x2: case 0x3:
7501 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7502 break;
7503
7504 case 0x4: case 0x5: case 0x6:
7505 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7506 break;
7507
7508 case 0x7:
7509 err = arm_decode_media (gdbarch, insn, dsc);
7510 break;
7511
7512 case 0x8: case 0x9: case 0xa: case 0xb:
7513 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7514 break;
7515
7516 case 0xc: case 0xd: case 0xe: case 0xf:
7517 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7518 break;
7519 }
7520
7521 if (err)
7522 internal_error (__FILE__, __LINE__,
7523 _("arm_process_displaced_insn: Instruction decode error"));
7524 }
7525
7526 /* Actually set up the scratch space for a displaced instruction. */
7527
7528 void
7529 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7530 CORE_ADDR to, arm_displaced_step_closure *dsc)
7531 {
7532 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7533 unsigned int i, len, offset;
7534 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7535 int size = dsc->is_thumb? 2 : 4;
7536 const gdb_byte *bkp_insn;
7537
7538 offset = 0;
7539 /* Poke modified instruction(s). */
7540 for (i = 0; i < dsc->numinsns; i++)
7541 {
7542 if (debug_displaced)
7543 {
7544 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7545 if (size == 4)
7546 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7547 dsc->modinsn[i]);
7548 else if (size == 2)
7549 fprintf_unfiltered (gdb_stdlog, "%.4x",
7550 (unsigned short)dsc->modinsn[i]);
7551
7552 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7553 (unsigned long) to + offset);
7554
7555 }
7556 write_memory_unsigned_integer (to + offset, size,
7557 byte_order_for_code,
7558 dsc->modinsn[i]);
7559 offset += size;
7560 }
7561
7562 /* Choose the correct breakpoint instruction. */
7563 if (dsc->is_thumb)
7564 {
7565 bkp_insn = tdep->thumb_breakpoint;
7566 len = tdep->thumb_breakpoint_size;
7567 }
7568 else
7569 {
7570 bkp_insn = tdep->arm_breakpoint;
7571 len = tdep->arm_breakpoint_size;
7572 }
7573
7574 /* Put breakpoint afterwards. */
7575 write_memory (to + offset, bkp_insn, len);
7576
7577 if (debug_displaced)
7578 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7579 paddress (gdbarch, from), paddress (gdbarch, to));
7580 }
7581
7582 /* Entry point for cleaning things up after a displaced instruction has been
7583 single-stepped. */
7584
7585 void
7586 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7587 struct displaced_step_closure *dsc_,
7588 CORE_ADDR from, CORE_ADDR to,
7589 struct regcache *regs)
7590 {
7591 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7592
7593 if (dsc->cleanup)
7594 dsc->cleanup (gdbarch, regs, dsc);
7595
7596 if (!dsc->wrote_to_pc)
7597 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7598 dsc->insn_addr + dsc->insn_size);
7599
7600 }
7601
7602 #include "bfd-in2.h"
7603 #include "libcoff.h"
7604
7605 static int
7606 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7607 {
7608 gdb_disassembler *di
7609 = static_cast<gdb_disassembler *>(info->application_data);
7610 struct gdbarch *gdbarch = di->arch ();
7611
7612 if (arm_pc_is_thumb (gdbarch, memaddr))
7613 {
7614 static asymbol *asym;
7615 static combined_entry_type ce;
7616 static struct coff_symbol_struct csym;
7617 static struct bfd fake_bfd;
7618 static bfd_target fake_target;
7619
7620 if (csym.native == NULL)
7621 {
7622 /* Create a fake symbol vector containing a Thumb symbol.
7623 This is solely so that the code in print_insn_little_arm()
7624 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7625 the presence of a Thumb symbol and switch to decoding
7626 Thumb instructions. */
7627
7628 fake_target.flavour = bfd_target_coff_flavour;
7629 fake_bfd.xvec = &fake_target;
7630 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7631 csym.native = &ce;
7632 csym.symbol.the_bfd = &fake_bfd;
7633 csym.symbol.name = "fake";
7634 asym = (asymbol *) & csym;
7635 }
7636
7637 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7638 info->symbols = &asym;
7639 }
7640 else
7641 info->symbols = NULL;
7642
7643 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7644 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7645 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7646 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7647 in default_print_insn. */
7648 if (exec_bfd != NULL)
7649 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7650
7651 return default_print_insn (memaddr, info);
7652 }
7653
7654 /* The following define instruction sequences that will cause ARM
7655 cpu's to take an undefined instruction trap. These are used to
7656 signal a breakpoint to GDB.
7657
7658 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7659 modes. A different instruction is required for each mode. The ARM
7660 cpu's can also be big or little endian. Thus four different
7661 instructions are needed to support all cases.
7662
7663 Note: ARMv4 defines several new instructions that will take the
7664 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7665 not in fact add the new instructions. The new undefined
7666 instructions in ARMv4 are all instructions that had no defined
7667 behaviour in earlier chips. There is no guarantee that they will
7668 raise an exception, but may be treated as NOP's. In practice, it
7669 may only safe to rely on instructions matching:
7670
7671 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7672 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7673 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7674
7675 Even this may only true if the condition predicate is true. The
7676 following use a condition predicate of ALWAYS so it is always TRUE.
7677
7678 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7679 and NetBSD all use a software interrupt rather than an undefined
7680 instruction to force a trap. This can be handled by by the
7681 abi-specific code during establishment of the gdbarch vector. */
7682
7683 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7684 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7685 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7686 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7687
7688 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7689 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7690 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7691 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7692
7693 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7694
7695 static int
7696 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7697 {
7698 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7699 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7700
7701 if (arm_pc_is_thumb (gdbarch, *pcptr))
7702 {
7703 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7704
7705 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7706 check whether we are replacing a 32-bit instruction. */
7707 if (tdep->thumb2_breakpoint != NULL)
7708 {
7709 gdb_byte buf[2];
7710
7711 if (target_read_memory (*pcptr, buf, 2) == 0)
7712 {
7713 unsigned short inst1;
7714
7715 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7716 if (thumb_insn_size (inst1) == 4)
7717 return ARM_BP_KIND_THUMB2;
7718 }
7719 }
7720
7721 return ARM_BP_KIND_THUMB;
7722 }
7723 else
7724 return ARM_BP_KIND_ARM;
7725
7726 }
7727
7728 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7729
7730 static const gdb_byte *
7731 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7732 {
7733 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7734
7735 switch (kind)
7736 {
7737 case ARM_BP_KIND_ARM:
7738 *size = tdep->arm_breakpoint_size;
7739 return tdep->arm_breakpoint;
7740 case ARM_BP_KIND_THUMB:
7741 *size = tdep->thumb_breakpoint_size;
7742 return tdep->thumb_breakpoint;
7743 case ARM_BP_KIND_THUMB2:
7744 *size = tdep->thumb2_breakpoint_size;
7745 return tdep->thumb2_breakpoint;
7746 default:
7747 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7748 }
7749 }
7750
7751 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7752
7753 static int
7754 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7755 struct regcache *regcache,
7756 CORE_ADDR *pcptr)
7757 {
7758 gdb_byte buf[4];
7759
7760 /* Check the memory pointed by PC is readable. */
7761 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7762 {
7763 struct arm_get_next_pcs next_pcs_ctx;
7764
7765 arm_get_next_pcs_ctor (&next_pcs_ctx,
7766 &arm_get_next_pcs_ops,
7767 gdbarch_byte_order (gdbarch),
7768 gdbarch_byte_order_for_code (gdbarch),
7769 0,
7770 regcache);
7771
7772 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7773
7774 /* If MEMADDR is the next instruction of current pc, do the
7775 software single step computation, and get the thumb mode by
7776 the destination address. */
7777 for (CORE_ADDR pc : next_pcs)
7778 {
7779 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7780 {
7781 if (IS_THUMB_ADDR (pc))
7782 {
7783 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7784 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7785 }
7786 else
7787 return ARM_BP_KIND_ARM;
7788 }
7789 }
7790 }
7791
7792 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7793 }
7794
7795 /* Extract from an array REGBUF containing the (raw) register state a
7796 function return value of type TYPE, and copy that, in virtual
7797 format, into VALBUF. */
7798
7799 static void
7800 arm_extract_return_value (struct type *type, struct regcache *regs,
7801 gdb_byte *valbuf)
7802 {
7803 struct gdbarch *gdbarch = regs->arch ();
7804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7805
7806 if (TYPE_CODE_FLT == TYPE_CODE (type))
7807 {
7808 switch (gdbarch_tdep (gdbarch)->fp_model)
7809 {
7810 case ARM_FLOAT_FPA:
7811 {
7812 /* The value is in register F0 in internal format. We need to
7813 extract the raw value and then convert it to the desired
7814 internal type. */
7815 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7816
7817 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7818 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7819 valbuf, type);
7820 }
7821 break;
7822
7823 case ARM_FLOAT_SOFT_FPA:
7824 case ARM_FLOAT_SOFT_VFP:
7825 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7826 not using the VFP ABI code. */
7827 case ARM_FLOAT_VFP:
7828 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7829 if (TYPE_LENGTH (type) > 4)
7830 regs->cooked_read (ARM_A1_REGNUM + 1,
7831 valbuf + ARM_INT_REGISTER_SIZE);
7832 break;
7833
7834 default:
7835 internal_error (__FILE__, __LINE__,
7836 _("arm_extract_return_value: "
7837 "Floating point model not supported"));
7838 break;
7839 }
7840 }
7841 else if (TYPE_CODE (type) == TYPE_CODE_INT
7842 || TYPE_CODE (type) == TYPE_CODE_CHAR
7843 || TYPE_CODE (type) == TYPE_CODE_BOOL
7844 || TYPE_CODE (type) == TYPE_CODE_PTR
7845 || TYPE_IS_REFERENCE (type)
7846 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7847 {
7848 /* If the type is a plain integer, then the access is
7849 straight-forward. Otherwise we have to play around a bit
7850 more. */
7851 int len = TYPE_LENGTH (type);
7852 int regno = ARM_A1_REGNUM;
7853 ULONGEST tmp;
7854
7855 while (len > 0)
7856 {
7857 /* By using store_unsigned_integer we avoid having to do
7858 anything special for small big-endian values. */
7859 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7860 store_unsigned_integer (valbuf,
7861 (len > ARM_INT_REGISTER_SIZE
7862 ? ARM_INT_REGISTER_SIZE : len),
7863 byte_order, tmp);
7864 len -= ARM_INT_REGISTER_SIZE;
7865 valbuf += ARM_INT_REGISTER_SIZE;
7866 }
7867 }
7868 else
7869 {
7870 /* For a structure or union the behaviour is as if the value had
7871 been stored to word-aligned memory and then loaded into
7872 registers with 32-bit load instruction(s). */
7873 int len = TYPE_LENGTH (type);
7874 int regno = ARM_A1_REGNUM;
7875 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7876
7877 while (len > 0)
7878 {
7879 regs->cooked_read (regno++, tmpbuf);
7880 memcpy (valbuf, tmpbuf,
7881 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7882 len -= ARM_INT_REGISTER_SIZE;
7883 valbuf += ARM_INT_REGISTER_SIZE;
7884 }
7885 }
7886 }
7887
7888
7889 /* Will a function return an aggregate type in memory or in a
7890 register? Return 0 if an aggregate type can be returned in a
7891 register, 1 if it must be returned in memory. */
7892
7893 static int
7894 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7895 {
7896 enum type_code code;
7897
7898 type = check_typedef (type);
7899
7900 /* Simple, non-aggregate types (ie not including vectors and
7901 complex) are always returned in a register (or registers). */
7902 code = TYPE_CODE (type);
7903 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7904 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7905 return 0;
7906
7907 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7908 {
7909 /* Vector values should be returned using ARM registers if they
7910 are not over 16 bytes. */
7911 return (TYPE_LENGTH (type) > 16);
7912 }
7913
7914 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7915 {
7916 /* The AAPCS says all aggregates not larger than a word are returned
7917 in a register. */
7918 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7919 return 0;
7920
7921 return 1;
7922 }
7923 else
7924 {
7925 int nRc;
7926
7927 /* All aggregate types that won't fit in a register must be returned
7928 in memory. */
7929 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7930 return 1;
7931
7932 /* In the ARM ABI, "integer" like aggregate types are returned in
7933 registers. For an aggregate type to be integer like, its size
7934 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7935 offset of each addressable subfield must be zero. Note that bit
7936 fields are not addressable, and all addressable subfields of
7937 unions always start at offset zero.
7938
7939 This function is based on the behaviour of GCC 2.95.1.
7940 See: gcc/arm.c: arm_return_in_memory() for details.
7941
7942 Note: All versions of GCC before GCC 2.95.2 do not set up the
7943 parameters correctly for a function returning the following
7944 structure: struct { float f;}; This should be returned in memory,
7945 not a register. Richard Earnshaw sent me a patch, but I do not
7946 know of any way to detect if a function like the above has been
7947 compiled with the correct calling convention. */
7948
7949 /* Assume all other aggregate types can be returned in a register.
7950 Run a check for structures, unions and arrays. */
7951 nRc = 0;
7952
7953 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7954 {
7955 int i;
7956 /* Need to check if this struct/union is "integer" like. For
7957 this to be true, its size must be less than or equal to
7958 ARM_INT_REGISTER_SIZE and the offset of each addressable
7959 subfield must be zero. Note that bit fields are not
7960 addressable, and unions always start at offset zero. If any
7961 of the subfields is a floating point type, the struct/union
7962 cannot be an integer type. */
7963
7964 /* For each field in the object, check:
7965 1) Is it FP? --> yes, nRc = 1;
7966 2) Is it addressable (bitpos != 0) and
7967 not packed (bitsize == 0)?
7968 --> yes, nRc = 1
7969 */
7970
7971 for (i = 0; i < TYPE_NFIELDS (type); i++)
7972 {
7973 enum type_code field_type_code;
7974
7975 field_type_code
7976 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7977 i)));
7978
7979 /* Is it a floating point type field? */
7980 if (field_type_code == TYPE_CODE_FLT)
7981 {
7982 nRc = 1;
7983 break;
7984 }
7985
7986 /* If bitpos != 0, then we have to care about it. */
7987 if (TYPE_FIELD_BITPOS (type, i) != 0)
7988 {
7989 /* Bitfields are not addressable. If the field bitsize is
7990 zero, then the field is not packed. Hence it cannot be
7991 a bitfield or any other packed type. */
7992 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7993 {
7994 nRc = 1;
7995 break;
7996 }
7997 }
7998 }
7999 }
8000
8001 return nRc;
8002 }
8003 }
8004
8005 /* Write into appropriate registers a function return value of type
8006 TYPE, given in virtual format. */
8007
8008 static void
8009 arm_store_return_value (struct type *type, struct regcache *regs,
8010 const gdb_byte *valbuf)
8011 {
8012 struct gdbarch *gdbarch = regs->arch ();
8013 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8014
8015 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8016 {
8017 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8018
8019 switch (gdbarch_tdep (gdbarch)->fp_model)
8020 {
8021 case ARM_FLOAT_FPA:
8022
8023 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8024 regs->cooked_write (ARM_F0_REGNUM, buf);
8025 break;
8026
8027 case ARM_FLOAT_SOFT_FPA:
8028 case ARM_FLOAT_SOFT_VFP:
8029 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8030 not using the VFP ABI code. */
8031 case ARM_FLOAT_VFP:
8032 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8033 if (TYPE_LENGTH (type) > 4)
8034 regs->cooked_write (ARM_A1_REGNUM + 1,
8035 valbuf + ARM_INT_REGISTER_SIZE);
8036 break;
8037
8038 default:
8039 internal_error (__FILE__, __LINE__,
8040 _("arm_store_return_value: Floating "
8041 "point model not supported"));
8042 break;
8043 }
8044 }
8045 else if (TYPE_CODE (type) == TYPE_CODE_INT
8046 || TYPE_CODE (type) == TYPE_CODE_CHAR
8047 || TYPE_CODE (type) == TYPE_CODE_BOOL
8048 || TYPE_CODE (type) == TYPE_CODE_PTR
8049 || TYPE_IS_REFERENCE (type)
8050 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8051 {
8052 if (TYPE_LENGTH (type) <= 4)
8053 {
8054 /* Values of one word or less are zero/sign-extended and
8055 returned in r0. */
8056 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8057 LONGEST val = unpack_long (type, valbuf);
8058
8059 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8060 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8061 }
8062 else
8063 {
8064 /* Integral values greater than one word are stored in consecutive
8065 registers starting with r0. This will always be a multiple of
8066 the regiser size. */
8067 int len = TYPE_LENGTH (type);
8068 int regno = ARM_A1_REGNUM;
8069
8070 while (len > 0)
8071 {
8072 regs->cooked_write (regno++, valbuf);
8073 len -= ARM_INT_REGISTER_SIZE;
8074 valbuf += ARM_INT_REGISTER_SIZE;
8075 }
8076 }
8077 }
8078 else
8079 {
8080 /* For a structure or union the behaviour is as if the value had
8081 been stored to word-aligned memory and then loaded into
8082 registers with 32-bit load instruction(s). */
8083 int len = TYPE_LENGTH (type);
8084 int regno = ARM_A1_REGNUM;
8085 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8086
8087 while (len > 0)
8088 {
8089 memcpy (tmpbuf, valbuf,
8090 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8091 regs->cooked_write (regno++, tmpbuf);
8092 len -= ARM_INT_REGISTER_SIZE;
8093 valbuf += ARM_INT_REGISTER_SIZE;
8094 }
8095 }
8096 }
8097
8098
8099 /* Handle function return values. */
8100
8101 static enum return_value_convention
8102 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8103 struct type *valtype, struct regcache *regcache,
8104 gdb_byte *readbuf, const gdb_byte *writebuf)
8105 {
8106 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8107 struct type *func_type = function ? value_type (function) : NULL;
8108 enum arm_vfp_cprc_base_type vfp_base_type;
8109 int vfp_base_count;
8110
8111 if (arm_vfp_abi_for_function (gdbarch, func_type)
8112 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8113 {
8114 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8115 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8116 int i;
8117 for (i = 0; i < vfp_base_count; i++)
8118 {
8119 if (reg_char == 'q')
8120 {
8121 if (writebuf)
8122 arm_neon_quad_write (gdbarch, regcache, i,
8123 writebuf + i * unit_length);
8124
8125 if (readbuf)
8126 arm_neon_quad_read (gdbarch, regcache, i,
8127 readbuf + i * unit_length);
8128 }
8129 else
8130 {
8131 char name_buf[4];
8132 int regnum;
8133
8134 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8135 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8136 strlen (name_buf));
8137 if (writebuf)
8138 regcache->cooked_write (regnum, writebuf + i * unit_length);
8139 if (readbuf)
8140 regcache->cooked_read (regnum, readbuf + i * unit_length);
8141 }
8142 }
8143 return RETURN_VALUE_REGISTER_CONVENTION;
8144 }
8145
8146 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8147 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8148 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8149 {
8150 if (tdep->struct_return == pcc_struct_return
8151 || arm_return_in_memory (gdbarch, valtype))
8152 return RETURN_VALUE_STRUCT_CONVENTION;
8153 }
8154 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8155 {
8156 if (arm_return_in_memory (gdbarch, valtype))
8157 return RETURN_VALUE_STRUCT_CONVENTION;
8158 }
8159
8160 if (writebuf)
8161 arm_store_return_value (valtype, regcache, writebuf);
8162
8163 if (readbuf)
8164 arm_extract_return_value (valtype, regcache, readbuf);
8165
8166 return RETURN_VALUE_REGISTER_CONVENTION;
8167 }
8168
8169
8170 static int
8171 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8172 {
8173 struct gdbarch *gdbarch = get_frame_arch (frame);
8174 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8175 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8176 CORE_ADDR jb_addr;
8177 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8178
8179 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8180
8181 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8182 ARM_INT_REGISTER_SIZE))
8183 return 0;
8184
8185 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8186 return 1;
8187 }
8188 /* A call to cmse secure entry function "foo" at "a" is modified by
8189 GNU ld as "b".
8190 a) bl xxxx <foo>
8191
8192 <foo>
8193 xxxx:
8194
8195 b) bl yyyy <__acle_se_foo>
8196
8197 section .gnu.sgstubs:
8198 <foo>
8199 yyyy: sg // secure gateway
8200 b.w xxxx <__acle_se_foo> // original_branch_dest
8201
8202 <__acle_se_foo>
8203 xxxx:
8204
8205 When the control at "b", the pc contains "yyyy" (sg address) which is a
8206 trampoline and does not exist in source code. This function returns the
8207 target pc "xxxx". For more details please refer to section 5.4
8208 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8209 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8210 document on www.developer.arm.com. */
8211
8212 static CORE_ADDR
8213 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8214 {
8215 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8216 char *target_name = (char *) alloca (target_len);
8217 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8218
8219 struct bound_minimal_symbol minsym
8220 = lookup_minimal_symbol (target_name, NULL, objfile);
8221
8222 if (minsym.minsym != nullptr)
8223 return BMSYMBOL_VALUE_ADDRESS (minsym);
8224
8225 return 0;
8226 }
8227
8228 /* Return true when SEC points to ".gnu.sgstubs" section. */
8229
8230 static bool
8231 arm_is_sgstubs_section (struct obj_section *sec)
8232 {
8233 return (sec != nullptr
8234 && sec->the_bfd_section != nullptr
8235 && sec->the_bfd_section->name != nullptr
8236 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8237 }
8238
8239 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8240 return the target PC. Otherwise return 0. */
8241
8242 CORE_ADDR
8243 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8244 {
8245 const char *name;
8246 int namelen;
8247 CORE_ADDR start_addr;
8248
8249 /* Find the starting address and name of the function containing the PC. */
8250 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8251 {
8252 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8253 check here. */
8254 start_addr = arm_skip_bx_reg (frame, pc);
8255 if (start_addr != 0)
8256 return start_addr;
8257
8258 return 0;
8259 }
8260
8261 /* If PC is in a Thumb call or return stub, return the address of the
8262 target PC, which is in a register. The thunk functions are called
8263 _call_via_xx, where x is the register name. The possible names
8264 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8265 functions, named __ARM_call_via_r[0-7]. */
8266 if (startswith (name, "_call_via_")
8267 || startswith (name, "__ARM_call_via_"))
8268 {
8269 /* Use the name suffix to determine which register contains the
8270 target PC. */
8271 static const char *table[15] =
8272 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8273 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8274 };
8275 int regno;
8276 int offset = strlen (name) - 2;
8277
8278 for (regno = 0; regno <= 14; regno++)
8279 if (strcmp (&name[offset], table[regno]) == 0)
8280 return get_frame_register_unsigned (frame, regno);
8281 }
8282
8283 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8284 non-interworking calls to foo. We could decode the stubs
8285 to find the target but it's easier to use the symbol table. */
8286 namelen = strlen (name);
8287 if (name[0] == '_' && name[1] == '_'
8288 && ((namelen > 2 + strlen ("_from_thumb")
8289 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8290 || (namelen > 2 + strlen ("_from_arm")
8291 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8292 {
8293 char *target_name;
8294 int target_len = namelen - 2;
8295 struct bound_minimal_symbol minsym;
8296 struct objfile *objfile;
8297 struct obj_section *sec;
8298
8299 if (name[namelen - 1] == 'b')
8300 target_len -= strlen ("_from_thumb");
8301 else
8302 target_len -= strlen ("_from_arm");
8303
8304 target_name = (char *) alloca (target_len + 1);
8305 memcpy (target_name, name + 2, target_len);
8306 target_name[target_len] = '\0';
8307
8308 sec = find_pc_section (pc);
8309 objfile = (sec == NULL) ? NULL : sec->objfile;
8310 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8311 if (minsym.minsym != NULL)
8312 return BMSYMBOL_VALUE_ADDRESS (minsym);
8313 else
8314 return 0;
8315 }
8316
8317 struct obj_section *section = find_pc_section (pc);
8318
8319 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8320 if (arm_is_sgstubs_section (section))
8321 return arm_skip_cmse_entry (pc, name, section->objfile);
8322
8323 return 0; /* not a stub */
8324 }
8325
8326 static void
8327 set_arm_command (const char *args, int from_tty)
8328 {
8329 printf_unfiltered (_("\
8330 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8331 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8332 }
8333
8334 static void
8335 show_arm_command (const char *args, int from_tty)
8336 {
8337 cmd_show_list (showarmcmdlist, from_tty, "");
8338 }
8339
8340 static void
8341 arm_update_current_architecture (void)
8342 {
8343 struct gdbarch_info info;
8344
8345 /* If the current architecture is not ARM, we have nothing to do. */
8346 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8347 return;
8348
8349 /* Update the architecture. */
8350 gdbarch_info_init (&info);
8351
8352 if (!gdbarch_update_p (info))
8353 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8354 }
8355
8356 static void
8357 set_fp_model_sfunc (const char *args, int from_tty,
8358 struct cmd_list_element *c)
8359 {
8360 int fp_model;
8361
8362 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8363 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8364 {
8365 arm_fp_model = (enum arm_float_model) fp_model;
8366 break;
8367 }
8368
8369 if (fp_model == ARM_FLOAT_LAST)
8370 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8371 current_fp_model);
8372
8373 arm_update_current_architecture ();
8374 }
8375
8376 static void
8377 show_fp_model (struct ui_file *file, int from_tty,
8378 struct cmd_list_element *c, const char *value)
8379 {
8380 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8381
8382 if (arm_fp_model == ARM_FLOAT_AUTO
8383 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8384 fprintf_filtered (file, _("\
8385 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8386 fp_model_strings[tdep->fp_model]);
8387 else
8388 fprintf_filtered (file, _("\
8389 The current ARM floating point model is \"%s\".\n"),
8390 fp_model_strings[arm_fp_model]);
8391 }
8392
8393 static void
8394 arm_set_abi (const char *args, int from_tty,
8395 struct cmd_list_element *c)
8396 {
8397 int arm_abi;
8398
8399 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8400 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8401 {
8402 arm_abi_global = (enum arm_abi_kind) arm_abi;
8403 break;
8404 }
8405
8406 if (arm_abi == ARM_ABI_LAST)
8407 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8408 arm_abi_string);
8409
8410 arm_update_current_architecture ();
8411 }
8412
8413 static void
8414 arm_show_abi (struct ui_file *file, int from_tty,
8415 struct cmd_list_element *c, const char *value)
8416 {
8417 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8418
8419 if (arm_abi_global == ARM_ABI_AUTO
8420 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8421 fprintf_filtered (file, _("\
8422 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8423 arm_abi_strings[tdep->arm_abi]);
8424 else
8425 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8426 arm_abi_string);
8427 }
8428
8429 static void
8430 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8431 struct cmd_list_element *c, const char *value)
8432 {
8433 fprintf_filtered (file,
8434 _("The current execution mode assumed "
8435 "(when symbols are unavailable) is \"%s\".\n"),
8436 arm_fallback_mode_string);
8437 }
8438
8439 static void
8440 arm_show_force_mode (struct ui_file *file, int from_tty,
8441 struct cmd_list_element *c, const char *value)
8442 {
8443 fprintf_filtered (file,
8444 _("The current execution mode assumed "
8445 "(even when symbols are available) is \"%s\".\n"),
8446 arm_force_mode_string);
8447 }
8448
8449 /* If the user changes the register disassembly style used for info
8450 register and other commands, we have to also switch the style used
8451 in opcodes for disassembly output. This function is run in the "set
8452 arm disassembly" command, and does that. */
8453
8454 static void
8455 set_disassembly_style_sfunc (const char *args, int from_tty,
8456 struct cmd_list_element *c)
8457 {
8458 /* Convert the short style name into the long style name (eg, reg-names-*)
8459 before calling the generic set_disassembler_options() function. */
8460 std::string long_name = std::string ("reg-names-") + disassembly_style;
8461 set_disassembler_options (&long_name[0]);
8462 }
8463
8464 static void
8465 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8466 struct cmd_list_element *c, const char *value)
8467 {
8468 struct gdbarch *gdbarch = get_current_arch ();
8469 char *options = get_disassembler_options (gdbarch);
8470 const char *style = "";
8471 int len = 0;
8472 const char *opt;
8473
8474 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8475 if (CONST_STRNEQ (opt, "reg-names-"))
8476 {
8477 style = &opt[strlen ("reg-names-")];
8478 len = strcspn (style, ",");
8479 }
8480
8481 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8482 }
8483 \f
8484 /* Return the ARM register name corresponding to register I. */
8485 static const char *
8486 arm_register_name (struct gdbarch *gdbarch, int i)
8487 {
8488 const int num_regs = gdbarch_num_regs (gdbarch);
8489
8490 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8491 && i >= num_regs && i < num_regs + 32)
8492 {
8493 static const char *const vfp_pseudo_names[] = {
8494 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8495 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8496 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8497 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8498 };
8499
8500 return vfp_pseudo_names[i - num_regs];
8501 }
8502
8503 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8504 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8505 {
8506 static const char *const neon_pseudo_names[] = {
8507 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8508 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8509 };
8510
8511 return neon_pseudo_names[i - num_regs - 32];
8512 }
8513
8514 if (i >= ARRAY_SIZE (arm_register_names))
8515 /* These registers are only supported on targets which supply
8516 an XML description. */
8517 return "";
8518
8519 return arm_register_names[i];
8520 }
8521
8522 /* Test whether the coff symbol specific value corresponds to a Thumb
8523 function. */
8524
8525 static int
8526 coff_sym_is_thumb (int val)
8527 {
8528 return (val == C_THUMBEXT
8529 || val == C_THUMBSTAT
8530 || val == C_THUMBEXTFUNC
8531 || val == C_THUMBSTATFUNC
8532 || val == C_THUMBLABEL);
8533 }
8534
8535 /* arm_coff_make_msymbol_special()
8536 arm_elf_make_msymbol_special()
8537
8538 These functions test whether the COFF or ELF symbol corresponds to
8539 an address in thumb code, and set a "special" bit in a minimal
8540 symbol to indicate that it does. */
8541
8542 static void
8543 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8544 {
8545 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8546
8547 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8548 == ST_BRANCH_TO_THUMB)
8549 MSYMBOL_SET_SPECIAL (msym);
8550 }
8551
8552 static void
8553 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8554 {
8555 if (coff_sym_is_thumb (val))
8556 MSYMBOL_SET_SPECIAL (msym);
8557 }
8558
8559 static void
8560 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8561 asymbol *sym)
8562 {
8563 const char *name = bfd_asymbol_name (sym);
8564 struct arm_per_bfd *data;
8565 struct arm_mapping_symbol new_map_sym;
8566
8567 gdb_assert (name[0] == '$');
8568 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8569 return;
8570
8571 data = arm_bfd_data_key.get (objfile->obfd);
8572 if (data == NULL)
8573 data = arm_bfd_data_key.emplace (objfile->obfd,
8574 objfile->obfd->section_count);
8575 arm_mapping_symbol_vec &map
8576 = data->section_maps[bfd_asymbol_section (sym)->index];
8577
8578 new_map_sym.value = sym->value;
8579 new_map_sym.type = name[1];
8580
8581 /* Insert at the end, the vector will be sorted on first use. */
8582 map.push_back (new_map_sym);
8583 }
8584
8585 static void
8586 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8587 {
8588 struct gdbarch *gdbarch = regcache->arch ();
8589 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8590
8591 /* If necessary, set the T bit. */
8592 if (arm_apcs_32)
8593 {
8594 ULONGEST val, t_bit;
8595 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8596 t_bit = arm_psr_thumb_bit (gdbarch);
8597 if (arm_pc_is_thumb (gdbarch, pc))
8598 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8599 val | t_bit);
8600 else
8601 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8602 val & ~t_bit);
8603 }
8604 }
8605
8606 /* Read the contents of a NEON quad register, by reading from two
8607 double registers. This is used to implement the quad pseudo
8608 registers, and for argument passing in case the quad registers are
8609 missing; vectors are passed in quad registers when using the VFP
8610 ABI, even if a NEON unit is not present. REGNUM is the index of
8611 the quad register, in [0, 15]. */
8612
8613 static enum register_status
8614 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8615 int regnum, gdb_byte *buf)
8616 {
8617 char name_buf[4];
8618 gdb_byte reg_buf[8];
8619 int offset, double_regnum;
8620 enum register_status status;
8621
8622 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8623 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8624 strlen (name_buf));
8625
8626 /* d0 is always the least significant half of q0. */
8627 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8628 offset = 8;
8629 else
8630 offset = 0;
8631
8632 status = regcache->raw_read (double_regnum, reg_buf);
8633 if (status != REG_VALID)
8634 return status;
8635 memcpy (buf + offset, reg_buf, 8);
8636
8637 offset = 8 - offset;
8638 status = regcache->raw_read (double_regnum + 1, reg_buf);
8639 if (status != REG_VALID)
8640 return status;
8641 memcpy (buf + offset, reg_buf, 8);
8642
8643 return REG_VALID;
8644 }
8645
8646 static enum register_status
8647 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8648 int regnum, gdb_byte *buf)
8649 {
8650 const int num_regs = gdbarch_num_regs (gdbarch);
8651 char name_buf[4];
8652 gdb_byte reg_buf[8];
8653 int offset, double_regnum;
8654
8655 gdb_assert (regnum >= num_regs);
8656 regnum -= num_regs;
8657
8658 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8659 /* Quad-precision register. */
8660 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8661 else
8662 {
8663 enum register_status status;
8664
8665 /* Single-precision register. */
8666 gdb_assert (regnum < 32);
8667
8668 /* s0 is always the least significant half of d0. */
8669 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8670 offset = (regnum & 1) ? 0 : 4;
8671 else
8672 offset = (regnum & 1) ? 4 : 0;
8673
8674 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8675 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8676 strlen (name_buf));
8677
8678 status = regcache->raw_read (double_regnum, reg_buf);
8679 if (status == REG_VALID)
8680 memcpy (buf, reg_buf + offset, 4);
8681 return status;
8682 }
8683 }
8684
8685 /* Store the contents of BUF to a NEON quad register, by writing to
8686 two double registers. This is used to implement the quad pseudo
8687 registers, and for argument passing in case the quad registers are
8688 missing; vectors are passed in quad registers when using the VFP
8689 ABI, even if a NEON unit is not present. REGNUM is the index
8690 of the quad register, in [0, 15]. */
8691
8692 static void
8693 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8694 int regnum, const gdb_byte *buf)
8695 {
8696 char name_buf[4];
8697 int offset, double_regnum;
8698
8699 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8700 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8701 strlen (name_buf));
8702
8703 /* d0 is always the least significant half of q0. */
8704 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8705 offset = 8;
8706 else
8707 offset = 0;
8708
8709 regcache->raw_write (double_regnum, buf + offset);
8710 offset = 8 - offset;
8711 regcache->raw_write (double_regnum + 1, buf + offset);
8712 }
8713
8714 static void
8715 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8716 int regnum, const gdb_byte *buf)
8717 {
8718 const int num_regs = gdbarch_num_regs (gdbarch);
8719 char name_buf[4];
8720 gdb_byte reg_buf[8];
8721 int offset, double_regnum;
8722
8723 gdb_assert (regnum >= num_regs);
8724 regnum -= num_regs;
8725
8726 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8727 /* Quad-precision register. */
8728 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8729 else
8730 {
8731 /* Single-precision register. */
8732 gdb_assert (regnum < 32);
8733
8734 /* s0 is always the least significant half of d0. */
8735 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8736 offset = (regnum & 1) ? 0 : 4;
8737 else
8738 offset = (regnum & 1) ? 4 : 0;
8739
8740 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8741 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8742 strlen (name_buf));
8743
8744 regcache->raw_read (double_regnum, reg_buf);
8745 memcpy (reg_buf + offset, buf, 4);
8746 regcache->raw_write (double_regnum, reg_buf);
8747 }
8748 }
8749
8750 static struct value *
8751 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8752 {
8753 const int *reg_p = (const int *) baton;
8754 return value_of_register (*reg_p, frame);
8755 }
8756 \f
8757 static enum gdb_osabi
8758 arm_elf_osabi_sniffer (bfd *abfd)
8759 {
8760 unsigned int elfosabi;
8761 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8762
8763 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8764
8765 if (elfosabi == ELFOSABI_ARM)
8766 /* GNU tools use this value. Check note sections in this case,
8767 as well. */
8768 bfd_map_over_sections (abfd,
8769 generic_elf_osabi_sniff_abi_tag_sections,
8770 &osabi);
8771
8772 /* Anything else will be handled by the generic ELF sniffer. */
8773 return osabi;
8774 }
8775
8776 static int
8777 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8778 struct reggroup *group)
8779 {
8780 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8781 this, FPS register belongs to save_regroup, restore_reggroup, and
8782 all_reggroup, of course. */
8783 if (regnum == ARM_FPS_REGNUM)
8784 return (group == float_reggroup
8785 || group == save_reggroup
8786 || group == restore_reggroup
8787 || group == all_reggroup);
8788 else
8789 return default_register_reggroup_p (gdbarch, regnum, group);
8790 }
8791
8792 /* For backward-compatibility we allow two 'g' packet lengths with
8793 the remote protocol depending on whether FPA registers are
8794 supplied. M-profile targets do not have FPA registers, but some
8795 stubs already exist in the wild which use a 'g' packet which
8796 supplies them albeit with dummy values. The packet format which
8797 includes FPA registers should be considered deprecated for
8798 M-profile targets. */
8799
8800 static void
8801 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8802 {
8803 if (gdbarch_tdep (gdbarch)->is_m)
8804 {
8805 const target_desc *tdesc;
8806
8807 /* If we know from the executable this is an M-profile target,
8808 cater for remote targets whose register set layout is the
8809 same as the FPA layout. */
8810 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8811 register_remote_g_packet_guess (gdbarch,
8812 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8813 tdesc);
8814
8815 /* The regular M-profile layout. */
8816 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8817 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8818 tdesc);
8819
8820 /* M-profile plus M4F VFP. */
8821 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8822 register_remote_g_packet_guess (gdbarch,
8823 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8824 tdesc);
8825 }
8826
8827 /* Otherwise we don't have a useful guess. */
8828 }
8829
8830 /* Implement the code_of_frame_writable gdbarch method. */
8831
8832 static int
8833 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8834 {
8835 if (gdbarch_tdep (gdbarch)->is_m
8836 && get_frame_type (frame) == SIGTRAMP_FRAME)
8837 {
8838 /* M-profile exception frames return to some magic PCs, where
8839 isn't writable at all. */
8840 return 0;
8841 }
8842 else
8843 return 1;
8844 }
8845
8846 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8847 to be postfixed by a version (eg armv7hl). */
8848
8849 static const char *
8850 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8851 {
8852 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8853 return "arm(v[^- ]*)?";
8854 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8855 }
8856
8857 /* Initialize the current architecture based on INFO. If possible,
8858 re-use an architecture from ARCHES, which is a list of
8859 architectures already created during this debugging session.
8860
8861 Called e.g. at program startup, when reading a core file, and when
8862 reading a binary file. */
8863
8864 static struct gdbarch *
8865 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8866 {
8867 struct gdbarch_tdep *tdep;
8868 struct gdbarch *gdbarch;
8869 struct gdbarch_list *best_arch;
8870 enum arm_abi_kind arm_abi = arm_abi_global;
8871 enum arm_float_model fp_model = arm_fp_model;
8872 struct tdesc_arch_data *tdesc_data = NULL;
8873 int i, is_m = 0;
8874 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8875 int have_wmmx_registers = 0;
8876 int have_neon = 0;
8877 int have_fpa_registers = 1;
8878 const struct target_desc *tdesc = info.target_desc;
8879
8880 /* If we have an object to base this architecture on, try to determine
8881 its ABI. */
8882
8883 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8884 {
8885 int ei_osabi, e_flags;
8886
8887 switch (bfd_get_flavour (info.abfd))
8888 {
8889 case bfd_target_coff_flavour:
8890 /* Assume it's an old APCS-style ABI. */
8891 /* XXX WinCE? */
8892 arm_abi = ARM_ABI_APCS;
8893 break;
8894
8895 case bfd_target_elf_flavour:
8896 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8897 e_flags = elf_elfheader (info.abfd)->e_flags;
8898
8899 if (ei_osabi == ELFOSABI_ARM)
8900 {
8901 /* GNU tools used to use this value, but do not for EABI
8902 objects. There's nowhere to tag an EABI version
8903 anyway, so assume APCS. */
8904 arm_abi = ARM_ABI_APCS;
8905 }
8906 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8907 {
8908 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8909
8910 switch (eabi_ver)
8911 {
8912 case EF_ARM_EABI_UNKNOWN:
8913 /* Assume GNU tools. */
8914 arm_abi = ARM_ABI_APCS;
8915 break;
8916
8917 case EF_ARM_EABI_VER4:
8918 case EF_ARM_EABI_VER5:
8919 arm_abi = ARM_ABI_AAPCS;
8920 /* EABI binaries default to VFP float ordering.
8921 They may also contain build attributes that can
8922 be used to identify if the VFP argument-passing
8923 ABI is in use. */
8924 if (fp_model == ARM_FLOAT_AUTO)
8925 {
8926 #ifdef HAVE_ELF
8927 switch (bfd_elf_get_obj_attr_int (info.abfd,
8928 OBJ_ATTR_PROC,
8929 Tag_ABI_VFP_args))
8930 {
8931 case AEABI_VFP_args_base:
8932 /* "The user intended FP parameter/result
8933 passing to conform to AAPCS, base
8934 variant". */
8935 fp_model = ARM_FLOAT_SOFT_VFP;
8936 break;
8937 case AEABI_VFP_args_vfp:
8938 /* "The user intended FP parameter/result
8939 passing to conform to AAPCS, VFP
8940 variant". */
8941 fp_model = ARM_FLOAT_VFP;
8942 break;
8943 case AEABI_VFP_args_toolchain:
8944 /* "The user intended FP parameter/result
8945 passing to conform to tool chain-specific
8946 conventions" - we don't know any such
8947 conventions, so leave it as "auto". */
8948 break;
8949 case AEABI_VFP_args_compatible:
8950 /* "Code is compatible with both the base
8951 and VFP variants; the user did not permit
8952 non-variadic functions to pass FP
8953 parameters/results" - leave it as
8954 "auto". */
8955 break;
8956 default:
8957 /* Attribute value not mentioned in the
8958 November 2012 ABI, so leave it as
8959 "auto". */
8960 break;
8961 }
8962 #else
8963 fp_model = ARM_FLOAT_SOFT_VFP;
8964 #endif
8965 }
8966 break;
8967
8968 default:
8969 /* Leave it as "auto". */
8970 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8971 break;
8972 }
8973
8974 #ifdef HAVE_ELF
8975 /* Detect M-profile programs. This only works if the
8976 executable file includes build attributes; GCC does
8977 copy them to the executable, but e.g. RealView does
8978 not. */
8979 int attr_arch
8980 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8981 Tag_CPU_arch);
8982 int attr_profile
8983 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8984 Tag_CPU_arch_profile);
8985
8986 /* GCC specifies the profile for v6-M; RealView only
8987 specifies the profile for architectures starting with
8988 V7 (as opposed to architectures with a tag
8989 numerically greater than TAG_CPU_ARCH_V7). */
8990 if (!tdesc_has_registers (tdesc)
8991 && (attr_arch == TAG_CPU_ARCH_V6_M
8992 || attr_arch == TAG_CPU_ARCH_V6S_M
8993 || attr_profile == 'M'))
8994 is_m = 1;
8995 #endif
8996 }
8997
8998 if (fp_model == ARM_FLOAT_AUTO)
8999 {
9000 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9001 {
9002 case 0:
9003 /* Leave it as "auto". Strictly speaking this case
9004 means FPA, but almost nobody uses that now, and
9005 many toolchains fail to set the appropriate bits
9006 for the floating-point model they use. */
9007 break;
9008 case EF_ARM_SOFT_FLOAT:
9009 fp_model = ARM_FLOAT_SOFT_FPA;
9010 break;
9011 case EF_ARM_VFP_FLOAT:
9012 fp_model = ARM_FLOAT_VFP;
9013 break;
9014 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9015 fp_model = ARM_FLOAT_SOFT_VFP;
9016 break;
9017 }
9018 }
9019
9020 if (e_flags & EF_ARM_BE8)
9021 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9022
9023 break;
9024
9025 default:
9026 /* Leave it as "auto". */
9027 break;
9028 }
9029 }
9030
9031 /* Check any target description for validity. */
9032 if (tdesc_has_registers (tdesc))
9033 {
9034 /* For most registers we require GDB's default names; but also allow
9035 the numeric names for sp / lr / pc, as a convenience. */
9036 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9037 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9038 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9039
9040 const struct tdesc_feature *feature;
9041 int valid_p;
9042
9043 feature = tdesc_find_feature (tdesc,
9044 "org.gnu.gdb.arm.core");
9045 if (feature == NULL)
9046 {
9047 feature = tdesc_find_feature (tdesc,
9048 "org.gnu.gdb.arm.m-profile");
9049 if (feature == NULL)
9050 return NULL;
9051 else
9052 is_m = 1;
9053 }
9054
9055 tdesc_data = tdesc_data_alloc ();
9056
9057 valid_p = 1;
9058 for (i = 0; i < ARM_SP_REGNUM; i++)
9059 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9060 arm_register_names[i]);
9061 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9062 ARM_SP_REGNUM,
9063 arm_sp_names);
9064 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9065 ARM_LR_REGNUM,
9066 arm_lr_names);
9067 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9068 ARM_PC_REGNUM,
9069 arm_pc_names);
9070 if (is_m)
9071 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9072 ARM_PS_REGNUM, "xpsr");
9073 else
9074 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9075 ARM_PS_REGNUM, "cpsr");
9076
9077 if (!valid_p)
9078 {
9079 tdesc_data_cleanup (tdesc_data);
9080 return NULL;
9081 }
9082
9083 feature = tdesc_find_feature (tdesc,
9084 "org.gnu.gdb.arm.fpa");
9085 if (feature != NULL)
9086 {
9087 valid_p = 1;
9088 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9089 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9090 arm_register_names[i]);
9091 if (!valid_p)
9092 {
9093 tdesc_data_cleanup (tdesc_data);
9094 return NULL;
9095 }
9096 }
9097 else
9098 have_fpa_registers = 0;
9099
9100 feature = tdesc_find_feature (tdesc,
9101 "org.gnu.gdb.xscale.iwmmxt");
9102 if (feature != NULL)
9103 {
9104 static const char *const iwmmxt_names[] = {
9105 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9106 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9107 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9108 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9109 };
9110
9111 valid_p = 1;
9112 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9113 valid_p
9114 &= tdesc_numbered_register (feature, tdesc_data, i,
9115 iwmmxt_names[i - ARM_WR0_REGNUM]);
9116
9117 /* Check for the control registers, but do not fail if they
9118 are missing. */
9119 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9120 tdesc_numbered_register (feature, tdesc_data, i,
9121 iwmmxt_names[i - ARM_WR0_REGNUM]);
9122
9123 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9124 valid_p
9125 &= tdesc_numbered_register (feature, tdesc_data, i,
9126 iwmmxt_names[i - ARM_WR0_REGNUM]);
9127
9128 if (!valid_p)
9129 {
9130 tdesc_data_cleanup (tdesc_data);
9131 return NULL;
9132 }
9133
9134 have_wmmx_registers = 1;
9135 }
9136
9137 /* If we have a VFP unit, check whether the single precision registers
9138 are present. If not, then we will synthesize them as pseudo
9139 registers. */
9140 feature = tdesc_find_feature (tdesc,
9141 "org.gnu.gdb.arm.vfp");
9142 if (feature != NULL)
9143 {
9144 static const char *const vfp_double_names[] = {
9145 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9146 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9147 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9148 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9149 };
9150
9151 /* Require the double precision registers. There must be either
9152 16 or 32. */
9153 valid_p = 1;
9154 for (i = 0; i < 32; i++)
9155 {
9156 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9157 ARM_D0_REGNUM + i,
9158 vfp_double_names[i]);
9159 if (!valid_p)
9160 break;
9161 }
9162 if (!valid_p && i == 16)
9163 valid_p = 1;
9164
9165 /* Also require FPSCR. */
9166 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9167 ARM_FPSCR_REGNUM, "fpscr");
9168 if (!valid_p)
9169 {
9170 tdesc_data_cleanup (tdesc_data);
9171 return NULL;
9172 }
9173
9174 if (tdesc_unnumbered_register (feature, "s0") == 0)
9175 have_vfp_pseudos = 1;
9176
9177 vfp_register_count = i;
9178
9179 /* If we have VFP, also check for NEON. The architecture allows
9180 NEON without VFP (integer vector operations only), but GDB
9181 does not support that. */
9182 feature = tdesc_find_feature (tdesc,
9183 "org.gnu.gdb.arm.neon");
9184 if (feature != NULL)
9185 {
9186 /* NEON requires 32 double-precision registers. */
9187 if (i != 32)
9188 {
9189 tdesc_data_cleanup (tdesc_data);
9190 return NULL;
9191 }
9192
9193 /* If there are quad registers defined by the stub, use
9194 their type; otherwise (normally) provide them with
9195 the default type. */
9196 if (tdesc_unnumbered_register (feature, "q0") == 0)
9197 have_neon_pseudos = 1;
9198
9199 have_neon = 1;
9200 }
9201 }
9202 }
9203
9204 /* If there is already a candidate, use it. */
9205 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9206 best_arch != NULL;
9207 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9208 {
9209 if (arm_abi != ARM_ABI_AUTO
9210 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9211 continue;
9212
9213 if (fp_model != ARM_FLOAT_AUTO
9214 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9215 continue;
9216
9217 /* There are various other properties in tdep that we do not
9218 need to check here: those derived from a target description,
9219 since gdbarches with a different target description are
9220 automatically disqualified. */
9221
9222 /* Do check is_m, though, since it might come from the binary. */
9223 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9224 continue;
9225
9226 /* Found a match. */
9227 break;
9228 }
9229
9230 if (best_arch != NULL)
9231 {
9232 if (tdesc_data != NULL)
9233 tdesc_data_cleanup (tdesc_data);
9234 return best_arch->gdbarch;
9235 }
9236
9237 tdep = XCNEW (struct gdbarch_tdep);
9238 gdbarch = gdbarch_alloc (&info, tdep);
9239
9240 /* Record additional information about the architecture we are defining.
9241 These are gdbarch discriminators, like the OSABI. */
9242 tdep->arm_abi = arm_abi;
9243 tdep->fp_model = fp_model;
9244 tdep->is_m = is_m;
9245 tdep->have_fpa_registers = have_fpa_registers;
9246 tdep->have_wmmx_registers = have_wmmx_registers;
9247 gdb_assert (vfp_register_count == 0
9248 || vfp_register_count == 16
9249 || vfp_register_count == 32);
9250 tdep->vfp_register_count = vfp_register_count;
9251 tdep->have_vfp_pseudos = have_vfp_pseudos;
9252 tdep->have_neon_pseudos = have_neon_pseudos;
9253 tdep->have_neon = have_neon;
9254
9255 arm_register_g_packet_guesses (gdbarch);
9256
9257 /* Breakpoints. */
9258 switch (info.byte_order_for_code)
9259 {
9260 case BFD_ENDIAN_BIG:
9261 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9262 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9263 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9264 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9265
9266 break;
9267
9268 case BFD_ENDIAN_LITTLE:
9269 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9270 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9271 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9272 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9273
9274 break;
9275
9276 default:
9277 internal_error (__FILE__, __LINE__,
9278 _("arm_gdbarch_init: bad byte order for float format"));
9279 }
9280
9281 /* On ARM targets char defaults to unsigned. */
9282 set_gdbarch_char_signed (gdbarch, 0);
9283
9284 /* wchar_t is unsigned under the AAPCS. */
9285 if (tdep->arm_abi == ARM_ABI_AAPCS)
9286 set_gdbarch_wchar_signed (gdbarch, 0);
9287 else
9288 set_gdbarch_wchar_signed (gdbarch, 1);
9289
9290 /* Compute type alignment. */
9291 set_gdbarch_type_align (gdbarch, arm_type_align);
9292
9293 /* Note: for displaced stepping, this includes the breakpoint, and one word
9294 of additional scratch space. This setting isn't used for anything beside
9295 displaced stepping at present. */
9296 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9297
9298 /* This should be low enough for everything. */
9299 tdep->lowest_pc = 0x20;
9300 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9301
9302 /* The default, for both APCS and AAPCS, is to return small
9303 structures in registers. */
9304 tdep->struct_return = reg_struct_return;
9305
9306 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9307 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9308
9309 if (is_m)
9310 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9311
9312 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9313
9314 frame_base_set_default (gdbarch, &arm_normal_base);
9315
9316 /* Address manipulation. */
9317 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9318
9319 /* Advance PC across function entry code. */
9320 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9321
9322 /* Detect whether PC is at a point where the stack has been destroyed. */
9323 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9324
9325 /* Skip trampolines. */
9326 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9327
9328 /* The stack grows downward. */
9329 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9330
9331 /* Breakpoint manipulation. */
9332 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9333 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9334 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9335 arm_breakpoint_kind_from_current_state);
9336
9337 /* Information about registers, etc. */
9338 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9339 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9340 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9341 set_gdbarch_register_type (gdbarch, arm_register_type);
9342 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9343
9344 /* This "info float" is FPA-specific. Use the generic version if we
9345 do not have FPA. */
9346 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9347 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9348
9349 /* Internal <-> external register number maps. */
9350 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9351 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9352
9353 set_gdbarch_register_name (gdbarch, arm_register_name);
9354
9355 /* Returning results. */
9356 set_gdbarch_return_value (gdbarch, arm_return_value);
9357
9358 /* Disassembly. */
9359 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9360
9361 /* Minsymbol frobbing. */
9362 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9363 set_gdbarch_coff_make_msymbol_special (gdbarch,
9364 arm_coff_make_msymbol_special);
9365 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9366
9367 /* Thumb-2 IT block support. */
9368 set_gdbarch_adjust_breakpoint_address (gdbarch,
9369 arm_adjust_breakpoint_address);
9370
9371 /* Virtual tables. */
9372 set_gdbarch_vbit_in_delta (gdbarch, 1);
9373
9374 /* Hook in the ABI-specific overrides, if they have been registered. */
9375 gdbarch_init_osabi (info, gdbarch);
9376
9377 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9378
9379 /* Add some default predicates. */
9380 if (is_m)
9381 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9382 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9383 dwarf2_append_unwinders (gdbarch);
9384 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9385 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9386 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9387
9388 /* Now we have tuned the configuration, set a few final things,
9389 based on what the OS ABI has told us. */
9390
9391 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9392 binaries are always marked. */
9393 if (tdep->arm_abi == ARM_ABI_AUTO)
9394 tdep->arm_abi = ARM_ABI_APCS;
9395
9396 /* Watchpoints are not steppable. */
9397 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9398
9399 /* We used to default to FPA for generic ARM, but almost nobody
9400 uses that now, and we now provide a way for the user to force
9401 the model. So default to the most useful variant. */
9402 if (tdep->fp_model == ARM_FLOAT_AUTO)
9403 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9404
9405 if (tdep->jb_pc >= 0)
9406 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9407
9408 /* Floating point sizes and format. */
9409 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9410 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9411 {
9412 set_gdbarch_double_format
9413 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9414 set_gdbarch_long_double_format
9415 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9416 }
9417 else
9418 {
9419 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9420 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9421 }
9422
9423 if (have_vfp_pseudos)
9424 {
9425 /* NOTE: These are the only pseudo registers used by
9426 the ARM target at the moment. If more are added, a
9427 little more care in numbering will be needed. */
9428
9429 int num_pseudos = 32;
9430 if (have_neon_pseudos)
9431 num_pseudos += 16;
9432 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9433 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9434 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9435 }
9436
9437 if (tdesc_data)
9438 {
9439 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9440
9441 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9442
9443 /* Override tdesc_register_type to adjust the types of VFP
9444 registers for NEON. */
9445 set_gdbarch_register_type (gdbarch, arm_register_type);
9446 }
9447
9448 /* Add standard register aliases. We add aliases even for those
9449 names which are used by the current architecture - it's simpler,
9450 and does no harm, since nothing ever lists user registers. */
9451 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9452 user_reg_add (gdbarch, arm_register_aliases[i].name,
9453 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9454
9455 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9456 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9457
9458 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9459
9460 return gdbarch;
9461 }
9462
9463 static void
9464 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9465 {
9466 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9467
9468 if (tdep == NULL)
9469 return;
9470
9471 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9472 (int) tdep->fp_model);
9473 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9474 (int) tdep->have_fpa_registers);
9475 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9476 (int) tdep->have_wmmx_registers);
9477 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9478 (int) tdep->vfp_register_count);
9479 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9480 (int) tdep->have_vfp_pseudos);
9481 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9482 (int) tdep->have_neon_pseudos);
9483 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9484 (int) tdep->have_neon);
9485 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9486 (unsigned long) tdep->lowest_pc);
9487 }
9488
9489 #if GDB_SELF_TEST
9490 namespace selftests
9491 {
9492 static void arm_record_test (void);
9493 }
9494 #endif
9495
9496 void _initialize_arm_tdep ();
9497 void
9498 _initialize_arm_tdep ()
9499 {
9500 long length;
9501 int i, j;
9502 char regdesc[1024], *rdptr = regdesc;
9503 size_t rest = sizeof (regdesc);
9504
9505 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9506
9507 /* Add ourselves to objfile event chain. */
9508 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9509
9510 /* Register an ELF OS ABI sniffer for ARM binaries. */
9511 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9512 bfd_target_elf_flavour,
9513 arm_elf_osabi_sniffer);
9514
9515 /* Add root prefix command for all "set arm"/"show arm" commands. */
9516 add_prefix_cmd ("arm", no_class, set_arm_command,
9517 _("Various ARM-specific commands."),
9518 &setarmcmdlist, "set arm ", 0, &setlist);
9519
9520 add_prefix_cmd ("arm", no_class, show_arm_command,
9521 _("Various ARM-specific commands."),
9522 &showarmcmdlist, "show arm ", 0, &showlist);
9523
9524
9525 arm_disassembler_options = xstrdup ("reg-names-std");
9526 const disasm_options_t *disasm_options
9527 = &disassembler_options_arm ()->options;
9528 int num_disassembly_styles = 0;
9529 for (i = 0; disasm_options->name[i] != NULL; i++)
9530 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9531 num_disassembly_styles++;
9532
9533 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9534 valid_disassembly_styles = XNEWVEC (const char *,
9535 num_disassembly_styles + 1);
9536 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9537 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9538 {
9539 size_t offset = strlen ("reg-names-");
9540 const char *style = disasm_options->name[i];
9541 valid_disassembly_styles[j++] = &style[offset];
9542 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9543 disasm_options->description[i]);
9544 rdptr += length;
9545 rest -= length;
9546 }
9547 /* Mark the end of valid options. */
9548 valid_disassembly_styles[num_disassembly_styles] = NULL;
9549
9550 /* Create the help text. */
9551 std::string helptext = string_printf ("%s%s%s",
9552 _("The valid values are:\n"),
9553 regdesc,
9554 _("The default is \"std\"."));
9555
9556 add_setshow_enum_cmd("disassembler", no_class,
9557 valid_disassembly_styles, &disassembly_style,
9558 _("Set the disassembly style."),
9559 _("Show the disassembly style."),
9560 helptext.c_str (),
9561 set_disassembly_style_sfunc,
9562 show_disassembly_style_sfunc,
9563 &setarmcmdlist, &showarmcmdlist);
9564
9565 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9566 _("Set usage of ARM 32-bit mode."),
9567 _("Show usage of ARM 32-bit mode."),
9568 _("When off, a 26-bit PC will be used."),
9569 NULL,
9570 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9571 mode is %s. */
9572 &setarmcmdlist, &showarmcmdlist);
9573
9574 /* Add a command to allow the user to force the FPU model. */
9575 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9576 _("Set the floating point type."),
9577 _("Show the floating point type."),
9578 _("auto - Determine the FP typefrom the OS-ABI.\n\
9579 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9580 fpa - FPA co-processor (GCC compiled).\n\
9581 softvfp - Software FP with pure-endian doubles.\n\
9582 vfp - VFP co-processor."),
9583 set_fp_model_sfunc, show_fp_model,
9584 &setarmcmdlist, &showarmcmdlist);
9585
9586 /* Add a command to allow the user to force the ABI. */
9587 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9588 _("Set the ABI."),
9589 _("Show the ABI."),
9590 NULL, arm_set_abi, arm_show_abi,
9591 &setarmcmdlist, &showarmcmdlist);
9592
9593 /* Add two commands to allow the user to force the assumed
9594 execution mode. */
9595 add_setshow_enum_cmd ("fallback-mode", class_support,
9596 arm_mode_strings, &arm_fallback_mode_string,
9597 _("Set the mode assumed when symbols are unavailable."),
9598 _("Show the mode assumed when symbols are unavailable."),
9599 NULL, NULL, arm_show_fallback_mode,
9600 &setarmcmdlist, &showarmcmdlist);
9601 add_setshow_enum_cmd ("force-mode", class_support,
9602 arm_mode_strings, &arm_force_mode_string,
9603 _("Set the mode assumed even when symbols are available."),
9604 _("Show the mode assumed even when symbols are available."),
9605 NULL, NULL, arm_show_force_mode,
9606 &setarmcmdlist, &showarmcmdlist);
9607
9608 /* Debugging flag. */
9609 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9610 _("Set ARM debugging."),
9611 _("Show ARM debugging."),
9612 _("When on, arm-specific debugging is enabled."),
9613 NULL,
9614 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9615 &setdebuglist, &showdebuglist);
9616
9617 #if GDB_SELF_TEST
9618 selftests::register_test ("arm-record", selftests::arm_record_test);
9619 #endif
9620
9621 }
9622
9623 /* ARM-reversible process record data structures. */
9624
9625 #define ARM_INSN_SIZE_BYTES 4
9626 #define THUMB_INSN_SIZE_BYTES 2
9627 #define THUMB2_INSN_SIZE_BYTES 4
9628
9629
9630 /* Position of the bit within a 32-bit ARM instruction
9631 that defines whether the instruction is a load or store. */
9632 #define INSN_S_L_BIT_NUM 20
9633
9634 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9635 do \
9636 { \
9637 unsigned int reg_len = LENGTH; \
9638 if (reg_len) \
9639 { \
9640 REGS = XNEWVEC (uint32_t, reg_len); \
9641 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9642 } \
9643 } \
9644 while (0)
9645
9646 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9647 do \
9648 { \
9649 unsigned int mem_len = LENGTH; \
9650 if (mem_len) \
9651 { \
9652 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9653 memcpy(&MEMS->len, &RECORD_BUF[0], \
9654 sizeof(struct arm_mem_r) * LENGTH); \
9655 } \
9656 } \
9657 while (0)
9658
9659 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9660 #define INSN_RECORDED(ARM_RECORD) \
9661 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9662
9663 /* ARM memory record structure. */
9664 struct arm_mem_r
9665 {
9666 uint32_t len; /* Record length. */
9667 uint32_t addr; /* Memory address. */
9668 };
9669
9670 /* ARM instruction record contains opcode of current insn
9671 and execution state (before entry to decode_insn()),
9672 contains list of to-be-modified registers and
9673 memory blocks (on return from decode_insn()). */
9674
9675 typedef struct insn_decode_record_t
9676 {
9677 struct gdbarch *gdbarch;
9678 struct regcache *regcache;
9679 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9680 uint32_t arm_insn; /* Should accommodate thumb. */
9681 uint32_t cond; /* Condition code. */
9682 uint32_t opcode; /* Insn opcode. */
9683 uint32_t decode; /* Insn decode bits. */
9684 uint32_t mem_rec_count; /* No of mem records. */
9685 uint32_t reg_rec_count; /* No of reg records. */
9686 uint32_t *arm_regs; /* Registers to be saved for this record. */
9687 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9688 } insn_decode_record;
9689
9690
9691 /* Checks ARM SBZ and SBO mandatory fields. */
9692
9693 static int
9694 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9695 {
9696 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9697
9698 if (!len)
9699 return 1;
9700
9701 if (!sbo)
9702 ones = ~ones;
9703
9704 while (ones)
9705 {
9706 if (!(ones & sbo))
9707 {
9708 return 0;
9709 }
9710 ones = ones >> 1;
9711 }
9712 return 1;
9713 }
9714
9715 enum arm_record_result
9716 {
9717 ARM_RECORD_SUCCESS = 0,
9718 ARM_RECORD_FAILURE = 1
9719 };
9720
9721 typedef enum
9722 {
9723 ARM_RECORD_STRH=1,
9724 ARM_RECORD_STRD
9725 } arm_record_strx_t;
9726
9727 typedef enum
9728 {
9729 ARM_RECORD=1,
9730 THUMB_RECORD,
9731 THUMB2_RECORD
9732 } record_type_t;
9733
9734
9735 static int
9736 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9737 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9738 {
9739
9740 struct regcache *reg_cache = arm_insn_r->regcache;
9741 ULONGEST u_regval[2]= {0};
9742
9743 uint32_t reg_src1 = 0, reg_src2 = 0;
9744 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9745
9746 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9747 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9748
9749 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9750 {
9751 /* 1) Handle misc store, immediate offset. */
9752 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9753 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9754 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9755 regcache_raw_read_unsigned (reg_cache, reg_src1,
9756 &u_regval[0]);
9757 if (ARM_PC_REGNUM == reg_src1)
9758 {
9759 /* If R15 was used as Rn, hence current PC+8. */
9760 u_regval[0] = u_regval[0] + 8;
9761 }
9762 offset_8 = (immed_high << 4) | immed_low;
9763 /* Calculate target store address. */
9764 if (14 == arm_insn_r->opcode)
9765 {
9766 tgt_mem_addr = u_regval[0] + offset_8;
9767 }
9768 else
9769 {
9770 tgt_mem_addr = u_regval[0] - offset_8;
9771 }
9772 if (ARM_RECORD_STRH == str_type)
9773 {
9774 record_buf_mem[0] = 2;
9775 record_buf_mem[1] = tgt_mem_addr;
9776 arm_insn_r->mem_rec_count = 1;
9777 }
9778 else if (ARM_RECORD_STRD == str_type)
9779 {
9780 record_buf_mem[0] = 4;
9781 record_buf_mem[1] = tgt_mem_addr;
9782 record_buf_mem[2] = 4;
9783 record_buf_mem[3] = tgt_mem_addr + 4;
9784 arm_insn_r->mem_rec_count = 2;
9785 }
9786 }
9787 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9788 {
9789 /* 2) Store, register offset. */
9790 /* Get Rm. */
9791 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9792 /* Get Rn. */
9793 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9794 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9795 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9796 if (15 == reg_src2)
9797 {
9798 /* If R15 was used as Rn, hence current PC+8. */
9799 u_regval[0] = u_regval[0] + 8;
9800 }
9801 /* Calculate target store address, Rn +/- Rm, register offset. */
9802 if (12 == arm_insn_r->opcode)
9803 {
9804 tgt_mem_addr = u_regval[0] + u_regval[1];
9805 }
9806 else
9807 {
9808 tgt_mem_addr = u_regval[1] - u_regval[0];
9809 }
9810 if (ARM_RECORD_STRH == str_type)
9811 {
9812 record_buf_mem[0] = 2;
9813 record_buf_mem[1] = tgt_mem_addr;
9814 arm_insn_r->mem_rec_count = 1;
9815 }
9816 else if (ARM_RECORD_STRD == str_type)
9817 {
9818 record_buf_mem[0] = 4;
9819 record_buf_mem[1] = tgt_mem_addr;
9820 record_buf_mem[2] = 4;
9821 record_buf_mem[3] = tgt_mem_addr + 4;
9822 arm_insn_r->mem_rec_count = 2;
9823 }
9824 }
9825 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9826 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9827 {
9828 /* 3) Store, immediate pre-indexed. */
9829 /* 5) Store, immediate post-indexed. */
9830 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9831 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9832 offset_8 = (immed_high << 4) | immed_low;
9833 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9834 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9835 /* Calculate target store address, Rn +/- Rm, register offset. */
9836 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9837 {
9838 tgt_mem_addr = u_regval[0] + offset_8;
9839 }
9840 else
9841 {
9842 tgt_mem_addr = u_regval[0] - offset_8;
9843 }
9844 if (ARM_RECORD_STRH == str_type)
9845 {
9846 record_buf_mem[0] = 2;
9847 record_buf_mem[1] = tgt_mem_addr;
9848 arm_insn_r->mem_rec_count = 1;
9849 }
9850 else if (ARM_RECORD_STRD == str_type)
9851 {
9852 record_buf_mem[0] = 4;
9853 record_buf_mem[1] = tgt_mem_addr;
9854 record_buf_mem[2] = 4;
9855 record_buf_mem[3] = tgt_mem_addr + 4;
9856 arm_insn_r->mem_rec_count = 2;
9857 }
9858 /* Record Rn also as it changes. */
9859 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9860 arm_insn_r->reg_rec_count = 1;
9861 }
9862 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9863 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9864 {
9865 /* 4) Store, register pre-indexed. */
9866 /* 6) Store, register post -indexed. */
9867 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9868 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9869 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9870 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9871 /* Calculate target store address, Rn +/- Rm, register offset. */
9872 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9873 {
9874 tgt_mem_addr = u_regval[0] + u_regval[1];
9875 }
9876 else
9877 {
9878 tgt_mem_addr = u_regval[1] - u_regval[0];
9879 }
9880 if (ARM_RECORD_STRH == str_type)
9881 {
9882 record_buf_mem[0] = 2;
9883 record_buf_mem[1] = tgt_mem_addr;
9884 arm_insn_r->mem_rec_count = 1;
9885 }
9886 else if (ARM_RECORD_STRD == str_type)
9887 {
9888 record_buf_mem[0] = 4;
9889 record_buf_mem[1] = tgt_mem_addr;
9890 record_buf_mem[2] = 4;
9891 record_buf_mem[3] = tgt_mem_addr + 4;
9892 arm_insn_r->mem_rec_count = 2;
9893 }
9894 /* Record Rn also as it changes. */
9895 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9896 arm_insn_r->reg_rec_count = 1;
9897 }
9898 return 0;
9899 }
9900
9901 /* Handling ARM extension space insns. */
9902
9903 static int
9904 arm_record_extension_space (insn_decode_record *arm_insn_r)
9905 {
9906 int ret = 0; /* Return value: -1:record failure ; 0:success */
9907 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9908 uint32_t record_buf[8], record_buf_mem[8];
9909 uint32_t reg_src1 = 0;
9910 struct regcache *reg_cache = arm_insn_r->regcache;
9911 ULONGEST u_regval = 0;
9912
9913 gdb_assert (!INSN_RECORDED(arm_insn_r));
9914 /* Handle unconditional insn extension space. */
9915
9916 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9917 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9918 if (arm_insn_r->cond)
9919 {
9920 /* PLD has no affect on architectural state, it just affects
9921 the caches. */
9922 if (5 == ((opcode1 & 0xE0) >> 5))
9923 {
9924 /* BLX(1) */
9925 record_buf[0] = ARM_PS_REGNUM;
9926 record_buf[1] = ARM_LR_REGNUM;
9927 arm_insn_r->reg_rec_count = 2;
9928 }
9929 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9930 }
9931
9932
9933 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9934 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9935 {
9936 ret = -1;
9937 /* Undefined instruction on ARM V5; need to handle if later
9938 versions define it. */
9939 }
9940
9941 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9942 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9943 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9944
9945 /* Handle arithmetic insn extension space. */
9946 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9947 && !INSN_RECORDED(arm_insn_r))
9948 {
9949 /* Handle MLA(S) and MUL(S). */
9950 if (in_inclusive_range (insn_op1, 0U, 3U))
9951 {
9952 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9953 record_buf[1] = ARM_PS_REGNUM;
9954 arm_insn_r->reg_rec_count = 2;
9955 }
9956 else if (in_inclusive_range (insn_op1, 4U, 15U))
9957 {
9958 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9959 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9960 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9961 record_buf[2] = ARM_PS_REGNUM;
9962 arm_insn_r->reg_rec_count = 3;
9963 }
9964 }
9965
9966 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9967 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9968 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9969
9970 /* Handle control insn extension space. */
9971
9972 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9973 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9974 {
9975 if (!bit (arm_insn_r->arm_insn,25))
9976 {
9977 if (!bits (arm_insn_r->arm_insn, 4, 7))
9978 {
9979 if ((0 == insn_op1) || (2 == insn_op1))
9980 {
9981 /* MRS. */
9982 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9983 arm_insn_r->reg_rec_count = 1;
9984 }
9985 else if (1 == insn_op1)
9986 {
9987 /* CSPR is going to be changed. */
9988 record_buf[0] = ARM_PS_REGNUM;
9989 arm_insn_r->reg_rec_count = 1;
9990 }
9991 else if (3 == insn_op1)
9992 {
9993 /* SPSR is going to be changed. */
9994 /* We need to get SPSR value, which is yet to be done. */
9995 return -1;
9996 }
9997 }
9998 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9999 {
10000 if (1 == insn_op1)
10001 {
10002 /* BX. */
10003 record_buf[0] = ARM_PS_REGNUM;
10004 arm_insn_r->reg_rec_count = 1;
10005 }
10006 else if (3 == insn_op1)
10007 {
10008 /* CLZ. */
10009 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10010 arm_insn_r->reg_rec_count = 1;
10011 }
10012 }
10013 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10014 {
10015 /* BLX. */
10016 record_buf[0] = ARM_PS_REGNUM;
10017 record_buf[1] = ARM_LR_REGNUM;
10018 arm_insn_r->reg_rec_count = 2;
10019 }
10020 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10021 {
10022 /* QADD, QSUB, QDADD, QDSUB */
10023 record_buf[0] = ARM_PS_REGNUM;
10024 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10025 arm_insn_r->reg_rec_count = 2;
10026 }
10027 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10028 {
10029 /* BKPT. */
10030 record_buf[0] = ARM_PS_REGNUM;
10031 record_buf[1] = ARM_LR_REGNUM;
10032 arm_insn_r->reg_rec_count = 2;
10033
10034 /* Save SPSR also;how? */
10035 return -1;
10036 }
10037 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10038 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10039 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10040 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10041 )
10042 {
10043 if (0 == insn_op1 || 1 == insn_op1)
10044 {
10045 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10046 /* We dont do optimization for SMULW<y> where we
10047 need only Rd. */
10048 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10049 record_buf[1] = ARM_PS_REGNUM;
10050 arm_insn_r->reg_rec_count = 2;
10051 }
10052 else if (2 == insn_op1)
10053 {
10054 /* SMLAL<x><y>. */
10055 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10056 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10057 arm_insn_r->reg_rec_count = 2;
10058 }
10059 else if (3 == insn_op1)
10060 {
10061 /* SMUL<x><y>. */
10062 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10063 arm_insn_r->reg_rec_count = 1;
10064 }
10065 }
10066 }
10067 else
10068 {
10069 /* MSR : immediate form. */
10070 if (1 == insn_op1)
10071 {
10072 /* CSPR is going to be changed. */
10073 record_buf[0] = ARM_PS_REGNUM;
10074 arm_insn_r->reg_rec_count = 1;
10075 }
10076 else if (3 == insn_op1)
10077 {
10078 /* SPSR is going to be changed. */
10079 /* we need to get SPSR value, which is yet to be done */
10080 return -1;
10081 }
10082 }
10083 }
10084
10085 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10086 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10087 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10088
10089 /* Handle load/store insn extension space. */
10090
10091 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10092 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10093 && !INSN_RECORDED(arm_insn_r))
10094 {
10095 /* SWP/SWPB. */
10096 if (0 == insn_op1)
10097 {
10098 /* These insn, changes register and memory as well. */
10099 /* SWP or SWPB insn. */
10100 /* Get memory address given by Rn. */
10101 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10102 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10103 /* SWP insn ?, swaps word. */
10104 if (8 == arm_insn_r->opcode)
10105 {
10106 record_buf_mem[0] = 4;
10107 }
10108 else
10109 {
10110 /* SWPB insn, swaps only byte. */
10111 record_buf_mem[0] = 1;
10112 }
10113 record_buf_mem[1] = u_regval;
10114 arm_insn_r->mem_rec_count = 1;
10115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10116 arm_insn_r->reg_rec_count = 1;
10117 }
10118 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10119 {
10120 /* STRH. */
10121 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10122 ARM_RECORD_STRH);
10123 }
10124 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10125 {
10126 /* LDRD. */
10127 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10128 record_buf[1] = record_buf[0] + 1;
10129 arm_insn_r->reg_rec_count = 2;
10130 }
10131 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10132 {
10133 /* STRD. */
10134 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10135 ARM_RECORD_STRD);
10136 }
10137 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10138 {
10139 /* LDRH, LDRSB, LDRSH. */
10140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10141 arm_insn_r->reg_rec_count = 1;
10142 }
10143
10144 }
10145
10146 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10147 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10148 && !INSN_RECORDED(arm_insn_r))
10149 {
10150 ret = -1;
10151 /* Handle coprocessor insn extension space. */
10152 }
10153
10154 /* To be done for ARMv5 and later; as of now we return -1. */
10155 if (-1 == ret)
10156 return ret;
10157
10158 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10159 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10160
10161 return ret;
10162 }
10163
10164 /* Handling opcode 000 insns. */
10165
10166 static int
10167 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10168 {
10169 struct regcache *reg_cache = arm_insn_r->regcache;
10170 uint32_t record_buf[8], record_buf_mem[8];
10171 ULONGEST u_regval[2] = {0};
10172
10173 uint32_t reg_src1 = 0;
10174 uint32_t opcode1 = 0;
10175
10176 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10177 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10178 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10179
10180 if (!((opcode1 & 0x19) == 0x10))
10181 {
10182 /* Data-processing (register) and Data-processing (register-shifted
10183 register */
10184 /* Out of 11 shifter operands mode, all the insn modifies destination
10185 register, which is specified by 13-16 decode. */
10186 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10187 record_buf[1] = ARM_PS_REGNUM;
10188 arm_insn_r->reg_rec_count = 2;
10189 }
10190 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10191 {
10192 /* Miscellaneous instructions */
10193
10194 if (3 == arm_insn_r->decode && 0x12 == opcode1
10195 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10196 {
10197 /* Handle BLX, branch and link/exchange. */
10198 if (9 == arm_insn_r->opcode)
10199 {
10200 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10201 and R14 stores the return address. */
10202 record_buf[0] = ARM_PS_REGNUM;
10203 record_buf[1] = ARM_LR_REGNUM;
10204 arm_insn_r->reg_rec_count = 2;
10205 }
10206 }
10207 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10208 {
10209 /* Handle enhanced software breakpoint insn, BKPT. */
10210 /* CPSR is changed to be executed in ARM state, disabling normal
10211 interrupts, entering abort mode. */
10212 /* According to high vector configuration PC is set. */
10213 /* user hit breakpoint and type reverse, in
10214 that case, we need to go back with previous CPSR and
10215 Program Counter. */
10216 record_buf[0] = ARM_PS_REGNUM;
10217 record_buf[1] = ARM_LR_REGNUM;
10218 arm_insn_r->reg_rec_count = 2;
10219
10220 /* Save SPSR also; how? */
10221 return -1;
10222 }
10223 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10224 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10225 {
10226 /* Handle BX, branch and link/exchange. */
10227 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10228 record_buf[0] = ARM_PS_REGNUM;
10229 arm_insn_r->reg_rec_count = 1;
10230 }
10231 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10232 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10233 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10234 {
10235 /* Count leading zeros: CLZ. */
10236 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10237 arm_insn_r->reg_rec_count = 1;
10238 }
10239 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10240 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10241 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10242 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10243 {
10244 /* Handle MRS insn. */
10245 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10246 arm_insn_r->reg_rec_count = 1;
10247 }
10248 }
10249 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10250 {
10251 /* Multiply and multiply-accumulate */
10252
10253 /* Handle multiply instructions. */
10254 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10255 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10256 {
10257 /* Handle MLA and MUL. */
10258 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10259 record_buf[1] = ARM_PS_REGNUM;
10260 arm_insn_r->reg_rec_count = 2;
10261 }
10262 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10263 {
10264 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10265 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10266 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10267 record_buf[2] = ARM_PS_REGNUM;
10268 arm_insn_r->reg_rec_count = 3;
10269 }
10270 }
10271 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10272 {
10273 /* Synchronization primitives */
10274
10275 /* Handling SWP, SWPB. */
10276 /* These insn, changes register and memory as well. */
10277 /* SWP or SWPB insn. */
10278
10279 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10280 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10281 /* SWP insn ?, swaps word. */
10282 if (8 == arm_insn_r->opcode)
10283 {
10284 record_buf_mem[0] = 4;
10285 }
10286 else
10287 {
10288 /* SWPB insn, swaps only byte. */
10289 record_buf_mem[0] = 1;
10290 }
10291 record_buf_mem[1] = u_regval[0];
10292 arm_insn_r->mem_rec_count = 1;
10293 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10294 arm_insn_r->reg_rec_count = 1;
10295 }
10296 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10297 || 15 == arm_insn_r->decode)
10298 {
10299 if ((opcode1 & 0x12) == 2)
10300 {
10301 /* Extra load/store (unprivileged) */
10302 return -1;
10303 }
10304 else
10305 {
10306 /* Extra load/store */
10307 switch (bits (arm_insn_r->arm_insn, 5, 6))
10308 {
10309 case 1:
10310 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10311 {
10312 /* STRH (register), STRH (immediate) */
10313 arm_record_strx (arm_insn_r, &record_buf[0],
10314 &record_buf_mem[0], ARM_RECORD_STRH);
10315 }
10316 else if ((opcode1 & 0x05) == 0x1)
10317 {
10318 /* LDRH (register) */
10319 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10320 arm_insn_r->reg_rec_count = 1;
10321
10322 if (bit (arm_insn_r->arm_insn, 21))
10323 {
10324 /* Write back to Rn. */
10325 record_buf[arm_insn_r->reg_rec_count++]
10326 = bits (arm_insn_r->arm_insn, 16, 19);
10327 }
10328 }
10329 else if ((opcode1 & 0x05) == 0x5)
10330 {
10331 /* LDRH (immediate), LDRH (literal) */
10332 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10333
10334 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10335 arm_insn_r->reg_rec_count = 1;
10336
10337 if (rn != 15)
10338 {
10339 /*LDRH (immediate) */
10340 if (bit (arm_insn_r->arm_insn, 21))
10341 {
10342 /* Write back to Rn. */
10343 record_buf[arm_insn_r->reg_rec_count++] = rn;
10344 }
10345 }
10346 }
10347 else
10348 return -1;
10349 break;
10350 case 2:
10351 if ((opcode1 & 0x05) == 0x0)
10352 {
10353 /* LDRD (register) */
10354 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10355 record_buf[1] = record_buf[0] + 1;
10356 arm_insn_r->reg_rec_count = 2;
10357
10358 if (bit (arm_insn_r->arm_insn, 21))
10359 {
10360 /* Write back to Rn. */
10361 record_buf[arm_insn_r->reg_rec_count++]
10362 = bits (arm_insn_r->arm_insn, 16, 19);
10363 }
10364 }
10365 else if ((opcode1 & 0x05) == 0x1)
10366 {
10367 /* LDRSB (register) */
10368 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10369 arm_insn_r->reg_rec_count = 1;
10370
10371 if (bit (arm_insn_r->arm_insn, 21))
10372 {
10373 /* Write back to Rn. */
10374 record_buf[arm_insn_r->reg_rec_count++]
10375 = bits (arm_insn_r->arm_insn, 16, 19);
10376 }
10377 }
10378 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10379 {
10380 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10381 LDRSB (literal) */
10382 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10383
10384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10385 arm_insn_r->reg_rec_count = 1;
10386
10387 if (rn != 15)
10388 {
10389 /*LDRD (immediate), LDRSB (immediate) */
10390 if (bit (arm_insn_r->arm_insn, 21))
10391 {
10392 /* Write back to Rn. */
10393 record_buf[arm_insn_r->reg_rec_count++] = rn;
10394 }
10395 }
10396 }
10397 else
10398 return -1;
10399 break;
10400 case 3:
10401 if ((opcode1 & 0x05) == 0x0)
10402 {
10403 /* STRD (register) */
10404 arm_record_strx (arm_insn_r, &record_buf[0],
10405 &record_buf_mem[0], ARM_RECORD_STRD);
10406 }
10407 else if ((opcode1 & 0x05) == 0x1)
10408 {
10409 /* LDRSH (register) */
10410 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10411 arm_insn_r->reg_rec_count = 1;
10412
10413 if (bit (arm_insn_r->arm_insn, 21))
10414 {
10415 /* Write back to Rn. */
10416 record_buf[arm_insn_r->reg_rec_count++]
10417 = bits (arm_insn_r->arm_insn, 16, 19);
10418 }
10419 }
10420 else if ((opcode1 & 0x05) == 0x4)
10421 {
10422 /* STRD (immediate) */
10423 arm_record_strx (arm_insn_r, &record_buf[0],
10424 &record_buf_mem[0], ARM_RECORD_STRD);
10425 }
10426 else if ((opcode1 & 0x05) == 0x5)
10427 {
10428 /* LDRSH (immediate), LDRSH (literal) */
10429 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10430 arm_insn_r->reg_rec_count = 1;
10431
10432 if (bit (arm_insn_r->arm_insn, 21))
10433 {
10434 /* Write back to Rn. */
10435 record_buf[arm_insn_r->reg_rec_count++]
10436 = bits (arm_insn_r->arm_insn, 16, 19);
10437 }
10438 }
10439 else
10440 return -1;
10441 break;
10442 default:
10443 return -1;
10444 }
10445 }
10446 }
10447 else
10448 {
10449 return -1;
10450 }
10451
10452 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10453 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10454 return 0;
10455 }
10456
10457 /* Handling opcode 001 insns. */
10458
10459 static int
10460 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10461 {
10462 uint32_t record_buf[8], record_buf_mem[8];
10463
10464 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10465 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10466
10467 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10468 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10469 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10470 )
10471 {
10472 /* Handle MSR insn. */
10473 if (9 == arm_insn_r->opcode)
10474 {
10475 /* CSPR is going to be changed. */
10476 record_buf[0] = ARM_PS_REGNUM;
10477 arm_insn_r->reg_rec_count = 1;
10478 }
10479 else
10480 {
10481 /* SPSR is going to be changed. */
10482 }
10483 }
10484 else if (arm_insn_r->opcode <= 15)
10485 {
10486 /* Normal data processing insns. */
10487 /* Out of 11 shifter operands mode, all the insn modifies destination
10488 register, which is specified by 13-16 decode. */
10489 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10490 record_buf[1] = ARM_PS_REGNUM;
10491 arm_insn_r->reg_rec_count = 2;
10492 }
10493 else
10494 {
10495 return -1;
10496 }
10497
10498 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10499 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10500 return 0;
10501 }
10502
10503 static int
10504 arm_record_media (insn_decode_record *arm_insn_r)
10505 {
10506 uint32_t record_buf[8];
10507
10508 switch (bits (arm_insn_r->arm_insn, 22, 24))
10509 {
10510 case 0:
10511 /* Parallel addition and subtraction, signed */
10512 case 1:
10513 /* Parallel addition and subtraction, unsigned */
10514 case 2:
10515 case 3:
10516 /* Packing, unpacking, saturation and reversal */
10517 {
10518 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10519
10520 record_buf[arm_insn_r->reg_rec_count++] = rd;
10521 }
10522 break;
10523
10524 case 4:
10525 case 5:
10526 /* Signed multiplies */
10527 {
10528 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10529 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10530
10531 record_buf[arm_insn_r->reg_rec_count++] = rd;
10532 if (op1 == 0x0)
10533 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10534 else if (op1 == 0x4)
10535 record_buf[arm_insn_r->reg_rec_count++]
10536 = bits (arm_insn_r->arm_insn, 12, 15);
10537 }
10538 break;
10539
10540 case 6:
10541 {
10542 if (bit (arm_insn_r->arm_insn, 21)
10543 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10544 {
10545 /* SBFX */
10546 record_buf[arm_insn_r->reg_rec_count++]
10547 = bits (arm_insn_r->arm_insn, 12, 15);
10548 }
10549 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10550 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10551 {
10552 /* USAD8 and USADA8 */
10553 record_buf[arm_insn_r->reg_rec_count++]
10554 = bits (arm_insn_r->arm_insn, 16, 19);
10555 }
10556 }
10557 break;
10558
10559 case 7:
10560 {
10561 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10562 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10563 {
10564 /* Permanently UNDEFINED */
10565 return -1;
10566 }
10567 else
10568 {
10569 /* BFC, BFI and UBFX */
10570 record_buf[arm_insn_r->reg_rec_count++]
10571 = bits (arm_insn_r->arm_insn, 12, 15);
10572 }
10573 }
10574 break;
10575
10576 default:
10577 return -1;
10578 }
10579
10580 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10581
10582 return 0;
10583 }
10584
10585 /* Handle ARM mode instructions with opcode 010. */
10586
10587 static int
10588 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10589 {
10590 struct regcache *reg_cache = arm_insn_r->regcache;
10591
10592 uint32_t reg_base , reg_dest;
10593 uint32_t offset_12, tgt_mem_addr;
10594 uint32_t record_buf[8], record_buf_mem[8];
10595 unsigned char wback;
10596 ULONGEST u_regval;
10597
10598 /* Calculate wback. */
10599 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10600 || (bit (arm_insn_r->arm_insn, 21) == 1);
10601
10602 arm_insn_r->reg_rec_count = 0;
10603 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10604
10605 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10606 {
10607 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10608 and LDRT. */
10609
10610 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10611 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10612
10613 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10614 preceeds a LDR instruction having R15 as reg_base, it
10615 emulates a branch and link instruction, and hence we need to save
10616 CPSR and PC as well. */
10617 if (ARM_PC_REGNUM == reg_dest)
10618 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10619
10620 /* If wback is true, also save the base register, which is going to be
10621 written to. */
10622 if (wback)
10623 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10624 }
10625 else
10626 {
10627 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10628
10629 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10630 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10631
10632 /* Handle bit U. */
10633 if (bit (arm_insn_r->arm_insn, 23))
10634 {
10635 /* U == 1: Add the offset. */
10636 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10637 }
10638 else
10639 {
10640 /* U == 0: subtract the offset. */
10641 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10642 }
10643
10644 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10645 bytes. */
10646 if (bit (arm_insn_r->arm_insn, 22))
10647 {
10648 /* STRB and STRBT: 1 byte. */
10649 record_buf_mem[0] = 1;
10650 }
10651 else
10652 {
10653 /* STR and STRT: 4 bytes. */
10654 record_buf_mem[0] = 4;
10655 }
10656
10657 /* Handle bit P. */
10658 if (bit (arm_insn_r->arm_insn, 24))
10659 record_buf_mem[1] = tgt_mem_addr;
10660 else
10661 record_buf_mem[1] = (uint32_t) u_regval;
10662
10663 arm_insn_r->mem_rec_count = 1;
10664
10665 /* If wback is true, also save the base register, which is going to be
10666 written to. */
10667 if (wback)
10668 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10669 }
10670
10671 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10672 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10673 return 0;
10674 }
10675
10676 /* Handling opcode 011 insns. */
10677
10678 static int
10679 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10680 {
10681 struct regcache *reg_cache = arm_insn_r->regcache;
10682
10683 uint32_t shift_imm = 0;
10684 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10685 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10686 uint32_t record_buf[8], record_buf_mem[8];
10687
10688 LONGEST s_word;
10689 ULONGEST u_regval[2];
10690
10691 if (bit (arm_insn_r->arm_insn, 4))
10692 return arm_record_media (arm_insn_r);
10693
10694 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10695 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10696
10697 /* Handle enhanced store insns and LDRD DSP insn,
10698 order begins according to addressing modes for store insns
10699 STRH insn. */
10700
10701 /* LDR or STR? */
10702 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10703 {
10704 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10705 /* LDR insn has a capability to do branching, if
10706 MOV LR, PC is preceded by LDR insn having Rn as R15
10707 in that case, it emulates branch and link insn, and hence we
10708 need to save CSPR and PC as well. */
10709 if (15 != reg_dest)
10710 {
10711 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10712 arm_insn_r->reg_rec_count = 1;
10713 }
10714 else
10715 {
10716 record_buf[0] = reg_dest;
10717 record_buf[1] = ARM_PS_REGNUM;
10718 arm_insn_r->reg_rec_count = 2;
10719 }
10720 }
10721 else
10722 {
10723 if (! bits (arm_insn_r->arm_insn, 4, 11))
10724 {
10725 /* Store insn, register offset and register pre-indexed,
10726 register post-indexed. */
10727 /* Get Rm. */
10728 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10729 /* Get Rn. */
10730 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10731 regcache_raw_read_unsigned (reg_cache, reg_src1
10732 , &u_regval[0]);
10733 regcache_raw_read_unsigned (reg_cache, reg_src2
10734 , &u_regval[1]);
10735 if (15 == reg_src2)
10736 {
10737 /* If R15 was used as Rn, hence current PC+8. */
10738 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10739 u_regval[0] = u_regval[0] + 8;
10740 }
10741 /* Calculate target store address, Rn +/- Rm, register offset. */
10742 /* U == 1. */
10743 if (bit (arm_insn_r->arm_insn, 23))
10744 {
10745 tgt_mem_addr = u_regval[0] + u_regval[1];
10746 }
10747 else
10748 {
10749 tgt_mem_addr = u_regval[1] - u_regval[0];
10750 }
10751
10752 switch (arm_insn_r->opcode)
10753 {
10754 /* STR. */
10755 case 8:
10756 case 12:
10757 /* STR. */
10758 case 9:
10759 case 13:
10760 /* STRT. */
10761 case 1:
10762 case 5:
10763 /* STR. */
10764 case 0:
10765 case 4:
10766 record_buf_mem[0] = 4;
10767 break;
10768
10769 /* STRB. */
10770 case 10:
10771 case 14:
10772 /* STRB. */
10773 case 11:
10774 case 15:
10775 /* STRBT. */
10776 case 3:
10777 case 7:
10778 /* STRB. */
10779 case 2:
10780 case 6:
10781 record_buf_mem[0] = 1;
10782 break;
10783
10784 default:
10785 gdb_assert_not_reached ("no decoding pattern found");
10786 break;
10787 }
10788 record_buf_mem[1] = tgt_mem_addr;
10789 arm_insn_r->mem_rec_count = 1;
10790
10791 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10792 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10793 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10794 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10795 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10796 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10797 )
10798 {
10799 /* Rn is going to be changed in pre-indexed mode and
10800 post-indexed mode as well. */
10801 record_buf[0] = reg_src2;
10802 arm_insn_r->reg_rec_count = 1;
10803 }
10804 }
10805 else
10806 {
10807 /* Store insn, scaled register offset; scaled pre-indexed. */
10808 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10809 /* Get Rm. */
10810 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10811 /* Get Rn. */
10812 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10813 /* Get shift_imm. */
10814 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10815 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10816 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10817 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10818 /* Offset_12 used as shift. */
10819 switch (offset_12)
10820 {
10821 case 0:
10822 /* Offset_12 used as index. */
10823 offset_12 = u_regval[0] << shift_imm;
10824 break;
10825
10826 case 1:
10827 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10828 break;
10829
10830 case 2:
10831 if (!shift_imm)
10832 {
10833 if (bit (u_regval[0], 31))
10834 {
10835 offset_12 = 0xFFFFFFFF;
10836 }
10837 else
10838 {
10839 offset_12 = 0;
10840 }
10841 }
10842 else
10843 {
10844 /* This is arithmetic shift. */
10845 offset_12 = s_word >> shift_imm;
10846 }
10847 break;
10848
10849 case 3:
10850 if (!shift_imm)
10851 {
10852 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10853 &u_regval[1]);
10854 /* Get C flag value and shift it by 31. */
10855 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10856 | (u_regval[0]) >> 1);
10857 }
10858 else
10859 {
10860 offset_12 = (u_regval[0] >> shift_imm) \
10861 | (u_regval[0] <<
10862 (sizeof(uint32_t) - shift_imm));
10863 }
10864 break;
10865
10866 default:
10867 gdb_assert_not_reached ("no decoding pattern found");
10868 break;
10869 }
10870
10871 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10872 /* bit U set. */
10873 if (bit (arm_insn_r->arm_insn, 23))
10874 {
10875 tgt_mem_addr = u_regval[1] + offset_12;
10876 }
10877 else
10878 {
10879 tgt_mem_addr = u_regval[1] - offset_12;
10880 }
10881
10882 switch (arm_insn_r->opcode)
10883 {
10884 /* STR. */
10885 case 8:
10886 case 12:
10887 /* STR. */
10888 case 9:
10889 case 13:
10890 /* STRT. */
10891 case 1:
10892 case 5:
10893 /* STR. */
10894 case 0:
10895 case 4:
10896 record_buf_mem[0] = 4;
10897 break;
10898
10899 /* STRB. */
10900 case 10:
10901 case 14:
10902 /* STRB. */
10903 case 11:
10904 case 15:
10905 /* STRBT. */
10906 case 3:
10907 case 7:
10908 /* STRB. */
10909 case 2:
10910 case 6:
10911 record_buf_mem[0] = 1;
10912 break;
10913
10914 default:
10915 gdb_assert_not_reached ("no decoding pattern found");
10916 break;
10917 }
10918 record_buf_mem[1] = tgt_mem_addr;
10919 arm_insn_r->mem_rec_count = 1;
10920
10921 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10922 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10923 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10924 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10925 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10926 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10927 )
10928 {
10929 /* Rn is going to be changed in register scaled pre-indexed
10930 mode,and scaled post indexed mode. */
10931 record_buf[0] = reg_src2;
10932 arm_insn_r->reg_rec_count = 1;
10933 }
10934 }
10935 }
10936
10937 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10938 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10939 return 0;
10940 }
10941
10942 /* Handle ARM mode instructions with opcode 100. */
10943
10944 static int
10945 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10946 {
10947 struct regcache *reg_cache = arm_insn_r->regcache;
10948 uint32_t register_count = 0, register_bits;
10949 uint32_t reg_base, addr_mode;
10950 uint32_t record_buf[24], record_buf_mem[48];
10951 uint32_t wback;
10952 ULONGEST u_regval;
10953
10954 /* Fetch the list of registers. */
10955 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10956 arm_insn_r->reg_rec_count = 0;
10957
10958 /* Fetch the base register that contains the address we are loading data
10959 to. */
10960 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10961
10962 /* Calculate wback. */
10963 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10964
10965 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10966 {
10967 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10968
10969 /* Find out which registers are going to be loaded from memory. */
10970 while (register_bits)
10971 {
10972 if (register_bits & 0x00000001)
10973 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10974 register_bits = register_bits >> 1;
10975 register_count++;
10976 }
10977
10978
10979 /* If wback is true, also save the base register, which is going to be
10980 written to. */
10981 if (wback)
10982 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10983
10984 /* Save the CPSR register. */
10985 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10986 }
10987 else
10988 {
10989 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10990
10991 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10992
10993 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10994
10995 /* Find out how many registers are going to be stored to memory. */
10996 while (register_bits)
10997 {
10998 if (register_bits & 0x00000001)
10999 register_count++;
11000 register_bits = register_bits >> 1;
11001 }
11002
11003 switch (addr_mode)
11004 {
11005 /* STMDA (STMED): Decrement after. */
11006 case 0:
11007 record_buf_mem[1] = (uint32_t) u_regval
11008 - register_count * ARM_INT_REGISTER_SIZE + 4;
11009 break;
11010 /* STM (STMIA, STMEA): Increment after. */
11011 case 1:
11012 record_buf_mem[1] = (uint32_t) u_regval;
11013 break;
11014 /* STMDB (STMFD): Decrement before. */
11015 case 2:
11016 record_buf_mem[1] = (uint32_t) u_regval
11017 - register_count * ARM_INT_REGISTER_SIZE;
11018 break;
11019 /* STMIB (STMFA): Increment before. */
11020 case 3:
11021 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11022 break;
11023 default:
11024 gdb_assert_not_reached ("no decoding pattern found");
11025 break;
11026 }
11027
11028 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11029 arm_insn_r->mem_rec_count = 1;
11030
11031 /* If wback is true, also save the base register, which is going to be
11032 written to. */
11033 if (wback)
11034 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11035 }
11036
11037 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11038 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11039 return 0;
11040 }
11041
11042 /* Handling opcode 101 insns. */
11043
11044 static int
11045 arm_record_b_bl (insn_decode_record *arm_insn_r)
11046 {
11047 uint32_t record_buf[8];
11048
11049 /* Handle B, BL, BLX(1) insns. */
11050 /* B simply branches so we do nothing here. */
11051 /* Note: BLX(1) doesnt fall here but instead it falls into
11052 extension space. */
11053 if (bit (arm_insn_r->arm_insn, 24))
11054 {
11055 record_buf[0] = ARM_LR_REGNUM;
11056 arm_insn_r->reg_rec_count = 1;
11057 }
11058
11059 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11060
11061 return 0;
11062 }
11063
11064 static int
11065 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11066 {
11067 printf_unfiltered (_("Process record does not support instruction "
11068 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11069 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11070
11071 return -1;
11072 }
11073
11074 /* Record handler for vector data transfer instructions. */
11075
11076 static int
11077 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11078 {
11079 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11080 uint32_t record_buf[4];
11081
11082 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11083 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11084 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11085 bit_l = bit (arm_insn_r->arm_insn, 20);
11086 bit_c = bit (arm_insn_r->arm_insn, 8);
11087
11088 /* Handle VMOV instruction. */
11089 if (bit_l && bit_c)
11090 {
11091 record_buf[0] = reg_t;
11092 arm_insn_r->reg_rec_count = 1;
11093 }
11094 else if (bit_l && !bit_c)
11095 {
11096 /* Handle VMOV instruction. */
11097 if (bits_a == 0x00)
11098 {
11099 record_buf[0] = reg_t;
11100 arm_insn_r->reg_rec_count = 1;
11101 }
11102 /* Handle VMRS instruction. */
11103 else if (bits_a == 0x07)
11104 {
11105 if (reg_t == 15)
11106 reg_t = ARM_PS_REGNUM;
11107
11108 record_buf[0] = reg_t;
11109 arm_insn_r->reg_rec_count = 1;
11110 }
11111 }
11112 else if (!bit_l && !bit_c)
11113 {
11114 /* Handle VMOV instruction. */
11115 if (bits_a == 0x00)
11116 {
11117 record_buf[0] = ARM_D0_REGNUM + reg_v;
11118
11119 arm_insn_r->reg_rec_count = 1;
11120 }
11121 /* Handle VMSR instruction. */
11122 else if (bits_a == 0x07)
11123 {
11124 record_buf[0] = ARM_FPSCR_REGNUM;
11125 arm_insn_r->reg_rec_count = 1;
11126 }
11127 }
11128 else if (!bit_l && bit_c)
11129 {
11130 /* Handle VMOV instruction. */
11131 if (!(bits_a & 0x04))
11132 {
11133 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11134 + ARM_D0_REGNUM;
11135 arm_insn_r->reg_rec_count = 1;
11136 }
11137 /* Handle VDUP instruction. */
11138 else
11139 {
11140 if (bit (arm_insn_r->arm_insn, 21))
11141 {
11142 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11143 record_buf[0] = reg_v + ARM_D0_REGNUM;
11144 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11145 arm_insn_r->reg_rec_count = 2;
11146 }
11147 else
11148 {
11149 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11150 record_buf[0] = reg_v + ARM_D0_REGNUM;
11151 arm_insn_r->reg_rec_count = 1;
11152 }
11153 }
11154 }
11155
11156 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11157 return 0;
11158 }
11159
11160 /* Record handler for extension register load/store instructions. */
11161
11162 static int
11163 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11164 {
11165 uint32_t opcode, single_reg;
11166 uint8_t op_vldm_vstm;
11167 uint32_t record_buf[8], record_buf_mem[128];
11168 ULONGEST u_regval = 0;
11169
11170 struct regcache *reg_cache = arm_insn_r->regcache;
11171
11172 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11173 single_reg = !bit (arm_insn_r->arm_insn, 8);
11174 op_vldm_vstm = opcode & 0x1b;
11175
11176 /* Handle VMOV instructions. */
11177 if ((opcode & 0x1e) == 0x04)
11178 {
11179 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11180 {
11181 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11182 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11183 arm_insn_r->reg_rec_count = 2;
11184 }
11185 else
11186 {
11187 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11188 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11189
11190 if (single_reg)
11191 {
11192 /* The first S register number m is REG_M:M (M is bit 5),
11193 the corresponding D register number is REG_M:M / 2, which
11194 is REG_M. */
11195 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11196 /* The second S register number is REG_M:M + 1, the
11197 corresponding D register number is (REG_M:M + 1) / 2.
11198 IOW, if bit M is 1, the first and second S registers
11199 are mapped to different D registers, otherwise, they are
11200 in the same D register. */
11201 if (bit_m)
11202 {
11203 record_buf[arm_insn_r->reg_rec_count++]
11204 = ARM_D0_REGNUM + reg_m + 1;
11205 }
11206 }
11207 else
11208 {
11209 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11210 arm_insn_r->reg_rec_count = 1;
11211 }
11212 }
11213 }
11214 /* Handle VSTM and VPUSH instructions. */
11215 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11216 || op_vldm_vstm == 0x12)
11217 {
11218 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11219 uint32_t memory_index = 0;
11220
11221 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11222 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11223 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11224 imm_off32 = imm_off8 << 2;
11225 memory_count = imm_off8;
11226
11227 if (bit (arm_insn_r->arm_insn, 23))
11228 start_address = u_regval;
11229 else
11230 start_address = u_regval - imm_off32;
11231
11232 if (bit (arm_insn_r->arm_insn, 21))
11233 {
11234 record_buf[0] = reg_rn;
11235 arm_insn_r->reg_rec_count = 1;
11236 }
11237
11238 while (memory_count > 0)
11239 {
11240 if (single_reg)
11241 {
11242 record_buf_mem[memory_index] = 4;
11243 record_buf_mem[memory_index + 1] = start_address;
11244 start_address = start_address + 4;
11245 memory_index = memory_index + 2;
11246 }
11247 else
11248 {
11249 record_buf_mem[memory_index] = 4;
11250 record_buf_mem[memory_index + 1] = start_address;
11251 record_buf_mem[memory_index + 2] = 4;
11252 record_buf_mem[memory_index + 3] = start_address + 4;
11253 start_address = start_address + 8;
11254 memory_index = memory_index + 4;
11255 }
11256 memory_count--;
11257 }
11258 arm_insn_r->mem_rec_count = (memory_index >> 1);
11259 }
11260 /* Handle VLDM instructions. */
11261 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11262 || op_vldm_vstm == 0x13)
11263 {
11264 uint32_t reg_count, reg_vd;
11265 uint32_t reg_index = 0;
11266 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11267
11268 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11269 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11270
11271 /* REG_VD is the first D register number. If the instruction
11272 loads memory to S registers (SINGLE_REG is TRUE), the register
11273 number is (REG_VD << 1 | bit D), so the corresponding D
11274 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11275 if (!single_reg)
11276 reg_vd = reg_vd | (bit_d << 4);
11277
11278 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11279 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11280
11281 /* If the instruction loads memory to D register, REG_COUNT should
11282 be divided by 2, according to the ARM Architecture Reference
11283 Manual. If the instruction loads memory to S register, divide by
11284 2 as well because two S registers are mapped to D register. */
11285 reg_count = reg_count / 2;
11286 if (single_reg && bit_d)
11287 {
11288 /* Increase the register count if S register list starts from
11289 an odd number (bit d is one). */
11290 reg_count++;
11291 }
11292
11293 while (reg_count > 0)
11294 {
11295 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11296 reg_count--;
11297 }
11298 arm_insn_r->reg_rec_count = reg_index;
11299 }
11300 /* VSTR Vector store register. */
11301 else if ((opcode & 0x13) == 0x10)
11302 {
11303 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11304 uint32_t memory_index = 0;
11305
11306 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11307 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11308 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11309 imm_off32 = imm_off8 << 2;
11310
11311 if (bit (arm_insn_r->arm_insn, 23))
11312 start_address = u_regval + imm_off32;
11313 else
11314 start_address = u_regval - imm_off32;
11315
11316 if (single_reg)
11317 {
11318 record_buf_mem[memory_index] = 4;
11319 record_buf_mem[memory_index + 1] = start_address;
11320 arm_insn_r->mem_rec_count = 1;
11321 }
11322 else
11323 {
11324 record_buf_mem[memory_index] = 4;
11325 record_buf_mem[memory_index + 1] = start_address;
11326 record_buf_mem[memory_index + 2] = 4;
11327 record_buf_mem[memory_index + 3] = start_address + 4;
11328 arm_insn_r->mem_rec_count = 2;
11329 }
11330 }
11331 /* VLDR Vector load register. */
11332 else if ((opcode & 0x13) == 0x11)
11333 {
11334 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11335
11336 if (!single_reg)
11337 {
11338 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11339 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11340 }
11341 else
11342 {
11343 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11344 /* Record register D rather than pseudo register S. */
11345 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11346 }
11347 arm_insn_r->reg_rec_count = 1;
11348 }
11349
11350 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11351 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11352 return 0;
11353 }
11354
11355 /* Record handler for arm/thumb mode VFP data processing instructions. */
11356
11357 static int
11358 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11359 {
11360 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11361 uint32_t record_buf[4];
11362 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11363 enum insn_types curr_insn_type = INSN_INV;
11364
11365 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11366 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11367 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11368 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11369 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11370 bit_d = bit (arm_insn_r->arm_insn, 22);
11371 /* Mask off the "D" bit. */
11372 opc1 = opc1 & ~0x04;
11373
11374 /* Handle VMLA, VMLS. */
11375 if (opc1 == 0x00)
11376 {
11377 if (bit (arm_insn_r->arm_insn, 10))
11378 {
11379 if (bit (arm_insn_r->arm_insn, 6))
11380 curr_insn_type = INSN_T0;
11381 else
11382 curr_insn_type = INSN_T1;
11383 }
11384 else
11385 {
11386 if (dp_op_sz)
11387 curr_insn_type = INSN_T1;
11388 else
11389 curr_insn_type = INSN_T2;
11390 }
11391 }
11392 /* Handle VNMLA, VNMLS, VNMUL. */
11393 else if (opc1 == 0x01)
11394 {
11395 if (dp_op_sz)
11396 curr_insn_type = INSN_T1;
11397 else
11398 curr_insn_type = INSN_T2;
11399 }
11400 /* Handle VMUL. */
11401 else if (opc1 == 0x02 && !(opc3 & 0x01))
11402 {
11403 if (bit (arm_insn_r->arm_insn, 10))
11404 {
11405 if (bit (arm_insn_r->arm_insn, 6))
11406 curr_insn_type = INSN_T0;
11407 else
11408 curr_insn_type = INSN_T1;
11409 }
11410 else
11411 {
11412 if (dp_op_sz)
11413 curr_insn_type = INSN_T1;
11414 else
11415 curr_insn_type = INSN_T2;
11416 }
11417 }
11418 /* Handle VADD, VSUB. */
11419 else if (opc1 == 0x03)
11420 {
11421 if (!bit (arm_insn_r->arm_insn, 9))
11422 {
11423 if (bit (arm_insn_r->arm_insn, 6))
11424 curr_insn_type = INSN_T0;
11425 else
11426 curr_insn_type = INSN_T1;
11427 }
11428 else
11429 {
11430 if (dp_op_sz)
11431 curr_insn_type = INSN_T1;
11432 else
11433 curr_insn_type = INSN_T2;
11434 }
11435 }
11436 /* Handle VDIV. */
11437 else if (opc1 == 0x08)
11438 {
11439 if (dp_op_sz)
11440 curr_insn_type = INSN_T1;
11441 else
11442 curr_insn_type = INSN_T2;
11443 }
11444 /* Handle all other vfp data processing instructions. */
11445 else if (opc1 == 0x0b)
11446 {
11447 /* Handle VMOV. */
11448 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11449 {
11450 if (bit (arm_insn_r->arm_insn, 4))
11451 {
11452 if (bit (arm_insn_r->arm_insn, 6))
11453 curr_insn_type = INSN_T0;
11454 else
11455 curr_insn_type = INSN_T1;
11456 }
11457 else
11458 {
11459 if (dp_op_sz)
11460 curr_insn_type = INSN_T1;
11461 else
11462 curr_insn_type = INSN_T2;
11463 }
11464 }
11465 /* Handle VNEG and VABS. */
11466 else if ((opc2 == 0x01 && opc3 == 0x01)
11467 || (opc2 == 0x00 && opc3 == 0x03))
11468 {
11469 if (!bit (arm_insn_r->arm_insn, 11))
11470 {
11471 if (bit (arm_insn_r->arm_insn, 6))
11472 curr_insn_type = INSN_T0;
11473 else
11474 curr_insn_type = INSN_T1;
11475 }
11476 else
11477 {
11478 if (dp_op_sz)
11479 curr_insn_type = INSN_T1;
11480 else
11481 curr_insn_type = INSN_T2;
11482 }
11483 }
11484 /* Handle VSQRT. */
11485 else if (opc2 == 0x01 && opc3 == 0x03)
11486 {
11487 if (dp_op_sz)
11488 curr_insn_type = INSN_T1;
11489 else
11490 curr_insn_type = INSN_T2;
11491 }
11492 /* Handle VCVT. */
11493 else if (opc2 == 0x07 && opc3 == 0x03)
11494 {
11495 if (!dp_op_sz)
11496 curr_insn_type = INSN_T1;
11497 else
11498 curr_insn_type = INSN_T2;
11499 }
11500 else if (opc3 & 0x01)
11501 {
11502 /* Handle VCVT. */
11503 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11504 {
11505 if (!bit (arm_insn_r->arm_insn, 18))
11506 curr_insn_type = INSN_T2;
11507 else
11508 {
11509 if (dp_op_sz)
11510 curr_insn_type = INSN_T1;
11511 else
11512 curr_insn_type = INSN_T2;
11513 }
11514 }
11515 /* Handle VCVT. */
11516 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11517 {
11518 if (dp_op_sz)
11519 curr_insn_type = INSN_T1;
11520 else
11521 curr_insn_type = INSN_T2;
11522 }
11523 /* Handle VCVTB, VCVTT. */
11524 else if ((opc2 & 0x0e) == 0x02)
11525 curr_insn_type = INSN_T2;
11526 /* Handle VCMP, VCMPE. */
11527 else if ((opc2 & 0x0e) == 0x04)
11528 curr_insn_type = INSN_T3;
11529 }
11530 }
11531
11532 switch (curr_insn_type)
11533 {
11534 case INSN_T0:
11535 reg_vd = reg_vd | (bit_d << 4);
11536 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11537 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11538 arm_insn_r->reg_rec_count = 2;
11539 break;
11540
11541 case INSN_T1:
11542 reg_vd = reg_vd | (bit_d << 4);
11543 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11544 arm_insn_r->reg_rec_count = 1;
11545 break;
11546
11547 case INSN_T2:
11548 reg_vd = (reg_vd << 1) | bit_d;
11549 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11550 arm_insn_r->reg_rec_count = 1;
11551 break;
11552
11553 case INSN_T3:
11554 record_buf[0] = ARM_FPSCR_REGNUM;
11555 arm_insn_r->reg_rec_count = 1;
11556 break;
11557
11558 default:
11559 gdb_assert_not_reached ("no decoding pattern found");
11560 break;
11561 }
11562
11563 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11564 return 0;
11565 }
11566
11567 /* Handling opcode 110 insns. */
11568
11569 static int
11570 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11571 {
11572 uint32_t op1, op1_ebit, coproc;
11573
11574 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11575 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11576 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11577
11578 if ((coproc & 0x0e) == 0x0a)
11579 {
11580 /* Handle extension register ld/st instructions. */
11581 if (!(op1 & 0x20))
11582 return arm_record_exreg_ld_st_insn (arm_insn_r);
11583
11584 /* 64-bit transfers between arm core and extension registers. */
11585 if ((op1 & 0x3e) == 0x04)
11586 return arm_record_exreg_ld_st_insn (arm_insn_r);
11587 }
11588 else
11589 {
11590 /* Handle coprocessor ld/st instructions. */
11591 if (!(op1 & 0x3a))
11592 {
11593 /* Store. */
11594 if (!op1_ebit)
11595 return arm_record_unsupported_insn (arm_insn_r);
11596 else
11597 /* Load. */
11598 return arm_record_unsupported_insn (arm_insn_r);
11599 }
11600
11601 /* Move to coprocessor from two arm core registers. */
11602 if (op1 == 0x4)
11603 return arm_record_unsupported_insn (arm_insn_r);
11604
11605 /* Move to two arm core registers from coprocessor. */
11606 if (op1 == 0x5)
11607 {
11608 uint32_t reg_t[2];
11609
11610 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11611 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11612 arm_insn_r->reg_rec_count = 2;
11613
11614 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11615 return 0;
11616 }
11617 }
11618 return arm_record_unsupported_insn (arm_insn_r);
11619 }
11620
11621 /* Handling opcode 111 insns. */
11622
11623 static int
11624 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11625 {
11626 uint32_t op, op1_ebit, coproc, bits_24_25;
11627 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11628 struct regcache *reg_cache = arm_insn_r->regcache;
11629
11630 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11631 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11632 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11633 op = bit (arm_insn_r->arm_insn, 4);
11634 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11635
11636 /* Handle arm SWI/SVC system call instructions. */
11637 if (bits_24_25 == 0x3)
11638 {
11639 if (tdep->arm_syscall_record != NULL)
11640 {
11641 ULONGEST svc_operand, svc_number;
11642
11643 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11644
11645 if (svc_operand) /* OABI. */
11646 svc_number = svc_operand - 0x900000;
11647 else /* EABI. */
11648 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11649
11650 return tdep->arm_syscall_record (reg_cache, svc_number);
11651 }
11652 else
11653 {
11654 printf_unfiltered (_("no syscall record support\n"));
11655 return -1;
11656 }
11657 }
11658 else if (bits_24_25 == 0x02)
11659 {
11660 if (op)
11661 {
11662 if ((coproc & 0x0e) == 0x0a)
11663 {
11664 /* 8, 16, and 32-bit transfer */
11665 return arm_record_vdata_transfer_insn (arm_insn_r);
11666 }
11667 else
11668 {
11669 if (op1_ebit)
11670 {
11671 /* MRC, MRC2 */
11672 uint32_t record_buf[1];
11673
11674 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11675 if (record_buf[0] == 15)
11676 record_buf[0] = ARM_PS_REGNUM;
11677
11678 arm_insn_r->reg_rec_count = 1;
11679 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11680 record_buf);
11681 return 0;
11682 }
11683 else
11684 {
11685 /* MCR, MCR2 */
11686 return -1;
11687 }
11688 }
11689 }
11690 else
11691 {
11692 if ((coproc & 0x0e) == 0x0a)
11693 {
11694 /* VFP data-processing instructions. */
11695 return arm_record_vfp_data_proc_insn (arm_insn_r);
11696 }
11697 else
11698 {
11699 /* CDP, CDP2 */
11700 return -1;
11701 }
11702 }
11703 }
11704 else
11705 {
11706 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11707
11708 if (op1 == 5)
11709 {
11710 if ((coproc & 0x0e) != 0x0a)
11711 {
11712 /* MRRC, MRRC2 */
11713 return -1;
11714 }
11715 }
11716 else if (op1 == 4 || op1 == 5)
11717 {
11718 if ((coproc & 0x0e) == 0x0a)
11719 {
11720 /* 64-bit transfers between ARM core and extension */
11721 return -1;
11722 }
11723 else if (op1 == 4)
11724 {
11725 /* MCRR, MCRR2 */
11726 return -1;
11727 }
11728 }
11729 else if (op1 == 0 || op1 == 1)
11730 {
11731 /* UNDEFINED */
11732 return -1;
11733 }
11734 else
11735 {
11736 if ((coproc & 0x0e) == 0x0a)
11737 {
11738 /* Extension register load/store */
11739 }
11740 else
11741 {
11742 /* STC, STC2, LDC, LDC2 */
11743 }
11744 return -1;
11745 }
11746 }
11747
11748 return -1;
11749 }
11750
11751 /* Handling opcode 000 insns. */
11752
11753 static int
11754 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11755 {
11756 uint32_t record_buf[8];
11757 uint32_t reg_src1 = 0;
11758
11759 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11760
11761 record_buf[0] = ARM_PS_REGNUM;
11762 record_buf[1] = reg_src1;
11763 thumb_insn_r->reg_rec_count = 2;
11764
11765 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11766
11767 return 0;
11768 }
11769
11770
11771 /* Handling opcode 001 insns. */
11772
11773 static int
11774 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11775 {
11776 uint32_t record_buf[8];
11777 uint32_t reg_src1 = 0;
11778
11779 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11780
11781 record_buf[0] = ARM_PS_REGNUM;
11782 record_buf[1] = reg_src1;
11783 thumb_insn_r->reg_rec_count = 2;
11784
11785 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11786
11787 return 0;
11788 }
11789
11790 /* Handling opcode 010 insns. */
11791
11792 static int
11793 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11794 {
11795 struct regcache *reg_cache = thumb_insn_r->regcache;
11796 uint32_t record_buf[8], record_buf_mem[8];
11797
11798 uint32_t reg_src1 = 0, reg_src2 = 0;
11799 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11800
11801 ULONGEST u_regval[2] = {0};
11802
11803 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11804
11805 if (bit (thumb_insn_r->arm_insn, 12))
11806 {
11807 /* Handle load/store register offset. */
11808 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11809
11810 if (in_inclusive_range (opB, 4U, 7U))
11811 {
11812 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11813 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11814 record_buf[0] = reg_src1;
11815 thumb_insn_r->reg_rec_count = 1;
11816 }
11817 else if (in_inclusive_range (opB, 0U, 2U))
11818 {
11819 /* STR(2), STRB(2), STRH(2) . */
11820 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11821 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11822 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11823 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11824 if (0 == opB)
11825 record_buf_mem[0] = 4; /* STR (2). */
11826 else if (2 == opB)
11827 record_buf_mem[0] = 1; /* STRB (2). */
11828 else if (1 == opB)
11829 record_buf_mem[0] = 2; /* STRH (2). */
11830 record_buf_mem[1] = u_regval[0] + u_regval[1];
11831 thumb_insn_r->mem_rec_count = 1;
11832 }
11833 }
11834 else if (bit (thumb_insn_r->arm_insn, 11))
11835 {
11836 /* Handle load from literal pool. */
11837 /* LDR(3). */
11838 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11839 record_buf[0] = reg_src1;
11840 thumb_insn_r->reg_rec_count = 1;
11841 }
11842 else if (opcode1)
11843 {
11844 /* Special data instructions and branch and exchange */
11845 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11846 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11847 if ((3 == opcode2) && (!opcode3))
11848 {
11849 /* Branch with exchange. */
11850 record_buf[0] = ARM_PS_REGNUM;
11851 thumb_insn_r->reg_rec_count = 1;
11852 }
11853 else
11854 {
11855 /* Format 8; special data processing insns. */
11856 record_buf[0] = ARM_PS_REGNUM;
11857 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11858 | bits (thumb_insn_r->arm_insn, 0, 2));
11859 thumb_insn_r->reg_rec_count = 2;
11860 }
11861 }
11862 else
11863 {
11864 /* Format 5; data processing insns. */
11865 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11866 if (bit (thumb_insn_r->arm_insn, 7))
11867 {
11868 reg_src1 = reg_src1 + 8;
11869 }
11870 record_buf[0] = ARM_PS_REGNUM;
11871 record_buf[1] = reg_src1;
11872 thumb_insn_r->reg_rec_count = 2;
11873 }
11874
11875 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11876 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11877 record_buf_mem);
11878
11879 return 0;
11880 }
11881
11882 /* Handling opcode 001 insns. */
11883
11884 static int
11885 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11886 {
11887 struct regcache *reg_cache = thumb_insn_r->regcache;
11888 uint32_t record_buf[8], record_buf_mem[8];
11889
11890 uint32_t reg_src1 = 0;
11891 uint32_t opcode = 0, immed_5 = 0;
11892
11893 ULONGEST u_regval = 0;
11894
11895 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11896
11897 if (opcode)
11898 {
11899 /* LDR(1). */
11900 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11901 record_buf[0] = reg_src1;
11902 thumb_insn_r->reg_rec_count = 1;
11903 }
11904 else
11905 {
11906 /* STR(1). */
11907 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11908 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11909 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11910 record_buf_mem[0] = 4;
11911 record_buf_mem[1] = u_regval + (immed_5 * 4);
11912 thumb_insn_r->mem_rec_count = 1;
11913 }
11914
11915 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11916 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11917 record_buf_mem);
11918
11919 return 0;
11920 }
11921
11922 /* Handling opcode 100 insns. */
11923
11924 static int
11925 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11926 {
11927 struct regcache *reg_cache = thumb_insn_r->regcache;
11928 uint32_t record_buf[8], record_buf_mem[8];
11929
11930 uint32_t reg_src1 = 0;
11931 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11932
11933 ULONGEST u_regval = 0;
11934
11935 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11936
11937 if (3 == opcode)
11938 {
11939 /* LDR(4). */
11940 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11941 record_buf[0] = reg_src1;
11942 thumb_insn_r->reg_rec_count = 1;
11943 }
11944 else if (1 == opcode)
11945 {
11946 /* LDRH(1). */
11947 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11948 record_buf[0] = reg_src1;
11949 thumb_insn_r->reg_rec_count = 1;
11950 }
11951 else if (2 == opcode)
11952 {
11953 /* STR(3). */
11954 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11955 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11956 record_buf_mem[0] = 4;
11957 record_buf_mem[1] = u_regval + (immed_8 * 4);
11958 thumb_insn_r->mem_rec_count = 1;
11959 }
11960 else if (0 == opcode)
11961 {
11962 /* STRH(1). */
11963 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11964 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11965 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11966 record_buf_mem[0] = 2;
11967 record_buf_mem[1] = u_regval + (immed_5 * 2);
11968 thumb_insn_r->mem_rec_count = 1;
11969 }
11970
11971 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11972 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11973 record_buf_mem);
11974
11975 return 0;
11976 }
11977
11978 /* Handling opcode 101 insns. */
11979
11980 static int
11981 thumb_record_misc (insn_decode_record *thumb_insn_r)
11982 {
11983 struct regcache *reg_cache = thumb_insn_r->regcache;
11984
11985 uint32_t opcode = 0;
11986 uint32_t register_bits = 0, register_count = 0;
11987 uint32_t index = 0, start_address = 0;
11988 uint32_t record_buf[24], record_buf_mem[48];
11989 uint32_t reg_src1;
11990
11991 ULONGEST u_regval = 0;
11992
11993 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11994
11995 if (opcode == 0 || opcode == 1)
11996 {
11997 /* ADR and ADD (SP plus immediate) */
11998
11999 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12000 record_buf[0] = reg_src1;
12001 thumb_insn_r->reg_rec_count = 1;
12002 }
12003 else
12004 {
12005 /* Miscellaneous 16-bit instructions */
12006 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12007
12008 switch (opcode2)
12009 {
12010 case 6:
12011 /* SETEND and CPS */
12012 break;
12013 case 0:
12014 /* ADD/SUB (SP plus immediate) */
12015 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12016 record_buf[0] = ARM_SP_REGNUM;
12017 thumb_insn_r->reg_rec_count = 1;
12018 break;
12019 case 1: /* fall through */
12020 case 3: /* fall through */
12021 case 9: /* fall through */
12022 case 11:
12023 /* CBNZ, CBZ */
12024 break;
12025 case 2:
12026 /* SXTH, SXTB, UXTH, UXTB */
12027 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12028 thumb_insn_r->reg_rec_count = 1;
12029 break;
12030 case 4: /* fall through */
12031 case 5:
12032 /* PUSH. */
12033 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12034 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12035 while (register_bits)
12036 {
12037 if (register_bits & 0x00000001)
12038 register_count++;
12039 register_bits = register_bits >> 1;
12040 }
12041 start_address = u_regval - \
12042 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12043 thumb_insn_r->mem_rec_count = register_count;
12044 while (register_count)
12045 {
12046 record_buf_mem[(register_count * 2) - 1] = start_address;
12047 record_buf_mem[(register_count * 2) - 2] = 4;
12048 start_address = start_address + 4;
12049 register_count--;
12050 }
12051 record_buf[0] = ARM_SP_REGNUM;
12052 thumb_insn_r->reg_rec_count = 1;
12053 break;
12054 case 10:
12055 /* REV, REV16, REVSH */
12056 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12057 thumb_insn_r->reg_rec_count = 1;
12058 break;
12059 case 12: /* fall through */
12060 case 13:
12061 /* POP. */
12062 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12063 while (register_bits)
12064 {
12065 if (register_bits & 0x00000001)
12066 record_buf[index++] = register_count;
12067 register_bits = register_bits >> 1;
12068 register_count++;
12069 }
12070 record_buf[index++] = ARM_PS_REGNUM;
12071 record_buf[index++] = ARM_SP_REGNUM;
12072 thumb_insn_r->reg_rec_count = index;
12073 break;
12074 case 0xe:
12075 /* BKPT insn. */
12076 /* Handle enhanced software breakpoint insn, BKPT. */
12077 /* CPSR is changed to be executed in ARM state, disabling normal
12078 interrupts, entering abort mode. */
12079 /* According to high vector configuration PC is set. */
12080 /* User hits breakpoint and type reverse, in that case, we need to go back with
12081 previous CPSR and Program Counter. */
12082 record_buf[0] = ARM_PS_REGNUM;
12083 record_buf[1] = ARM_LR_REGNUM;
12084 thumb_insn_r->reg_rec_count = 2;
12085 /* We need to save SPSR value, which is not yet done. */
12086 printf_unfiltered (_("Process record does not support instruction "
12087 "0x%0x at address %s.\n"),
12088 thumb_insn_r->arm_insn,
12089 paddress (thumb_insn_r->gdbarch,
12090 thumb_insn_r->this_addr));
12091 return -1;
12092
12093 case 0xf:
12094 /* If-Then, and hints */
12095 break;
12096 default:
12097 return -1;
12098 };
12099 }
12100
12101 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12102 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12103 record_buf_mem);
12104
12105 return 0;
12106 }
12107
12108 /* Handling opcode 110 insns. */
12109
12110 static int
12111 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12112 {
12113 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12114 struct regcache *reg_cache = thumb_insn_r->regcache;
12115
12116 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12117 uint32_t reg_src1 = 0;
12118 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12119 uint32_t index = 0, start_address = 0;
12120 uint32_t record_buf[24], record_buf_mem[48];
12121
12122 ULONGEST u_regval = 0;
12123
12124 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12125 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12126
12127 if (1 == opcode2)
12128 {
12129
12130 /* LDMIA. */
12131 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12132 /* Get Rn. */
12133 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12134 while (register_bits)
12135 {
12136 if (register_bits & 0x00000001)
12137 record_buf[index++] = register_count;
12138 register_bits = register_bits >> 1;
12139 register_count++;
12140 }
12141 record_buf[index++] = reg_src1;
12142 thumb_insn_r->reg_rec_count = index;
12143 }
12144 else if (0 == opcode2)
12145 {
12146 /* It handles both STMIA. */
12147 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12148 /* Get Rn. */
12149 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12150 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12151 while (register_bits)
12152 {
12153 if (register_bits & 0x00000001)
12154 register_count++;
12155 register_bits = register_bits >> 1;
12156 }
12157 start_address = u_regval;
12158 thumb_insn_r->mem_rec_count = register_count;
12159 while (register_count)
12160 {
12161 record_buf_mem[(register_count * 2) - 1] = start_address;
12162 record_buf_mem[(register_count * 2) - 2] = 4;
12163 start_address = start_address + 4;
12164 register_count--;
12165 }
12166 }
12167 else if (0x1F == opcode1)
12168 {
12169 /* Handle arm syscall insn. */
12170 if (tdep->arm_syscall_record != NULL)
12171 {
12172 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12173 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12174 }
12175 else
12176 {
12177 printf_unfiltered (_("no syscall record support\n"));
12178 return -1;
12179 }
12180 }
12181
12182 /* B (1), conditional branch is automatically taken care in process_record,
12183 as PC is saved there. */
12184
12185 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12186 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12187 record_buf_mem);
12188
12189 return ret;
12190 }
12191
12192 /* Handling opcode 111 insns. */
12193
12194 static int
12195 thumb_record_branch (insn_decode_record *thumb_insn_r)
12196 {
12197 uint32_t record_buf[8];
12198 uint32_t bits_h = 0;
12199
12200 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12201
12202 if (2 == bits_h || 3 == bits_h)
12203 {
12204 /* BL */
12205 record_buf[0] = ARM_LR_REGNUM;
12206 thumb_insn_r->reg_rec_count = 1;
12207 }
12208 else if (1 == bits_h)
12209 {
12210 /* BLX(1). */
12211 record_buf[0] = ARM_PS_REGNUM;
12212 record_buf[1] = ARM_LR_REGNUM;
12213 thumb_insn_r->reg_rec_count = 2;
12214 }
12215
12216 /* B(2) is automatically taken care in process_record, as PC is
12217 saved there. */
12218
12219 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12220
12221 return 0;
12222 }
12223
12224 /* Handler for thumb2 load/store multiple instructions. */
12225
12226 static int
12227 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12228 {
12229 struct regcache *reg_cache = thumb2_insn_r->regcache;
12230
12231 uint32_t reg_rn, op;
12232 uint32_t register_bits = 0, register_count = 0;
12233 uint32_t index = 0, start_address = 0;
12234 uint32_t record_buf[24], record_buf_mem[48];
12235
12236 ULONGEST u_regval = 0;
12237
12238 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12239 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12240
12241 if (0 == op || 3 == op)
12242 {
12243 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12244 {
12245 /* Handle RFE instruction. */
12246 record_buf[0] = ARM_PS_REGNUM;
12247 thumb2_insn_r->reg_rec_count = 1;
12248 }
12249 else
12250 {
12251 /* Handle SRS instruction after reading banked SP. */
12252 return arm_record_unsupported_insn (thumb2_insn_r);
12253 }
12254 }
12255 else if (1 == op || 2 == op)
12256 {
12257 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12258 {
12259 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12260 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12261 while (register_bits)
12262 {
12263 if (register_bits & 0x00000001)
12264 record_buf[index++] = register_count;
12265
12266 register_count++;
12267 register_bits = register_bits >> 1;
12268 }
12269 record_buf[index++] = reg_rn;
12270 record_buf[index++] = ARM_PS_REGNUM;
12271 thumb2_insn_r->reg_rec_count = index;
12272 }
12273 else
12274 {
12275 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12276 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12277 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12278 while (register_bits)
12279 {
12280 if (register_bits & 0x00000001)
12281 register_count++;
12282
12283 register_bits = register_bits >> 1;
12284 }
12285
12286 if (1 == op)
12287 {
12288 /* Start address calculation for LDMDB/LDMEA. */
12289 start_address = u_regval;
12290 }
12291 else if (2 == op)
12292 {
12293 /* Start address calculation for LDMDB/LDMEA. */
12294 start_address = u_regval - register_count * 4;
12295 }
12296
12297 thumb2_insn_r->mem_rec_count = register_count;
12298 while (register_count)
12299 {
12300 record_buf_mem[register_count * 2 - 1] = start_address;
12301 record_buf_mem[register_count * 2 - 2] = 4;
12302 start_address = start_address + 4;
12303 register_count--;
12304 }
12305 record_buf[0] = reg_rn;
12306 record_buf[1] = ARM_PS_REGNUM;
12307 thumb2_insn_r->reg_rec_count = 2;
12308 }
12309 }
12310
12311 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12312 record_buf_mem);
12313 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12314 record_buf);
12315 return ARM_RECORD_SUCCESS;
12316 }
12317
12318 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12319 instructions. */
12320
12321 static int
12322 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12323 {
12324 struct regcache *reg_cache = thumb2_insn_r->regcache;
12325
12326 uint32_t reg_rd, reg_rn, offset_imm;
12327 uint32_t reg_dest1, reg_dest2;
12328 uint32_t address, offset_addr;
12329 uint32_t record_buf[8], record_buf_mem[8];
12330 uint32_t op1, op2, op3;
12331
12332 ULONGEST u_regval[2];
12333
12334 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12335 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12336 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12337
12338 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12339 {
12340 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12341 {
12342 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12343 record_buf[0] = reg_dest1;
12344 record_buf[1] = ARM_PS_REGNUM;
12345 thumb2_insn_r->reg_rec_count = 2;
12346 }
12347
12348 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12349 {
12350 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12351 record_buf[2] = reg_dest2;
12352 thumb2_insn_r->reg_rec_count = 3;
12353 }
12354 }
12355 else
12356 {
12357 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12358 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12359
12360 if (0 == op1 && 0 == op2)
12361 {
12362 /* Handle STREX. */
12363 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12364 address = u_regval[0] + (offset_imm * 4);
12365 record_buf_mem[0] = 4;
12366 record_buf_mem[1] = address;
12367 thumb2_insn_r->mem_rec_count = 1;
12368 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12369 record_buf[0] = reg_rd;
12370 thumb2_insn_r->reg_rec_count = 1;
12371 }
12372 else if (1 == op1 && 0 == op2)
12373 {
12374 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12375 record_buf[0] = reg_rd;
12376 thumb2_insn_r->reg_rec_count = 1;
12377 address = u_regval[0];
12378 record_buf_mem[1] = address;
12379
12380 if (4 == op3)
12381 {
12382 /* Handle STREXB. */
12383 record_buf_mem[0] = 1;
12384 thumb2_insn_r->mem_rec_count = 1;
12385 }
12386 else if (5 == op3)
12387 {
12388 /* Handle STREXH. */
12389 record_buf_mem[0] = 2 ;
12390 thumb2_insn_r->mem_rec_count = 1;
12391 }
12392 else if (7 == op3)
12393 {
12394 /* Handle STREXD. */
12395 address = u_regval[0];
12396 record_buf_mem[0] = 4;
12397 record_buf_mem[2] = 4;
12398 record_buf_mem[3] = address + 4;
12399 thumb2_insn_r->mem_rec_count = 2;
12400 }
12401 }
12402 else
12403 {
12404 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12405
12406 if (bit (thumb2_insn_r->arm_insn, 24))
12407 {
12408 if (bit (thumb2_insn_r->arm_insn, 23))
12409 offset_addr = u_regval[0] + (offset_imm * 4);
12410 else
12411 offset_addr = u_regval[0] - (offset_imm * 4);
12412
12413 address = offset_addr;
12414 }
12415 else
12416 address = u_regval[0];
12417
12418 record_buf_mem[0] = 4;
12419 record_buf_mem[1] = address;
12420 record_buf_mem[2] = 4;
12421 record_buf_mem[3] = address + 4;
12422 thumb2_insn_r->mem_rec_count = 2;
12423 record_buf[0] = reg_rn;
12424 thumb2_insn_r->reg_rec_count = 1;
12425 }
12426 }
12427
12428 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12429 record_buf);
12430 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12431 record_buf_mem);
12432 return ARM_RECORD_SUCCESS;
12433 }
12434
12435 /* Handler for thumb2 data processing (shift register and modified immediate)
12436 instructions. */
12437
12438 static int
12439 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12440 {
12441 uint32_t reg_rd, op;
12442 uint32_t record_buf[8];
12443
12444 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12445 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12446
12447 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12448 {
12449 record_buf[0] = ARM_PS_REGNUM;
12450 thumb2_insn_r->reg_rec_count = 1;
12451 }
12452 else
12453 {
12454 record_buf[0] = reg_rd;
12455 record_buf[1] = ARM_PS_REGNUM;
12456 thumb2_insn_r->reg_rec_count = 2;
12457 }
12458
12459 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12460 record_buf);
12461 return ARM_RECORD_SUCCESS;
12462 }
12463
12464 /* Generic handler for thumb2 instructions which effect destination and PS
12465 registers. */
12466
12467 static int
12468 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12469 {
12470 uint32_t reg_rd;
12471 uint32_t record_buf[8];
12472
12473 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12474
12475 record_buf[0] = reg_rd;
12476 record_buf[1] = ARM_PS_REGNUM;
12477 thumb2_insn_r->reg_rec_count = 2;
12478
12479 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12480 record_buf);
12481 return ARM_RECORD_SUCCESS;
12482 }
12483
12484 /* Handler for thumb2 branch and miscellaneous control instructions. */
12485
12486 static int
12487 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12488 {
12489 uint32_t op, op1, op2;
12490 uint32_t record_buf[8];
12491
12492 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12493 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12494 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12495
12496 /* Handle MSR insn. */
12497 if (!(op1 & 0x2) && 0x38 == op)
12498 {
12499 if (!(op2 & 0x3))
12500 {
12501 /* CPSR is going to be changed. */
12502 record_buf[0] = ARM_PS_REGNUM;
12503 thumb2_insn_r->reg_rec_count = 1;
12504 }
12505 else
12506 {
12507 arm_record_unsupported_insn(thumb2_insn_r);
12508 return -1;
12509 }
12510 }
12511 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12512 {
12513 /* BLX. */
12514 record_buf[0] = ARM_PS_REGNUM;
12515 record_buf[1] = ARM_LR_REGNUM;
12516 thumb2_insn_r->reg_rec_count = 2;
12517 }
12518
12519 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12520 record_buf);
12521 return ARM_RECORD_SUCCESS;
12522 }
12523
12524 /* Handler for thumb2 store single data item instructions. */
12525
12526 static int
12527 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12528 {
12529 struct regcache *reg_cache = thumb2_insn_r->regcache;
12530
12531 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12532 uint32_t address, offset_addr;
12533 uint32_t record_buf[8], record_buf_mem[8];
12534 uint32_t op1, op2;
12535
12536 ULONGEST u_regval[2];
12537
12538 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12539 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12540 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12541 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12542
12543 if (bit (thumb2_insn_r->arm_insn, 23))
12544 {
12545 /* T2 encoding. */
12546 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12547 offset_addr = u_regval[0] + offset_imm;
12548 address = offset_addr;
12549 }
12550 else
12551 {
12552 /* T3 encoding. */
12553 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12554 {
12555 /* Handle STRB (register). */
12556 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12557 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12558 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12559 offset_addr = u_regval[1] << shift_imm;
12560 address = u_regval[0] + offset_addr;
12561 }
12562 else
12563 {
12564 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12565 if (bit (thumb2_insn_r->arm_insn, 10))
12566 {
12567 if (bit (thumb2_insn_r->arm_insn, 9))
12568 offset_addr = u_regval[0] + offset_imm;
12569 else
12570 offset_addr = u_regval[0] - offset_imm;
12571
12572 address = offset_addr;
12573 }
12574 else
12575 address = u_regval[0];
12576 }
12577 }
12578
12579 switch (op1)
12580 {
12581 /* Store byte instructions. */
12582 case 4:
12583 case 0:
12584 record_buf_mem[0] = 1;
12585 break;
12586 /* Store half word instructions. */
12587 case 1:
12588 case 5:
12589 record_buf_mem[0] = 2;
12590 break;
12591 /* Store word instructions. */
12592 case 2:
12593 case 6:
12594 record_buf_mem[0] = 4;
12595 break;
12596
12597 default:
12598 gdb_assert_not_reached ("no decoding pattern found");
12599 break;
12600 }
12601
12602 record_buf_mem[1] = address;
12603 thumb2_insn_r->mem_rec_count = 1;
12604 record_buf[0] = reg_rn;
12605 thumb2_insn_r->reg_rec_count = 1;
12606
12607 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12608 record_buf);
12609 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12610 record_buf_mem);
12611 return ARM_RECORD_SUCCESS;
12612 }
12613
12614 /* Handler for thumb2 load memory hints instructions. */
12615
12616 static int
12617 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12618 {
12619 uint32_t record_buf[8];
12620 uint32_t reg_rt, reg_rn;
12621
12622 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12623 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12624
12625 if (ARM_PC_REGNUM != reg_rt)
12626 {
12627 record_buf[0] = reg_rt;
12628 record_buf[1] = reg_rn;
12629 record_buf[2] = ARM_PS_REGNUM;
12630 thumb2_insn_r->reg_rec_count = 3;
12631
12632 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12633 record_buf);
12634 return ARM_RECORD_SUCCESS;
12635 }
12636
12637 return ARM_RECORD_FAILURE;
12638 }
12639
12640 /* Handler for thumb2 load word instructions. */
12641
12642 static int
12643 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12644 {
12645 uint32_t record_buf[8];
12646
12647 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12648 record_buf[1] = ARM_PS_REGNUM;
12649 thumb2_insn_r->reg_rec_count = 2;
12650
12651 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12652 record_buf);
12653 return ARM_RECORD_SUCCESS;
12654 }
12655
12656 /* Handler for thumb2 long multiply, long multiply accumulate, and
12657 divide instructions. */
12658
12659 static int
12660 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12661 {
12662 uint32_t opcode1 = 0, opcode2 = 0;
12663 uint32_t record_buf[8];
12664
12665 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12666 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12667
12668 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12669 {
12670 /* Handle SMULL, UMULL, SMULAL. */
12671 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12672 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12673 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12674 record_buf[2] = ARM_PS_REGNUM;
12675 thumb2_insn_r->reg_rec_count = 3;
12676 }
12677 else if (1 == opcode1 || 3 == opcode2)
12678 {
12679 /* Handle SDIV and UDIV. */
12680 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12681 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12682 record_buf[2] = ARM_PS_REGNUM;
12683 thumb2_insn_r->reg_rec_count = 3;
12684 }
12685 else
12686 return ARM_RECORD_FAILURE;
12687
12688 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12689 record_buf);
12690 return ARM_RECORD_SUCCESS;
12691 }
12692
12693 /* Record handler for thumb32 coprocessor instructions. */
12694
12695 static int
12696 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12697 {
12698 if (bit (thumb2_insn_r->arm_insn, 25))
12699 return arm_record_coproc_data_proc (thumb2_insn_r);
12700 else
12701 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12702 }
12703
12704 /* Record handler for advance SIMD structure load/store instructions. */
12705
12706 static int
12707 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12708 {
12709 struct regcache *reg_cache = thumb2_insn_r->regcache;
12710 uint32_t l_bit, a_bit, b_bits;
12711 uint32_t record_buf[128], record_buf_mem[128];
12712 uint32_t reg_rn, reg_vd, address, f_elem;
12713 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12714 uint8_t f_ebytes;
12715
12716 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12717 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12718 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12719 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12720 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12721 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12722 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12723 f_elem = 8 / f_ebytes;
12724
12725 if (!l_bit)
12726 {
12727 ULONGEST u_regval = 0;
12728 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12729 address = u_regval;
12730
12731 if (!a_bit)
12732 {
12733 /* Handle VST1. */
12734 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12735 {
12736 if (b_bits == 0x07)
12737 bf_regs = 1;
12738 else if (b_bits == 0x0a)
12739 bf_regs = 2;
12740 else if (b_bits == 0x06)
12741 bf_regs = 3;
12742 else if (b_bits == 0x02)
12743 bf_regs = 4;
12744 else
12745 bf_regs = 0;
12746
12747 for (index_r = 0; index_r < bf_regs; index_r++)
12748 {
12749 for (index_e = 0; index_e < f_elem; index_e++)
12750 {
12751 record_buf_mem[index_m++] = f_ebytes;
12752 record_buf_mem[index_m++] = address;
12753 address = address + f_ebytes;
12754 thumb2_insn_r->mem_rec_count += 1;
12755 }
12756 }
12757 }
12758 /* Handle VST2. */
12759 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12760 {
12761 if (b_bits == 0x09 || b_bits == 0x08)
12762 bf_regs = 1;
12763 else if (b_bits == 0x03)
12764 bf_regs = 2;
12765 else
12766 bf_regs = 0;
12767
12768 for (index_r = 0; index_r < bf_regs; index_r++)
12769 for (index_e = 0; index_e < f_elem; index_e++)
12770 {
12771 for (loop_t = 0; loop_t < 2; loop_t++)
12772 {
12773 record_buf_mem[index_m++] = f_ebytes;
12774 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12775 thumb2_insn_r->mem_rec_count += 1;
12776 }
12777 address = address + (2 * f_ebytes);
12778 }
12779 }
12780 /* Handle VST3. */
12781 else if ((b_bits & 0x0e) == 0x04)
12782 {
12783 for (index_e = 0; index_e < f_elem; index_e++)
12784 {
12785 for (loop_t = 0; loop_t < 3; loop_t++)
12786 {
12787 record_buf_mem[index_m++] = f_ebytes;
12788 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12789 thumb2_insn_r->mem_rec_count += 1;
12790 }
12791 address = address + (3 * f_ebytes);
12792 }
12793 }
12794 /* Handle VST4. */
12795 else if (!(b_bits & 0x0e))
12796 {
12797 for (index_e = 0; index_e < f_elem; index_e++)
12798 {
12799 for (loop_t = 0; loop_t < 4; loop_t++)
12800 {
12801 record_buf_mem[index_m++] = f_ebytes;
12802 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12803 thumb2_insn_r->mem_rec_count += 1;
12804 }
12805 address = address + (4 * f_ebytes);
12806 }
12807 }
12808 }
12809 else
12810 {
12811 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12812
12813 if (bft_size == 0x00)
12814 f_ebytes = 1;
12815 else if (bft_size == 0x01)
12816 f_ebytes = 2;
12817 else if (bft_size == 0x02)
12818 f_ebytes = 4;
12819 else
12820 f_ebytes = 0;
12821
12822 /* Handle VST1. */
12823 if (!(b_bits & 0x0b) || b_bits == 0x08)
12824 thumb2_insn_r->mem_rec_count = 1;
12825 /* Handle VST2. */
12826 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12827 thumb2_insn_r->mem_rec_count = 2;
12828 /* Handle VST3. */
12829 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12830 thumb2_insn_r->mem_rec_count = 3;
12831 /* Handle VST4. */
12832 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12833 thumb2_insn_r->mem_rec_count = 4;
12834
12835 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12836 {
12837 record_buf_mem[index_m] = f_ebytes;
12838 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12839 }
12840 }
12841 }
12842 else
12843 {
12844 if (!a_bit)
12845 {
12846 /* Handle VLD1. */
12847 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12848 thumb2_insn_r->reg_rec_count = 1;
12849 /* Handle VLD2. */
12850 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12851 thumb2_insn_r->reg_rec_count = 2;
12852 /* Handle VLD3. */
12853 else if ((b_bits & 0x0e) == 0x04)
12854 thumb2_insn_r->reg_rec_count = 3;
12855 /* Handle VLD4. */
12856 else if (!(b_bits & 0x0e))
12857 thumb2_insn_r->reg_rec_count = 4;
12858 }
12859 else
12860 {
12861 /* Handle VLD1. */
12862 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12863 thumb2_insn_r->reg_rec_count = 1;
12864 /* Handle VLD2. */
12865 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12866 thumb2_insn_r->reg_rec_count = 2;
12867 /* Handle VLD3. */
12868 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12869 thumb2_insn_r->reg_rec_count = 3;
12870 /* Handle VLD4. */
12871 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12872 thumb2_insn_r->reg_rec_count = 4;
12873
12874 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12875 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12876 }
12877 }
12878
12879 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12880 {
12881 record_buf[index_r] = reg_rn;
12882 thumb2_insn_r->reg_rec_count += 1;
12883 }
12884
12885 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12886 record_buf);
12887 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12888 record_buf_mem);
12889 return 0;
12890 }
12891
12892 /* Decodes thumb2 instruction type and invokes its record handler. */
12893
12894 static unsigned int
12895 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12896 {
12897 uint32_t op, op1, op2;
12898
12899 op = bit (thumb2_insn_r->arm_insn, 15);
12900 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12901 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12902
12903 if (op1 == 0x01)
12904 {
12905 if (!(op2 & 0x64 ))
12906 {
12907 /* Load/store multiple instruction. */
12908 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12909 }
12910 else if ((op2 & 0x64) == 0x4)
12911 {
12912 /* Load/store (dual/exclusive) and table branch instruction. */
12913 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12914 }
12915 else if ((op2 & 0x60) == 0x20)
12916 {
12917 /* Data-processing (shifted register). */
12918 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12919 }
12920 else if (op2 & 0x40)
12921 {
12922 /* Co-processor instructions. */
12923 return thumb2_record_coproc_insn (thumb2_insn_r);
12924 }
12925 }
12926 else if (op1 == 0x02)
12927 {
12928 if (op)
12929 {
12930 /* Branches and miscellaneous control instructions. */
12931 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12932 }
12933 else if (op2 & 0x20)
12934 {
12935 /* Data-processing (plain binary immediate) instruction. */
12936 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12937 }
12938 else
12939 {
12940 /* Data-processing (modified immediate). */
12941 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12942 }
12943 }
12944 else if (op1 == 0x03)
12945 {
12946 if (!(op2 & 0x71 ))
12947 {
12948 /* Store single data item. */
12949 return thumb2_record_str_single_data (thumb2_insn_r);
12950 }
12951 else if (!((op2 & 0x71) ^ 0x10))
12952 {
12953 /* Advanced SIMD or structure load/store instructions. */
12954 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12955 }
12956 else if (!((op2 & 0x67) ^ 0x01))
12957 {
12958 /* Load byte, memory hints instruction. */
12959 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12960 }
12961 else if (!((op2 & 0x67) ^ 0x03))
12962 {
12963 /* Load halfword, memory hints instruction. */
12964 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12965 }
12966 else if (!((op2 & 0x67) ^ 0x05))
12967 {
12968 /* Load word instruction. */
12969 return thumb2_record_ld_word (thumb2_insn_r);
12970 }
12971 else if (!((op2 & 0x70) ^ 0x20))
12972 {
12973 /* Data-processing (register) instruction. */
12974 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12975 }
12976 else if (!((op2 & 0x78) ^ 0x30))
12977 {
12978 /* Multiply, multiply accumulate, abs diff instruction. */
12979 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12980 }
12981 else if (!((op2 & 0x78) ^ 0x38))
12982 {
12983 /* Long multiply, long multiply accumulate, and divide. */
12984 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12985 }
12986 else if (op2 & 0x40)
12987 {
12988 /* Co-processor instructions. */
12989 return thumb2_record_coproc_insn (thumb2_insn_r);
12990 }
12991 }
12992
12993 return -1;
12994 }
12995
12996 namespace {
12997 /* Abstract memory reader. */
12998
12999 class abstract_memory_reader
13000 {
13001 public:
13002 /* Read LEN bytes of target memory at address MEMADDR, placing the
13003 results in GDB's memory at BUF. Return true on success. */
13004
13005 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13006 };
13007
13008 /* Instruction reader from real target. */
13009
13010 class instruction_reader : public abstract_memory_reader
13011 {
13012 public:
13013 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13014 {
13015 if (target_read_memory (memaddr, buf, len))
13016 return false;
13017 else
13018 return true;
13019 }
13020 };
13021
13022 } // namespace
13023
13024 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13025 and positive val on failure. */
13026
13027 static int
13028 extract_arm_insn (abstract_memory_reader& reader,
13029 insn_decode_record *insn_record, uint32_t insn_size)
13030 {
13031 gdb_byte buf[insn_size];
13032
13033 memset (&buf[0], 0, insn_size);
13034
13035 if (!reader.read (insn_record->this_addr, buf, insn_size))
13036 return 1;
13037 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13038 insn_size,
13039 gdbarch_byte_order_for_code (insn_record->gdbarch));
13040 return 0;
13041 }
13042
13043 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13044
13045 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13046 dispatch it. */
13047
13048 static int
13049 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13050 record_type_t record_type, uint32_t insn_size)
13051 {
13052
13053 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13054 instruction. */
13055 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13056 {
13057 arm_record_data_proc_misc_ld_str, /* 000. */
13058 arm_record_data_proc_imm, /* 001. */
13059 arm_record_ld_st_imm_offset, /* 010. */
13060 arm_record_ld_st_reg_offset, /* 011. */
13061 arm_record_ld_st_multiple, /* 100. */
13062 arm_record_b_bl, /* 101. */
13063 arm_record_asimd_vfp_coproc, /* 110. */
13064 arm_record_coproc_data_proc /* 111. */
13065 };
13066
13067 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13068 instruction. */
13069 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13070 { \
13071 thumb_record_shift_add_sub, /* 000. */
13072 thumb_record_add_sub_cmp_mov, /* 001. */
13073 thumb_record_ld_st_reg_offset, /* 010. */
13074 thumb_record_ld_st_imm_offset, /* 011. */
13075 thumb_record_ld_st_stack, /* 100. */
13076 thumb_record_misc, /* 101. */
13077 thumb_record_ldm_stm_swi, /* 110. */
13078 thumb_record_branch /* 111. */
13079 };
13080
13081 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13082 uint32_t insn_id = 0;
13083
13084 if (extract_arm_insn (reader, arm_record, insn_size))
13085 {
13086 if (record_debug)
13087 {
13088 printf_unfiltered (_("Process record: error reading memory at "
13089 "addr %s len = %d.\n"),
13090 paddress (arm_record->gdbarch,
13091 arm_record->this_addr), insn_size);
13092 }
13093 return -1;
13094 }
13095 else if (ARM_RECORD == record_type)
13096 {
13097 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13098 insn_id = bits (arm_record->arm_insn, 25, 27);
13099
13100 if (arm_record->cond == 0xf)
13101 ret = arm_record_extension_space (arm_record);
13102 else
13103 {
13104 /* If this insn has fallen into extension space
13105 then we need not decode it anymore. */
13106 ret = arm_handle_insn[insn_id] (arm_record);
13107 }
13108 if (ret != ARM_RECORD_SUCCESS)
13109 {
13110 arm_record_unsupported_insn (arm_record);
13111 ret = -1;
13112 }
13113 }
13114 else if (THUMB_RECORD == record_type)
13115 {
13116 /* As thumb does not have condition codes, we set negative. */
13117 arm_record->cond = -1;
13118 insn_id = bits (arm_record->arm_insn, 13, 15);
13119 ret = thumb_handle_insn[insn_id] (arm_record);
13120 if (ret != ARM_RECORD_SUCCESS)
13121 {
13122 arm_record_unsupported_insn (arm_record);
13123 ret = -1;
13124 }
13125 }
13126 else if (THUMB2_RECORD == record_type)
13127 {
13128 /* As thumb does not have condition codes, we set negative. */
13129 arm_record->cond = -1;
13130
13131 /* Swap first half of 32bit thumb instruction with second half. */
13132 arm_record->arm_insn
13133 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13134
13135 ret = thumb2_record_decode_insn_handler (arm_record);
13136
13137 if (ret != ARM_RECORD_SUCCESS)
13138 {
13139 arm_record_unsupported_insn (arm_record);
13140 ret = -1;
13141 }
13142 }
13143 else
13144 {
13145 /* Throw assertion. */
13146 gdb_assert_not_reached ("not a valid instruction, could not decode");
13147 }
13148
13149 return ret;
13150 }
13151
13152 #if GDB_SELF_TEST
13153 namespace selftests {
13154
13155 /* Provide both 16-bit and 32-bit thumb instructions. */
13156
13157 class instruction_reader_thumb : public abstract_memory_reader
13158 {
13159 public:
13160 template<size_t SIZE>
13161 instruction_reader_thumb (enum bfd_endian endian,
13162 const uint16_t (&insns)[SIZE])
13163 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13164 {}
13165
13166 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13167 {
13168 SELF_CHECK (len == 4 || len == 2);
13169 SELF_CHECK (memaddr % 2 == 0);
13170 SELF_CHECK ((memaddr / 2) < m_insns_size);
13171
13172 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13173 if (len == 4)
13174 {
13175 store_unsigned_integer (&buf[2], 2, m_endian,
13176 m_insns[memaddr / 2 + 1]);
13177 }
13178 return true;
13179 }
13180
13181 private:
13182 enum bfd_endian m_endian;
13183 const uint16_t *m_insns;
13184 size_t m_insns_size;
13185 };
13186
13187 static void
13188 arm_record_test (void)
13189 {
13190 struct gdbarch_info info;
13191 gdbarch_info_init (&info);
13192 info.bfd_arch_info = bfd_scan_arch ("arm");
13193
13194 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13195
13196 SELF_CHECK (gdbarch != NULL);
13197
13198 /* 16-bit Thumb instructions. */
13199 {
13200 insn_decode_record arm_record;
13201
13202 memset (&arm_record, 0, sizeof (insn_decode_record));
13203 arm_record.gdbarch = gdbarch;
13204
13205 static const uint16_t insns[] = {
13206 /* db b2 uxtb r3, r3 */
13207 0xb2db,
13208 /* cd 58 ldr r5, [r1, r3] */
13209 0x58cd,
13210 };
13211
13212 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13213 instruction_reader_thumb reader (endian, insns);
13214 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13215 THUMB_INSN_SIZE_BYTES);
13216
13217 SELF_CHECK (ret == 0);
13218 SELF_CHECK (arm_record.mem_rec_count == 0);
13219 SELF_CHECK (arm_record.reg_rec_count == 1);
13220 SELF_CHECK (arm_record.arm_regs[0] == 3);
13221
13222 arm_record.this_addr += 2;
13223 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13224 THUMB_INSN_SIZE_BYTES);
13225
13226 SELF_CHECK (ret == 0);
13227 SELF_CHECK (arm_record.mem_rec_count == 0);
13228 SELF_CHECK (arm_record.reg_rec_count == 1);
13229 SELF_CHECK (arm_record.arm_regs[0] == 5);
13230 }
13231
13232 /* 32-bit Thumb-2 instructions. */
13233 {
13234 insn_decode_record arm_record;
13235
13236 memset (&arm_record, 0, sizeof (insn_decode_record));
13237 arm_record.gdbarch = gdbarch;
13238
13239 static const uint16_t insns[] = {
13240 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13241 0xee1d, 0x7f70,
13242 };
13243
13244 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13245 instruction_reader_thumb reader (endian, insns);
13246 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13247 THUMB2_INSN_SIZE_BYTES);
13248
13249 SELF_CHECK (ret == 0);
13250 SELF_CHECK (arm_record.mem_rec_count == 0);
13251 SELF_CHECK (arm_record.reg_rec_count == 1);
13252 SELF_CHECK (arm_record.arm_regs[0] == 7);
13253 }
13254 }
13255 } // namespace selftests
13256 #endif /* GDB_SELF_TEST */
13257
13258 /* Cleans up local record registers and memory allocations. */
13259
13260 static void
13261 deallocate_reg_mem (insn_decode_record *record)
13262 {
13263 xfree (record->arm_regs);
13264 xfree (record->arm_mems);
13265 }
13266
13267
13268 /* Parse the current instruction and record the values of the registers and
13269 memory that will be changed in current instruction to record_arch_list".
13270 Return -1 if something is wrong. */
13271
13272 int
13273 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13274 CORE_ADDR insn_addr)
13275 {
13276
13277 uint32_t no_of_rec = 0;
13278 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13279 ULONGEST t_bit = 0, insn_id = 0;
13280
13281 ULONGEST u_regval = 0;
13282
13283 insn_decode_record arm_record;
13284
13285 memset (&arm_record, 0, sizeof (insn_decode_record));
13286 arm_record.regcache = regcache;
13287 arm_record.this_addr = insn_addr;
13288 arm_record.gdbarch = gdbarch;
13289
13290
13291 if (record_debug > 1)
13292 {
13293 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13294 "addr = %s\n",
13295 paddress (gdbarch, arm_record.this_addr));
13296 }
13297
13298 instruction_reader reader;
13299 if (extract_arm_insn (reader, &arm_record, 2))
13300 {
13301 if (record_debug)
13302 {
13303 printf_unfiltered (_("Process record: error reading memory at "
13304 "addr %s len = %d.\n"),
13305 paddress (arm_record.gdbarch,
13306 arm_record.this_addr), 2);
13307 }
13308 return -1;
13309 }
13310
13311 /* Check the insn, whether it is thumb or arm one. */
13312
13313 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13314 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13315
13316
13317 if (!(u_regval & t_bit))
13318 {
13319 /* We are decoding arm insn. */
13320 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13321 }
13322 else
13323 {
13324 insn_id = bits (arm_record.arm_insn, 11, 15);
13325 /* is it thumb2 insn? */
13326 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13327 {
13328 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13329 THUMB2_INSN_SIZE_BYTES);
13330 }
13331 else
13332 {
13333 /* We are decoding thumb insn. */
13334 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13335 THUMB_INSN_SIZE_BYTES);
13336 }
13337 }
13338
13339 if (0 == ret)
13340 {
13341 /* Record registers. */
13342 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13343 if (arm_record.arm_regs)
13344 {
13345 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13346 {
13347 if (record_full_arch_list_add_reg
13348 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13349 ret = -1;
13350 }
13351 }
13352 /* Record memories. */
13353 if (arm_record.arm_mems)
13354 {
13355 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13356 {
13357 if (record_full_arch_list_add_mem
13358 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13359 arm_record.arm_mems[no_of_rec].len))
13360 ret = -1;
13361 }
13362 }
13363
13364 if (record_full_arch_list_add_end ())
13365 ret = -1;
13366 }
13367
13368
13369 deallocate_reg_mem (&arm_record);
13370
13371 return ret;
13372 }
13373
13374 /* See arm-tdep.h. */
13375
13376 const target_desc *
13377 arm_read_description (arm_fp_type fp_type)
13378 {
13379 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13380
13381 if (tdesc == nullptr)
13382 {
13383 tdesc = arm_create_target_description (fp_type);
13384 tdesc_arm_list[fp_type] = tdesc;
13385 }
13386
13387 return tdesc;
13388 }
13389
13390 /* See arm-tdep.h. */
13391
13392 const target_desc *
13393 arm_read_mprofile_description (arm_m_profile_type m_type)
13394 {
13395 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13396
13397 if (tdesc == nullptr)
13398 {
13399 tdesc = arm_create_mprofile_target_description (m_type);
13400 tdesc_arm_mprofile_list[m_type] = tdesc;
13401 }
13402
13403 return tdesc;
13404 }
This page took 0.320291 seconds and 4 git commands to generate.