Make demangled_name_entry::language not a bitfield
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "record.h"
59 #include "record-full.h"
60 #include <algorithm>
61
62 #if GDB_SELF_TEST
63 #include "gdbsupport/selftest.h"
64 #endif
65
66 static bool arm_debug;
67
68 /* Macros for setting and testing a bit in a minimal symbol that marks
69 it as Thumb function. The MSB of the minimal symbol's "info" field
70 is used for this purpose.
71
72 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
73 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
74
75 #define MSYMBOL_SET_SPECIAL(msym) \
76 MSYMBOL_TARGET_FLAG_1 (msym) = 1
77
78 #define MSYMBOL_IS_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym)
80
81 struct arm_mapping_symbol
82 {
83 bfd_vma value;
84 char type;
85
86 bool operator< (const arm_mapping_symbol &other) const
87 { return this->value < other.value; }
88 };
89
90 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
91
92 struct arm_per_objfile
93 {
94 explicit arm_per_objfile (size_t num_sections)
95 : section_maps (new arm_mapping_symbol_vec[num_sections]),
96 section_maps_sorted (new bool[num_sections] ())
97 {}
98
99 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
100
101 /* Information about mapping symbols ($a, $d, $t) in the objfile.
102
103 The format is an array of vectors of arm_mapping_symbols, there is one
104 vector for each section of the objfile (the array is index by BFD section
105 index).
106
107 For each section, the vector of arm_mapping_symbol is sorted by
108 symbol value (address). */
109 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
110
111 /* For each corresponding element of section_maps above, is this vector
112 sorted. */
113 std::unique_ptr<bool[]> section_maps_sorted;
114 };
115
116 /* Per-objfile data used for mapping symbols. */
117 static objfile_key<arm_per_objfile> arm_objfile_data_key;
118
119 /* The list of available "set arm ..." and "show arm ..." commands. */
120 static struct cmd_list_element *setarmcmdlist = NULL;
121 static struct cmd_list_element *showarmcmdlist = NULL;
122
123 /* The type of floating-point to use. Keep this in sync with enum
124 arm_float_model, and the help string in _initialize_arm_tdep. */
125 static const char *const fp_model_strings[] =
126 {
127 "auto",
128 "softfpa",
129 "fpa",
130 "softvfp",
131 "vfp",
132 NULL
133 };
134
135 /* A variable that can be configured by the user. */
136 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
137 static const char *current_fp_model = "auto";
138
139 /* The ABI to use. Keep this in sync with arm_abi_kind. */
140 static const char *const arm_abi_strings[] =
141 {
142 "auto",
143 "APCS",
144 "AAPCS",
145 NULL
146 };
147
148 /* A variable that can be configured by the user. */
149 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
150 static const char *arm_abi_string = "auto";
151
152 /* The execution mode to assume. */
153 static const char *const arm_mode_strings[] =
154 {
155 "auto",
156 "arm",
157 "thumb",
158 NULL
159 };
160
161 static const char *arm_fallback_mode_string = "auto";
162 static const char *arm_force_mode_string = "auto";
163
164 /* The standard register names, and all the valid aliases for them. Note
165 that `fp', `sp' and `pc' are not added in this alias list, because they
166 have been added as builtin user registers in
167 std-regs.c:_initialize_frame_reg. */
168 static const struct
169 {
170 const char *name;
171 int regnum;
172 } arm_register_aliases[] = {
173 /* Basic register numbers. */
174 { "r0", 0 },
175 { "r1", 1 },
176 { "r2", 2 },
177 { "r3", 3 },
178 { "r4", 4 },
179 { "r5", 5 },
180 { "r6", 6 },
181 { "r7", 7 },
182 { "r8", 8 },
183 { "r9", 9 },
184 { "r10", 10 },
185 { "r11", 11 },
186 { "r12", 12 },
187 { "r13", 13 },
188 { "r14", 14 },
189 { "r15", 15 },
190 /* Synonyms (argument and variable registers). */
191 { "a1", 0 },
192 { "a2", 1 },
193 { "a3", 2 },
194 { "a4", 3 },
195 { "v1", 4 },
196 { "v2", 5 },
197 { "v3", 6 },
198 { "v4", 7 },
199 { "v5", 8 },
200 { "v6", 9 },
201 { "v7", 10 },
202 { "v8", 11 },
203 /* Other platform-specific names for r9. */
204 { "sb", 9 },
205 { "tr", 9 },
206 /* Special names. */
207 { "ip", 12 },
208 { "lr", 14 },
209 /* Names used by GCC (not listed in the ARM EABI). */
210 { "sl", 10 },
211 /* A special name from the older ATPCS. */
212 { "wr", 7 },
213 };
214
215 static const char *const arm_register_names[] =
216 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
217 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
218 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
219 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
220 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
221 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
222 "fps", "cpsr" }; /* 24 25 */
223
224 /* Holds the current set of options to be passed to the disassembler. */
225 static char *arm_disassembler_options;
226
227 /* Valid register name styles. */
228 static const char **valid_disassembly_styles;
229
230 /* Disassembly style to use. Default to "std" register names. */
231 static const char *disassembly_style;
232
233 /* All possible arm target descriptors. */
234 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
235 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
236
237 /* This is used to keep the bfd arch_info in sync with the disassembly
238 style. */
239 static void set_disassembly_style_sfunc (const char *, int,
240 struct cmd_list_element *);
241 static void show_disassembly_style_sfunc (struct ui_file *, int,
242 struct cmd_list_element *,
243 const char *);
244
245 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
246 readable_regcache *regcache,
247 int regnum, gdb_byte *buf);
248 static void arm_neon_quad_write (struct gdbarch *gdbarch,
249 struct regcache *regcache,
250 int regnum, const gdb_byte *buf);
251
252 static CORE_ADDR
253 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
254
255
256 /* get_next_pcs operations. */
257 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
258 arm_get_next_pcs_read_memory_unsigned_integer,
259 arm_get_next_pcs_syscall_next_pc,
260 arm_get_next_pcs_addr_bits_remove,
261 arm_get_next_pcs_is_thumb,
262 NULL,
263 };
264
265 struct arm_prologue_cache
266 {
267 /* The stack pointer at the time this frame was created; i.e. the
268 caller's stack pointer when this function was called. It is used
269 to identify this frame. */
270 CORE_ADDR prev_sp;
271
272 /* The frame base for this frame is just prev_sp - frame size.
273 FRAMESIZE is the distance from the frame pointer to the
274 initial stack pointer. */
275
276 int framesize;
277
278 /* The register used to hold the frame pointer for this frame. */
279 int framereg;
280
281 /* Saved register offsets. */
282 struct trad_frame_saved_reg *saved_regs;
283 };
284
285 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
286 CORE_ADDR prologue_start,
287 CORE_ADDR prologue_end,
288 struct arm_prologue_cache *cache);
289
290 /* Architecture version for displaced stepping. This effects the behaviour of
291 certain instructions, and really should not be hard-wired. */
292
293 #define DISPLACED_STEPPING_ARCH_VERSION 5
294
295 /* See arm-tdep.h. */
296
297 bool arm_apcs_32 = true;
298
299 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
300
301 int
302 arm_psr_thumb_bit (struct gdbarch *gdbarch)
303 {
304 if (gdbarch_tdep (gdbarch)->is_m)
305 return XPSR_T;
306 else
307 return CPSR_T;
308 }
309
310 /* Determine if the processor is currently executing in Thumb mode. */
311
312 int
313 arm_is_thumb (struct regcache *regcache)
314 {
315 ULONGEST cpsr;
316 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
317
318 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
319
320 return (cpsr & t_bit) != 0;
321 }
322
323 /* Determine if FRAME is executing in Thumb mode. */
324
325 int
326 arm_frame_is_thumb (struct frame_info *frame)
327 {
328 CORE_ADDR cpsr;
329 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
330
331 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
332 directly (from a signal frame or dummy frame) or by interpreting
333 the saved LR (from a prologue or DWARF frame). So consult it and
334 trust the unwinders. */
335 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
336
337 return (cpsr & t_bit) != 0;
338 }
339
340 /* Search for the mapping symbol covering MEMADDR. If one is found,
341 return its type. Otherwise, return 0. If START is non-NULL,
342 set *START to the location of the mapping symbol. */
343
344 static char
345 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
346 {
347 struct obj_section *sec;
348
349 /* If there are mapping symbols, consult them. */
350 sec = find_pc_section (memaddr);
351 if (sec != NULL)
352 {
353 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
354 if (data != NULL)
355 {
356 unsigned int section_idx = sec->the_bfd_section->index;
357 arm_mapping_symbol_vec &map
358 = data->section_maps[section_idx];
359
360 /* Sort the vector on first use. */
361 if (!data->section_maps_sorted[section_idx])
362 {
363 std::sort (map.begin (), map.end ());
364 data->section_maps_sorted[section_idx] = true;
365 }
366
367 struct arm_mapping_symbol map_key
368 = { memaddr - obj_section_addr (sec), 0 };
369 arm_mapping_symbol_vec::const_iterator it
370 = std::lower_bound (map.begin (), map.end (), map_key);
371
372 /* std::lower_bound finds the earliest ordered insertion
373 point. If the symbol at this position starts at this exact
374 address, we use that; otherwise, the preceding
375 mapping symbol covers this address. */
376 if (it < map.end ())
377 {
378 if (it->value == map_key.value)
379 {
380 if (start)
381 *start = it->value + obj_section_addr (sec);
382 return it->type;
383 }
384 }
385
386 if (it > map.begin ())
387 {
388 arm_mapping_symbol_vec::const_iterator prev_it
389 = it - 1;
390
391 if (start)
392 *start = prev_it->value + obj_section_addr (sec);
393 return prev_it->type;
394 }
395 }
396 }
397
398 return 0;
399 }
400
401 /* Determine if the program counter specified in MEMADDR is in a Thumb
402 function. This function should be called for addresses unrelated to
403 any executing frame; otherwise, prefer arm_frame_is_thumb. */
404
405 int
406 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
407 {
408 struct bound_minimal_symbol sym;
409 char type;
410 arm_displaced_step_closure *dsc
411 = ((arm_displaced_step_closure * )
412 get_displaced_step_closure_by_addr (memaddr));
413
414 /* If checking the mode of displaced instruction in copy area, the mode
415 should be determined by instruction on the original address. */
416 if (dsc)
417 {
418 if (debug_displaced)
419 fprintf_unfiltered (gdb_stdlog,
420 "displaced: check mode of %.8lx instead of %.8lx\n",
421 (unsigned long) dsc->insn_addr,
422 (unsigned long) memaddr);
423 memaddr = dsc->insn_addr;
424 }
425
426 /* If bit 0 of the address is set, assume this is a Thumb address. */
427 if (IS_THUMB_ADDR (memaddr))
428 return 1;
429
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
434 return 1;
435
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
438 return 1;
439
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
442 if (type)
443 return type == 't';
444
445 /* Thumb functions have a "special" bit set in minimal symbols. */
446 sym = lookup_minimal_symbol_by_pc (memaddr);
447 if (sym.minsym)
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
449
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 return 0;
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
454 return 1;
455
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
460 displayed in the mode it will be executed). */
461 if (target_has_registers)
462 return arm_frame_is_thumb (get_current_frame ());
463
464 /* Otherwise we're out of luck; we assume ARM. */
465 return 0;
466 }
467
468 /* Determine if the address specified equals any of these magic return
469 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
470 architectures.
471
472 From ARMv6-M Reference Manual B1.5.8
473 Table B1-5 Exception return behavior
474
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
479
480 From ARMv7-M Reference Manual B1.5.8
481 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
482
483 EXC_RETURN Return To Return Stack
484 0xFFFFFFF1 Handler mode Main
485 0xFFFFFFF9 Thread mode Main
486 0xFFFFFFFD Thread mode Process
487
488 Table B1-9 EXC_RETURN definition of exception return behavior, with
489 FP
490
491 EXC_RETURN Return To Return Stack Frame Type
492 0xFFFFFFE1 Handler mode Main Extended
493 0xFFFFFFE9 Thread mode Main Extended
494 0xFFFFFFED Thread mode Process Extended
495 0xFFFFFFF1 Handler mode Main Basic
496 0xFFFFFFF9 Thread mode Main Basic
497 0xFFFFFFFD Thread mode Process Basic
498
499 For more details see "B1.5.8 Exception return behavior"
500 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
501
502 static int
503 arm_m_addr_is_magic (CORE_ADDR addr)
504 {
505 switch (addr)
506 {
507 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
508 the exception return behavior. */
509 case 0xffffffe1:
510 case 0xffffffe9:
511 case 0xffffffed:
512 case 0xfffffff1:
513 case 0xfffffff9:
514 case 0xfffffffd:
515 /* Address is magic. */
516 return 1;
517
518 default:
519 /* Address is not magic. */
520 return 0;
521 }
522 }
523
524 /* Remove useless bits from addresses in a running program. */
525 static CORE_ADDR
526 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
527 {
528 /* On M-profile devices, do not strip the low bit from EXC_RETURN
529 (the magic exception return address). */
530 if (gdbarch_tdep (gdbarch)->is_m
531 && arm_m_addr_is_magic (val))
532 return val;
533
534 if (arm_apcs_32)
535 return UNMAKE_THUMB_ADDR (val);
536 else
537 return (val & 0x03fffffc);
538 }
539
540 /* Return 1 if PC is the start of a compiler helper function which
541 can be safely ignored during prologue skipping. IS_THUMB is true
542 if the function is known to be a Thumb function due to the way it
543 is being called. */
544 static int
545 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
546 {
547 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
548 struct bound_minimal_symbol msym;
549
550 msym = lookup_minimal_symbol_by_pc (pc);
551 if (msym.minsym != NULL
552 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
553 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
554 {
555 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
556
557 /* The GNU linker's Thumb call stub to foo is named
558 __foo_from_thumb. */
559 if (strstr (name, "_from_thumb") != NULL)
560 name += 2;
561
562 /* On soft-float targets, __truncdfsf2 is called to convert promoted
563 arguments to their argument types in non-prototyped
564 functions. */
565 if (startswith (name, "__truncdfsf2"))
566 return 1;
567 if (startswith (name, "__aeabi_d2f"))
568 return 1;
569
570 /* Internal functions related to thread-local storage. */
571 if (startswith (name, "__tls_get_addr"))
572 return 1;
573 if (startswith (name, "__aeabi_read_tp"))
574 return 1;
575 }
576 else
577 {
578 /* If we run against a stripped glibc, we may be unable to identify
579 special functions by name. Check for one important case,
580 __aeabi_read_tp, by comparing the *code* against the default
581 implementation (this is hand-written ARM assembler in glibc). */
582
583 if (!is_thumb
584 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
585 == 0xe3e00a0f /* mov r0, #0xffff0fff */
586 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
587 == 0xe240f01f) /* sub pc, r0, #31 */
588 return 1;
589 }
590
591 return 0;
592 }
593
594 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
595 the first 16-bit of instruction, and INSN2 is the second 16-bit of
596 instruction. */
597 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
598 ((bits ((insn1), 0, 3) << 12) \
599 | (bits ((insn1), 10, 10) << 11) \
600 | (bits ((insn2), 12, 14) << 8) \
601 | bits ((insn2), 0, 7))
602
603 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
604 the 32-bit instruction. */
605 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
606 ((bits ((insn), 16, 19) << 12) \
607 | bits ((insn), 0, 11))
608
609 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
610
611 static unsigned int
612 thumb_expand_immediate (unsigned int imm)
613 {
614 unsigned int count = imm >> 7;
615
616 if (count < 8)
617 switch (count / 2)
618 {
619 case 0:
620 return imm & 0xff;
621 case 1:
622 return (imm & 0xff) | ((imm & 0xff) << 16);
623 case 2:
624 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
625 case 3:
626 return (imm & 0xff) | ((imm & 0xff) << 8)
627 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
628 }
629
630 return (0x80 | (imm & 0x7f)) << (32 - count);
631 }
632
633 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
634 epilogue, 0 otherwise. */
635
636 static int
637 thumb_instruction_restores_sp (unsigned short insn)
638 {
639 return (insn == 0x46bd /* mov sp, r7 */
640 || (insn & 0xff80) == 0xb000 /* add sp, imm */
641 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
642 }
643
644 /* Analyze a Thumb prologue, looking for a recognizable stack frame
645 and frame pointer. Scan until we encounter a store that could
646 clobber the stack frame unexpectedly, or an unknown instruction.
647 Return the last address which is definitely safe to skip for an
648 initial breakpoint. */
649
650 static CORE_ADDR
651 thumb_analyze_prologue (struct gdbarch *gdbarch,
652 CORE_ADDR start, CORE_ADDR limit,
653 struct arm_prologue_cache *cache)
654 {
655 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
656 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
657 int i;
658 pv_t regs[16];
659 CORE_ADDR offset;
660 CORE_ADDR unrecognized_pc = 0;
661
662 for (i = 0; i < 16; i++)
663 regs[i] = pv_register (i, 0);
664 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
665
666 while (start < limit)
667 {
668 unsigned short insn;
669
670 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
671
672 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
673 {
674 int regno;
675 int mask;
676
677 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
678 break;
679
680 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
681 whether to save LR (R14). */
682 mask = (insn & 0xff) | ((insn & 0x100) << 6);
683
684 /* Calculate offsets of saved R0-R7 and LR. */
685 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
686 if (mask & (1 << regno))
687 {
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 -4);
690 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
691 }
692 }
693 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
694 {
695 offset = (insn & 0x7f) << 2; /* get scaled offset */
696 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
697 -offset);
698 }
699 else if (thumb_instruction_restores_sp (insn))
700 {
701 /* Don't scan past the epilogue. */
702 break;
703 }
704 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
706 (insn & 0xff) << 2);
707 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
708 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
709 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
710 bits (insn, 6, 8));
711 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
712 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
713 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
714 bits (insn, 0, 7));
715 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
716 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
717 && pv_is_constant (regs[bits (insn, 3, 5)]))
718 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
719 regs[bits (insn, 6, 8)]);
720 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
721 && pv_is_constant (regs[bits (insn, 3, 6)]))
722 {
723 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
724 int rm = bits (insn, 3, 6);
725 regs[rd] = pv_add (regs[rd], regs[rm]);
726 }
727 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
728 {
729 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
730 int src_reg = (insn & 0x78) >> 3;
731 regs[dst_reg] = regs[src_reg];
732 }
733 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
734 {
735 /* Handle stores to the stack. Normally pushes are used,
736 but with GCC -mtpcs-frame, there may be other stores
737 in the prologue to create the frame. */
738 int regno = (insn >> 8) & 0x7;
739 pv_t addr;
740
741 offset = (insn & 0xff) << 2;
742 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
743
744 if (stack.store_would_trash (addr))
745 break;
746
747 stack.store (addr, 4, regs[regno]);
748 }
749 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
750 {
751 int rd = bits (insn, 0, 2);
752 int rn = bits (insn, 3, 5);
753 pv_t addr;
754
755 offset = bits (insn, 6, 10) << 2;
756 addr = pv_add_constant (regs[rn], offset);
757
758 if (stack.store_would_trash (addr))
759 break;
760
761 stack.store (addr, 4, regs[rd]);
762 }
763 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
764 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
765 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
766 /* Ignore stores of argument registers to the stack. */
767 ;
768 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
769 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
770 /* Ignore block loads from the stack, potentially copying
771 parameters from memory. */
772 ;
773 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
774 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
775 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
776 /* Similarly ignore single loads from the stack. */
777 ;
778 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
779 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
780 /* Skip register copies, i.e. saves to another register
781 instead of the stack. */
782 ;
783 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
784 /* Recognize constant loads; even with small stacks these are necessary
785 on Thumb. */
786 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
787 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
788 {
789 /* Constant pool loads, for the same reason. */
790 unsigned int constant;
791 CORE_ADDR loc;
792
793 loc = start + 4 + bits (insn, 0, 7) * 4;
794 constant = read_memory_unsigned_integer (loc, 4, byte_order);
795 regs[bits (insn, 8, 10)] = pv_constant (constant);
796 }
797 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
798 {
799 unsigned short inst2;
800
801 inst2 = read_code_unsigned_integer (start + 2, 2,
802 byte_order_for_code);
803
804 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
805 {
806 /* BL, BLX. Allow some special function calls when
807 skipping the prologue; GCC generates these before
808 storing arguments to the stack. */
809 CORE_ADDR nextpc;
810 int j1, j2, imm1, imm2;
811
812 imm1 = sbits (insn, 0, 10);
813 imm2 = bits (inst2, 0, 10);
814 j1 = bit (inst2, 13);
815 j2 = bit (inst2, 11);
816
817 offset = ((imm1 << 12) + (imm2 << 1));
818 offset ^= ((!j2) << 22) | ((!j1) << 23);
819
820 nextpc = start + 4 + offset;
821 /* For BLX make sure to clear the low bits. */
822 if (bit (inst2, 12) == 0)
823 nextpc = nextpc & 0xfffffffc;
824
825 if (!skip_prologue_function (gdbarch, nextpc,
826 bit (inst2, 12) != 0))
827 break;
828 }
829
830 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
831 { registers } */
832 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
833 {
834 pv_t addr = regs[bits (insn, 0, 3)];
835 int regno;
836
837 if (stack.store_would_trash (addr))
838 break;
839
840 /* Calculate offsets of saved registers. */
841 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
842 if (inst2 & (1 << regno))
843 {
844 addr = pv_add_constant (addr, -4);
845 stack.store (addr, 4, regs[regno]);
846 }
847
848 if (insn & 0x0020)
849 regs[bits (insn, 0, 3)] = addr;
850 }
851
852 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
853 [Rn, #+/-imm]{!} */
854 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
855 {
856 int regno1 = bits (inst2, 12, 15);
857 int regno2 = bits (inst2, 8, 11);
858 pv_t addr = regs[bits (insn, 0, 3)];
859
860 offset = inst2 & 0xff;
861 if (insn & 0x0080)
862 addr = pv_add_constant (addr, offset);
863 else
864 addr = pv_add_constant (addr, -offset);
865
866 if (stack.store_would_trash (addr))
867 break;
868
869 stack.store (addr, 4, regs[regno1]);
870 stack.store (pv_add_constant (addr, 4),
871 4, regs[regno2]);
872
873 if (insn & 0x0020)
874 regs[bits (insn, 0, 3)] = addr;
875 }
876
877 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
878 && (inst2 & 0x0c00) == 0x0c00
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
880 {
881 int regno = bits (inst2, 12, 15);
882 pv_t addr = regs[bits (insn, 0, 3)];
883
884 offset = inst2 & 0xff;
885 if (inst2 & 0x0200)
886 addr = pv_add_constant (addr, offset);
887 else
888 addr = pv_add_constant (addr, -offset);
889
890 if (stack.store_would_trash (addr))
891 break;
892
893 stack.store (addr, 4, regs[regno]);
894
895 if (inst2 & 0x0100)
896 regs[bits (insn, 0, 3)] = addr;
897 }
898
899 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
900 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
901 {
902 int regno = bits (inst2, 12, 15);
903 pv_t addr;
904
905 offset = inst2 & 0xfff;
906 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
907
908 if (stack.store_would_trash (addr))
909 break;
910
911 stack.store (addr, 4, regs[regno]);
912 }
913
914 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 /* Ignore stores of argument registers to the stack. */
917 ;
918
919 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
920 && (inst2 & 0x0d00) == 0x0c00
921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
922 /* Ignore stores of argument registers to the stack. */
923 ;
924
925 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
926 { registers } */
927 && (inst2 & 0x8000) == 0x0000
928 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
929 /* Ignore block loads from the stack, potentially copying
930 parameters from memory. */
931 ;
932
933 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
934 [Rn, #+/-imm] */
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 /* Similarly ignore dual loads from the stack. */
937 ;
938
939 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
940 && (inst2 & 0x0d00) == 0x0c00
941 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
942 /* Similarly ignore single loads from the stack. */
943 ;
944
945 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
946 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
947 /* Similarly ignore single loads from the stack. */
948 ;
949
950 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
951 && (inst2 & 0x8000) == 0x0000)
952 {
953 unsigned int imm = ((bits (insn, 10, 10) << 11)
954 | (bits (inst2, 12, 14) << 8)
955 | bits (inst2, 0, 7));
956
957 regs[bits (inst2, 8, 11)]
958 = pv_add_constant (regs[bits (insn, 0, 3)],
959 thumb_expand_immediate (imm));
960 }
961
962 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
963 && (inst2 & 0x8000) == 0x0000)
964 {
965 unsigned int imm = ((bits (insn, 10, 10) << 11)
966 | (bits (inst2, 12, 14) << 8)
967 | bits (inst2, 0, 7));
968
969 regs[bits (inst2, 8, 11)]
970 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
971 }
972
973 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
974 && (inst2 & 0x8000) == 0x0000)
975 {
976 unsigned int imm = ((bits (insn, 10, 10) << 11)
977 | (bits (inst2, 12, 14) << 8)
978 | bits (inst2, 0, 7));
979
980 regs[bits (inst2, 8, 11)]
981 = pv_add_constant (regs[bits (insn, 0, 3)],
982 - (CORE_ADDR) thumb_expand_immediate (imm));
983 }
984
985 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
986 && (inst2 & 0x8000) == 0x0000)
987 {
988 unsigned int imm = ((bits (insn, 10, 10) << 11)
989 | (bits (inst2, 12, 14) << 8)
990 | bits (inst2, 0, 7));
991
992 regs[bits (inst2, 8, 11)]
993 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
994 }
995
996 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
997 {
998 unsigned int imm = ((bits (insn, 10, 10) << 11)
999 | (bits (inst2, 12, 14) << 8)
1000 | bits (inst2, 0, 7));
1001
1002 regs[bits (inst2, 8, 11)]
1003 = pv_constant (thumb_expand_immediate (imm));
1004 }
1005
1006 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1007 {
1008 unsigned int imm
1009 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1010
1011 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1012 }
1013
1014 else if (insn == 0xea5f /* mov.w Rd,Rm */
1015 && (inst2 & 0xf0f0) == 0)
1016 {
1017 int dst_reg = (inst2 & 0x0f00) >> 8;
1018 int src_reg = inst2 & 0xf;
1019 regs[dst_reg] = regs[src_reg];
1020 }
1021
1022 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1023 {
1024 /* Constant pool loads. */
1025 unsigned int constant;
1026 CORE_ADDR loc;
1027
1028 offset = bits (inst2, 0, 11);
1029 if (insn & 0x0080)
1030 loc = start + 4 + offset;
1031 else
1032 loc = start + 4 - offset;
1033
1034 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1035 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1036 }
1037
1038 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1039 {
1040 /* Constant pool loads. */
1041 unsigned int constant;
1042 CORE_ADDR loc;
1043
1044 offset = bits (inst2, 0, 7) << 2;
1045 if (insn & 0x0080)
1046 loc = start + 4 + offset;
1047 else
1048 loc = start + 4 - offset;
1049
1050 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1051 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1052
1053 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1054 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1055 }
1056
1057 else if (thumb2_instruction_changes_pc (insn, inst2))
1058 {
1059 /* Don't scan past anything that might change control flow. */
1060 break;
1061 }
1062 else
1063 {
1064 /* The optimizer might shove anything into the prologue,
1065 so we just skip what we don't recognize. */
1066 unrecognized_pc = start;
1067 }
1068
1069 start += 2;
1070 }
1071 else if (thumb_instruction_changes_pc (insn))
1072 {
1073 /* Don't scan past anything that might change control flow. */
1074 break;
1075 }
1076 else
1077 {
1078 /* The optimizer might shove anything into the prologue,
1079 so we just skip what we don't recognize. */
1080 unrecognized_pc = start;
1081 }
1082
1083 start += 2;
1084 }
1085
1086 if (arm_debug)
1087 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1088 paddress (gdbarch, start));
1089
1090 if (unrecognized_pc == 0)
1091 unrecognized_pc = start;
1092
1093 if (cache == NULL)
1094 return unrecognized_pc;
1095
1096 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1097 {
1098 /* Frame pointer is fp. Frame size is constant. */
1099 cache->framereg = ARM_FP_REGNUM;
1100 cache->framesize = -regs[ARM_FP_REGNUM].k;
1101 }
1102 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1103 {
1104 /* Frame pointer is r7. Frame size is constant. */
1105 cache->framereg = THUMB_FP_REGNUM;
1106 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1107 }
1108 else
1109 {
1110 /* Try the stack pointer... this is a bit desperate. */
1111 cache->framereg = ARM_SP_REGNUM;
1112 cache->framesize = -regs[ARM_SP_REGNUM].k;
1113 }
1114
1115 for (i = 0; i < 16; i++)
1116 if (stack.find_reg (gdbarch, i, &offset))
1117 cache->saved_regs[i].addr = offset;
1118
1119 return unrecognized_pc;
1120 }
1121
1122
1123 /* Try to analyze the instructions starting from PC, which load symbol
1124 __stack_chk_guard. Return the address of instruction after loading this
1125 symbol, set the dest register number to *BASEREG, and set the size of
1126 instructions for loading symbol in OFFSET. Return 0 if instructions are
1127 not recognized. */
1128
1129 static CORE_ADDR
1130 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1131 unsigned int *destreg, int *offset)
1132 {
1133 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1134 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1135 unsigned int low, high, address;
1136
1137 address = 0;
1138 if (is_thumb)
1139 {
1140 unsigned short insn1
1141 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1142
1143 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1144 {
1145 *destreg = bits (insn1, 8, 10);
1146 *offset = 2;
1147 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1148 address = read_memory_unsigned_integer (address, 4,
1149 byte_order_for_code);
1150 }
1151 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1152 {
1153 unsigned short insn2
1154 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1155
1156 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1157
1158 insn1
1159 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1160 insn2
1161 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1162
1163 /* movt Rd, #const */
1164 if ((insn1 & 0xfbc0) == 0xf2c0)
1165 {
1166 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1167 *destreg = bits (insn2, 8, 11);
1168 *offset = 8;
1169 address = (high << 16 | low);
1170 }
1171 }
1172 }
1173 else
1174 {
1175 unsigned int insn
1176 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1177
1178 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1179 {
1180 address = bits (insn, 0, 11) + pc + 8;
1181 address = read_memory_unsigned_integer (address, 4,
1182 byte_order_for_code);
1183
1184 *destreg = bits (insn, 12, 15);
1185 *offset = 4;
1186 }
1187 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1188 {
1189 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1190
1191 insn
1192 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1193
1194 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1195 {
1196 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1197 *destreg = bits (insn, 12, 15);
1198 *offset = 8;
1199 address = (high << 16 | low);
1200 }
1201 }
1202 }
1203
1204 return address;
1205 }
1206
1207 /* Try to skip a sequence of instructions used for stack protector. If PC
1208 points to the first instruction of this sequence, return the address of
1209 first instruction after this sequence, otherwise, return original PC.
1210
1211 On arm, this sequence of instructions is composed of mainly three steps,
1212 Step 1: load symbol __stack_chk_guard,
1213 Step 2: load from address of __stack_chk_guard,
1214 Step 3: store it to somewhere else.
1215
1216 Usually, instructions on step 2 and step 3 are the same on various ARM
1217 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1218 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1219 instructions in step 1 vary from different ARM architectures. On ARMv7,
1220 they are,
1221
1222 movw Rn, #:lower16:__stack_chk_guard
1223 movt Rn, #:upper16:__stack_chk_guard
1224
1225 On ARMv5t, it is,
1226
1227 ldr Rn, .Label
1228 ....
1229 .Lable:
1230 .word __stack_chk_guard
1231
1232 Since ldr/str is a very popular instruction, we can't use them as
1233 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1234 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1235 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1236
1237 static CORE_ADDR
1238 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1239 {
1240 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1241 unsigned int basereg;
1242 struct bound_minimal_symbol stack_chk_guard;
1243 int offset;
1244 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1245 CORE_ADDR addr;
1246
1247 /* Try to parse the instructions in Step 1. */
1248 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1249 &basereg, &offset);
1250 if (!addr)
1251 return pc;
1252
1253 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1254 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1255 Otherwise, this sequence cannot be for stack protector. */
1256 if (stack_chk_guard.minsym == NULL
1257 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1258 return pc;
1259
1260 if (is_thumb)
1261 {
1262 unsigned int destreg;
1263 unsigned short insn
1264 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1265
1266 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1267 if ((insn & 0xf800) != 0x6800)
1268 return pc;
1269 if (bits (insn, 3, 5) != basereg)
1270 return pc;
1271 destreg = bits (insn, 0, 2);
1272
1273 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1274 byte_order_for_code);
1275 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1276 if ((insn & 0xf800) != 0x6000)
1277 return pc;
1278 if (destreg != bits (insn, 0, 2))
1279 return pc;
1280 }
1281 else
1282 {
1283 unsigned int destreg;
1284 unsigned int insn
1285 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1286
1287 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1288 if ((insn & 0x0e500000) != 0x04100000)
1289 return pc;
1290 if (bits (insn, 16, 19) != basereg)
1291 return pc;
1292 destreg = bits (insn, 12, 15);
1293 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1294 insn = read_code_unsigned_integer (pc + offset + 4,
1295 4, byte_order_for_code);
1296 if ((insn & 0x0e500000) != 0x04000000)
1297 return pc;
1298 if (bits (insn, 12, 15) != destreg)
1299 return pc;
1300 }
1301 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1302 on arm. */
1303 if (is_thumb)
1304 return pc + offset + 4;
1305 else
1306 return pc + offset + 8;
1307 }
1308
1309 /* Advance the PC across any function entry prologue instructions to
1310 reach some "real" code.
1311
1312 The APCS (ARM Procedure Call Standard) defines the following
1313 prologue:
1314
1315 mov ip, sp
1316 [stmfd sp!, {a1,a2,a3,a4}]
1317 stmfd sp!, {...,fp,ip,lr,pc}
1318 [stfe f7, [sp, #-12]!]
1319 [stfe f6, [sp, #-12]!]
1320 [stfe f5, [sp, #-12]!]
1321 [stfe f4, [sp, #-12]!]
1322 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1323
1324 static CORE_ADDR
1325 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1326 {
1327 CORE_ADDR func_addr, limit_pc;
1328
1329 /* See if we can determine the end of the prologue via the symbol table.
1330 If so, then return either PC, or the PC after the prologue, whichever
1331 is greater. */
1332 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1333 {
1334 CORE_ADDR post_prologue_pc
1335 = skip_prologue_using_sal (gdbarch, func_addr);
1336 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1337
1338 if (post_prologue_pc)
1339 post_prologue_pc
1340 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1341
1342
1343 /* GCC always emits a line note before the prologue and another
1344 one after, even if the two are at the same address or on the
1345 same line. Take advantage of this so that we do not need to
1346 know every instruction that might appear in the prologue. We
1347 will have producer information for most binaries; if it is
1348 missing (e.g. for -gstabs), assuming the GNU tools. */
1349 if (post_prologue_pc
1350 && (cust == NULL
1351 || COMPUNIT_PRODUCER (cust) == NULL
1352 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1353 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1354 return post_prologue_pc;
1355
1356 if (post_prologue_pc != 0)
1357 {
1358 CORE_ADDR analyzed_limit;
1359
1360 /* For non-GCC compilers, make sure the entire line is an
1361 acceptable prologue; GDB will round this function's
1362 return value up to the end of the following line so we
1363 can not skip just part of a line (and we do not want to).
1364
1365 RealView does not treat the prologue specially, but does
1366 associate prologue code with the opening brace; so this
1367 lets us skip the first line if we think it is the opening
1368 brace. */
1369 if (arm_pc_is_thumb (gdbarch, func_addr))
1370 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1371 post_prologue_pc, NULL);
1372 else
1373 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1374 post_prologue_pc, NULL);
1375
1376 if (analyzed_limit != post_prologue_pc)
1377 return func_addr;
1378
1379 return post_prologue_pc;
1380 }
1381 }
1382
1383 /* Can't determine prologue from the symbol table, need to examine
1384 instructions. */
1385
1386 /* Find an upper limit on the function prologue using the debug
1387 information. If the debug information could not be used to provide
1388 that bound, then use an arbitrary large number as the upper bound. */
1389 /* Like arm_scan_prologue, stop no later than pc + 64. */
1390 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1391 if (limit_pc == 0)
1392 limit_pc = pc + 64; /* Magic. */
1393
1394
1395 /* Check if this is Thumb code. */
1396 if (arm_pc_is_thumb (gdbarch, pc))
1397 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1398 else
1399 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1400 }
1401
1402 /* *INDENT-OFF* */
1403 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1404 This function decodes a Thumb function prologue to determine:
1405 1) the size of the stack frame
1406 2) which registers are saved on it
1407 3) the offsets of saved regs
1408 4) the offset from the stack pointer to the frame pointer
1409
1410 A typical Thumb function prologue would create this stack frame
1411 (offsets relative to FP)
1412 old SP -> 24 stack parameters
1413 20 LR
1414 16 R7
1415 R7 -> 0 local variables (16 bytes)
1416 SP -> -12 additional stack space (12 bytes)
1417 The frame size would thus be 36 bytes, and the frame offset would be
1418 12 bytes. The frame register is R7.
1419
1420 The comments for thumb_skip_prolog() describe the algorithm we use
1421 to detect the end of the prolog. */
1422 /* *INDENT-ON* */
1423
1424 static void
1425 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1426 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1427 {
1428 CORE_ADDR prologue_start;
1429 CORE_ADDR prologue_end;
1430
1431 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1432 &prologue_end))
1433 {
1434 /* See comment in arm_scan_prologue for an explanation of
1435 this heuristics. */
1436 if (prologue_end > prologue_start + 64)
1437 {
1438 prologue_end = prologue_start + 64;
1439 }
1440 }
1441 else
1442 /* We're in the boondocks: we have no idea where the start of the
1443 function is. */
1444 return;
1445
1446 prologue_end = std::min (prologue_end, prev_pc);
1447
1448 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1449 }
1450
1451 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1452 otherwise. */
1453
1454 static int
1455 arm_instruction_restores_sp (unsigned int insn)
1456 {
1457 if (bits (insn, 28, 31) != INST_NV)
1458 {
1459 if ((insn & 0x0df0f000) == 0x0080d000
1460 /* ADD SP (register or immediate). */
1461 || (insn & 0x0df0f000) == 0x0040d000
1462 /* SUB SP (register or immediate). */
1463 || (insn & 0x0ffffff0) == 0x01a0d000
1464 /* MOV SP. */
1465 || (insn & 0x0fff0000) == 0x08bd0000
1466 /* POP (LDMIA). */
1467 || (insn & 0x0fff0000) == 0x049d0000)
1468 /* POP of a single register. */
1469 return 1;
1470 }
1471
1472 return 0;
1473 }
1474
1475 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1476 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1477 fill it in. Return the first address not recognized as a prologue
1478 instruction.
1479
1480 We recognize all the instructions typically found in ARM prologues,
1481 plus harmless instructions which can be skipped (either for analysis
1482 purposes, or a more restrictive set that can be skipped when finding
1483 the end of the prologue). */
1484
1485 static CORE_ADDR
1486 arm_analyze_prologue (struct gdbarch *gdbarch,
1487 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1488 struct arm_prologue_cache *cache)
1489 {
1490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1491 int regno;
1492 CORE_ADDR offset, current_pc;
1493 pv_t regs[ARM_FPS_REGNUM];
1494 CORE_ADDR unrecognized_pc = 0;
1495
1496 /* Search the prologue looking for instructions that set up the
1497 frame pointer, adjust the stack pointer, and save registers.
1498
1499 Be careful, however, and if it doesn't look like a prologue,
1500 don't try to scan it. If, for instance, a frameless function
1501 begins with stmfd sp!, then we will tell ourselves there is
1502 a frame, which will confuse stack traceback, as well as "finish"
1503 and other operations that rely on a knowledge of the stack
1504 traceback. */
1505
1506 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1507 regs[regno] = pv_register (regno, 0);
1508 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1509
1510 for (current_pc = prologue_start;
1511 current_pc < prologue_end;
1512 current_pc += 4)
1513 {
1514 unsigned int insn
1515 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1516
1517 if (insn == 0xe1a0c00d) /* mov ip, sp */
1518 {
1519 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1520 continue;
1521 }
1522 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1523 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1524 {
1525 unsigned imm = insn & 0xff; /* immediate value */
1526 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1527 int rd = bits (insn, 12, 15);
1528 imm = (imm >> rot) | (imm << (32 - rot));
1529 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1530 continue;
1531 }
1532 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1533 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1534 {
1535 unsigned imm = insn & 0xff; /* immediate value */
1536 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1537 int rd = bits (insn, 12, 15);
1538 imm = (imm >> rot) | (imm << (32 - rot));
1539 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1540 continue;
1541 }
1542 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1543 [sp, #-4]! */
1544 {
1545 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1546 break;
1547 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1548 stack.store (regs[ARM_SP_REGNUM], 4,
1549 regs[bits (insn, 12, 15)]);
1550 continue;
1551 }
1552 else if ((insn & 0xffff0000) == 0xe92d0000)
1553 /* stmfd sp!, {..., fp, ip, lr, pc}
1554 or
1555 stmfd sp!, {a1, a2, a3, a4} */
1556 {
1557 int mask = insn & 0xffff;
1558
1559 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1560 break;
1561
1562 /* Calculate offsets of saved registers. */
1563 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1564 if (mask & (1 << regno))
1565 {
1566 regs[ARM_SP_REGNUM]
1567 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1568 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1569 }
1570 }
1571 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1572 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1573 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1574 {
1575 /* No need to add this to saved_regs -- it's just an arg reg. */
1576 continue;
1577 }
1578 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1579 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1580 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1581 {
1582 /* No need to add this to saved_regs -- it's just an arg reg. */
1583 continue;
1584 }
1585 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1586 { registers } */
1587 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1588 {
1589 /* No need to add this to saved_regs -- it's just arg regs. */
1590 continue;
1591 }
1592 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1593 {
1594 unsigned imm = insn & 0xff; /* immediate value */
1595 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1596 imm = (imm >> rot) | (imm << (32 - rot));
1597 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1598 }
1599 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1600 {
1601 unsigned imm = insn & 0xff; /* immediate value */
1602 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1603 imm = (imm >> rot) | (imm << (32 - rot));
1604 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1605 }
1606 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1607 [sp, -#c]! */
1608 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1609 {
1610 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1611 break;
1612
1613 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1614 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1615 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1616 }
1617 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1618 [sp!] */
1619 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1620 {
1621 int n_saved_fp_regs;
1622 unsigned int fp_start_reg, fp_bound_reg;
1623
1624 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1625 break;
1626
1627 if ((insn & 0x800) == 0x800) /* N0 is set */
1628 {
1629 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1630 n_saved_fp_regs = 3;
1631 else
1632 n_saved_fp_regs = 1;
1633 }
1634 else
1635 {
1636 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1637 n_saved_fp_regs = 2;
1638 else
1639 n_saved_fp_regs = 4;
1640 }
1641
1642 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1643 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1644 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1645 {
1646 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1647 stack.store (regs[ARM_SP_REGNUM], 12,
1648 regs[fp_start_reg++]);
1649 }
1650 }
1651 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1652 {
1653 /* Allow some special function calls when skipping the
1654 prologue; GCC generates these before storing arguments to
1655 the stack. */
1656 CORE_ADDR dest = BranchDest (current_pc, insn);
1657
1658 if (skip_prologue_function (gdbarch, dest, 0))
1659 continue;
1660 else
1661 break;
1662 }
1663 else if ((insn & 0xf0000000) != 0xe0000000)
1664 break; /* Condition not true, exit early. */
1665 else if (arm_instruction_changes_pc (insn))
1666 /* Don't scan past anything that might change control flow. */
1667 break;
1668 else if (arm_instruction_restores_sp (insn))
1669 {
1670 /* Don't scan past the epilogue. */
1671 break;
1672 }
1673 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1674 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1675 /* Ignore block loads from the stack, potentially copying
1676 parameters from memory. */
1677 continue;
1678 else if ((insn & 0xfc500000) == 0xe4100000
1679 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1680 /* Similarly ignore single loads from the stack. */
1681 continue;
1682 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1683 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1684 register instead of the stack. */
1685 continue;
1686 else
1687 {
1688 /* The optimizer might shove anything into the prologue, if
1689 we build up cache (cache != NULL) from scanning prologue,
1690 we just skip what we don't recognize and scan further to
1691 make cache as complete as possible. However, if we skip
1692 prologue, we'll stop immediately on unrecognized
1693 instruction. */
1694 unrecognized_pc = current_pc;
1695 if (cache != NULL)
1696 continue;
1697 else
1698 break;
1699 }
1700 }
1701
1702 if (unrecognized_pc == 0)
1703 unrecognized_pc = current_pc;
1704
1705 if (cache)
1706 {
1707 int framereg, framesize;
1708
1709 /* The frame size is just the distance from the frame register
1710 to the original stack pointer. */
1711 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1712 {
1713 /* Frame pointer is fp. */
1714 framereg = ARM_FP_REGNUM;
1715 framesize = -regs[ARM_FP_REGNUM].k;
1716 }
1717 else
1718 {
1719 /* Try the stack pointer... this is a bit desperate. */
1720 framereg = ARM_SP_REGNUM;
1721 framesize = -regs[ARM_SP_REGNUM].k;
1722 }
1723
1724 cache->framereg = framereg;
1725 cache->framesize = framesize;
1726
1727 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1728 if (stack.find_reg (gdbarch, regno, &offset))
1729 cache->saved_regs[regno].addr = offset;
1730 }
1731
1732 if (arm_debug)
1733 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1734 paddress (gdbarch, unrecognized_pc));
1735
1736 return unrecognized_pc;
1737 }
1738
1739 static void
1740 arm_scan_prologue (struct frame_info *this_frame,
1741 struct arm_prologue_cache *cache)
1742 {
1743 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1744 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1745 CORE_ADDR prologue_start, prologue_end;
1746 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1747 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1748
1749 /* Assume there is no frame until proven otherwise. */
1750 cache->framereg = ARM_SP_REGNUM;
1751 cache->framesize = 0;
1752
1753 /* Check for Thumb prologue. */
1754 if (arm_frame_is_thumb (this_frame))
1755 {
1756 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1757 return;
1758 }
1759
1760 /* Find the function prologue. If we can't find the function in
1761 the symbol table, peek in the stack frame to find the PC. */
1762 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1763 &prologue_end))
1764 {
1765 /* One way to find the end of the prologue (which works well
1766 for unoptimized code) is to do the following:
1767
1768 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1769
1770 if (sal.line == 0)
1771 prologue_end = prev_pc;
1772 else if (sal.end < prologue_end)
1773 prologue_end = sal.end;
1774
1775 This mechanism is very accurate so long as the optimizer
1776 doesn't move any instructions from the function body into the
1777 prologue. If this happens, sal.end will be the last
1778 instruction in the first hunk of prologue code just before
1779 the first instruction that the scheduler has moved from
1780 the body to the prologue.
1781
1782 In order to make sure that we scan all of the prologue
1783 instructions, we use a slightly less accurate mechanism which
1784 may scan more than necessary. To help compensate for this
1785 lack of accuracy, the prologue scanning loop below contains
1786 several clauses which'll cause the loop to terminate early if
1787 an implausible prologue instruction is encountered.
1788
1789 The expression
1790
1791 prologue_start + 64
1792
1793 is a suitable endpoint since it accounts for the largest
1794 possible prologue plus up to five instructions inserted by
1795 the scheduler. */
1796
1797 if (prologue_end > prologue_start + 64)
1798 {
1799 prologue_end = prologue_start + 64; /* See above. */
1800 }
1801 }
1802 else
1803 {
1804 /* We have no symbol information. Our only option is to assume this
1805 function has a standard stack frame and the normal frame register.
1806 Then, we can find the value of our frame pointer on entrance to
1807 the callee (or at the present moment if this is the innermost frame).
1808 The value stored there should be the address of the stmfd + 8. */
1809 CORE_ADDR frame_loc;
1810 ULONGEST return_value;
1811
1812 /* AAPCS does not use a frame register, so we can abort here. */
1813 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1814 return;
1815
1816 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1817 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1818 &return_value))
1819 return;
1820 else
1821 {
1822 prologue_start = gdbarch_addr_bits_remove
1823 (gdbarch, return_value) - 8;
1824 prologue_end = prologue_start + 64; /* See above. */
1825 }
1826 }
1827
1828 if (prev_pc < prologue_end)
1829 prologue_end = prev_pc;
1830
1831 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1832 }
1833
1834 static struct arm_prologue_cache *
1835 arm_make_prologue_cache (struct frame_info *this_frame)
1836 {
1837 int reg;
1838 struct arm_prologue_cache *cache;
1839 CORE_ADDR unwound_fp;
1840
1841 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1842 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1843
1844 arm_scan_prologue (this_frame, cache);
1845
1846 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1847 if (unwound_fp == 0)
1848 return cache;
1849
1850 cache->prev_sp = unwound_fp + cache->framesize;
1851
1852 /* Calculate actual addresses of saved registers using offsets
1853 determined by arm_scan_prologue. */
1854 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1855 if (trad_frame_addr_p (cache->saved_regs, reg))
1856 cache->saved_regs[reg].addr += cache->prev_sp;
1857
1858 return cache;
1859 }
1860
1861 /* Implementation of the stop_reason hook for arm_prologue frames. */
1862
1863 static enum unwind_stop_reason
1864 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1865 void **this_cache)
1866 {
1867 struct arm_prologue_cache *cache;
1868 CORE_ADDR pc;
1869
1870 if (*this_cache == NULL)
1871 *this_cache = arm_make_prologue_cache (this_frame);
1872 cache = (struct arm_prologue_cache *) *this_cache;
1873
1874 /* This is meant to halt the backtrace at "_start". */
1875 pc = get_frame_pc (this_frame);
1876 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1877 return UNWIND_OUTERMOST;
1878
1879 /* If we've hit a wall, stop. */
1880 if (cache->prev_sp == 0)
1881 return UNWIND_OUTERMOST;
1882
1883 return UNWIND_NO_REASON;
1884 }
1885
1886 /* Our frame ID for a normal frame is the current function's starting PC
1887 and the caller's SP when we were called. */
1888
1889 static void
1890 arm_prologue_this_id (struct frame_info *this_frame,
1891 void **this_cache,
1892 struct frame_id *this_id)
1893 {
1894 struct arm_prologue_cache *cache;
1895 struct frame_id id;
1896 CORE_ADDR pc, func;
1897
1898 if (*this_cache == NULL)
1899 *this_cache = arm_make_prologue_cache (this_frame);
1900 cache = (struct arm_prologue_cache *) *this_cache;
1901
1902 /* Use function start address as part of the frame ID. If we cannot
1903 identify the start address (due to missing symbol information),
1904 fall back to just using the current PC. */
1905 pc = get_frame_pc (this_frame);
1906 func = get_frame_func (this_frame);
1907 if (!func)
1908 func = pc;
1909
1910 id = frame_id_build (cache->prev_sp, func);
1911 *this_id = id;
1912 }
1913
1914 static struct value *
1915 arm_prologue_prev_register (struct frame_info *this_frame,
1916 void **this_cache,
1917 int prev_regnum)
1918 {
1919 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1920 struct arm_prologue_cache *cache;
1921
1922 if (*this_cache == NULL)
1923 *this_cache = arm_make_prologue_cache (this_frame);
1924 cache = (struct arm_prologue_cache *) *this_cache;
1925
1926 /* If we are asked to unwind the PC, then we need to return the LR
1927 instead. The prologue may save PC, but it will point into this
1928 frame's prologue, not the next frame's resume location. Also
1929 strip the saved T bit. A valid LR may have the low bit set, but
1930 a valid PC never does. */
1931 if (prev_regnum == ARM_PC_REGNUM)
1932 {
1933 CORE_ADDR lr;
1934
1935 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1936 return frame_unwind_got_constant (this_frame, prev_regnum,
1937 arm_addr_bits_remove (gdbarch, lr));
1938 }
1939
1940 /* SP is generally not saved to the stack, but this frame is
1941 identified by the next frame's stack pointer at the time of the call.
1942 The value was already reconstructed into PREV_SP. */
1943 if (prev_regnum == ARM_SP_REGNUM)
1944 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1945
1946 /* The CPSR may have been changed by the call instruction and by the
1947 called function. The only bit we can reconstruct is the T bit,
1948 by checking the low bit of LR as of the call. This is a reliable
1949 indicator of Thumb-ness except for some ARM v4T pre-interworking
1950 Thumb code, which could get away with a clear low bit as long as
1951 the called function did not use bx. Guess that all other
1952 bits are unchanged; the condition flags are presumably lost,
1953 but the processor status is likely valid. */
1954 if (prev_regnum == ARM_PS_REGNUM)
1955 {
1956 CORE_ADDR lr, cpsr;
1957 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1958
1959 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1960 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1961 if (IS_THUMB_ADDR (lr))
1962 cpsr |= t_bit;
1963 else
1964 cpsr &= ~t_bit;
1965 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1966 }
1967
1968 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1969 prev_regnum);
1970 }
1971
1972 struct frame_unwind arm_prologue_unwind = {
1973 NORMAL_FRAME,
1974 arm_prologue_unwind_stop_reason,
1975 arm_prologue_this_id,
1976 arm_prologue_prev_register,
1977 NULL,
1978 default_frame_sniffer
1979 };
1980
1981 /* Maintain a list of ARM exception table entries per objfile, similar to the
1982 list of mapping symbols. We only cache entries for standard ARM-defined
1983 personality routines; the cache will contain only the frame unwinding
1984 instructions associated with the entry (not the descriptors). */
1985
1986 struct arm_exidx_entry
1987 {
1988 bfd_vma addr;
1989 gdb_byte *entry;
1990
1991 bool operator< (const arm_exidx_entry &other) const
1992 {
1993 return addr < other.addr;
1994 }
1995 };
1996
1997 struct arm_exidx_data
1998 {
1999 std::vector<std::vector<arm_exidx_entry>> section_maps;
2000 };
2001
2002 static const struct objfile_key<arm_exidx_data> arm_exidx_data_key;
2003
2004 static struct obj_section *
2005 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2006 {
2007 struct obj_section *osect;
2008
2009 ALL_OBJFILE_OSECTIONS (objfile, osect)
2010 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2011 {
2012 bfd_vma start, size;
2013 start = bfd_section_vma (osect->the_bfd_section);
2014 size = bfd_section_size (osect->the_bfd_section);
2015
2016 if (start <= vma && vma < start + size)
2017 return osect;
2018 }
2019
2020 return NULL;
2021 }
2022
2023 /* Parse contents of exception table and exception index sections
2024 of OBJFILE, and fill in the exception table entry cache.
2025
2026 For each entry that refers to a standard ARM-defined personality
2027 routine, extract the frame unwinding instructions (from either
2028 the index or the table section). The unwinding instructions
2029 are normalized by:
2030 - extracting them from the rest of the table data
2031 - converting to host endianness
2032 - appending the implicit 0xb0 ("Finish") code
2033
2034 The extracted and normalized instructions are stored for later
2035 retrieval by the arm_find_exidx_entry routine. */
2036
2037 static void
2038 arm_exidx_new_objfile (struct objfile *objfile)
2039 {
2040 struct arm_exidx_data *data;
2041 asection *exidx, *extab;
2042 bfd_vma exidx_vma = 0, extab_vma = 0;
2043 LONGEST i;
2044
2045 /* If we've already touched this file, do nothing. */
2046 if (!objfile || arm_exidx_data_key.get (objfile) != NULL)
2047 return;
2048
2049 /* Read contents of exception table and index. */
2050 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2051 gdb::byte_vector exidx_data;
2052 if (exidx)
2053 {
2054 exidx_vma = bfd_section_vma (exidx);
2055 exidx_data.resize (bfd_section_size (exidx));
2056
2057 if (!bfd_get_section_contents (objfile->obfd, exidx,
2058 exidx_data.data (), 0,
2059 exidx_data.size ()))
2060 return;
2061 }
2062
2063 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2064 gdb::byte_vector extab_data;
2065 if (extab)
2066 {
2067 extab_vma = bfd_section_vma (extab);
2068 extab_data.resize (bfd_section_size (extab));
2069
2070 if (!bfd_get_section_contents (objfile->obfd, extab,
2071 extab_data.data (), 0,
2072 extab_data.size ()))
2073 return;
2074 }
2075
2076 /* Allocate exception table data structure. */
2077 data = arm_exidx_data_key.emplace (objfile);
2078 data->section_maps.resize (objfile->obfd->section_count);
2079
2080 /* Fill in exception table. */
2081 for (i = 0; i < exidx_data.size () / 8; i++)
2082 {
2083 struct arm_exidx_entry new_exidx_entry;
2084 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2085 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2086 exidx_data.data () + i * 8 + 4);
2087 bfd_vma addr = 0, word = 0;
2088 int n_bytes = 0, n_words = 0;
2089 struct obj_section *sec;
2090 gdb_byte *entry = NULL;
2091
2092 /* Extract address of start of function. */
2093 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2094 idx += exidx_vma + i * 8;
2095
2096 /* Find section containing function and compute section offset. */
2097 sec = arm_obj_section_from_vma (objfile, idx);
2098 if (sec == NULL)
2099 continue;
2100 idx -= bfd_section_vma (sec->the_bfd_section);
2101
2102 /* Determine address of exception table entry. */
2103 if (val == 1)
2104 {
2105 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2106 }
2107 else if ((val & 0xff000000) == 0x80000000)
2108 {
2109 /* Exception table entry embedded in .ARM.exidx
2110 -- must be short form. */
2111 word = val;
2112 n_bytes = 3;
2113 }
2114 else if (!(val & 0x80000000))
2115 {
2116 /* Exception table entry in .ARM.extab. */
2117 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 addr += exidx_vma + i * 8 + 4;
2119
2120 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2121 {
2122 word = bfd_h_get_32 (objfile->obfd,
2123 extab_data.data () + addr - extab_vma);
2124 addr += 4;
2125
2126 if ((word & 0xff000000) == 0x80000000)
2127 {
2128 /* Short form. */
2129 n_bytes = 3;
2130 }
2131 else if ((word & 0xff000000) == 0x81000000
2132 || (word & 0xff000000) == 0x82000000)
2133 {
2134 /* Long form. */
2135 n_bytes = 2;
2136 n_words = ((word >> 16) & 0xff);
2137 }
2138 else if (!(word & 0x80000000))
2139 {
2140 bfd_vma pers;
2141 struct obj_section *pers_sec;
2142 int gnu_personality = 0;
2143
2144 /* Custom personality routine. */
2145 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2146 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2147
2148 /* Check whether we've got one of the variants of the
2149 GNU personality routines. */
2150 pers_sec = arm_obj_section_from_vma (objfile, pers);
2151 if (pers_sec)
2152 {
2153 static const char *personality[] =
2154 {
2155 "__gcc_personality_v0",
2156 "__gxx_personality_v0",
2157 "__gcj_personality_v0",
2158 "__gnu_objc_personality_v0",
2159 NULL
2160 };
2161
2162 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2163 int k;
2164
2165 for (k = 0; personality[k]; k++)
2166 if (lookup_minimal_symbol_by_pc_name
2167 (pc, personality[k], objfile))
2168 {
2169 gnu_personality = 1;
2170 break;
2171 }
2172 }
2173
2174 /* If so, the next word contains a word count in the high
2175 byte, followed by the same unwind instructions as the
2176 pre-defined forms. */
2177 if (gnu_personality
2178 && addr + 4 <= extab_vma + extab_data.size ())
2179 {
2180 word = bfd_h_get_32 (objfile->obfd,
2181 (extab_data.data ()
2182 + addr - extab_vma));
2183 addr += 4;
2184 n_bytes = 3;
2185 n_words = ((word >> 24) & 0xff);
2186 }
2187 }
2188 }
2189 }
2190
2191 /* Sanity check address. */
2192 if (n_words)
2193 if (addr < extab_vma
2194 || addr + 4 * n_words > extab_vma + extab_data.size ())
2195 n_words = n_bytes = 0;
2196
2197 /* The unwind instructions reside in WORD (only the N_BYTES least
2198 significant bytes are valid), followed by N_WORDS words in the
2199 extab section starting at ADDR. */
2200 if (n_bytes || n_words)
2201 {
2202 gdb_byte *p = entry
2203 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2204 n_bytes + n_words * 4 + 1);
2205
2206 while (n_bytes--)
2207 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2208
2209 while (n_words--)
2210 {
2211 word = bfd_h_get_32 (objfile->obfd,
2212 extab_data.data () + addr - extab_vma);
2213 addr += 4;
2214
2215 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2216 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2217 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2218 *p++ = (gdb_byte) (word & 0xff);
2219 }
2220
2221 /* Implied "Finish" to terminate the list. */
2222 *p++ = 0xb0;
2223 }
2224
2225 /* Push entry onto vector. They are guaranteed to always
2226 appear in order of increasing addresses. */
2227 new_exidx_entry.addr = idx;
2228 new_exidx_entry.entry = entry;
2229 data->section_maps[sec->the_bfd_section->index].push_back
2230 (new_exidx_entry);
2231 }
2232 }
2233
2234 /* Search for the exception table entry covering MEMADDR. If one is found,
2235 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2236 set *START to the start of the region covered by this entry. */
2237
2238 static gdb_byte *
2239 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2240 {
2241 struct obj_section *sec;
2242
2243 sec = find_pc_section (memaddr);
2244 if (sec != NULL)
2245 {
2246 struct arm_exidx_data *data;
2247 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2248
2249 data = arm_exidx_data_key.get (sec->objfile);
2250 if (data != NULL)
2251 {
2252 std::vector<arm_exidx_entry> &map
2253 = data->section_maps[sec->the_bfd_section->index];
2254 if (!map.empty ())
2255 {
2256 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2257
2258 /* std::lower_bound finds the earliest ordered insertion
2259 point. If the following symbol starts at this exact
2260 address, we use that; otherwise, the preceding
2261 exception table entry covers this address. */
2262 if (idx < map.end ())
2263 {
2264 if (idx->addr == map_key.addr)
2265 {
2266 if (start)
2267 *start = idx->addr + obj_section_addr (sec);
2268 return idx->entry;
2269 }
2270 }
2271
2272 if (idx > map.begin ())
2273 {
2274 idx = idx - 1;
2275 if (start)
2276 *start = idx->addr + obj_section_addr (sec);
2277 return idx->entry;
2278 }
2279 }
2280 }
2281 }
2282
2283 return NULL;
2284 }
2285
2286 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2287 instruction list from the ARM exception table entry ENTRY, allocate and
2288 return a prologue cache structure describing how to unwind this frame.
2289
2290 Return NULL if the unwinding instruction list contains a "spare",
2291 "reserved" or "refuse to unwind" instruction as defined in section
2292 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2293 for the ARM Architecture" document. */
2294
2295 static struct arm_prologue_cache *
2296 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2297 {
2298 CORE_ADDR vsp = 0;
2299 int vsp_valid = 0;
2300
2301 struct arm_prologue_cache *cache;
2302 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2303 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2304
2305 for (;;)
2306 {
2307 gdb_byte insn;
2308
2309 /* Whenever we reload SP, we actually have to retrieve its
2310 actual value in the current frame. */
2311 if (!vsp_valid)
2312 {
2313 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2314 {
2315 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2316 vsp = get_frame_register_unsigned (this_frame, reg);
2317 }
2318 else
2319 {
2320 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2321 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2322 }
2323
2324 vsp_valid = 1;
2325 }
2326
2327 /* Decode next unwind instruction. */
2328 insn = *entry++;
2329
2330 if ((insn & 0xc0) == 0)
2331 {
2332 int offset = insn & 0x3f;
2333 vsp += (offset << 2) + 4;
2334 }
2335 else if ((insn & 0xc0) == 0x40)
2336 {
2337 int offset = insn & 0x3f;
2338 vsp -= (offset << 2) + 4;
2339 }
2340 else if ((insn & 0xf0) == 0x80)
2341 {
2342 int mask = ((insn & 0xf) << 8) | *entry++;
2343 int i;
2344
2345 /* The special case of an all-zero mask identifies
2346 "Refuse to unwind". We return NULL to fall back
2347 to the prologue analyzer. */
2348 if (mask == 0)
2349 return NULL;
2350
2351 /* Pop registers r4..r15 under mask. */
2352 for (i = 0; i < 12; i++)
2353 if (mask & (1 << i))
2354 {
2355 cache->saved_regs[4 + i].addr = vsp;
2356 vsp += 4;
2357 }
2358
2359 /* Special-case popping SP -- we need to reload vsp. */
2360 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2361 vsp_valid = 0;
2362 }
2363 else if ((insn & 0xf0) == 0x90)
2364 {
2365 int reg = insn & 0xf;
2366
2367 /* Reserved cases. */
2368 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2369 return NULL;
2370
2371 /* Set SP from another register and mark VSP for reload. */
2372 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2373 vsp_valid = 0;
2374 }
2375 else if ((insn & 0xf0) == 0xa0)
2376 {
2377 int count = insn & 0x7;
2378 int pop_lr = (insn & 0x8) != 0;
2379 int i;
2380
2381 /* Pop r4..r[4+count]. */
2382 for (i = 0; i <= count; i++)
2383 {
2384 cache->saved_regs[4 + i].addr = vsp;
2385 vsp += 4;
2386 }
2387
2388 /* If indicated by flag, pop LR as well. */
2389 if (pop_lr)
2390 {
2391 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2392 vsp += 4;
2393 }
2394 }
2395 else if (insn == 0xb0)
2396 {
2397 /* We could only have updated PC by popping into it; if so, it
2398 will show up as address. Otherwise, copy LR into PC. */
2399 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2400 cache->saved_regs[ARM_PC_REGNUM]
2401 = cache->saved_regs[ARM_LR_REGNUM];
2402
2403 /* We're done. */
2404 break;
2405 }
2406 else if (insn == 0xb1)
2407 {
2408 int mask = *entry++;
2409 int i;
2410
2411 /* All-zero mask and mask >= 16 is "spare". */
2412 if (mask == 0 || mask >= 16)
2413 return NULL;
2414
2415 /* Pop r0..r3 under mask. */
2416 for (i = 0; i < 4; i++)
2417 if (mask & (1 << i))
2418 {
2419 cache->saved_regs[i].addr = vsp;
2420 vsp += 4;
2421 }
2422 }
2423 else if (insn == 0xb2)
2424 {
2425 ULONGEST offset = 0;
2426 unsigned shift = 0;
2427
2428 do
2429 {
2430 offset |= (*entry & 0x7f) << shift;
2431 shift += 7;
2432 }
2433 while (*entry++ & 0x80);
2434
2435 vsp += 0x204 + (offset << 2);
2436 }
2437 else if (insn == 0xb3)
2438 {
2439 int start = *entry >> 4;
2440 int count = (*entry++) & 0xf;
2441 int i;
2442
2443 /* Only registers D0..D15 are valid here. */
2444 if (start + count >= 16)
2445 return NULL;
2446
2447 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2448 for (i = 0; i <= count; i++)
2449 {
2450 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2451 vsp += 8;
2452 }
2453
2454 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2455 vsp += 4;
2456 }
2457 else if ((insn & 0xf8) == 0xb8)
2458 {
2459 int count = insn & 0x7;
2460 int i;
2461
2462 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2463 for (i = 0; i <= count; i++)
2464 {
2465 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2466 vsp += 8;
2467 }
2468
2469 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2470 vsp += 4;
2471 }
2472 else if (insn == 0xc6)
2473 {
2474 int start = *entry >> 4;
2475 int count = (*entry++) & 0xf;
2476 int i;
2477
2478 /* Only registers WR0..WR15 are valid. */
2479 if (start + count >= 16)
2480 return NULL;
2481
2482 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2483 for (i = 0; i <= count; i++)
2484 {
2485 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2486 vsp += 8;
2487 }
2488 }
2489 else if (insn == 0xc7)
2490 {
2491 int mask = *entry++;
2492 int i;
2493
2494 /* All-zero mask and mask >= 16 is "spare". */
2495 if (mask == 0 || mask >= 16)
2496 return NULL;
2497
2498 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2499 for (i = 0; i < 4; i++)
2500 if (mask & (1 << i))
2501 {
2502 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2503 vsp += 4;
2504 }
2505 }
2506 else if ((insn & 0xf8) == 0xc0)
2507 {
2508 int count = insn & 0x7;
2509 int i;
2510
2511 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2512 for (i = 0; i <= count; i++)
2513 {
2514 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2515 vsp += 8;
2516 }
2517 }
2518 else if (insn == 0xc8)
2519 {
2520 int start = *entry >> 4;
2521 int count = (*entry++) & 0xf;
2522 int i;
2523
2524 /* Only registers D0..D31 are valid. */
2525 if (start + count >= 16)
2526 return NULL;
2527
2528 /* Pop VFP double-precision registers
2529 D[16+start]..D[16+start+count]. */
2530 for (i = 0; i <= count; i++)
2531 {
2532 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2533 vsp += 8;
2534 }
2535 }
2536 else if (insn == 0xc9)
2537 {
2538 int start = *entry >> 4;
2539 int count = (*entry++) & 0xf;
2540 int i;
2541
2542 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2543 for (i = 0; i <= count; i++)
2544 {
2545 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2546 vsp += 8;
2547 }
2548 }
2549 else if ((insn & 0xf8) == 0xd0)
2550 {
2551 int count = insn & 0x7;
2552 int i;
2553
2554 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2555 for (i = 0; i <= count; i++)
2556 {
2557 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2558 vsp += 8;
2559 }
2560 }
2561 else
2562 {
2563 /* Everything else is "spare". */
2564 return NULL;
2565 }
2566 }
2567
2568 /* If we restore SP from a register, assume this was the frame register.
2569 Otherwise just fall back to SP as frame register. */
2570 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2571 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2572 else
2573 cache->framereg = ARM_SP_REGNUM;
2574
2575 /* Determine offset to previous frame. */
2576 cache->framesize
2577 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2578
2579 /* We already got the previous SP. */
2580 cache->prev_sp = vsp;
2581
2582 return cache;
2583 }
2584
2585 /* Unwinding via ARM exception table entries. Note that the sniffer
2586 already computes a filled-in prologue cache, which is then used
2587 with the same arm_prologue_this_id and arm_prologue_prev_register
2588 routines also used for prologue-parsing based unwinding. */
2589
2590 static int
2591 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2592 struct frame_info *this_frame,
2593 void **this_prologue_cache)
2594 {
2595 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2596 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2597 CORE_ADDR addr_in_block, exidx_region, func_start;
2598 struct arm_prologue_cache *cache;
2599 gdb_byte *entry;
2600
2601 /* See if we have an ARM exception table entry covering this address. */
2602 addr_in_block = get_frame_address_in_block (this_frame);
2603 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2604 if (!entry)
2605 return 0;
2606
2607 /* The ARM exception table does not describe unwind information
2608 for arbitrary PC values, but is guaranteed to be correct only
2609 at call sites. We have to decide here whether we want to use
2610 ARM exception table information for this frame, or fall back
2611 to using prologue parsing. (Note that if we have DWARF CFI,
2612 this sniffer isn't even called -- CFI is always preferred.)
2613
2614 Before we make this decision, however, we check whether we
2615 actually have *symbol* information for the current frame.
2616 If not, prologue parsing would not work anyway, so we might
2617 as well use the exception table and hope for the best. */
2618 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2619 {
2620 int exc_valid = 0;
2621
2622 /* If the next frame is "normal", we are at a call site in this
2623 frame, so exception information is guaranteed to be valid. */
2624 if (get_next_frame (this_frame)
2625 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2626 exc_valid = 1;
2627
2628 /* We also assume exception information is valid if we're currently
2629 blocked in a system call. The system library is supposed to
2630 ensure this, so that e.g. pthread cancellation works. */
2631 if (arm_frame_is_thumb (this_frame))
2632 {
2633 ULONGEST insn;
2634
2635 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2636 2, byte_order_for_code, &insn)
2637 && (insn & 0xff00) == 0xdf00 /* svc */)
2638 exc_valid = 1;
2639 }
2640 else
2641 {
2642 ULONGEST insn;
2643
2644 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2645 4, byte_order_for_code, &insn)
2646 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2647 exc_valid = 1;
2648 }
2649
2650 /* Bail out if we don't know that exception information is valid. */
2651 if (!exc_valid)
2652 return 0;
2653
2654 /* The ARM exception index does not mark the *end* of the region
2655 covered by the entry, and some functions will not have any entry.
2656 To correctly recognize the end of the covered region, the linker
2657 should have inserted dummy records with a CANTUNWIND marker.
2658
2659 Unfortunately, current versions of GNU ld do not reliably do
2660 this, and thus we may have found an incorrect entry above.
2661 As a (temporary) sanity check, we only use the entry if it
2662 lies *within* the bounds of the function. Note that this check
2663 might reject perfectly valid entries that just happen to cover
2664 multiple functions; therefore this check ought to be removed
2665 once the linker is fixed. */
2666 if (func_start > exidx_region)
2667 return 0;
2668 }
2669
2670 /* Decode the list of unwinding instructions into a prologue cache.
2671 Note that this may fail due to e.g. a "refuse to unwind" code. */
2672 cache = arm_exidx_fill_cache (this_frame, entry);
2673 if (!cache)
2674 return 0;
2675
2676 *this_prologue_cache = cache;
2677 return 1;
2678 }
2679
2680 struct frame_unwind arm_exidx_unwind = {
2681 NORMAL_FRAME,
2682 default_frame_unwind_stop_reason,
2683 arm_prologue_this_id,
2684 arm_prologue_prev_register,
2685 NULL,
2686 arm_exidx_unwind_sniffer
2687 };
2688
2689 static struct arm_prologue_cache *
2690 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2691 {
2692 struct arm_prologue_cache *cache;
2693 int reg;
2694
2695 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2696 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2697
2698 /* Still rely on the offset calculated from prologue. */
2699 arm_scan_prologue (this_frame, cache);
2700
2701 /* Since we are in epilogue, the SP has been restored. */
2702 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2703
2704 /* Calculate actual addresses of saved registers using offsets
2705 determined by arm_scan_prologue. */
2706 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2707 if (trad_frame_addr_p (cache->saved_regs, reg))
2708 cache->saved_regs[reg].addr += cache->prev_sp;
2709
2710 return cache;
2711 }
2712
2713 /* Implementation of function hook 'this_id' in
2714 'struct frame_uwnind' for epilogue unwinder. */
2715
2716 static void
2717 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2718 void **this_cache,
2719 struct frame_id *this_id)
2720 {
2721 struct arm_prologue_cache *cache;
2722 CORE_ADDR pc, func;
2723
2724 if (*this_cache == NULL)
2725 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2726 cache = (struct arm_prologue_cache *) *this_cache;
2727
2728 /* Use function start address as part of the frame ID. If we cannot
2729 identify the start address (due to missing symbol information),
2730 fall back to just using the current PC. */
2731 pc = get_frame_pc (this_frame);
2732 func = get_frame_func (this_frame);
2733 if (func == 0)
2734 func = pc;
2735
2736 (*this_id) = frame_id_build (cache->prev_sp, pc);
2737 }
2738
2739 /* Implementation of function hook 'prev_register' in
2740 'struct frame_uwnind' for epilogue unwinder. */
2741
2742 static struct value *
2743 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2744 void **this_cache, int regnum)
2745 {
2746 if (*this_cache == NULL)
2747 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2748
2749 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2750 }
2751
2752 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2753 CORE_ADDR pc);
2754 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2755 CORE_ADDR pc);
2756
2757 /* Implementation of function hook 'sniffer' in
2758 'struct frame_uwnind' for epilogue unwinder. */
2759
2760 static int
2761 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2762 struct frame_info *this_frame,
2763 void **this_prologue_cache)
2764 {
2765 if (frame_relative_level (this_frame) == 0)
2766 {
2767 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2768 CORE_ADDR pc = get_frame_pc (this_frame);
2769
2770 if (arm_frame_is_thumb (this_frame))
2771 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2772 else
2773 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2774 }
2775 else
2776 return 0;
2777 }
2778
2779 /* Frame unwinder from epilogue. */
2780
2781 static const struct frame_unwind arm_epilogue_frame_unwind =
2782 {
2783 NORMAL_FRAME,
2784 default_frame_unwind_stop_reason,
2785 arm_epilogue_frame_this_id,
2786 arm_epilogue_frame_prev_register,
2787 NULL,
2788 arm_epilogue_frame_sniffer,
2789 };
2790
2791 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2792 trampoline, return the target PC. Otherwise return 0.
2793
2794 void call0a (char c, short s, int i, long l) {}
2795
2796 int main (void)
2797 {
2798 (*pointer_to_call0a) (c, s, i, l);
2799 }
2800
2801 Instead of calling a stub library function _call_via_xx (xx is
2802 the register name), GCC may inline the trampoline in the object
2803 file as below (register r2 has the address of call0a).
2804
2805 .global main
2806 .type main, %function
2807 ...
2808 bl .L1
2809 ...
2810 .size main, .-main
2811
2812 .L1:
2813 bx r2
2814
2815 The trampoline 'bx r2' doesn't belong to main. */
2816
2817 static CORE_ADDR
2818 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2819 {
2820 /* The heuristics of recognizing such trampoline is that FRAME is
2821 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2822 if (arm_frame_is_thumb (frame))
2823 {
2824 gdb_byte buf[2];
2825
2826 if (target_read_memory (pc, buf, 2) == 0)
2827 {
2828 struct gdbarch *gdbarch = get_frame_arch (frame);
2829 enum bfd_endian byte_order_for_code
2830 = gdbarch_byte_order_for_code (gdbarch);
2831 uint16_t insn
2832 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2833
2834 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2835 {
2836 CORE_ADDR dest
2837 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2838
2839 /* Clear the LSB so that gdb core sets step-resume
2840 breakpoint at the right address. */
2841 return UNMAKE_THUMB_ADDR (dest);
2842 }
2843 }
2844 }
2845
2846 return 0;
2847 }
2848
2849 static struct arm_prologue_cache *
2850 arm_make_stub_cache (struct frame_info *this_frame)
2851 {
2852 struct arm_prologue_cache *cache;
2853
2854 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2855 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2856
2857 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2858
2859 return cache;
2860 }
2861
2862 /* Our frame ID for a stub frame is the current SP and LR. */
2863
2864 static void
2865 arm_stub_this_id (struct frame_info *this_frame,
2866 void **this_cache,
2867 struct frame_id *this_id)
2868 {
2869 struct arm_prologue_cache *cache;
2870
2871 if (*this_cache == NULL)
2872 *this_cache = arm_make_stub_cache (this_frame);
2873 cache = (struct arm_prologue_cache *) *this_cache;
2874
2875 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2876 }
2877
2878 static int
2879 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2880 struct frame_info *this_frame,
2881 void **this_prologue_cache)
2882 {
2883 CORE_ADDR addr_in_block;
2884 gdb_byte dummy[4];
2885 CORE_ADDR pc, start_addr;
2886 const char *name;
2887
2888 addr_in_block = get_frame_address_in_block (this_frame);
2889 pc = get_frame_pc (this_frame);
2890 if (in_plt_section (addr_in_block)
2891 /* We also use the stub winder if the target memory is unreadable
2892 to avoid having the prologue unwinder trying to read it. */
2893 || target_read_memory (pc, dummy, 4) != 0)
2894 return 1;
2895
2896 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2897 && arm_skip_bx_reg (this_frame, pc) != 0)
2898 return 1;
2899
2900 return 0;
2901 }
2902
2903 struct frame_unwind arm_stub_unwind = {
2904 NORMAL_FRAME,
2905 default_frame_unwind_stop_reason,
2906 arm_stub_this_id,
2907 arm_prologue_prev_register,
2908 NULL,
2909 arm_stub_unwind_sniffer
2910 };
2911
2912 /* Put here the code to store, into CACHE->saved_regs, the addresses
2913 of the saved registers of frame described by THIS_FRAME. CACHE is
2914 returned. */
2915
2916 static struct arm_prologue_cache *
2917 arm_m_exception_cache (struct frame_info *this_frame)
2918 {
2919 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2920 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2921 struct arm_prologue_cache *cache;
2922 CORE_ADDR unwound_sp;
2923 LONGEST xpsr;
2924
2925 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2926 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2927
2928 unwound_sp = get_frame_register_unsigned (this_frame,
2929 ARM_SP_REGNUM);
2930
2931 /* The hardware saves eight 32-bit words, comprising xPSR,
2932 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2933 "B1.5.6 Exception entry behavior" in
2934 "ARMv7-M Architecture Reference Manual". */
2935 cache->saved_regs[0].addr = unwound_sp;
2936 cache->saved_regs[1].addr = unwound_sp + 4;
2937 cache->saved_regs[2].addr = unwound_sp + 8;
2938 cache->saved_regs[3].addr = unwound_sp + 12;
2939 cache->saved_regs[12].addr = unwound_sp + 16;
2940 cache->saved_regs[14].addr = unwound_sp + 20;
2941 cache->saved_regs[15].addr = unwound_sp + 24;
2942 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2943
2944 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2945 aligner between the top of the 32-byte stack frame and the
2946 previous context's stack pointer. */
2947 cache->prev_sp = unwound_sp + 32;
2948 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2949 && (xpsr & (1 << 9)) != 0)
2950 cache->prev_sp += 4;
2951
2952 return cache;
2953 }
2954
2955 /* Implementation of function hook 'this_id' in
2956 'struct frame_uwnind'. */
2957
2958 static void
2959 arm_m_exception_this_id (struct frame_info *this_frame,
2960 void **this_cache,
2961 struct frame_id *this_id)
2962 {
2963 struct arm_prologue_cache *cache;
2964
2965 if (*this_cache == NULL)
2966 *this_cache = arm_m_exception_cache (this_frame);
2967 cache = (struct arm_prologue_cache *) *this_cache;
2968
2969 /* Our frame ID for a stub frame is the current SP and LR. */
2970 *this_id = frame_id_build (cache->prev_sp,
2971 get_frame_pc (this_frame));
2972 }
2973
2974 /* Implementation of function hook 'prev_register' in
2975 'struct frame_uwnind'. */
2976
2977 static struct value *
2978 arm_m_exception_prev_register (struct frame_info *this_frame,
2979 void **this_cache,
2980 int prev_regnum)
2981 {
2982 struct arm_prologue_cache *cache;
2983
2984 if (*this_cache == NULL)
2985 *this_cache = arm_m_exception_cache (this_frame);
2986 cache = (struct arm_prologue_cache *) *this_cache;
2987
2988 /* The value was already reconstructed into PREV_SP. */
2989 if (prev_regnum == ARM_SP_REGNUM)
2990 return frame_unwind_got_constant (this_frame, prev_regnum,
2991 cache->prev_sp);
2992
2993 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2994 prev_regnum);
2995 }
2996
2997 /* Implementation of function hook 'sniffer' in
2998 'struct frame_uwnind'. */
2999
3000 static int
3001 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3002 struct frame_info *this_frame,
3003 void **this_prologue_cache)
3004 {
3005 CORE_ADDR this_pc = get_frame_pc (this_frame);
3006
3007 /* No need to check is_m; this sniffer is only registered for
3008 M-profile architectures. */
3009
3010 /* Check if exception frame returns to a magic PC value. */
3011 return arm_m_addr_is_magic (this_pc);
3012 }
3013
3014 /* Frame unwinder for M-profile exceptions. */
3015
3016 struct frame_unwind arm_m_exception_unwind =
3017 {
3018 SIGTRAMP_FRAME,
3019 default_frame_unwind_stop_reason,
3020 arm_m_exception_this_id,
3021 arm_m_exception_prev_register,
3022 NULL,
3023 arm_m_exception_unwind_sniffer
3024 };
3025
3026 static CORE_ADDR
3027 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3028 {
3029 struct arm_prologue_cache *cache;
3030
3031 if (*this_cache == NULL)
3032 *this_cache = arm_make_prologue_cache (this_frame);
3033 cache = (struct arm_prologue_cache *) *this_cache;
3034
3035 return cache->prev_sp - cache->framesize;
3036 }
3037
3038 struct frame_base arm_normal_base = {
3039 &arm_prologue_unwind,
3040 arm_normal_frame_base,
3041 arm_normal_frame_base,
3042 arm_normal_frame_base
3043 };
3044
3045 static struct value *
3046 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3047 int regnum)
3048 {
3049 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3050 CORE_ADDR lr, cpsr;
3051 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3052
3053 switch (regnum)
3054 {
3055 case ARM_PC_REGNUM:
3056 /* The PC is normally copied from the return column, which
3057 describes saves of LR. However, that version may have an
3058 extra bit set to indicate Thumb state. The bit is not
3059 part of the PC. */
3060 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3061 return frame_unwind_got_constant (this_frame, regnum,
3062 arm_addr_bits_remove (gdbarch, lr));
3063
3064 case ARM_PS_REGNUM:
3065 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3066 cpsr = get_frame_register_unsigned (this_frame, regnum);
3067 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3068 if (IS_THUMB_ADDR (lr))
3069 cpsr |= t_bit;
3070 else
3071 cpsr &= ~t_bit;
3072 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3073
3074 default:
3075 internal_error (__FILE__, __LINE__,
3076 _("Unexpected register %d"), regnum);
3077 }
3078 }
3079
3080 static void
3081 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3082 struct dwarf2_frame_state_reg *reg,
3083 struct frame_info *this_frame)
3084 {
3085 switch (regnum)
3086 {
3087 case ARM_PC_REGNUM:
3088 case ARM_PS_REGNUM:
3089 reg->how = DWARF2_FRAME_REG_FN;
3090 reg->loc.fn = arm_dwarf2_prev_register;
3091 break;
3092 case ARM_SP_REGNUM:
3093 reg->how = DWARF2_FRAME_REG_CFA;
3094 break;
3095 }
3096 }
3097
3098 /* Implement the stack_frame_destroyed_p gdbarch method. */
3099
3100 static int
3101 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3102 {
3103 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3104 unsigned int insn, insn2;
3105 int found_return = 0, found_stack_adjust = 0;
3106 CORE_ADDR func_start, func_end;
3107 CORE_ADDR scan_pc;
3108 gdb_byte buf[4];
3109
3110 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3111 return 0;
3112
3113 /* The epilogue is a sequence of instructions along the following lines:
3114
3115 - add stack frame size to SP or FP
3116 - [if frame pointer used] restore SP from FP
3117 - restore registers from SP [may include PC]
3118 - a return-type instruction [if PC wasn't already restored]
3119
3120 In a first pass, we scan forward from the current PC and verify the
3121 instructions we find as compatible with this sequence, ending in a
3122 return instruction.
3123
3124 However, this is not sufficient to distinguish indirect function calls
3125 within a function from indirect tail calls in the epilogue in some cases.
3126 Therefore, if we didn't already find any SP-changing instruction during
3127 forward scan, we add a backward scanning heuristic to ensure we actually
3128 are in the epilogue. */
3129
3130 scan_pc = pc;
3131 while (scan_pc < func_end && !found_return)
3132 {
3133 if (target_read_memory (scan_pc, buf, 2))
3134 break;
3135
3136 scan_pc += 2;
3137 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3138
3139 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3140 found_return = 1;
3141 else if (insn == 0x46f7) /* mov pc, lr */
3142 found_return = 1;
3143 else if (thumb_instruction_restores_sp (insn))
3144 {
3145 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3146 found_return = 1;
3147 }
3148 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3149 {
3150 if (target_read_memory (scan_pc, buf, 2))
3151 break;
3152
3153 scan_pc += 2;
3154 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3155
3156 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3157 {
3158 if (insn2 & 0x8000) /* <registers> include PC. */
3159 found_return = 1;
3160 }
3161 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3162 && (insn2 & 0x0fff) == 0x0b04)
3163 {
3164 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3165 found_return = 1;
3166 }
3167 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3168 && (insn2 & 0x0e00) == 0x0a00)
3169 ;
3170 else
3171 break;
3172 }
3173 else
3174 break;
3175 }
3176
3177 if (!found_return)
3178 return 0;
3179
3180 /* Since any instruction in the epilogue sequence, with the possible
3181 exception of return itself, updates the stack pointer, we need to
3182 scan backwards for at most one instruction. Try either a 16-bit or
3183 a 32-bit instruction. This is just a heuristic, so we do not worry
3184 too much about false positives. */
3185
3186 if (pc - 4 < func_start)
3187 return 0;
3188 if (target_read_memory (pc - 4, buf, 4))
3189 return 0;
3190
3191 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3192 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3193
3194 if (thumb_instruction_restores_sp (insn2))
3195 found_stack_adjust = 1;
3196 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3197 found_stack_adjust = 1;
3198 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3199 && (insn2 & 0x0fff) == 0x0b04)
3200 found_stack_adjust = 1;
3201 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3202 && (insn2 & 0x0e00) == 0x0a00)
3203 found_stack_adjust = 1;
3204
3205 return found_stack_adjust;
3206 }
3207
3208 static int
3209 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3210 {
3211 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3212 unsigned int insn;
3213 int found_return;
3214 CORE_ADDR func_start, func_end;
3215
3216 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3217 return 0;
3218
3219 /* We are in the epilogue if the previous instruction was a stack
3220 adjustment and the next instruction is a possible return (bx, mov
3221 pc, or pop). We could have to scan backwards to find the stack
3222 adjustment, or forwards to find the return, but this is a decent
3223 approximation. First scan forwards. */
3224
3225 found_return = 0;
3226 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3227 if (bits (insn, 28, 31) != INST_NV)
3228 {
3229 if ((insn & 0x0ffffff0) == 0x012fff10)
3230 /* BX. */
3231 found_return = 1;
3232 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3233 /* MOV PC. */
3234 found_return = 1;
3235 else if ((insn & 0x0fff0000) == 0x08bd0000
3236 && (insn & 0x0000c000) != 0)
3237 /* POP (LDMIA), including PC or LR. */
3238 found_return = 1;
3239 }
3240
3241 if (!found_return)
3242 return 0;
3243
3244 /* Scan backwards. This is just a heuristic, so do not worry about
3245 false positives from mode changes. */
3246
3247 if (pc < func_start + 4)
3248 return 0;
3249
3250 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3251 if (arm_instruction_restores_sp (insn))
3252 return 1;
3253
3254 return 0;
3255 }
3256
3257 /* Implement the stack_frame_destroyed_p gdbarch method. */
3258
3259 static int
3260 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3261 {
3262 if (arm_pc_is_thumb (gdbarch, pc))
3263 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3264 else
3265 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3266 }
3267
3268 /* When arguments must be pushed onto the stack, they go on in reverse
3269 order. The code below implements a FILO (stack) to do this. */
3270
3271 struct stack_item
3272 {
3273 int len;
3274 struct stack_item *prev;
3275 gdb_byte *data;
3276 };
3277
3278 static struct stack_item *
3279 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3280 {
3281 struct stack_item *si;
3282 si = XNEW (struct stack_item);
3283 si->data = (gdb_byte *) xmalloc (len);
3284 si->len = len;
3285 si->prev = prev;
3286 memcpy (si->data, contents, len);
3287 return si;
3288 }
3289
3290 static struct stack_item *
3291 pop_stack_item (struct stack_item *si)
3292 {
3293 struct stack_item *dead = si;
3294 si = si->prev;
3295 xfree (dead->data);
3296 xfree (dead);
3297 return si;
3298 }
3299
3300 /* Implement the gdbarch type alignment method, overrides the generic
3301 alignment algorithm for anything that is arm specific. */
3302
3303 static ULONGEST
3304 arm_type_align (gdbarch *gdbarch, struct type *t)
3305 {
3306 t = check_typedef (t);
3307 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3308 {
3309 /* Use the natural alignment for vector types (the same for
3310 scalar type), but the maximum alignment is 64-bit. */
3311 if (TYPE_LENGTH (t) > 8)
3312 return 8;
3313 else
3314 return TYPE_LENGTH (t);
3315 }
3316
3317 /* Allow the common code to calculate the alignment. */
3318 return 0;
3319 }
3320
3321 /* Possible base types for a candidate for passing and returning in
3322 VFP registers. */
3323
3324 enum arm_vfp_cprc_base_type
3325 {
3326 VFP_CPRC_UNKNOWN,
3327 VFP_CPRC_SINGLE,
3328 VFP_CPRC_DOUBLE,
3329 VFP_CPRC_VEC64,
3330 VFP_CPRC_VEC128
3331 };
3332
3333 /* The length of one element of base type B. */
3334
3335 static unsigned
3336 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3337 {
3338 switch (b)
3339 {
3340 case VFP_CPRC_SINGLE:
3341 return 4;
3342 case VFP_CPRC_DOUBLE:
3343 return 8;
3344 case VFP_CPRC_VEC64:
3345 return 8;
3346 case VFP_CPRC_VEC128:
3347 return 16;
3348 default:
3349 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3350 (int) b);
3351 }
3352 }
3353
3354 /* The character ('s', 'd' or 'q') for the type of VFP register used
3355 for passing base type B. */
3356
3357 static int
3358 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3359 {
3360 switch (b)
3361 {
3362 case VFP_CPRC_SINGLE:
3363 return 's';
3364 case VFP_CPRC_DOUBLE:
3365 return 'd';
3366 case VFP_CPRC_VEC64:
3367 return 'd';
3368 case VFP_CPRC_VEC128:
3369 return 'q';
3370 default:
3371 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3372 (int) b);
3373 }
3374 }
3375
3376 /* Determine whether T may be part of a candidate for passing and
3377 returning in VFP registers, ignoring the limit on the total number
3378 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3379 classification of the first valid component found; if it is not
3380 VFP_CPRC_UNKNOWN, all components must have the same classification
3381 as *BASE_TYPE. If it is found that T contains a type not permitted
3382 for passing and returning in VFP registers, a type differently
3383 classified from *BASE_TYPE, or two types differently classified
3384 from each other, return -1, otherwise return the total number of
3385 base-type elements found (possibly 0 in an empty structure or
3386 array). Vector types are not currently supported, matching the
3387 generic AAPCS support. */
3388
3389 static int
3390 arm_vfp_cprc_sub_candidate (struct type *t,
3391 enum arm_vfp_cprc_base_type *base_type)
3392 {
3393 t = check_typedef (t);
3394 switch (TYPE_CODE (t))
3395 {
3396 case TYPE_CODE_FLT:
3397 switch (TYPE_LENGTH (t))
3398 {
3399 case 4:
3400 if (*base_type == VFP_CPRC_UNKNOWN)
3401 *base_type = VFP_CPRC_SINGLE;
3402 else if (*base_type != VFP_CPRC_SINGLE)
3403 return -1;
3404 return 1;
3405
3406 case 8:
3407 if (*base_type == VFP_CPRC_UNKNOWN)
3408 *base_type = VFP_CPRC_DOUBLE;
3409 else if (*base_type != VFP_CPRC_DOUBLE)
3410 return -1;
3411 return 1;
3412
3413 default:
3414 return -1;
3415 }
3416 break;
3417
3418 case TYPE_CODE_COMPLEX:
3419 /* Arguments of complex T where T is one of the types float or
3420 double get treated as if they are implemented as:
3421
3422 struct complexT
3423 {
3424 T real;
3425 T imag;
3426 };
3427
3428 */
3429 switch (TYPE_LENGTH (t))
3430 {
3431 case 8:
3432 if (*base_type == VFP_CPRC_UNKNOWN)
3433 *base_type = VFP_CPRC_SINGLE;
3434 else if (*base_type != VFP_CPRC_SINGLE)
3435 return -1;
3436 return 2;
3437
3438 case 16:
3439 if (*base_type == VFP_CPRC_UNKNOWN)
3440 *base_type = VFP_CPRC_DOUBLE;
3441 else if (*base_type != VFP_CPRC_DOUBLE)
3442 return -1;
3443 return 2;
3444
3445 default:
3446 return -1;
3447 }
3448 break;
3449
3450 case TYPE_CODE_ARRAY:
3451 {
3452 if (TYPE_VECTOR (t))
3453 {
3454 /* A 64-bit or 128-bit containerized vector type are VFP
3455 CPRCs. */
3456 switch (TYPE_LENGTH (t))
3457 {
3458 case 8:
3459 if (*base_type == VFP_CPRC_UNKNOWN)
3460 *base_type = VFP_CPRC_VEC64;
3461 return 1;
3462 case 16:
3463 if (*base_type == VFP_CPRC_UNKNOWN)
3464 *base_type = VFP_CPRC_VEC128;
3465 return 1;
3466 default:
3467 return -1;
3468 }
3469 }
3470 else
3471 {
3472 int count;
3473 unsigned unitlen;
3474
3475 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3476 base_type);
3477 if (count == -1)
3478 return -1;
3479 if (TYPE_LENGTH (t) == 0)
3480 {
3481 gdb_assert (count == 0);
3482 return 0;
3483 }
3484 else if (count == 0)
3485 return -1;
3486 unitlen = arm_vfp_cprc_unit_length (*base_type);
3487 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3488 return TYPE_LENGTH (t) / unitlen;
3489 }
3490 }
3491 break;
3492
3493 case TYPE_CODE_STRUCT:
3494 {
3495 int count = 0;
3496 unsigned unitlen;
3497 int i;
3498 for (i = 0; i < TYPE_NFIELDS (t); i++)
3499 {
3500 int sub_count = 0;
3501
3502 if (!field_is_static (&TYPE_FIELD (t, i)))
3503 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3504 base_type);
3505 if (sub_count == -1)
3506 return -1;
3507 count += sub_count;
3508 }
3509 if (TYPE_LENGTH (t) == 0)
3510 {
3511 gdb_assert (count == 0);
3512 return 0;
3513 }
3514 else if (count == 0)
3515 return -1;
3516 unitlen = arm_vfp_cprc_unit_length (*base_type);
3517 if (TYPE_LENGTH (t) != unitlen * count)
3518 return -1;
3519 return count;
3520 }
3521
3522 case TYPE_CODE_UNION:
3523 {
3524 int count = 0;
3525 unsigned unitlen;
3526 int i;
3527 for (i = 0; i < TYPE_NFIELDS (t); i++)
3528 {
3529 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3530 base_type);
3531 if (sub_count == -1)
3532 return -1;
3533 count = (count > sub_count ? count : sub_count);
3534 }
3535 if (TYPE_LENGTH (t) == 0)
3536 {
3537 gdb_assert (count == 0);
3538 return 0;
3539 }
3540 else if (count == 0)
3541 return -1;
3542 unitlen = arm_vfp_cprc_unit_length (*base_type);
3543 if (TYPE_LENGTH (t) != unitlen * count)
3544 return -1;
3545 return count;
3546 }
3547
3548 default:
3549 break;
3550 }
3551
3552 return -1;
3553 }
3554
3555 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3556 if passed to or returned from a non-variadic function with the VFP
3557 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3558 *BASE_TYPE to the base type for T and *COUNT to the number of
3559 elements of that base type before returning. */
3560
3561 static int
3562 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3563 int *count)
3564 {
3565 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3566 int c = arm_vfp_cprc_sub_candidate (t, &b);
3567 if (c <= 0 || c > 4)
3568 return 0;
3569 *base_type = b;
3570 *count = c;
3571 return 1;
3572 }
3573
3574 /* Return 1 if the VFP ABI should be used for passing arguments to and
3575 returning values from a function of type FUNC_TYPE, 0
3576 otherwise. */
3577
3578 static int
3579 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3580 {
3581 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3582 /* Variadic functions always use the base ABI. Assume that functions
3583 without debug info are not variadic. */
3584 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3585 return 0;
3586 /* The VFP ABI is only supported as a variant of AAPCS. */
3587 if (tdep->arm_abi != ARM_ABI_AAPCS)
3588 return 0;
3589 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3590 }
3591
3592 /* We currently only support passing parameters in integer registers, which
3593 conforms with GCC's default model, and VFP argument passing following
3594 the VFP variant of AAPCS. Several other variants exist and
3595 we should probably support some of them based on the selected ABI. */
3596
3597 static CORE_ADDR
3598 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3599 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3600 struct value **args, CORE_ADDR sp,
3601 function_call_return_method return_method,
3602 CORE_ADDR struct_addr)
3603 {
3604 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3605 int argnum;
3606 int argreg;
3607 int nstack;
3608 struct stack_item *si = NULL;
3609 int use_vfp_abi;
3610 struct type *ftype;
3611 unsigned vfp_regs_free = (1 << 16) - 1;
3612
3613 /* Determine the type of this function and whether the VFP ABI
3614 applies. */
3615 ftype = check_typedef (value_type (function));
3616 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3617 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3618 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3619
3620 /* Set the return address. For the ARM, the return breakpoint is
3621 always at BP_ADDR. */
3622 if (arm_pc_is_thumb (gdbarch, bp_addr))
3623 bp_addr |= 1;
3624 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3625
3626 /* Walk through the list of args and determine how large a temporary
3627 stack is required. Need to take care here as structs may be
3628 passed on the stack, and we have to push them. */
3629 nstack = 0;
3630
3631 argreg = ARM_A1_REGNUM;
3632 nstack = 0;
3633
3634 /* The struct_return pointer occupies the first parameter
3635 passing register. */
3636 if (return_method == return_method_struct)
3637 {
3638 if (arm_debug)
3639 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3640 gdbarch_register_name (gdbarch, argreg),
3641 paddress (gdbarch, struct_addr));
3642 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3643 argreg++;
3644 }
3645
3646 for (argnum = 0; argnum < nargs; argnum++)
3647 {
3648 int len;
3649 struct type *arg_type;
3650 struct type *target_type;
3651 enum type_code typecode;
3652 const bfd_byte *val;
3653 int align;
3654 enum arm_vfp_cprc_base_type vfp_base_type;
3655 int vfp_base_count;
3656 int may_use_core_reg = 1;
3657
3658 arg_type = check_typedef (value_type (args[argnum]));
3659 len = TYPE_LENGTH (arg_type);
3660 target_type = TYPE_TARGET_TYPE (arg_type);
3661 typecode = TYPE_CODE (arg_type);
3662 val = value_contents (args[argnum]);
3663
3664 align = type_align (arg_type);
3665 /* Round alignment up to a whole number of words. */
3666 align = (align + ARM_INT_REGISTER_SIZE - 1)
3667 & ~(ARM_INT_REGISTER_SIZE - 1);
3668 /* Different ABIs have different maximum alignments. */
3669 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3670 {
3671 /* The APCS ABI only requires word alignment. */
3672 align = ARM_INT_REGISTER_SIZE;
3673 }
3674 else
3675 {
3676 /* The AAPCS requires at most doubleword alignment. */
3677 if (align > ARM_INT_REGISTER_SIZE * 2)
3678 align = ARM_INT_REGISTER_SIZE * 2;
3679 }
3680
3681 if (use_vfp_abi
3682 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3683 &vfp_base_count))
3684 {
3685 int regno;
3686 int unit_length;
3687 int shift;
3688 unsigned mask;
3689
3690 /* Because this is a CPRC it cannot go in a core register or
3691 cause a core register to be skipped for alignment.
3692 Either it goes in VFP registers and the rest of this loop
3693 iteration is skipped for this argument, or it goes on the
3694 stack (and the stack alignment code is correct for this
3695 case). */
3696 may_use_core_reg = 0;
3697
3698 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3699 shift = unit_length / 4;
3700 mask = (1 << (shift * vfp_base_count)) - 1;
3701 for (regno = 0; regno < 16; regno += shift)
3702 if (((vfp_regs_free >> regno) & mask) == mask)
3703 break;
3704
3705 if (regno < 16)
3706 {
3707 int reg_char;
3708 int reg_scaled;
3709 int i;
3710
3711 vfp_regs_free &= ~(mask << regno);
3712 reg_scaled = regno / shift;
3713 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3714 for (i = 0; i < vfp_base_count; i++)
3715 {
3716 char name_buf[4];
3717 int regnum;
3718 if (reg_char == 'q')
3719 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3720 val + i * unit_length);
3721 else
3722 {
3723 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3724 reg_char, reg_scaled + i);
3725 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3726 strlen (name_buf));
3727 regcache->cooked_write (regnum, val + i * unit_length);
3728 }
3729 }
3730 continue;
3731 }
3732 else
3733 {
3734 /* This CPRC could not go in VFP registers, so all VFP
3735 registers are now marked as used. */
3736 vfp_regs_free = 0;
3737 }
3738 }
3739
3740 /* Push stack padding for doubleword alignment. */
3741 if (nstack & (align - 1))
3742 {
3743 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3744 nstack += ARM_INT_REGISTER_SIZE;
3745 }
3746
3747 /* Doubleword aligned quantities must go in even register pairs. */
3748 if (may_use_core_reg
3749 && argreg <= ARM_LAST_ARG_REGNUM
3750 && align > ARM_INT_REGISTER_SIZE
3751 && argreg & 1)
3752 argreg++;
3753
3754 /* If the argument is a pointer to a function, and it is a
3755 Thumb function, create a LOCAL copy of the value and set
3756 the THUMB bit in it. */
3757 if (TYPE_CODE_PTR == typecode
3758 && target_type != NULL
3759 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3760 {
3761 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3762 if (arm_pc_is_thumb (gdbarch, regval))
3763 {
3764 bfd_byte *copy = (bfd_byte *) alloca (len);
3765 store_unsigned_integer (copy, len, byte_order,
3766 MAKE_THUMB_ADDR (regval));
3767 val = copy;
3768 }
3769 }
3770
3771 /* Copy the argument to general registers or the stack in
3772 register-sized pieces. Large arguments are split between
3773 registers and stack. */
3774 while (len > 0)
3775 {
3776 int partial_len = len < ARM_INT_REGISTER_SIZE
3777 ? len : ARM_INT_REGISTER_SIZE;
3778 CORE_ADDR regval
3779 = extract_unsigned_integer (val, partial_len, byte_order);
3780
3781 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3782 {
3783 /* The argument is being passed in a general purpose
3784 register. */
3785 if (byte_order == BFD_ENDIAN_BIG)
3786 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3787 if (arm_debug)
3788 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3789 argnum,
3790 gdbarch_register_name
3791 (gdbarch, argreg),
3792 phex (regval, ARM_INT_REGISTER_SIZE));
3793 regcache_cooked_write_unsigned (regcache, argreg, regval);
3794 argreg++;
3795 }
3796 else
3797 {
3798 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3799
3800 memset (buf, 0, sizeof (buf));
3801 store_unsigned_integer (buf, partial_len, byte_order, regval);
3802
3803 /* Push the arguments onto the stack. */
3804 if (arm_debug)
3805 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3806 argnum, nstack);
3807 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3808 nstack += ARM_INT_REGISTER_SIZE;
3809 }
3810
3811 len -= partial_len;
3812 val += partial_len;
3813 }
3814 }
3815 /* If we have an odd number of words to push, then decrement the stack
3816 by one word now, so first stack argument will be dword aligned. */
3817 if (nstack & 4)
3818 sp -= 4;
3819
3820 while (si)
3821 {
3822 sp -= si->len;
3823 write_memory (sp, si->data, si->len);
3824 si = pop_stack_item (si);
3825 }
3826
3827 /* Finally, update teh SP register. */
3828 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3829
3830 return sp;
3831 }
3832
3833
3834 /* Always align the frame to an 8-byte boundary. This is required on
3835 some platforms and harmless on the rest. */
3836
3837 static CORE_ADDR
3838 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3839 {
3840 /* Align the stack to eight bytes. */
3841 return sp & ~ (CORE_ADDR) 7;
3842 }
3843
3844 static void
3845 print_fpu_flags (struct ui_file *file, int flags)
3846 {
3847 if (flags & (1 << 0))
3848 fputs_filtered ("IVO ", file);
3849 if (flags & (1 << 1))
3850 fputs_filtered ("DVZ ", file);
3851 if (flags & (1 << 2))
3852 fputs_filtered ("OFL ", file);
3853 if (flags & (1 << 3))
3854 fputs_filtered ("UFL ", file);
3855 if (flags & (1 << 4))
3856 fputs_filtered ("INX ", file);
3857 fputc_filtered ('\n', file);
3858 }
3859
3860 /* Print interesting information about the floating point processor
3861 (if present) or emulator. */
3862 static void
3863 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3864 struct frame_info *frame, const char *args)
3865 {
3866 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3867 int type;
3868
3869 type = (status >> 24) & 127;
3870 if (status & (1 << 31))
3871 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3872 else
3873 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3874 /* i18n: [floating point unit] mask */
3875 fputs_filtered (_("mask: "), file);
3876 print_fpu_flags (file, status >> 16);
3877 /* i18n: [floating point unit] flags */
3878 fputs_filtered (_("flags: "), file);
3879 print_fpu_flags (file, status);
3880 }
3881
3882 /* Construct the ARM extended floating point type. */
3883 static struct type *
3884 arm_ext_type (struct gdbarch *gdbarch)
3885 {
3886 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3887
3888 if (!tdep->arm_ext_type)
3889 tdep->arm_ext_type
3890 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3891 floatformats_arm_ext);
3892
3893 return tdep->arm_ext_type;
3894 }
3895
3896 static struct type *
3897 arm_neon_double_type (struct gdbarch *gdbarch)
3898 {
3899 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3900
3901 if (tdep->neon_double_type == NULL)
3902 {
3903 struct type *t, *elem;
3904
3905 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3906 TYPE_CODE_UNION);
3907 elem = builtin_type (gdbarch)->builtin_uint8;
3908 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3909 elem = builtin_type (gdbarch)->builtin_uint16;
3910 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3911 elem = builtin_type (gdbarch)->builtin_uint32;
3912 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3913 elem = builtin_type (gdbarch)->builtin_uint64;
3914 append_composite_type_field (t, "u64", elem);
3915 elem = builtin_type (gdbarch)->builtin_float;
3916 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3917 elem = builtin_type (gdbarch)->builtin_double;
3918 append_composite_type_field (t, "f64", elem);
3919
3920 TYPE_VECTOR (t) = 1;
3921 TYPE_NAME (t) = "neon_d";
3922 tdep->neon_double_type = t;
3923 }
3924
3925 return tdep->neon_double_type;
3926 }
3927
3928 /* FIXME: The vector types are not correctly ordered on big-endian
3929 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3930 bits of d0 - regardless of what unit size is being held in d0. So
3931 the offset of the first uint8 in d0 is 7, but the offset of the
3932 first float is 4. This code works as-is for little-endian
3933 targets. */
3934
3935 static struct type *
3936 arm_neon_quad_type (struct gdbarch *gdbarch)
3937 {
3938 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3939
3940 if (tdep->neon_quad_type == NULL)
3941 {
3942 struct type *t, *elem;
3943
3944 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3945 TYPE_CODE_UNION);
3946 elem = builtin_type (gdbarch)->builtin_uint8;
3947 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3948 elem = builtin_type (gdbarch)->builtin_uint16;
3949 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3950 elem = builtin_type (gdbarch)->builtin_uint32;
3951 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3952 elem = builtin_type (gdbarch)->builtin_uint64;
3953 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3954 elem = builtin_type (gdbarch)->builtin_float;
3955 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3956 elem = builtin_type (gdbarch)->builtin_double;
3957 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3958
3959 TYPE_VECTOR (t) = 1;
3960 TYPE_NAME (t) = "neon_q";
3961 tdep->neon_quad_type = t;
3962 }
3963
3964 return tdep->neon_quad_type;
3965 }
3966
3967 /* Return the GDB type object for the "standard" data type of data in
3968 register N. */
3969
3970 static struct type *
3971 arm_register_type (struct gdbarch *gdbarch, int regnum)
3972 {
3973 int num_regs = gdbarch_num_regs (gdbarch);
3974
3975 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3976 && regnum >= num_regs && regnum < num_regs + 32)
3977 return builtin_type (gdbarch)->builtin_float;
3978
3979 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3980 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3981 return arm_neon_quad_type (gdbarch);
3982
3983 /* If the target description has register information, we are only
3984 in this function so that we can override the types of
3985 double-precision registers for NEON. */
3986 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3987 {
3988 struct type *t = tdesc_register_type (gdbarch, regnum);
3989
3990 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3991 && TYPE_CODE (t) == TYPE_CODE_FLT
3992 && gdbarch_tdep (gdbarch)->have_neon)
3993 return arm_neon_double_type (gdbarch);
3994 else
3995 return t;
3996 }
3997
3998 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3999 {
4000 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4001 return builtin_type (gdbarch)->builtin_void;
4002
4003 return arm_ext_type (gdbarch);
4004 }
4005 else if (regnum == ARM_SP_REGNUM)
4006 return builtin_type (gdbarch)->builtin_data_ptr;
4007 else if (regnum == ARM_PC_REGNUM)
4008 return builtin_type (gdbarch)->builtin_func_ptr;
4009 else if (regnum >= ARRAY_SIZE (arm_register_names))
4010 /* These registers are only supported on targets which supply
4011 an XML description. */
4012 return builtin_type (gdbarch)->builtin_int0;
4013 else
4014 return builtin_type (gdbarch)->builtin_uint32;
4015 }
4016
4017 /* Map a DWARF register REGNUM onto the appropriate GDB register
4018 number. */
4019
4020 static int
4021 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4022 {
4023 /* Core integer regs. */
4024 if (reg >= 0 && reg <= 15)
4025 return reg;
4026
4027 /* Legacy FPA encoding. These were once used in a way which
4028 overlapped with VFP register numbering, so their use is
4029 discouraged, but GDB doesn't support the ARM toolchain
4030 which used them for VFP. */
4031 if (reg >= 16 && reg <= 23)
4032 return ARM_F0_REGNUM + reg - 16;
4033
4034 /* New assignments for the FPA registers. */
4035 if (reg >= 96 && reg <= 103)
4036 return ARM_F0_REGNUM + reg - 96;
4037
4038 /* WMMX register assignments. */
4039 if (reg >= 104 && reg <= 111)
4040 return ARM_WCGR0_REGNUM + reg - 104;
4041
4042 if (reg >= 112 && reg <= 127)
4043 return ARM_WR0_REGNUM + reg - 112;
4044
4045 if (reg >= 192 && reg <= 199)
4046 return ARM_WC0_REGNUM + reg - 192;
4047
4048 /* VFP v2 registers. A double precision value is actually
4049 in d1 rather than s2, but the ABI only defines numbering
4050 for the single precision registers. This will "just work"
4051 in GDB for little endian targets (we'll read eight bytes,
4052 starting in s0 and then progressing to s1), but will be
4053 reversed on big endian targets with VFP. This won't
4054 be a problem for the new Neon quad registers; you're supposed
4055 to use DW_OP_piece for those. */
4056 if (reg >= 64 && reg <= 95)
4057 {
4058 char name_buf[4];
4059
4060 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4061 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4062 strlen (name_buf));
4063 }
4064
4065 /* VFP v3 / Neon registers. This range is also used for VFP v2
4066 registers, except that it now describes d0 instead of s0. */
4067 if (reg >= 256 && reg <= 287)
4068 {
4069 char name_buf[4];
4070
4071 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4072 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4073 strlen (name_buf));
4074 }
4075
4076 return -1;
4077 }
4078
4079 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4080 static int
4081 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4082 {
4083 int reg = regnum;
4084 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4085
4086 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4087 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4088
4089 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4090 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4091
4092 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4093 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4094
4095 if (reg < NUM_GREGS)
4096 return SIM_ARM_R0_REGNUM + reg;
4097 reg -= NUM_GREGS;
4098
4099 if (reg < NUM_FREGS)
4100 return SIM_ARM_FP0_REGNUM + reg;
4101 reg -= NUM_FREGS;
4102
4103 if (reg < NUM_SREGS)
4104 return SIM_ARM_FPS_REGNUM + reg;
4105 reg -= NUM_SREGS;
4106
4107 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4108 }
4109
4110 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4111 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4112 NULL if an error occurs. BUF is freed. */
4113
4114 static gdb_byte *
4115 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4116 int old_len, int new_len)
4117 {
4118 gdb_byte *new_buf;
4119 int bytes_to_read = new_len - old_len;
4120
4121 new_buf = (gdb_byte *) xmalloc (new_len);
4122 memcpy (new_buf + bytes_to_read, buf, old_len);
4123 xfree (buf);
4124 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4125 {
4126 xfree (new_buf);
4127 return NULL;
4128 }
4129 return new_buf;
4130 }
4131
4132 /* An IT block is at most the 2-byte IT instruction followed by
4133 four 4-byte instructions. The furthest back we must search to
4134 find an IT block that affects the current instruction is thus
4135 2 + 3 * 4 == 14 bytes. */
4136 #define MAX_IT_BLOCK_PREFIX 14
4137
4138 /* Use a quick scan if there are more than this many bytes of
4139 code. */
4140 #define IT_SCAN_THRESHOLD 32
4141
4142 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4143 A breakpoint in an IT block may not be hit, depending on the
4144 condition flags. */
4145 static CORE_ADDR
4146 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4147 {
4148 gdb_byte *buf;
4149 char map_type;
4150 CORE_ADDR boundary, func_start;
4151 int buf_len;
4152 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4153 int i, any, last_it, last_it_count;
4154
4155 /* If we are using BKPT breakpoints, none of this is necessary. */
4156 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4157 return bpaddr;
4158
4159 /* ARM mode does not have this problem. */
4160 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4161 return bpaddr;
4162
4163 /* We are setting a breakpoint in Thumb code that could potentially
4164 contain an IT block. The first step is to find how much Thumb
4165 code there is; we do not need to read outside of known Thumb
4166 sequences. */
4167 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4168 if (map_type == 0)
4169 /* Thumb-2 code must have mapping symbols to have a chance. */
4170 return bpaddr;
4171
4172 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4173
4174 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4175 && func_start > boundary)
4176 boundary = func_start;
4177
4178 /* Search for a candidate IT instruction. We have to do some fancy
4179 footwork to distinguish a real IT instruction from the second
4180 half of a 32-bit instruction, but there is no need for that if
4181 there's no candidate. */
4182 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4183 if (buf_len == 0)
4184 /* No room for an IT instruction. */
4185 return bpaddr;
4186
4187 buf = (gdb_byte *) xmalloc (buf_len);
4188 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4189 return bpaddr;
4190 any = 0;
4191 for (i = 0; i < buf_len; i += 2)
4192 {
4193 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4194 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4195 {
4196 any = 1;
4197 break;
4198 }
4199 }
4200
4201 if (any == 0)
4202 {
4203 xfree (buf);
4204 return bpaddr;
4205 }
4206
4207 /* OK, the code bytes before this instruction contain at least one
4208 halfword which resembles an IT instruction. We know that it's
4209 Thumb code, but there are still two possibilities. Either the
4210 halfword really is an IT instruction, or it is the second half of
4211 a 32-bit Thumb instruction. The only way we can tell is to
4212 scan forwards from a known instruction boundary. */
4213 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4214 {
4215 int definite;
4216
4217 /* There's a lot of code before this instruction. Start with an
4218 optimistic search; it's easy to recognize halfwords that can
4219 not be the start of a 32-bit instruction, and use that to
4220 lock on to the instruction boundaries. */
4221 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4222 if (buf == NULL)
4223 return bpaddr;
4224 buf_len = IT_SCAN_THRESHOLD;
4225
4226 definite = 0;
4227 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4228 {
4229 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4230 if (thumb_insn_size (inst1) == 2)
4231 {
4232 definite = 1;
4233 break;
4234 }
4235 }
4236
4237 /* At this point, if DEFINITE, BUF[I] is the first place we
4238 are sure that we know the instruction boundaries, and it is far
4239 enough from BPADDR that we could not miss an IT instruction
4240 affecting BPADDR. If ! DEFINITE, give up - start from a
4241 known boundary. */
4242 if (! definite)
4243 {
4244 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4245 bpaddr - boundary);
4246 if (buf == NULL)
4247 return bpaddr;
4248 buf_len = bpaddr - boundary;
4249 i = 0;
4250 }
4251 }
4252 else
4253 {
4254 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4255 if (buf == NULL)
4256 return bpaddr;
4257 buf_len = bpaddr - boundary;
4258 i = 0;
4259 }
4260
4261 /* Scan forwards. Find the last IT instruction before BPADDR. */
4262 last_it = -1;
4263 last_it_count = 0;
4264 while (i < buf_len)
4265 {
4266 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4267 last_it_count--;
4268 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4269 {
4270 last_it = i;
4271 if (inst1 & 0x0001)
4272 last_it_count = 4;
4273 else if (inst1 & 0x0002)
4274 last_it_count = 3;
4275 else if (inst1 & 0x0004)
4276 last_it_count = 2;
4277 else
4278 last_it_count = 1;
4279 }
4280 i += thumb_insn_size (inst1);
4281 }
4282
4283 xfree (buf);
4284
4285 if (last_it == -1)
4286 /* There wasn't really an IT instruction after all. */
4287 return bpaddr;
4288
4289 if (last_it_count < 1)
4290 /* It was too far away. */
4291 return bpaddr;
4292
4293 /* This really is a trouble spot. Move the breakpoint to the IT
4294 instruction. */
4295 return bpaddr - buf_len + last_it;
4296 }
4297
4298 /* ARM displaced stepping support.
4299
4300 Generally ARM displaced stepping works as follows:
4301
4302 1. When an instruction is to be single-stepped, it is first decoded by
4303 arm_process_displaced_insn. Depending on the type of instruction, it is
4304 then copied to a scratch location, possibly in a modified form. The
4305 copy_* set of functions performs such modification, as necessary. A
4306 breakpoint is placed after the modified instruction in the scratch space
4307 to return control to GDB. Note in particular that instructions which
4308 modify the PC will no longer do so after modification.
4309
4310 2. The instruction is single-stepped, by setting the PC to the scratch
4311 location address, and resuming. Control returns to GDB when the
4312 breakpoint is hit.
4313
4314 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4315 function used for the current instruction. This function's job is to
4316 put the CPU/memory state back to what it would have been if the
4317 instruction had been executed unmodified in its original location. */
4318
4319 /* NOP instruction (mov r0, r0). */
4320 #define ARM_NOP 0xe1a00000
4321 #define THUMB_NOP 0x4600
4322
4323 /* Helper for register reads for displaced stepping. In particular, this
4324 returns the PC as it would be seen by the instruction at its original
4325 location. */
4326
4327 ULONGEST
4328 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4329 int regno)
4330 {
4331 ULONGEST ret;
4332 CORE_ADDR from = dsc->insn_addr;
4333
4334 if (regno == ARM_PC_REGNUM)
4335 {
4336 /* Compute pipeline offset:
4337 - When executing an ARM instruction, PC reads as the address of the
4338 current instruction plus 8.
4339 - When executing a Thumb instruction, PC reads as the address of the
4340 current instruction plus 4. */
4341
4342 if (!dsc->is_thumb)
4343 from += 8;
4344 else
4345 from += 4;
4346
4347 if (debug_displaced)
4348 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4349 (unsigned long) from);
4350 return (ULONGEST) from;
4351 }
4352 else
4353 {
4354 regcache_cooked_read_unsigned (regs, regno, &ret);
4355 if (debug_displaced)
4356 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4357 regno, (unsigned long) ret);
4358 return ret;
4359 }
4360 }
4361
4362 static int
4363 displaced_in_arm_mode (struct regcache *regs)
4364 {
4365 ULONGEST ps;
4366 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4367
4368 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4369
4370 return (ps & t_bit) == 0;
4371 }
4372
4373 /* Write to the PC as from a branch instruction. */
4374
4375 static void
4376 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4377 ULONGEST val)
4378 {
4379 if (!dsc->is_thumb)
4380 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4381 architecture versions < 6. */
4382 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4383 val & ~(ULONGEST) 0x3);
4384 else
4385 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4386 val & ~(ULONGEST) 0x1);
4387 }
4388
4389 /* Write to the PC as from a branch-exchange instruction. */
4390
4391 static void
4392 bx_write_pc (struct regcache *regs, ULONGEST val)
4393 {
4394 ULONGEST ps;
4395 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4396
4397 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4398
4399 if ((val & 1) == 1)
4400 {
4401 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4402 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4403 }
4404 else if ((val & 2) == 0)
4405 {
4406 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4407 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4408 }
4409 else
4410 {
4411 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4412 mode, align dest to 4 bytes). */
4413 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4414 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4415 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4416 }
4417 }
4418
4419 /* Write to the PC as if from a load instruction. */
4420
4421 static void
4422 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4423 ULONGEST val)
4424 {
4425 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4426 bx_write_pc (regs, val);
4427 else
4428 branch_write_pc (regs, dsc, val);
4429 }
4430
4431 /* Write to the PC as if from an ALU instruction. */
4432
4433 static void
4434 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4435 ULONGEST val)
4436 {
4437 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4438 bx_write_pc (regs, val);
4439 else
4440 branch_write_pc (regs, dsc, val);
4441 }
4442
4443 /* Helper for writing to registers for displaced stepping. Writing to the PC
4444 has a varying effects depending on the instruction which does the write:
4445 this is controlled by the WRITE_PC argument. */
4446
4447 void
4448 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4449 int regno, ULONGEST val, enum pc_write_style write_pc)
4450 {
4451 if (regno == ARM_PC_REGNUM)
4452 {
4453 if (debug_displaced)
4454 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4455 (unsigned long) val);
4456 switch (write_pc)
4457 {
4458 case BRANCH_WRITE_PC:
4459 branch_write_pc (regs, dsc, val);
4460 break;
4461
4462 case BX_WRITE_PC:
4463 bx_write_pc (regs, val);
4464 break;
4465
4466 case LOAD_WRITE_PC:
4467 load_write_pc (regs, dsc, val);
4468 break;
4469
4470 case ALU_WRITE_PC:
4471 alu_write_pc (regs, dsc, val);
4472 break;
4473
4474 case CANNOT_WRITE_PC:
4475 warning (_("Instruction wrote to PC in an unexpected way when "
4476 "single-stepping"));
4477 break;
4478
4479 default:
4480 internal_error (__FILE__, __LINE__,
4481 _("Invalid argument to displaced_write_reg"));
4482 }
4483
4484 dsc->wrote_to_pc = 1;
4485 }
4486 else
4487 {
4488 if (debug_displaced)
4489 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4490 regno, (unsigned long) val);
4491 regcache_cooked_write_unsigned (regs, regno, val);
4492 }
4493 }
4494
4495 /* This function is used to concisely determine if an instruction INSN
4496 references PC. Register fields of interest in INSN should have the
4497 corresponding fields of BITMASK set to 0b1111. The function
4498 returns return 1 if any of these fields in INSN reference the PC
4499 (also 0b1111, r15), else it returns 0. */
4500
4501 static int
4502 insn_references_pc (uint32_t insn, uint32_t bitmask)
4503 {
4504 uint32_t lowbit = 1;
4505
4506 while (bitmask != 0)
4507 {
4508 uint32_t mask;
4509
4510 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4511 ;
4512
4513 if (!lowbit)
4514 break;
4515
4516 mask = lowbit * 0xf;
4517
4518 if ((insn & mask) == mask)
4519 return 1;
4520
4521 bitmask &= ~mask;
4522 }
4523
4524 return 0;
4525 }
4526
4527 /* The simplest copy function. Many instructions have the same effect no
4528 matter what address they are executed at: in those cases, use this. */
4529
4530 static int
4531 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4532 const char *iname, arm_displaced_step_closure *dsc)
4533 {
4534 if (debug_displaced)
4535 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4536 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4537 iname);
4538
4539 dsc->modinsn[0] = insn;
4540
4541 return 0;
4542 }
4543
4544 static int
4545 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4546 uint16_t insn2, const char *iname,
4547 arm_displaced_step_closure *dsc)
4548 {
4549 if (debug_displaced)
4550 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4551 "opcode/class '%s' unmodified\n", insn1, insn2,
4552 iname);
4553
4554 dsc->modinsn[0] = insn1;
4555 dsc->modinsn[1] = insn2;
4556 dsc->numinsns = 2;
4557
4558 return 0;
4559 }
4560
4561 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4562 modification. */
4563 static int
4564 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4565 const char *iname,
4566 arm_displaced_step_closure *dsc)
4567 {
4568 if (debug_displaced)
4569 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4570 "opcode/class '%s' unmodified\n", insn,
4571 iname);
4572
4573 dsc->modinsn[0] = insn;
4574
4575 return 0;
4576 }
4577
4578 /* Preload instructions with immediate offset. */
4579
4580 static void
4581 cleanup_preload (struct gdbarch *gdbarch,
4582 struct regcache *regs, arm_displaced_step_closure *dsc)
4583 {
4584 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4585 if (!dsc->u.preload.immed)
4586 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4587 }
4588
4589 static void
4590 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4591 arm_displaced_step_closure *dsc, unsigned int rn)
4592 {
4593 ULONGEST rn_val;
4594 /* Preload instructions:
4595
4596 {pli/pld} [rn, #+/-imm]
4597 ->
4598 {pli/pld} [r0, #+/-imm]. */
4599
4600 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4601 rn_val = displaced_read_reg (regs, dsc, rn);
4602 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4603 dsc->u.preload.immed = 1;
4604
4605 dsc->cleanup = &cleanup_preload;
4606 }
4607
4608 static int
4609 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4610 arm_displaced_step_closure *dsc)
4611 {
4612 unsigned int rn = bits (insn, 16, 19);
4613
4614 if (!insn_references_pc (insn, 0x000f0000ul))
4615 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4616
4617 if (debug_displaced)
4618 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4619 (unsigned long) insn);
4620
4621 dsc->modinsn[0] = insn & 0xfff0ffff;
4622
4623 install_preload (gdbarch, regs, dsc, rn);
4624
4625 return 0;
4626 }
4627
4628 static int
4629 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4630 struct regcache *regs, arm_displaced_step_closure *dsc)
4631 {
4632 unsigned int rn = bits (insn1, 0, 3);
4633 unsigned int u_bit = bit (insn1, 7);
4634 int imm12 = bits (insn2, 0, 11);
4635 ULONGEST pc_val;
4636
4637 if (rn != ARM_PC_REGNUM)
4638 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4639
4640 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4641 PLD (literal) Encoding T1. */
4642 if (debug_displaced)
4643 fprintf_unfiltered (gdb_stdlog,
4644 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4645 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4646 imm12);
4647
4648 if (!u_bit)
4649 imm12 = -1 * imm12;
4650
4651 /* Rewrite instruction {pli/pld} PC imm12 into:
4652 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4653
4654 {pli/pld} [r0, r1]
4655
4656 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4657
4658 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4659 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4660
4661 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4662
4663 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4664 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4665 dsc->u.preload.immed = 0;
4666
4667 /* {pli/pld} [r0, r1] */
4668 dsc->modinsn[0] = insn1 & 0xfff0;
4669 dsc->modinsn[1] = 0xf001;
4670 dsc->numinsns = 2;
4671
4672 dsc->cleanup = &cleanup_preload;
4673 return 0;
4674 }
4675
4676 /* Preload instructions with register offset. */
4677
4678 static void
4679 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4680 arm_displaced_step_closure *dsc, unsigned int rn,
4681 unsigned int rm)
4682 {
4683 ULONGEST rn_val, rm_val;
4684
4685 /* Preload register-offset instructions:
4686
4687 {pli/pld} [rn, rm {, shift}]
4688 ->
4689 {pli/pld} [r0, r1 {, shift}]. */
4690
4691 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4692 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4693 rn_val = displaced_read_reg (regs, dsc, rn);
4694 rm_val = displaced_read_reg (regs, dsc, rm);
4695 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4696 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4697 dsc->u.preload.immed = 0;
4698
4699 dsc->cleanup = &cleanup_preload;
4700 }
4701
4702 static int
4703 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4704 struct regcache *regs,
4705 arm_displaced_step_closure *dsc)
4706 {
4707 unsigned int rn = bits (insn, 16, 19);
4708 unsigned int rm = bits (insn, 0, 3);
4709
4710
4711 if (!insn_references_pc (insn, 0x000f000ful))
4712 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4713
4714 if (debug_displaced)
4715 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4716 (unsigned long) insn);
4717
4718 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4719
4720 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4721 return 0;
4722 }
4723
4724 /* Copy/cleanup coprocessor load and store instructions. */
4725
4726 static void
4727 cleanup_copro_load_store (struct gdbarch *gdbarch,
4728 struct regcache *regs,
4729 arm_displaced_step_closure *dsc)
4730 {
4731 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4732
4733 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4734
4735 if (dsc->u.ldst.writeback)
4736 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4737 }
4738
4739 static void
4740 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4741 arm_displaced_step_closure *dsc,
4742 int writeback, unsigned int rn)
4743 {
4744 ULONGEST rn_val;
4745
4746 /* Coprocessor load/store instructions:
4747
4748 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4749 ->
4750 {stc/stc2} [r0, #+/-imm].
4751
4752 ldc/ldc2 are handled identically. */
4753
4754 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4755 rn_val = displaced_read_reg (regs, dsc, rn);
4756 /* PC should be 4-byte aligned. */
4757 rn_val = rn_val & 0xfffffffc;
4758 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4759
4760 dsc->u.ldst.writeback = writeback;
4761 dsc->u.ldst.rn = rn;
4762
4763 dsc->cleanup = &cleanup_copro_load_store;
4764 }
4765
4766 static int
4767 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4768 struct regcache *regs,
4769 arm_displaced_step_closure *dsc)
4770 {
4771 unsigned int rn = bits (insn, 16, 19);
4772
4773 if (!insn_references_pc (insn, 0x000f0000ul))
4774 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4775
4776 if (debug_displaced)
4777 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4778 "load/store insn %.8lx\n", (unsigned long) insn);
4779
4780 dsc->modinsn[0] = insn & 0xfff0ffff;
4781
4782 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4783
4784 return 0;
4785 }
4786
4787 static int
4788 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4789 uint16_t insn2, struct regcache *regs,
4790 arm_displaced_step_closure *dsc)
4791 {
4792 unsigned int rn = bits (insn1, 0, 3);
4793
4794 if (rn != ARM_PC_REGNUM)
4795 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4796 "copro load/store", dsc);
4797
4798 if (debug_displaced)
4799 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4800 "load/store insn %.4x%.4x\n", insn1, insn2);
4801
4802 dsc->modinsn[0] = insn1 & 0xfff0;
4803 dsc->modinsn[1] = insn2;
4804 dsc->numinsns = 2;
4805
4806 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4807 doesn't support writeback, so pass 0. */
4808 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4809
4810 return 0;
4811 }
4812
4813 /* Clean up branch instructions (actually perform the branch, by setting
4814 PC). */
4815
4816 static void
4817 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4819 {
4820 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4821 int branch_taken = condition_true (dsc->u.branch.cond, status);
4822 enum pc_write_style write_pc = dsc->u.branch.exchange
4823 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4824
4825 if (!branch_taken)
4826 return;
4827
4828 if (dsc->u.branch.link)
4829 {
4830 /* The value of LR should be the next insn of current one. In order
4831 not to confuse logic handling later insn `bx lr', if current insn mode
4832 is Thumb, the bit 0 of LR value should be set to 1. */
4833 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4834
4835 if (dsc->is_thumb)
4836 next_insn_addr |= 0x1;
4837
4838 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4839 CANNOT_WRITE_PC);
4840 }
4841
4842 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4843 }
4844
4845 /* Copy B/BL/BLX instructions with immediate destinations. */
4846
4847 static void
4848 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4849 arm_displaced_step_closure *dsc,
4850 unsigned int cond, int exchange, int link, long offset)
4851 {
4852 /* Implement "BL<cond> <label>" as:
4853
4854 Preparation: cond <- instruction condition
4855 Insn: mov r0, r0 (nop)
4856 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4857
4858 B<cond> similar, but don't set r14 in cleanup. */
4859
4860 dsc->u.branch.cond = cond;
4861 dsc->u.branch.link = link;
4862 dsc->u.branch.exchange = exchange;
4863
4864 dsc->u.branch.dest = dsc->insn_addr;
4865 if (link && exchange)
4866 /* For BLX, offset is computed from the Align (PC, 4). */
4867 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4868
4869 if (dsc->is_thumb)
4870 dsc->u.branch.dest += 4 + offset;
4871 else
4872 dsc->u.branch.dest += 8 + offset;
4873
4874 dsc->cleanup = &cleanup_branch;
4875 }
4876 static int
4877 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4878 struct regcache *regs, arm_displaced_step_closure *dsc)
4879 {
4880 unsigned int cond = bits (insn, 28, 31);
4881 int exchange = (cond == 0xf);
4882 int link = exchange || bit (insn, 24);
4883 long offset;
4884
4885 if (debug_displaced)
4886 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4887 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4888 (unsigned long) insn);
4889 if (exchange)
4890 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4891 then arrange the switch into Thumb mode. */
4892 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4893 else
4894 offset = bits (insn, 0, 23) << 2;
4895
4896 if (bit (offset, 25))
4897 offset = offset | ~0x3ffffff;
4898
4899 dsc->modinsn[0] = ARM_NOP;
4900
4901 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4902 return 0;
4903 }
4904
4905 static int
4906 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4907 uint16_t insn2, struct regcache *regs,
4908 arm_displaced_step_closure *dsc)
4909 {
4910 int link = bit (insn2, 14);
4911 int exchange = link && !bit (insn2, 12);
4912 int cond = INST_AL;
4913 long offset = 0;
4914 int j1 = bit (insn2, 13);
4915 int j2 = bit (insn2, 11);
4916 int s = sbits (insn1, 10, 10);
4917 int i1 = !(j1 ^ bit (insn1, 10));
4918 int i2 = !(j2 ^ bit (insn1, 10));
4919
4920 if (!link && !exchange) /* B */
4921 {
4922 offset = (bits (insn2, 0, 10) << 1);
4923 if (bit (insn2, 12)) /* Encoding T4 */
4924 {
4925 offset |= (bits (insn1, 0, 9) << 12)
4926 | (i2 << 22)
4927 | (i1 << 23)
4928 | (s << 24);
4929 cond = INST_AL;
4930 }
4931 else /* Encoding T3 */
4932 {
4933 offset |= (bits (insn1, 0, 5) << 12)
4934 | (j1 << 18)
4935 | (j2 << 19)
4936 | (s << 20);
4937 cond = bits (insn1, 6, 9);
4938 }
4939 }
4940 else
4941 {
4942 offset = (bits (insn1, 0, 9) << 12);
4943 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4944 offset |= exchange ?
4945 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4946 }
4947
4948 if (debug_displaced)
4949 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4950 "%.4x %.4x with offset %.8lx\n",
4951 link ? (exchange) ? "blx" : "bl" : "b",
4952 insn1, insn2, offset);
4953
4954 dsc->modinsn[0] = THUMB_NOP;
4955
4956 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4957 return 0;
4958 }
4959
4960 /* Copy B Thumb instructions. */
4961 static int
4962 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4963 arm_displaced_step_closure *dsc)
4964 {
4965 unsigned int cond = 0;
4966 int offset = 0;
4967 unsigned short bit_12_15 = bits (insn, 12, 15);
4968 CORE_ADDR from = dsc->insn_addr;
4969
4970 if (bit_12_15 == 0xd)
4971 {
4972 /* offset = SignExtend (imm8:0, 32) */
4973 offset = sbits ((insn << 1), 0, 8);
4974 cond = bits (insn, 8, 11);
4975 }
4976 else if (bit_12_15 == 0xe) /* Encoding T2 */
4977 {
4978 offset = sbits ((insn << 1), 0, 11);
4979 cond = INST_AL;
4980 }
4981
4982 if (debug_displaced)
4983 fprintf_unfiltered (gdb_stdlog,
4984 "displaced: copying b immediate insn %.4x "
4985 "with offset %d\n", insn, offset);
4986
4987 dsc->u.branch.cond = cond;
4988 dsc->u.branch.link = 0;
4989 dsc->u.branch.exchange = 0;
4990 dsc->u.branch.dest = from + 4 + offset;
4991
4992 dsc->modinsn[0] = THUMB_NOP;
4993
4994 dsc->cleanup = &cleanup_branch;
4995
4996 return 0;
4997 }
4998
4999 /* Copy BX/BLX with register-specified destinations. */
5000
5001 static void
5002 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5003 arm_displaced_step_closure *dsc, int link,
5004 unsigned int cond, unsigned int rm)
5005 {
5006 /* Implement {BX,BLX}<cond> <reg>" as:
5007
5008 Preparation: cond <- instruction condition
5009 Insn: mov r0, r0 (nop)
5010 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5011
5012 Don't set r14 in cleanup for BX. */
5013
5014 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5015
5016 dsc->u.branch.cond = cond;
5017 dsc->u.branch.link = link;
5018
5019 dsc->u.branch.exchange = 1;
5020
5021 dsc->cleanup = &cleanup_branch;
5022 }
5023
5024 static int
5025 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5026 struct regcache *regs, arm_displaced_step_closure *dsc)
5027 {
5028 unsigned int cond = bits (insn, 28, 31);
5029 /* BX: x12xxx1x
5030 BLX: x12xxx3x. */
5031 int link = bit (insn, 5);
5032 unsigned int rm = bits (insn, 0, 3);
5033
5034 if (debug_displaced)
5035 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5036 (unsigned long) insn);
5037
5038 dsc->modinsn[0] = ARM_NOP;
5039
5040 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5041 return 0;
5042 }
5043
5044 static int
5045 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5046 struct regcache *regs,
5047 arm_displaced_step_closure *dsc)
5048 {
5049 int link = bit (insn, 7);
5050 unsigned int rm = bits (insn, 3, 6);
5051
5052 if (debug_displaced)
5053 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5054 (unsigned short) insn);
5055
5056 dsc->modinsn[0] = THUMB_NOP;
5057
5058 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5059
5060 return 0;
5061 }
5062
5063
5064 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5065
5066 static void
5067 cleanup_alu_imm (struct gdbarch *gdbarch,
5068 struct regcache *regs, arm_displaced_step_closure *dsc)
5069 {
5070 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5071 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5072 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5073 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5074 }
5075
5076 static int
5077 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5078 arm_displaced_step_closure *dsc)
5079 {
5080 unsigned int rn = bits (insn, 16, 19);
5081 unsigned int rd = bits (insn, 12, 15);
5082 unsigned int op = bits (insn, 21, 24);
5083 int is_mov = (op == 0xd);
5084 ULONGEST rd_val, rn_val;
5085
5086 if (!insn_references_pc (insn, 0x000ff000ul))
5087 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5088
5089 if (debug_displaced)
5090 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5091 "%.8lx\n", is_mov ? "move" : "ALU",
5092 (unsigned long) insn);
5093
5094 /* Instruction is of form:
5095
5096 <op><cond> rd, [rn,] #imm
5097
5098 Rewrite as:
5099
5100 Preparation: tmp1, tmp2 <- r0, r1;
5101 r0, r1 <- rd, rn
5102 Insn: <op><cond> r0, r1, #imm
5103 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5104 */
5105
5106 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5107 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5108 rn_val = displaced_read_reg (regs, dsc, rn);
5109 rd_val = displaced_read_reg (regs, dsc, rd);
5110 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5111 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5112 dsc->rd = rd;
5113
5114 if (is_mov)
5115 dsc->modinsn[0] = insn & 0xfff00fff;
5116 else
5117 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5118
5119 dsc->cleanup = &cleanup_alu_imm;
5120
5121 return 0;
5122 }
5123
5124 static int
5125 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5126 uint16_t insn2, struct regcache *regs,
5127 arm_displaced_step_closure *dsc)
5128 {
5129 unsigned int op = bits (insn1, 5, 8);
5130 unsigned int rn, rm, rd;
5131 ULONGEST rd_val, rn_val;
5132
5133 rn = bits (insn1, 0, 3); /* Rn */
5134 rm = bits (insn2, 0, 3); /* Rm */
5135 rd = bits (insn2, 8, 11); /* Rd */
5136
5137 /* This routine is only called for instruction MOV. */
5138 gdb_assert (op == 0x2 && rn == 0xf);
5139
5140 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5141 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5142
5143 if (debug_displaced)
5144 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5145 "ALU", insn1, insn2);
5146
5147 /* Instruction is of form:
5148
5149 <op><cond> rd, [rn,] #imm
5150
5151 Rewrite as:
5152
5153 Preparation: tmp1, tmp2 <- r0, r1;
5154 r0, r1 <- rd, rn
5155 Insn: <op><cond> r0, r1, #imm
5156 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5157 */
5158
5159 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5160 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5161 rn_val = displaced_read_reg (regs, dsc, rn);
5162 rd_val = displaced_read_reg (regs, dsc, rd);
5163 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5164 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5165 dsc->rd = rd;
5166
5167 dsc->modinsn[0] = insn1;
5168 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5169 dsc->numinsns = 2;
5170
5171 dsc->cleanup = &cleanup_alu_imm;
5172
5173 return 0;
5174 }
5175
5176 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5177
5178 static void
5179 cleanup_alu_reg (struct gdbarch *gdbarch,
5180 struct regcache *regs, arm_displaced_step_closure *dsc)
5181 {
5182 ULONGEST rd_val;
5183 int i;
5184
5185 rd_val = displaced_read_reg (regs, dsc, 0);
5186
5187 for (i = 0; i < 3; i++)
5188 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5189
5190 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5191 }
5192
5193 static void
5194 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5195 arm_displaced_step_closure *dsc,
5196 unsigned int rd, unsigned int rn, unsigned int rm)
5197 {
5198 ULONGEST rd_val, rn_val, rm_val;
5199
5200 /* Instruction is of form:
5201
5202 <op><cond> rd, [rn,] rm [, <shift>]
5203
5204 Rewrite as:
5205
5206 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5207 r0, r1, r2 <- rd, rn, rm
5208 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5209 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5210 */
5211
5212 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5213 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5214 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5215 rd_val = displaced_read_reg (regs, dsc, rd);
5216 rn_val = displaced_read_reg (regs, dsc, rn);
5217 rm_val = displaced_read_reg (regs, dsc, rm);
5218 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5219 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5220 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5221 dsc->rd = rd;
5222
5223 dsc->cleanup = &cleanup_alu_reg;
5224 }
5225
5226 static int
5227 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5228 arm_displaced_step_closure *dsc)
5229 {
5230 unsigned int op = bits (insn, 21, 24);
5231 int is_mov = (op == 0xd);
5232
5233 if (!insn_references_pc (insn, 0x000ff00ful))
5234 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5235
5236 if (debug_displaced)
5237 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5238 is_mov ? "move" : "ALU", (unsigned long) insn);
5239
5240 if (is_mov)
5241 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5242 else
5243 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5244
5245 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5246 bits (insn, 0, 3));
5247 return 0;
5248 }
5249
5250 static int
5251 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5252 struct regcache *regs,
5253 arm_displaced_step_closure *dsc)
5254 {
5255 unsigned rm, rd;
5256
5257 rm = bits (insn, 3, 6);
5258 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5259
5260 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5261 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5262
5263 if (debug_displaced)
5264 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5265 (unsigned short) insn);
5266
5267 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5268
5269 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5270
5271 return 0;
5272 }
5273
5274 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5275
5276 static void
5277 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5278 struct regcache *regs,
5279 arm_displaced_step_closure *dsc)
5280 {
5281 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5282 int i;
5283
5284 for (i = 0; i < 4; i++)
5285 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5286
5287 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5288 }
5289
5290 static void
5291 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5292 arm_displaced_step_closure *dsc,
5293 unsigned int rd, unsigned int rn, unsigned int rm,
5294 unsigned rs)
5295 {
5296 int i;
5297 ULONGEST rd_val, rn_val, rm_val, rs_val;
5298
5299 /* Instruction is of form:
5300
5301 <op><cond> rd, [rn,] rm, <shift> rs
5302
5303 Rewrite as:
5304
5305 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5306 r0, r1, r2, r3 <- rd, rn, rm, rs
5307 Insn: <op><cond> r0, r1, r2, <shift> r3
5308 Cleanup: tmp5 <- r0
5309 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5310 rd <- tmp5
5311 */
5312
5313 for (i = 0; i < 4; i++)
5314 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5315
5316 rd_val = displaced_read_reg (regs, dsc, rd);
5317 rn_val = displaced_read_reg (regs, dsc, rn);
5318 rm_val = displaced_read_reg (regs, dsc, rm);
5319 rs_val = displaced_read_reg (regs, dsc, rs);
5320 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5321 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5322 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5324 dsc->rd = rd;
5325 dsc->cleanup = &cleanup_alu_shifted_reg;
5326 }
5327
5328 static int
5329 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5330 struct regcache *regs,
5331 arm_displaced_step_closure *dsc)
5332 {
5333 unsigned int op = bits (insn, 21, 24);
5334 int is_mov = (op == 0xd);
5335 unsigned int rd, rn, rm, rs;
5336
5337 if (!insn_references_pc (insn, 0x000fff0ful))
5338 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5339
5340 if (debug_displaced)
5341 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5342 "%.8lx\n", is_mov ? "move" : "ALU",
5343 (unsigned long) insn);
5344
5345 rn = bits (insn, 16, 19);
5346 rm = bits (insn, 0, 3);
5347 rs = bits (insn, 8, 11);
5348 rd = bits (insn, 12, 15);
5349
5350 if (is_mov)
5351 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5352 else
5353 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5354
5355 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5356
5357 return 0;
5358 }
5359
5360 /* Clean up load instructions. */
5361
5362 static void
5363 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5364 arm_displaced_step_closure *dsc)
5365 {
5366 ULONGEST rt_val, rt_val2 = 0, rn_val;
5367
5368 rt_val = displaced_read_reg (regs, dsc, 0);
5369 if (dsc->u.ldst.xfersize == 8)
5370 rt_val2 = displaced_read_reg (regs, dsc, 1);
5371 rn_val = displaced_read_reg (regs, dsc, 2);
5372
5373 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5374 if (dsc->u.ldst.xfersize > 4)
5375 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5376 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5377 if (!dsc->u.ldst.immed)
5378 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5379
5380 /* Handle register writeback. */
5381 if (dsc->u.ldst.writeback)
5382 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5383 /* Put result in right place. */
5384 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5385 if (dsc->u.ldst.xfersize == 8)
5386 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5387 }
5388
5389 /* Clean up store instructions. */
5390
5391 static void
5392 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5393 arm_displaced_step_closure *dsc)
5394 {
5395 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5396
5397 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5398 if (dsc->u.ldst.xfersize > 4)
5399 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5400 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5401 if (!dsc->u.ldst.immed)
5402 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5403 if (!dsc->u.ldst.restore_r4)
5404 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5405
5406 /* Writeback. */
5407 if (dsc->u.ldst.writeback)
5408 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5409 }
5410
5411 /* Copy "extra" load/store instructions. These are halfword/doubleword
5412 transfers, which have a different encoding to byte/word transfers. */
5413
5414 static int
5415 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5416 struct regcache *regs, arm_displaced_step_closure *dsc)
5417 {
5418 unsigned int op1 = bits (insn, 20, 24);
5419 unsigned int op2 = bits (insn, 5, 6);
5420 unsigned int rt = bits (insn, 12, 15);
5421 unsigned int rn = bits (insn, 16, 19);
5422 unsigned int rm = bits (insn, 0, 3);
5423 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5424 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5425 int immed = (op1 & 0x4) != 0;
5426 int opcode;
5427 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5428
5429 if (!insn_references_pc (insn, 0x000ff00ful))
5430 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5431
5432 if (debug_displaced)
5433 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5434 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5435 (unsigned long) insn);
5436
5437 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5438
5439 if (opcode < 0)
5440 internal_error (__FILE__, __LINE__,
5441 _("copy_extra_ld_st: instruction decode error"));
5442
5443 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5444 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5445 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5446 if (!immed)
5447 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5448
5449 rt_val = displaced_read_reg (regs, dsc, rt);
5450 if (bytesize[opcode] == 8)
5451 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5452 rn_val = displaced_read_reg (regs, dsc, rn);
5453 if (!immed)
5454 rm_val = displaced_read_reg (regs, dsc, rm);
5455
5456 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5457 if (bytesize[opcode] == 8)
5458 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5459 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5460 if (!immed)
5461 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5462
5463 dsc->rd = rt;
5464 dsc->u.ldst.xfersize = bytesize[opcode];
5465 dsc->u.ldst.rn = rn;
5466 dsc->u.ldst.immed = immed;
5467 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5468 dsc->u.ldst.restore_r4 = 0;
5469
5470 if (immed)
5471 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5472 ->
5473 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5474 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5475 else
5476 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5477 ->
5478 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5479 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5480
5481 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5482
5483 return 0;
5484 }
5485
5486 /* Copy byte/half word/word loads and stores. */
5487
5488 static void
5489 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5490 arm_displaced_step_closure *dsc, int load,
5491 int immed, int writeback, int size, int usermode,
5492 int rt, int rm, int rn)
5493 {
5494 ULONGEST rt_val, rn_val, rm_val = 0;
5495
5496 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5497 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5498 if (!immed)
5499 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5500 if (!load)
5501 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5502
5503 rt_val = displaced_read_reg (regs, dsc, rt);
5504 rn_val = displaced_read_reg (regs, dsc, rn);
5505 if (!immed)
5506 rm_val = displaced_read_reg (regs, dsc, rm);
5507
5508 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5509 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5510 if (!immed)
5511 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5512 dsc->rd = rt;
5513 dsc->u.ldst.xfersize = size;
5514 dsc->u.ldst.rn = rn;
5515 dsc->u.ldst.immed = immed;
5516 dsc->u.ldst.writeback = writeback;
5517
5518 /* To write PC we can do:
5519
5520 Before this sequence of instructions:
5521 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5522 r2 is the Rn value got from displaced_read_reg.
5523
5524 Insn1: push {pc} Write address of STR instruction + offset on stack
5525 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5526 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5527 = addr(Insn1) + offset - addr(Insn3) - 8
5528 = offset - 16
5529 Insn4: add r4, r4, #8 r4 = offset - 8
5530 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5531 = from + offset
5532 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5533
5534 Otherwise we don't know what value to write for PC, since the offset is
5535 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5536 of this can be found in Section "Saving from r15" in
5537 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5538
5539 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5540 }
5541
5542
5543 static int
5544 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5545 uint16_t insn2, struct regcache *regs,
5546 arm_displaced_step_closure *dsc, int size)
5547 {
5548 unsigned int u_bit = bit (insn1, 7);
5549 unsigned int rt = bits (insn2, 12, 15);
5550 int imm12 = bits (insn2, 0, 11);
5551 ULONGEST pc_val;
5552
5553 if (debug_displaced)
5554 fprintf_unfiltered (gdb_stdlog,
5555 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5556 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5557 imm12);
5558
5559 if (!u_bit)
5560 imm12 = -1 * imm12;
5561
5562 /* Rewrite instruction LDR Rt imm12 into:
5563
5564 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5565
5566 LDR R0, R2, R3,
5567
5568 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5569
5570
5571 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5572 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5573 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5574
5575 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5576
5577 pc_val = pc_val & 0xfffffffc;
5578
5579 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5580 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5581
5582 dsc->rd = rt;
5583
5584 dsc->u.ldst.xfersize = size;
5585 dsc->u.ldst.immed = 0;
5586 dsc->u.ldst.writeback = 0;
5587 dsc->u.ldst.restore_r4 = 0;
5588
5589 /* LDR R0, R2, R3 */
5590 dsc->modinsn[0] = 0xf852;
5591 dsc->modinsn[1] = 0x3;
5592 dsc->numinsns = 2;
5593
5594 dsc->cleanup = &cleanup_load;
5595
5596 return 0;
5597 }
5598
5599 static int
5600 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5601 uint16_t insn2, struct regcache *regs,
5602 arm_displaced_step_closure *dsc,
5603 int writeback, int immed)
5604 {
5605 unsigned int rt = bits (insn2, 12, 15);
5606 unsigned int rn = bits (insn1, 0, 3);
5607 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5608 /* In LDR (register), there is also a register Rm, which is not allowed to
5609 be PC, so we don't have to check it. */
5610
5611 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5612 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5613 dsc);
5614
5615 if (debug_displaced)
5616 fprintf_unfiltered (gdb_stdlog,
5617 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5618 rt, rn, insn1, insn2);
5619
5620 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5621 0, rt, rm, rn);
5622
5623 dsc->u.ldst.restore_r4 = 0;
5624
5625 if (immed)
5626 /* ldr[b]<cond> rt, [rn, #imm], etc.
5627 ->
5628 ldr[b]<cond> r0, [r2, #imm]. */
5629 {
5630 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5631 dsc->modinsn[1] = insn2 & 0x0fff;
5632 }
5633 else
5634 /* ldr[b]<cond> rt, [rn, rm], etc.
5635 ->
5636 ldr[b]<cond> r0, [r2, r3]. */
5637 {
5638 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5639 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5640 }
5641
5642 dsc->numinsns = 2;
5643
5644 return 0;
5645 }
5646
5647
5648 static int
5649 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5650 struct regcache *regs,
5651 arm_displaced_step_closure *dsc,
5652 int load, int size, int usermode)
5653 {
5654 int immed = !bit (insn, 25);
5655 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5656 unsigned int rt = bits (insn, 12, 15);
5657 unsigned int rn = bits (insn, 16, 19);
5658 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5659
5660 if (!insn_references_pc (insn, 0x000ff00ful))
5661 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5662
5663 if (debug_displaced)
5664 fprintf_unfiltered (gdb_stdlog,
5665 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5666 load ? (size == 1 ? "ldrb" : "ldr")
5667 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5668 rt, rn,
5669 (unsigned long) insn);
5670
5671 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5672 usermode, rt, rm, rn);
5673
5674 if (load || rt != ARM_PC_REGNUM)
5675 {
5676 dsc->u.ldst.restore_r4 = 0;
5677
5678 if (immed)
5679 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5680 ->
5681 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5682 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5683 else
5684 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5685 ->
5686 {ldr,str}[b]<cond> r0, [r2, r3]. */
5687 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5688 }
5689 else
5690 {
5691 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5692 dsc->u.ldst.restore_r4 = 1;
5693 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5694 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5695 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5696 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5697 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5698
5699 /* As above. */
5700 if (immed)
5701 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5702 else
5703 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5704
5705 dsc->numinsns = 6;
5706 }
5707
5708 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5709
5710 return 0;
5711 }
5712
5713 /* Cleanup LDM instructions with fully-populated register list. This is an
5714 unfortunate corner case: it's impossible to implement correctly by modifying
5715 the instruction. The issue is as follows: we have an instruction,
5716
5717 ldm rN, {r0-r15}
5718
5719 which we must rewrite to avoid loading PC. A possible solution would be to
5720 do the load in two halves, something like (with suitable cleanup
5721 afterwards):
5722
5723 mov r8, rN
5724 ldm[id][ab] r8!, {r0-r7}
5725 str r7, <temp>
5726 ldm[id][ab] r8, {r7-r14}
5727 <bkpt>
5728
5729 but at present there's no suitable place for <temp>, since the scratch space
5730 is overwritten before the cleanup routine is called. For now, we simply
5731 emulate the instruction. */
5732
5733 static void
5734 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5735 arm_displaced_step_closure *dsc)
5736 {
5737 int inc = dsc->u.block.increment;
5738 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5739 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5740 uint32_t regmask = dsc->u.block.regmask;
5741 int regno = inc ? 0 : 15;
5742 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5743 int exception_return = dsc->u.block.load && dsc->u.block.user
5744 && (regmask & 0x8000) != 0;
5745 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5746 int do_transfer = condition_true (dsc->u.block.cond, status);
5747 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5748
5749 if (!do_transfer)
5750 return;
5751
5752 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5753 sensible we can do here. Complain loudly. */
5754 if (exception_return)
5755 error (_("Cannot single-step exception return"));
5756
5757 /* We don't handle any stores here for now. */
5758 gdb_assert (dsc->u.block.load != 0);
5759
5760 if (debug_displaced)
5761 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5762 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5763 dsc->u.block.increment ? "inc" : "dec",
5764 dsc->u.block.before ? "before" : "after");
5765
5766 while (regmask)
5767 {
5768 uint32_t memword;
5769
5770 if (inc)
5771 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5772 regno++;
5773 else
5774 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5775 regno--;
5776
5777 xfer_addr += bump_before;
5778
5779 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5780 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5781
5782 xfer_addr += bump_after;
5783
5784 regmask &= ~(1 << regno);
5785 }
5786
5787 if (dsc->u.block.writeback)
5788 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5789 CANNOT_WRITE_PC);
5790 }
5791
5792 /* Clean up an STM which included the PC in the register list. */
5793
5794 static void
5795 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5796 arm_displaced_step_closure *dsc)
5797 {
5798 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5799 int store_executed = condition_true (dsc->u.block.cond, status);
5800 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5801 CORE_ADDR stm_insn_addr;
5802 uint32_t pc_val;
5803 long offset;
5804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5805
5806 /* If condition code fails, there's nothing else to do. */
5807 if (!store_executed)
5808 return;
5809
5810 if (dsc->u.block.increment)
5811 {
5812 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5813
5814 if (dsc->u.block.before)
5815 pc_stored_at += 4;
5816 }
5817 else
5818 {
5819 pc_stored_at = dsc->u.block.xfer_addr;
5820
5821 if (dsc->u.block.before)
5822 pc_stored_at -= 4;
5823 }
5824
5825 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5826 stm_insn_addr = dsc->scratch_base;
5827 offset = pc_val - stm_insn_addr;
5828
5829 if (debug_displaced)
5830 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5831 "STM instruction\n", offset);
5832
5833 /* Rewrite the stored PC to the proper value for the non-displaced original
5834 instruction. */
5835 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5836 dsc->insn_addr + offset);
5837 }
5838
5839 /* Clean up an LDM which includes the PC in the register list. We clumped all
5840 the registers in the transferred list into a contiguous range r0...rX (to
5841 avoid loading PC directly and losing control of the debugged program), so we
5842 must undo that here. */
5843
5844 static void
5845 cleanup_block_load_pc (struct gdbarch *gdbarch,
5846 struct regcache *regs,
5847 arm_displaced_step_closure *dsc)
5848 {
5849 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5850 int load_executed = condition_true (dsc->u.block.cond, status);
5851 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5852 unsigned int regs_loaded = bitcount (mask);
5853 unsigned int num_to_shuffle = regs_loaded, clobbered;
5854
5855 /* The method employed here will fail if the register list is fully populated
5856 (we need to avoid loading PC directly). */
5857 gdb_assert (num_to_shuffle < 16);
5858
5859 if (!load_executed)
5860 return;
5861
5862 clobbered = (1 << num_to_shuffle) - 1;
5863
5864 while (num_to_shuffle > 0)
5865 {
5866 if ((mask & (1 << write_reg)) != 0)
5867 {
5868 unsigned int read_reg = num_to_shuffle - 1;
5869
5870 if (read_reg != write_reg)
5871 {
5872 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5873 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5874 if (debug_displaced)
5875 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5876 "loaded register r%d to r%d\n"), read_reg,
5877 write_reg);
5878 }
5879 else if (debug_displaced)
5880 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5881 "r%d already in the right place\n"),
5882 write_reg);
5883
5884 clobbered &= ~(1 << write_reg);
5885
5886 num_to_shuffle--;
5887 }
5888
5889 write_reg--;
5890 }
5891
5892 /* Restore any registers we scribbled over. */
5893 for (write_reg = 0; clobbered != 0; write_reg++)
5894 {
5895 if ((clobbered & (1 << write_reg)) != 0)
5896 {
5897 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5898 CANNOT_WRITE_PC);
5899 if (debug_displaced)
5900 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5901 "clobbered register r%d\n"), write_reg);
5902 clobbered &= ~(1 << write_reg);
5903 }
5904 }
5905
5906 /* Perform register writeback manually. */
5907 if (dsc->u.block.writeback)
5908 {
5909 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5910
5911 if (dsc->u.block.increment)
5912 new_rn_val += regs_loaded * 4;
5913 else
5914 new_rn_val -= regs_loaded * 4;
5915
5916 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5917 CANNOT_WRITE_PC);
5918 }
5919 }
5920
5921 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5922 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5923
5924 static int
5925 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5926 struct regcache *regs,
5927 arm_displaced_step_closure *dsc)
5928 {
5929 int load = bit (insn, 20);
5930 int user = bit (insn, 22);
5931 int increment = bit (insn, 23);
5932 int before = bit (insn, 24);
5933 int writeback = bit (insn, 21);
5934 int rn = bits (insn, 16, 19);
5935
5936 /* Block transfers which don't mention PC can be run directly
5937 out-of-line. */
5938 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5939 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5940
5941 if (rn == ARM_PC_REGNUM)
5942 {
5943 warning (_("displaced: Unpredictable LDM or STM with "
5944 "base register r15"));
5945 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5946 }
5947
5948 if (debug_displaced)
5949 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5950 "%.8lx\n", (unsigned long) insn);
5951
5952 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5953 dsc->u.block.rn = rn;
5954
5955 dsc->u.block.load = load;
5956 dsc->u.block.user = user;
5957 dsc->u.block.increment = increment;
5958 dsc->u.block.before = before;
5959 dsc->u.block.writeback = writeback;
5960 dsc->u.block.cond = bits (insn, 28, 31);
5961
5962 dsc->u.block.regmask = insn & 0xffff;
5963
5964 if (load)
5965 {
5966 if ((insn & 0xffff) == 0xffff)
5967 {
5968 /* LDM with a fully-populated register list. This case is
5969 particularly tricky. Implement for now by fully emulating the
5970 instruction (which might not behave perfectly in all cases, but
5971 these instructions should be rare enough for that not to matter
5972 too much). */
5973 dsc->modinsn[0] = ARM_NOP;
5974
5975 dsc->cleanup = &cleanup_block_load_all;
5976 }
5977 else
5978 {
5979 /* LDM of a list of registers which includes PC. Implement by
5980 rewriting the list of registers to be transferred into a
5981 contiguous chunk r0...rX before doing the transfer, then shuffling
5982 registers into the correct places in the cleanup routine. */
5983 unsigned int regmask = insn & 0xffff;
5984 unsigned int num_in_list = bitcount (regmask), new_regmask;
5985 unsigned int i;
5986
5987 for (i = 0; i < num_in_list; i++)
5988 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5989
5990 /* Writeback makes things complicated. We need to avoid clobbering
5991 the base register with one of the registers in our modified
5992 register list, but just using a different register can't work in
5993 all cases, e.g.:
5994
5995 ldm r14!, {r0-r13,pc}
5996
5997 which would need to be rewritten as:
5998
5999 ldm rN!, {r0-r14}
6000
6001 but that can't work, because there's no free register for N.
6002
6003 Solve this by turning off the writeback bit, and emulating
6004 writeback manually in the cleanup routine. */
6005
6006 if (writeback)
6007 insn &= ~(1 << 21);
6008
6009 new_regmask = (1 << num_in_list) - 1;
6010
6011 if (debug_displaced)
6012 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6013 "{..., pc}: original reg list %.4x, modified "
6014 "list %.4x\n"), rn, writeback ? "!" : "",
6015 (int) insn & 0xffff, new_regmask);
6016
6017 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6018
6019 dsc->cleanup = &cleanup_block_load_pc;
6020 }
6021 }
6022 else
6023 {
6024 /* STM of a list of registers which includes PC. Run the instruction
6025 as-is, but out of line: this will store the wrong value for the PC,
6026 so we must manually fix up the memory in the cleanup routine.
6027 Doing things this way has the advantage that we can auto-detect
6028 the offset of the PC write (which is architecture-dependent) in
6029 the cleanup routine. */
6030 dsc->modinsn[0] = insn;
6031
6032 dsc->cleanup = &cleanup_block_store_pc;
6033 }
6034
6035 return 0;
6036 }
6037
6038 static int
6039 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6040 struct regcache *regs,
6041 arm_displaced_step_closure *dsc)
6042 {
6043 int rn = bits (insn1, 0, 3);
6044 int load = bit (insn1, 4);
6045 int writeback = bit (insn1, 5);
6046
6047 /* Block transfers which don't mention PC can be run directly
6048 out-of-line. */
6049 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6050 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6051
6052 if (rn == ARM_PC_REGNUM)
6053 {
6054 warning (_("displaced: Unpredictable LDM or STM with "
6055 "base register r15"));
6056 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6057 "unpredictable ldm/stm", dsc);
6058 }
6059
6060 if (debug_displaced)
6061 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6062 "%.4x%.4x\n", insn1, insn2);
6063
6064 /* Clear bit 13, since it should be always zero. */
6065 dsc->u.block.regmask = (insn2 & 0xdfff);
6066 dsc->u.block.rn = rn;
6067
6068 dsc->u.block.load = load;
6069 dsc->u.block.user = 0;
6070 dsc->u.block.increment = bit (insn1, 7);
6071 dsc->u.block.before = bit (insn1, 8);
6072 dsc->u.block.writeback = writeback;
6073 dsc->u.block.cond = INST_AL;
6074 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6075
6076 if (load)
6077 {
6078 if (dsc->u.block.regmask == 0xffff)
6079 {
6080 /* This branch is impossible to happen. */
6081 gdb_assert (0);
6082 }
6083 else
6084 {
6085 unsigned int regmask = dsc->u.block.regmask;
6086 unsigned int num_in_list = bitcount (regmask), new_regmask;
6087 unsigned int i;
6088
6089 for (i = 0; i < num_in_list; i++)
6090 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6091
6092 if (writeback)
6093 insn1 &= ~(1 << 5);
6094
6095 new_regmask = (1 << num_in_list) - 1;
6096
6097 if (debug_displaced)
6098 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6099 "{..., pc}: original reg list %.4x, modified "
6100 "list %.4x\n"), rn, writeback ? "!" : "",
6101 (int) dsc->u.block.regmask, new_regmask);
6102
6103 dsc->modinsn[0] = insn1;
6104 dsc->modinsn[1] = (new_regmask & 0xffff);
6105 dsc->numinsns = 2;
6106
6107 dsc->cleanup = &cleanup_block_load_pc;
6108 }
6109 }
6110 else
6111 {
6112 dsc->modinsn[0] = insn1;
6113 dsc->modinsn[1] = insn2;
6114 dsc->numinsns = 2;
6115 dsc->cleanup = &cleanup_block_store_pc;
6116 }
6117 return 0;
6118 }
6119
6120 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6121 This is used to avoid a dependency on BFD's bfd_endian enum. */
6122
6123 ULONGEST
6124 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6125 int byte_order)
6126 {
6127 return read_memory_unsigned_integer (memaddr, len,
6128 (enum bfd_endian) byte_order);
6129 }
6130
6131 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6132
6133 CORE_ADDR
6134 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6135 CORE_ADDR val)
6136 {
6137 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6138 }
6139
6140 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6141
6142 static CORE_ADDR
6143 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6144 {
6145 return 0;
6146 }
6147
6148 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6149
6150 int
6151 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6152 {
6153 return arm_is_thumb (self->regcache);
6154 }
6155
6156 /* single_step() is called just before we want to resume the inferior,
6157 if we want to single-step it but there is no hardware or kernel
6158 single-step support. We find the target of the coming instructions
6159 and breakpoint them. */
6160
6161 std::vector<CORE_ADDR>
6162 arm_software_single_step (struct regcache *regcache)
6163 {
6164 struct gdbarch *gdbarch = regcache->arch ();
6165 struct arm_get_next_pcs next_pcs_ctx;
6166
6167 arm_get_next_pcs_ctor (&next_pcs_ctx,
6168 &arm_get_next_pcs_ops,
6169 gdbarch_byte_order (gdbarch),
6170 gdbarch_byte_order_for_code (gdbarch),
6171 0,
6172 regcache);
6173
6174 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6175
6176 for (CORE_ADDR &pc_ref : next_pcs)
6177 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6178
6179 return next_pcs;
6180 }
6181
6182 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6183 for Linux, where some SVC instructions must be treated specially. */
6184
6185 static void
6186 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6187 arm_displaced_step_closure *dsc)
6188 {
6189 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6190
6191 if (debug_displaced)
6192 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6193 "%.8lx\n", (unsigned long) resume_addr);
6194
6195 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6196 }
6197
6198
6199 /* Common copy routine for svc instruction. */
6200
6201 static int
6202 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6203 arm_displaced_step_closure *dsc)
6204 {
6205 /* Preparation: none.
6206 Insn: unmodified svc.
6207 Cleanup: pc <- insn_addr + insn_size. */
6208
6209 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6210 instruction. */
6211 dsc->wrote_to_pc = 1;
6212
6213 /* Allow OS-specific code to override SVC handling. */
6214 if (dsc->u.svc.copy_svc_os)
6215 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6216 else
6217 {
6218 dsc->cleanup = &cleanup_svc;
6219 return 0;
6220 }
6221 }
6222
6223 static int
6224 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6225 struct regcache *regs, arm_displaced_step_closure *dsc)
6226 {
6227
6228 if (debug_displaced)
6229 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6230 (unsigned long) insn);
6231
6232 dsc->modinsn[0] = insn;
6233
6234 return install_svc (gdbarch, regs, dsc);
6235 }
6236
6237 static int
6238 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6239 struct regcache *regs, arm_displaced_step_closure *dsc)
6240 {
6241
6242 if (debug_displaced)
6243 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6244 insn);
6245
6246 dsc->modinsn[0] = insn;
6247
6248 return install_svc (gdbarch, regs, dsc);
6249 }
6250
6251 /* Copy undefined instructions. */
6252
6253 static int
6254 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6255 arm_displaced_step_closure *dsc)
6256 {
6257 if (debug_displaced)
6258 fprintf_unfiltered (gdb_stdlog,
6259 "displaced: copying undefined insn %.8lx\n",
6260 (unsigned long) insn);
6261
6262 dsc->modinsn[0] = insn;
6263
6264 return 0;
6265 }
6266
6267 static int
6268 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6269 arm_displaced_step_closure *dsc)
6270 {
6271
6272 if (debug_displaced)
6273 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6274 "%.4x %.4x\n", (unsigned short) insn1,
6275 (unsigned short) insn2);
6276
6277 dsc->modinsn[0] = insn1;
6278 dsc->modinsn[1] = insn2;
6279 dsc->numinsns = 2;
6280
6281 return 0;
6282 }
6283
6284 /* Copy unpredictable instructions. */
6285
6286 static int
6287 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6288 arm_displaced_step_closure *dsc)
6289 {
6290 if (debug_displaced)
6291 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6292 "%.8lx\n", (unsigned long) insn);
6293
6294 dsc->modinsn[0] = insn;
6295
6296 return 0;
6297 }
6298
6299 /* The decode_* functions are instruction decoding helpers. They mostly follow
6300 the presentation in the ARM ARM. */
6301
6302 static int
6303 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6304 struct regcache *regs,
6305 arm_displaced_step_closure *dsc)
6306 {
6307 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6308 unsigned int rn = bits (insn, 16, 19);
6309
6310 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6311 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6312 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6313 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6314 else if ((op1 & 0x60) == 0x20)
6315 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6316 else if ((op1 & 0x71) == 0x40)
6317 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6318 dsc);
6319 else if ((op1 & 0x77) == 0x41)
6320 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6321 else if ((op1 & 0x77) == 0x45)
6322 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6323 else if ((op1 & 0x77) == 0x51)
6324 {
6325 if (rn != 0xf)
6326 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6327 else
6328 return arm_copy_unpred (gdbarch, insn, dsc);
6329 }
6330 else if ((op1 & 0x77) == 0x55)
6331 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6332 else if (op1 == 0x57)
6333 switch (op2)
6334 {
6335 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6336 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6337 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6338 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6339 default: return arm_copy_unpred (gdbarch, insn, dsc);
6340 }
6341 else if ((op1 & 0x63) == 0x43)
6342 return arm_copy_unpred (gdbarch, insn, dsc);
6343 else if ((op2 & 0x1) == 0x0)
6344 switch (op1 & ~0x80)
6345 {
6346 case 0x61:
6347 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6348 case 0x65:
6349 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6350 case 0x71: case 0x75:
6351 /* pld/pldw reg. */
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6353 case 0x63: case 0x67: case 0x73: case 0x77:
6354 return arm_copy_unpred (gdbarch, insn, dsc);
6355 default:
6356 return arm_copy_undef (gdbarch, insn, dsc);
6357 }
6358 else
6359 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6360 }
6361
6362 static int
6363 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6364 struct regcache *regs,
6365 arm_displaced_step_closure *dsc)
6366 {
6367 if (bit (insn, 27) == 0)
6368 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6369 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6370 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6371 {
6372 case 0x0: case 0x2:
6373 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6374
6375 case 0x1: case 0x3:
6376 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6377
6378 case 0x4: case 0x5: case 0x6: case 0x7:
6379 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6380
6381 case 0x8:
6382 switch ((insn & 0xe00000) >> 21)
6383 {
6384 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6385 /* stc/stc2. */
6386 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6387
6388 case 0x2:
6389 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6390
6391 default:
6392 return arm_copy_undef (gdbarch, insn, dsc);
6393 }
6394
6395 case 0x9:
6396 {
6397 int rn_f = (bits (insn, 16, 19) == 0xf);
6398 switch ((insn & 0xe00000) >> 21)
6399 {
6400 case 0x1: case 0x3:
6401 /* ldc/ldc2 imm (undefined for rn == pc). */
6402 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6403 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6404
6405 case 0x2:
6406 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6407
6408 case 0x4: case 0x5: case 0x6: case 0x7:
6409 /* ldc/ldc2 lit (undefined for rn != pc). */
6410 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6411 : arm_copy_undef (gdbarch, insn, dsc);
6412
6413 default:
6414 return arm_copy_undef (gdbarch, insn, dsc);
6415 }
6416 }
6417
6418 case 0xa:
6419 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6420
6421 case 0xb:
6422 if (bits (insn, 16, 19) == 0xf)
6423 /* ldc/ldc2 lit. */
6424 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6425 else
6426 return arm_copy_undef (gdbarch, insn, dsc);
6427
6428 case 0xc:
6429 if (bit (insn, 4))
6430 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6431 else
6432 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6433
6434 case 0xd:
6435 if (bit (insn, 4))
6436 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6437 else
6438 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6439
6440 default:
6441 return arm_copy_undef (gdbarch, insn, dsc);
6442 }
6443 }
6444
6445 /* Decode miscellaneous instructions in dp/misc encoding space. */
6446
6447 static int
6448 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6449 struct regcache *regs,
6450 arm_displaced_step_closure *dsc)
6451 {
6452 unsigned int op2 = bits (insn, 4, 6);
6453 unsigned int op = bits (insn, 21, 22);
6454
6455 switch (op2)
6456 {
6457 case 0x0:
6458 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6459
6460 case 0x1:
6461 if (op == 0x1) /* bx. */
6462 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6463 else if (op == 0x3)
6464 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6465 else
6466 return arm_copy_undef (gdbarch, insn, dsc);
6467
6468 case 0x2:
6469 if (op == 0x1)
6470 /* Not really supported. */
6471 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6472 else
6473 return arm_copy_undef (gdbarch, insn, dsc);
6474
6475 case 0x3:
6476 if (op == 0x1)
6477 return arm_copy_bx_blx_reg (gdbarch, insn,
6478 regs, dsc); /* blx register. */
6479 else
6480 return arm_copy_undef (gdbarch, insn, dsc);
6481
6482 case 0x5:
6483 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6484
6485 case 0x7:
6486 if (op == 0x1)
6487 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6488 else if (op == 0x3)
6489 /* Not really supported. */
6490 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6491 /* Fall through. */
6492
6493 default:
6494 return arm_copy_undef (gdbarch, insn, dsc);
6495 }
6496 }
6497
6498 static int
6499 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6500 struct regcache *regs,
6501 arm_displaced_step_closure *dsc)
6502 {
6503 if (bit (insn, 25))
6504 switch (bits (insn, 20, 24))
6505 {
6506 case 0x10:
6507 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6508
6509 case 0x14:
6510 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6511
6512 case 0x12: case 0x16:
6513 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6514
6515 default:
6516 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6517 }
6518 else
6519 {
6520 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6521
6522 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6523 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6524 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6525 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6526 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6527 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6528 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6529 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6530 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6531 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6532 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6533 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6534 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6535 /* 2nd arg means "unprivileged". */
6536 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6537 dsc);
6538 }
6539
6540 /* Should be unreachable. */
6541 return 1;
6542 }
6543
6544 static int
6545 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6546 struct regcache *regs,
6547 arm_displaced_step_closure *dsc)
6548 {
6549 int a = bit (insn, 25), b = bit (insn, 4);
6550 uint32_t op1 = bits (insn, 20, 24);
6551
6552 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6553 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6554 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6555 else if ((!a && (op1 & 0x17) == 0x02)
6556 || (a && (op1 & 0x17) == 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6558 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6559 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6561 else if ((!a && (op1 & 0x17) == 0x03)
6562 || (a && (op1 & 0x17) == 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6564 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6565 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6567 else if ((!a && (op1 & 0x17) == 0x06)
6568 || (a && (op1 & 0x17) == 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6570 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6571 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6573 else if ((!a && (op1 & 0x17) == 0x07)
6574 || (a && (op1 & 0x17) == 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6576
6577 /* Should be unreachable. */
6578 return 1;
6579 }
6580
6581 static int
6582 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6583 arm_displaced_step_closure *dsc)
6584 {
6585 switch (bits (insn, 20, 24))
6586 {
6587 case 0x00: case 0x01: case 0x02: case 0x03:
6588 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6589
6590 case 0x04: case 0x05: case 0x06: case 0x07:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6592
6593 case 0x08: case 0x09: case 0x0a: case 0x0b:
6594 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6595 return arm_copy_unmodified (gdbarch, insn,
6596 "decode/pack/unpack/saturate/reverse", dsc);
6597
6598 case 0x18:
6599 if (bits (insn, 5, 7) == 0) /* op2. */
6600 {
6601 if (bits (insn, 12, 15) == 0xf)
6602 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6603 else
6604 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6605 }
6606 else
6607 return arm_copy_undef (gdbarch, insn, dsc);
6608
6609 case 0x1a: case 0x1b:
6610 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6611 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6612 else
6613 return arm_copy_undef (gdbarch, insn, dsc);
6614
6615 case 0x1c: case 0x1d:
6616 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6617 {
6618 if (bits (insn, 0, 3) == 0xf)
6619 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6620 else
6621 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6622 }
6623 else
6624 return arm_copy_undef (gdbarch, insn, dsc);
6625
6626 case 0x1e: case 0x1f:
6627 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6628 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6629 else
6630 return arm_copy_undef (gdbarch, insn, dsc);
6631 }
6632
6633 /* Should be unreachable. */
6634 return 1;
6635 }
6636
6637 static int
6638 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6639 struct regcache *regs,
6640 arm_displaced_step_closure *dsc)
6641 {
6642 if (bit (insn, 25))
6643 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6644 else
6645 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6646 }
6647
6648 static int
6649 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6650 struct regcache *regs,
6651 arm_displaced_step_closure *dsc)
6652 {
6653 unsigned int opcode = bits (insn, 20, 24);
6654
6655 switch (opcode)
6656 {
6657 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6658 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6659
6660 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6661 case 0x12: case 0x16:
6662 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6663
6664 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6665 case 0x13: case 0x17:
6666 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6667
6668 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6669 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6670 /* Note: no writeback for these instructions. Bit 25 will always be
6671 zero though (via caller), so the following works OK. */
6672 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6673 }
6674
6675 /* Should be unreachable. */
6676 return 1;
6677 }
6678
6679 /* Decode shifted register instructions. */
6680
6681 static int
6682 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6683 uint16_t insn2, struct regcache *regs,
6684 arm_displaced_step_closure *dsc)
6685 {
6686 /* PC is only allowed to be used in instruction MOV. */
6687
6688 unsigned int op = bits (insn1, 5, 8);
6689 unsigned int rn = bits (insn1, 0, 3);
6690
6691 if (op == 0x2 && rn == 0xf) /* MOV */
6692 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6693 else
6694 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6695 "dp (shift reg)", dsc);
6696 }
6697
6698
6699 /* Decode extension register load/store. Exactly the same as
6700 arm_decode_ext_reg_ld_st. */
6701
6702 static int
6703 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6704 uint16_t insn2, struct regcache *regs,
6705 arm_displaced_step_closure *dsc)
6706 {
6707 unsigned int opcode = bits (insn1, 4, 8);
6708
6709 switch (opcode)
6710 {
6711 case 0x04: case 0x05:
6712 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6713 "vfp/neon vmov", dsc);
6714
6715 case 0x08: case 0x0c: /* 01x00 */
6716 case 0x0a: case 0x0e: /* 01x10 */
6717 case 0x12: case 0x16: /* 10x10 */
6718 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6719 "vfp/neon vstm/vpush", dsc);
6720
6721 case 0x09: case 0x0d: /* 01x01 */
6722 case 0x0b: case 0x0f: /* 01x11 */
6723 case 0x13: case 0x17: /* 10x11 */
6724 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6725 "vfp/neon vldm/vpop", dsc);
6726
6727 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6728 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6729 "vstr", dsc);
6730 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6731 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6732 }
6733
6734 /* Should be unreachable. */
6735 return 1;
6736 }
6737
6738 static int
6739 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6740 struct regcache *regs, arm_displaced_step_closure *dsc)
6741 {
6742 unsigned int op1 = bits (insn, 20, 25);
6743 int op = bit (insn, 4);
6744 unsigned int coproc = bits (insn, 8, 11);
6745
6746 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6747 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6748 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6749 && (coproc & 0xe) != 0xa)
6750 /* stc/stc2. */
6751 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6752 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6753 && (coproc & 0xe) != 0xa)
6754 /* ldc/ldc2 imm/lit. */
6755 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6756 else if ((op1 & 0x3e) == 0x00)
6757 return arm_copy_undef (gdbarch, insn, dsc);
6758 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6759 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6760 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6761 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6762 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6763 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6764 else if ((op1 & 0x30) == 0x20 && !op)
6765 {
6766 if ((coproc & 0xe) == 0xa)
6767 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6768 else
6769 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6770 }
6771 else if ((op1 & 0x30) == 0x20 && op)
6772 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6773 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6774 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6775 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6776 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6777 else if ((op1 & 0x30) == 0x30)
6778 return arm_copy_svc (gdbarch, insn, regs, dsc);
6779 else
6780 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6781 }
6782
6783 static int
6784 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6785 uint16_t insn2, struct regcache *regs,
6786 arm_displaced_step_closure *dsc)
6787 {
6788 unsigned int coproc = bits (insn2, 8, 11);
6789 unsigned int bit_5_8 = bits (insn1, 5, 8);
6790 unsigned int bit_9 = bit (insn1, 9);
6791 unsigned int bit_4 = bit (insn1, 4);
6792
6793 if (bit_9 == 0)
6794 {
6795 if (bit_5_8 == 2)
6796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6797 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6798 dsc);
6799 else if (bit_5_8 == 0) /* UNDEFINED. */
6800 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6801 else
6802 {
6803 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6804 if ((coproc & 0xe) == 0xa)
6805 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6806 dsc);
6807 else /* coproc is not 101x. */
6808 {
6809 if (bit_4 == 0) /* STC/STC2. */
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6811 "stc/stc2", dsc);
6812 else /* LDC/LDC2 {literal, immediate}. */
6813 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6814 regs, dsc);
6815 }
6816 }
6817 }
6818 else
6819 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6820
6821 return 0;
6822 }
6823
6824 static void
6825 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6826 arm_displaced_step_closure *dsc, int rd)
6827 {
6828 /* ADR Rd, #imm
6829
6830 Rewrite as:
6831
6832 Preparation: Rd <- PC
6833 Insn: ADD Rd, #imm
6834 Cleanup: Null.
6835 */
6836
6837 /* Rd <- PC */
6838 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6839 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6840 }
6841
6842 static int
6843 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6844 arm_displaced_step_closure *dsc,
6845 int rd, unsigned int imm)
6846 {
6847
6848 /* Encoding T2: ADDS Rd, #imm */
6849 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6850
6851 install_pc_relative (gdbarch, regs, dsc, rd);
6852
6853 return 0;
6854 }
6855
6856 static int
6857 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6858 struct regcache *regs,
6859 arm_displaced_step_closure *dsc)
6860 {
6861 unsigned int rd = bits (insn, 8, 10);
6862 unsigned int imm8 = bits (insn, 0, 7);
6863
6864 if (debug_displaced)
6865 fprintf_unfiltered (gdb_stdlog,
6866 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6867 rd, imm8, insn);
6868
6869 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6870 }
6871
6872 static int
6873 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6874 uint16_t insn2, struct regcache *regs,
6875 arm_displaced_step_closure *dsc)
6876 {
6877 unsigned int rd = bits (insn2, 8, 11);
6878 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6879 extract raw immediate encoding rather than computing immediate. When
6880 generating ADD or SUB instruction, we can simply perform OR operation to
6881 set immediate into ADD. */
6882 unsigned int imm_3_8 = insn2 & 0x70ff;
6883 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6884
6885 if (debug_displaced)
6886 fprintf_unfiltered (gdb_stdlog,
6887 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6888 rd, imm_i, imm_3_8, insn1, insn2);
6889
6890 if (bit (insn1, 7)) /* Encoding T2 */
6891 {
6892 /* Encoding T3: SUB Rd, Rd, #imm */
6893 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6894 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6895 }
6896 else /* Encoding T3 */
6897 {
6898 /* Encoding T3: ADD Rd, Rd, #imm */
6899 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6900 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6901 }
6902 dsc->numinsns = 2;
6903
6904 install_pc_relative (gdbarch, regs, dsc, rd);
6905
6906 return 0;
6907 }
6908
6909 static int
6910 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6911 struct regcache *regs,
6912 arm_displaced_step_closure *dsc)
6913 {
6914 unsigned int rt = bits (insn1, 8, 10);
6915 unsigned int pc;
6916 int imm8 = (bits (insn1, 0, 7) << 2);
6917
6918 /* LDR Rd, #imm8
6919
6920 Rwrite as:
6921
6922 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6923
6924 Insn: LDR R0, [R2, R3];
6925 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6926
6927 if (debug_displaced)
6928 fprintf_unfiltered (gdb_stdlog,
6929 "displaced: copying thumb ldr r%d [pc #%d]\n"
6930 , rt, imm8);
6931
6932 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6933 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6934 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6935 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6936 /* The assembler calculates the required value of the offset from the
6937 Align(PC,4) value of this instruction to the label. */
6938 pc = pc & 0xfffffffc;
6939
6940 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6941 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6942
6943 dsc->rd = rt;
6944 dsc->u.ldst.xfersize = 4;
6945 dsc->u.ldst.rn = 0;
6946 dsc->u.ldst.immed = 0;
6947 dsc->u.ldst.writeback = 0;
6948 dsc->u.ldst.restore_r4 = 0;
6949
6950 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6951
6952 dsc->cleanup = &cleanup_load;
6953
6954 return 0;
6955 }
6956
6957 /* Copy Thumb cbnz/cbz instruction. */
6958
6959 static int
6960 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6961 struct regcache *regs,
6962 arm_displaced_step_closure *dsc)
6963 {
6964 int non_zero = bit (insn1, 11);
6965 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6966 CORE_ADDR from = dsc->insn_addr;
6967 int rn = bits (insn1, 0, 2);
6968 int rn_val = displaced_read_reg (regs, dsc, rn);
6969
6970 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6971 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6972 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6973 condition is false, let it be, cleanup_branch will do nothing. */
6974 if (dsc->u.branch.cond)
6975 {
6976 dsc->u.branch.cond = INST_AL;
6977 dsc->u.branch.dest = from + 4 + imm5;
6978 }
6979 else
6980 dsc->u.branch.dest = from + 2;
6981
6982 dsc->u.branch.link = 0;
6983 dsc->u.branch.exchange = 0;
6984
6985 if (debug_displaced)
6986 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6987 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6988 rn, rn_val, insn1, dsc->u.branch.dest);
6989
6990 dsc->modinsn[0] = THUMB_NOP;
6991
6992 dsc->cleanup = &cleanup_branch;
6993 return 0;
6994 }
6995
6996 /* Copy Table Branch Byte/Halfword */
6997 static int
6998 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
6999 uint16_t insn2, struct regcache *regs,
7000 arm_displaced_step_closure *dsc)
7001 {
7002 ULONGEST rn_val, rm_val;
7003 int is_tbh = bit (insn2, 4);
7004 CORE_ADDR halfwords = 0;
7005 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7006
7007 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7008 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7009
7010 if (is_tbh)
7011 {
7012 gdb_byte buf[2];
7013
7014 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7015 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7016 }
7017 else
7018 {
7019 gdb_byte buf[1];
7020
7021 target_read_memory (rn_val + rm_val, buf, 1);
7022 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7023 }
7024
7025 if (debug_displaced)
7026 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7027 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7028 (unsigned int) rn_val, (unsigned int) rm_val,
7029 (unsigned int) halfwords);
7030
7031 dsc->u.branch.cond = INST_AL;
7032 dsc->u.branch.link = 0;
7033 dsc->u.branch.exchange = 0;
7034 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7035
7036 dsc->cleanup = &cleanup_branch;
7037
7038 return 0;
7039 }
7040
7041 static void
7042 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7043 arm_displaced_step_closure *dsc)
7044 {
7045 /* PC <- r7 */
7046 int val = displaced_read_reg (regs, dsc, 7);
7047 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7048
7049 /* r7 <- r8 */
7050 val = displaced_read_reg (regs, dsc, 8);
7051 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7052
7053 /* r8 <- tmp[0] */
7054 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7055
7056 }
7057
7058 static int
7059 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7060 struct regcache *regs,
7061 arm_displaced_step_closure *dsc)
7062 {
7063 dsc->u.block.regmask = insn1 & 0x00ff;
7064
7065 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7066 to :
7067
7068 (1) register list is full, that is, r0-r7 are used.
7069 Prepare: tmp[0] <- r8
7070
7071 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7072 MOV r8, r7; Move value of r7 to r8;
7073 POP {r7}; Store PC value into r7.
7074
7075 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7076
7077 (2) register list is not full, supposing there are N registers in
7078 register list (except PC, 0 <= N <= 7).
7079 Prepare: for each i, 0 - N, tmp[i] <- ri.
7080
7081 POP {r0, r1, ...., rN};
7082
7083 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7084 from tmp[] properly.
7085 */
7086 if (debug_displaced)
7087 fprintf_unfiltered (gdb_stdlog,
7088 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7089 dsc->u.block.regmask, insn1);
7090
7091 if (dsc->u.block.regmask == 0xff)
7092 {
7093 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7094
7095 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7096 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7097 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7098
7099 dsc->numinsns = 3;
7100 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7101 }
7102 else
7103 {
7104 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7105 unsigned int i;
7106 unsigned int new_regmask;
7107
7108 for (i = 0; i < num_in_list + 1; i++)
7109 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7110
7111 new_regmask = (1 << (num_in_list + 1)) - 1;
7112
7113 if (debug_displaced)
7114 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7115 "{..., pc}: original reg list %.4x,"
7116 " modified list %.4x\n"),
7117 (int) dsc->u.block.regmask, new_regmask);
7118
7119 dsc->u.block.regmask |= 0x8000;
7120 dsc->u.block.writeback = 0;
7121 dsc->u.block.cond = INST_AL;
7122
7123 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7124
7125 dsc->cleanup = &cleanup_block_load_pc;
7126 }
7127
7128 return 0;
7129 }
7130
7131 static void
7132 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7133 struct regcache *regs,
7134 arm_displaced_step_closure *dsc)
7135 {
7136 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7137 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7138 int err = 0;
7139
7140 /* 16-bit thumb instructions. */
7141 switch (op_bit_12_15)
7142 {
7143 /* Shift (imme), add, subtract, move and compare. */
7144 case 0: case 1: case 2: case 3:
7145 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7146 "shift/add/sub/mov/cmp",
7147 dsc);
7148 break;
7149 case 4:
7150 switch (op_bit_10_11)
7151 {
7152 case 0: /* Data-processing */
7153 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7154 "data-processing",
7155 dsc);
7156 break;
7157 case 1: /* Special data instructions and branch and exchange. */
7158 {
7159 unsigned short op = bits (insn1, 7, 9);
7160 if (op == 6 || op == 7) /* BX or BLX */
7161 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7162 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7163 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7164 else
7165 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7166 dsc);
7167 }
7168 break;
7169 default: /* LDR (literal) */
7170 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7171 }
7172 break;
7173 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7174 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7175 break;
7176 case 10:
7177 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7178 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7179 else /* Generate SP-relative address */
7180 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7181 break;
7182 case 11: /* Misc 16-bit instructions */
7183 {
7184 switch (bits (insn1, 8, 11))
7185 {
7186 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7187 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7188 break;
7189 case 12: case 13: /* POP */
7190 if (bit (insn1, 8)) /* PC is in register list. */
7191 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7192 else
7193 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7194 break;
7195 case 15: /* If-Then, and hints */
7196 if (bits (insn1, 0, 3))
7197 /* If-Then makes up to four following instructions conditional.
7198 IT instruction itself is not conditional, so handle it as a
7199 common unmodified instruction. */
7200 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7201 dsc);
7202 else
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7204 break;
7205 default:
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7207 }
7208 }
7209 break;
7210 case 12:
7211 if (op_bit_10_11 < 2) /* Store multiple registers */
7212 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7213 else /* Load multiple registers */
7214 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7215 break;
7216 case 13: /* Conditional branch and supervisor call */
7217 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7218 err = thumb_copy_b (gdbarch, insn1, dsc);
7219 else
7220 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7221 break;
7222 case 14: /* Unconditional branch */
7223 err = thumb_copy_b (gdbarch, insn1, dsc);
7224 break;
7225 default:
7226 err = 1;
7227 }
7228
7229 if (err)
7230 internal_error (__FILE__, __LINE__,
7231 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7232 }
7233
7234 static int
7235 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7236 uint16_t insn1, uint16_t insn2,
7237 struct regcache *regs,
7238 arm_displaced_step_closure *dsc)
7239 {
7240 int rt = bits (insn2, 12, 15);
7241 int rn = bits (insn1, 0, 3);
7242 int op1 = bits (insn1, 7, 8);
7243
7244 switch (bits (insn1, 5, 6))
7245 {
7246 case 0: /* Load byte and memory hints */
7247 if (rt == 0xf) /* PLD/PLI */
7248 {
7249 if (rn == 0xf)
7250 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7251 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7252 else
7253 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7254 "pli/pld", dsc);
7255 }
7256 else
7257 {
7258 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7259 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7260 1);
7261 else
7262 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7263 "ldrb{reg, immediate}/ldrbt",
7264 dsc);
7265 }
7266
7267 break;
7268 case 1: /* Load halfword and memory hints. */
7269 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7270 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7271 "pld/unalloc memhint", dsc);
7272 else
7273 {
7274 if (rn == 0xf)
7275 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7276 2);
7277 else
7278 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7279 "ldrh/ldrht", dsc);
7280 }
7281 break;
7282 case 2: /* Load word */
7283 {
7284 int insn2_bit_8_11 = bits (insn2, 8, 11);
7285
7286 if (rn == 0xf)
7287 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7288 else if (op1 == 0x1) /* Encoding T3 */
7289 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7290 0, 1);
7291 else /* op1 == 0x0 */
7292 {
7293 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7294 /* LDR (immediate) */
7295 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7296 dsc, bit (insn2, 8), 1);
7297 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7298 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7299 "ldrt", dsc);
7300 else
7301 /* LDR (register) */
7302 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7303 dsc, 0, 0);
7304 }
7305 break;
7306 }
7307 default:
7308 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7309 break;
7310 }
7311 return 0;
7312 }
7313
7314 static void
7315 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7316 uint16_t insn2, struct regcache *regs,
7317 arm_displaced_step_closure *dsc)
7318 {
7319 int err = 0;
7320 unsigned short op = bit (insn2, 15);
7321 unsigned int op1 = bits (insn1, 11, 12);
7322
7323 switch (op1)
7324 {
7325 case 1:
7326 {
7327 switch (bits (insn1, 9, 10))
7328 {
7329 case 0:
7330 if (bit (insn1, 6))
7331 {
7332 /* Load/store {dual, exclusive}, table branch. */
7333 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7334 && bits (insn2, 5, 7) == 0)
7335 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7336 dsc);
7337 else
7338 /* PC is not allowed to use in load/store {dual, exclusive}
7339 instructions. */
7340 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7341 "load/store dual/ex", dsc);
7342 }
7343 else /* load/store multiple */
7344 {
7345 switch (bits (insn1, 7, 8))
7346 {
7347 case 0: case 3: /* SRS, RFE */
7348 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7349 "srs/rfe", dsc);
7350 break;
7351 case 1: case 2: /* LDM/STM/PUSH/POP */
7352 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7353 break;
7354 }
7355 }
7356 break;
7357
7358 case 1:
7359 /* Data-processing (shift register). */
7360 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7361 dsc);
7362 break;
7363 default: /* Coprocessor instructions. */
7364 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7365 break;
7366 }
7367 break;
7368 }
7369 case 2: /* op1 = 2 */
7370 if (op) /* Branch and misc control. */
7371 {
7372 if (bit (insn2, 14) /* BLX/BL */
7373 || bit (insn2, 12) /* Unconditional branch */
7374 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7375 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7376 else
7377 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7378 "misc ctrl", dsc);
7379 }
7380 else
7381 {
7382 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7383 {
7384 int dp_op = bits (insn1, 4, 8);
7385 int rn = bits (insn1, 0, 3);
7386 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7387 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7388 regs, dsc);
7389 else
7390 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7391 "dp/pb", dsc);
7392 }
7393 else /* Data processing (modified immediate) */
7394 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7395 "dp/mi", dsc);
7396 }
7397 break;
7398 case 3: /* op1 = 3 */
7399 switch (bits (insn1, 9, 10))
7400 {
7401 case 0:
7402 if (bit (insn1, 4))
7403 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7404 regs, dsc);
7405 else /* NEON Load/Store and Store single data item */
7406 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7407 "neon elt/struct load/store",
7408 dsc);
7409 break;
7410 case 1: /* op1 = 3, bits (9, 10) == 1 */
7411 switch (bits (insn1, 7, 8))
7412 {
7413 case 0: case 1: /* Data processing (register) */
7414 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7415 "dp(reg)", dsc);
7416 break;
7417 case 2: /* Multiply and absolute difference */
7418 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7419 "mul/mua/diff", dsc);
7420 break;
7421 case 3: /* Long multiply and divide */
7422 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7423 "lmul/lmua", dsc);
7424 break;
7425 }
7426 break;
7427 default: /* Coprocessor instructions */
7428 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7429 break;
7430 }
7431 break;
7432 default:
7433 err = 1;
7434 }
7435
7436 if (err)
7437 internal_error (__FILE__, __LINE__,
7438 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7439
7440 }
7441
7442 static void
7443 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7444 struct regcache *regs,
7445 arm_displaced_step_closure *dsc)
7446 {
7447 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7448 uint16_t insn1
7449 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7450
7451 if (debug_displaced)
7452 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7453 "at %.8lx\n", insn1, (unsigned long) from);
7454
7455 dsc->is_thumb = 1;
7456 dsc->insn_size = thumb_insn_size (insn1);
7457 if (thumb_insn_size (insn1) == 4)
7458 {
7459 uint16_t insn2
7460 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7461 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7462 }
7463 else
7464 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7465 }
7466
7467 void
7468 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7469 CORE_ADDR to, struct regcache *regs,
7470 arm_displaced_step_closure *dsc)
7471 {
7472 int err = 0;
7473 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7474 uint32_t insn;
7475
7476 /* Most displaced instructions use a 1-instruction scratch space, so set this
7477 here and override below if/when necessary. */
7478 dsc->numinsns = 1;
7479 dsc->insn_addr = from;
7480 dsc->scratch_base = to;
7481 dsc->cleanup = NULL;
7482 dsc->wrote_to_pc = 0;
7483
7484 if (!displaced_in_arm_mode (regs))
7485 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7486
7487 dsc->is_thumb = 0;
7488 dsc->insn_size = 4;
7489 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7490 if (debug_displaced)
7491 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7492 "at %.8lx\n", (unsigned long) insn,
7493 (unsigned long) from);
7494
7495 if ((insn & 0xf0000000) == 0xf0000000)
7496 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7497 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7498 {
7499 case 0x0: case 0x1: case 0x2: case 0x3:
7500 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7501 break;
7502
7503 case 0x4: case 0x5: case 0x6:
7504 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7505 break;
7506
7507 case 0x7:
7508 err = arm_decode_media (gdbarch, insn, dsc);
7509 break;
7510
7511 case 0x8: case 0x9: case 0xa: case 0xb:
7512 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7513 break;
7514
7515 case 0xc: case 0xd: case 0xe: case 0xf:
7516 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7517 break;
7518 }
7519
7520 if (err)
7521 internal_error (__FILE__, __LINE__,
7522 _("arm_process_displaced_insn: Instruction decode error"));
7523 }
7524
7525 /* Actually set up the scratch space for a displaced instruction. */
7526
7527 void
7528 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7529 CORE_ADDR to, arm_displaced_step_closure *dsc)
7530 {
7531 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7532 unsigned int i, len, offset;
7533 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7534 int size = dsc->is_thumb? 2 : 4;
7535 const gdb_byte *bkp_insn;
7536
7537 offset = 0;
7538 /* Poke modified instruction(s). */
7539 for (i = 0; i < dsc->numinsns; i++)
7540 {
7541 if (debug_displaced)
7542 {
7543 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7544 if (size == 4)
7545 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7546 dsc->modinsn[i]);
7547 else if (size == 2)
7548 fprintf_unfiltered (gdb_stdlog, "%.4x",
7549 (unsigned short)dsc->modinsn[i]);
7550
7551 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7552 (unsigned long) to + offset);
7553
7554 }
7555 write_memory_unsigned_integer (to + offset, size,
7556 byte_order_for_code,
7557 dsc->modinsn[i]);
7558 offset += size;
7559 }
7560
7561 /* Choose the correct breakpoint instruction. */
7562 if (dsc->is_thumb)
7563 {
7564 bkp_insn = tdep->thumb_breakpoint;
7565 len = tdep->thumb_breakpoint_size;
7566 }
7567 else
7568 {
7569 bkp_insn = tdep->arm_breakpoint;
7570 len = tdep->arm_breakpoint_size;
7571 }
7572
7573 /* Put breakpoint afterwards. */
7574 write_memory (to + offset, bkp_insn, len);
7575
7576 if (debug_displaced)
7577 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7578 paddress (gdbarch, from), paddress (gdbarch, to));
7579 }
7580
7581 /* Entry point for cleaning things up after a displaced instruction has been
7582 single-stepped. */
7583
7584 void
7585 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7586 struct displaced_step_closure *dsc_,
7587 CORE_ADDR from, CORE_ADDR to,
7588 struct regcache *regs)
7589 {
7590 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7591
7592 if (dsc->cleanup)
7593 dsc->cleanup (gdbarch, regs, dsc);
7594
7595 if (!dsc->wrote_to_pc)
7596 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7597 dsc->insn_addr + dsc->insn_size);
7598
7599 }
7600
7601 #include "bfd-in2.h"
7602 #include "libcoff.h"
7603
7604 static int
7605 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7606 {
7607 gdb_disassembler *di
7608 = static_cast<gdb_disassembler *>(info->application_data);
7609 struct gdbarch *gdbarch = di->arch ();
7610
7611 if (arm_pc_is_thumb (gdbarch, memaddr))
7612 {
7613 static asymbol *asym;
7614 static combined_entry_type ce;
7615 static struct coff_symbol_struct csym;
7616 static struct bfd fake_bfd;
7617 static bfd_target fake_target;
7618
7619 if (csym.native == NULL)
7620 {
7621 /* Create a fake symbol vector containing a Thumb symbol.
7622 This is solely so that the code in print_insn_little_arm()
7623 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7624 the presence of a Thumb symbol and switch to decoding
7625 Thumb instructions. */
7626
7627 fake_target.flavour = bfd_target_coff_flavour;
7628 fake_bfd.xvec = &fake_target;
7629 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7630 csym.native = &ce;
7631 csym.symbol.the_bfd = &fake_bfd;
7632 csym.symbol.name = "fake";
7633 asym = (asymbol *) & csym;
7634 }
7635
7636 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7637 info->symbols = &asym;
7638 }
7639 else
7640 info->symbols = NULL;
7641
7642 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7643 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7644 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7645 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7646 in default_print_insn. */
7647 if (exec_bfd != NULL)
7648 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7649
7650 return default_print_insn (memaddr, info);
7651 }
7652
7653 /* The following define instruction sequences that will cause ARM
7654 cpu's to take an undefined instruction trap. These are used to
7655 signal a breakpoint to GDB.
7656
7657 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7658 modes. A different instruction is required for each mode. The ARM
7659 cpu's can also be big or little endian. Thus four different
7660 instructions are needed to support all cases.
7661
7662 Note: ARMv4 defines several new instructions that will take the
7663 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7664 not in fact add the new instructions. The new undefined
7665 instructions in ARMv4 are all instructions that had no defined
7666 behaviour in earlier chips. There is no guarantee that they will
7667 raise an exception, but may be treated as NOP's. In practice, it
7668 may only safe to rely on instructions matching:
7669
7670 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7671 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7672 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7673
7674 Even this may only true if the condition predicate is true. The
7675 following use a condition predicate of ALWAYS so it is always TRUE.
7676
7677 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7678 and NetBSD all use a software interrupt rather than an undefined
7679 instruction to force a trap. This can be handled by by the
7680 abi-specific code during establishment of the gdbarch vector. */
7681
7682 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7683 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7684 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7685 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7686
7687 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7688 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7689 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7690 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7691
7692 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7693
7694 static int
7695 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7696 {
7697 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7698 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7699
7700 if (arm_pc_is_thumb (gdbarch, *pcptr))
7701 {
7702 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7703
7704 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7705 check whether we are replacing a 32-bit instruction. */
7706 if (tdep->thumb2_breakpoint != NULL)
7707 {
7708 gdb_byte buf[2];
7709
7710 if (target_read_memory (*pcptr, buf, 2) == 0)
7711 {
7712 unsigned short inst1;
7713
7714 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7715 if (thumb_insn_size (inst1) == 4)
7716 return ARM_BP_KIND_THUMB2;
7717 }
7718 }
7719
7720 return ARM_BP_KIND_THUMB;
7721 }
7722 else
7723 return ARM_BP_KIND_ARM;
7724
7725 }
7726
7727 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7728
7729 static const gdb_byte *
7730 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7731 {
7732 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7733
7734 switch (kind)
7735 {
7736 case ARM_BP_KIND_ARM:
7737 *size = tdep->arm_breakpoint_size;
7738 return tdep->arm_breakpoint;
7739 case ARM_BP_KIND_THUMB:
7740 *size = tdep->thumb_breakpoint_size;
7741 return tdep->thumb_breakpoint;
7742 case ARM_BP_KIND_THUMB2:
7743 *size = tdep->thumb2_breakpoint_size;
7744 return tdep->thumb2_breakpoint;
7745 default:
7746 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7747 }
7748 }
7749
7750 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7751
7752 static int
7753 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7754 struct regcache *regcache,
7755 CORE_ADDR *pcptr)
7756 {
7757 gdb_byte buf[4];
7758
7759 /* Check the memory pointed by PC is readable. */
7760 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7761 {
7762 struct arm_get_next_pcs next_pcs_ctx;
7763
7764 arm_get_next_pcs_ctor (&next_pcs_ctx,
7765 &arm_get_next_pcs_ops,
7766 gdbarch_byte_order (gdbarch),
7767 gdbarch_byte_order_for_code (gdbarch),
7768 0,
7769 regcache);
7770
7771 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7772
7773 /* If MEMADDR is the next instruction of current pc, do the
7774 software single step computation, and get the thumb mode by
7775 the destination address. */
7776 for (CORE_ADDR pc : next_pcs)
7777 {
7778 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7779 {
7780 if (IS_THUMB_ADDR (pc))
7781 {
7782 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7783 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7784 }
7785 else
7786 return ARM_BP_KIND_ARM;
7787 }
7788 }
7789 }
7790
7791 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7792 }
7793
7794 /* Extract from an array REGBUF containing the (raw) register state a
7795 function return value of type TYPE, and copy that, in virtual
7796 format, into VALBUF. */
7797
7798 static void
7799 arm_extract_return_value (struct type *type, struct regcache *regs,
7800 gdb_byte *valbuf)
7801 {
7802 struct gdbarch *gdbarch = regs->arch ();
7803 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7804
7805 if (TYPE_CODE_FLT == TYPE_CODE (type))
7806 {
7807 switch (gdbarch_tdep (gdbarch)->fp_model)
7808 {
7809 case ARM_FLOAT_FPA:
7810 {
7811 /* The value is in register F0 in internal format. We need to
7812 extract the raw value and then convert it to the desired
7813 internal type. */
7814 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7815
7816 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7817 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7818 valbuf, type);
7819 }
7820 break;
7821
7822 case ARM_FLOAT_SOFT_FPA:
7823 case ARM_FLOAT_SOFT_VFP:
7824 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7825 not using the VFP ABI code. */
7826 case ARM_FLOAT_VFP:
7827 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7828 if (TYPE_LENGTH (type) > 4)
7829 regs->cooked_read (ARM_A1_REGNUM + 1,
7830 valbuf + ARM_INT_REGISTER_SIZE);
7831 break;
7832
7833 default:
7834 internal_error (__FILE__, __LINE__,
7835 _("arm_extract_return_value: "
7836 "Floating point model not supported"));
7837 break;
7838 }
7839 }
7840 else if (TYPE_CODE (type) == TYPE_CODE_INT
7841 || TYPE_CODE (type) == TYPE_CODE_CHAR
7842 || TYPE_CODE (type) == TYPE_CODE_BOOL
7843 || TYPE_CODE (type) == TYPE_CODE_PTR
7844 || TYPE_IS_REFERENCE (type)
7845 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7846 {
7847 /* If the type is a plain integer, then the access is
7848 straight-forward. Otherwise we have to play around a bit
7849 more. */
7850 int len = TYPE_LENGTH (type);
7851 int regno = ARM_A1_REGNUM;
7852 ULONGEST tmp;
7853
7854 while (len > 0)
7855 {
7856 /* By using store_unsigned_integer we avoid having to do
7857 anything special for small big-endian values. */
7858 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7859 store_unsigned_integer (valbuf,
7860 (len > ARM_INT_REGISTER_SIZE
7861 ? ARM_INT_REGISTER_SIZE : len),
7862 byte_order, tmp);
7863 len -= ARM_INT_REGISTER_SIZE;
7864 valbuf += ARM_INT_REGISTER_SIZE;
7865 }
7866 }
7867 else
7868 {
7869 /* For a structure or union the behaviour is as if the value had
7870 been stored to word-aligned memory and then loaded into
7871 registers with 32-bit load instruction(s). */
7872 int len = TYPE_LENGTH (type);
7873 int regno = ARM_A1_REGNUM;
7874 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7875
7876 while (len > 0)
7877 {
7878 regs->cooked_read (regno++, tmpbuf);
7879 memcpy (valbuf, tmpbuf,
7880 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7881 len -= ARM_INT_REGISTER_SIZE;
7882 valbuf += ARM_INT_REGISTER_SIZE;
7883 }
7884 }
7885 }
7886
7887
7888 /* Will a function return an aggregate type in memory or in a
7889 register? Return 0 if an aggregate type can be returned in a
7890 register, 1 if it must be returned in memory. */
7891
7892 static int
7893 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7894 {
7895 enum type_code code;
7896
7897 type = check_typedef (type);
7898
7899 /* Simple, non-aggregate types (ie not including vectors and
7900 complex) are always returned in a register (or registers). */
7901 code = TYPE_CODE (type);
7902 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7903 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7904 return 0;
7905
7906 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7907 {
7908 /* Vector values should be returned using ARM registers if they
7909 are not over 16 bytes. */
7910 return (TYPE_LENGTH (type) > 16);
7911 }
7912
7913 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7914 {
7915 /* The AAPCS says all aggregates not larger than a word are returned
7916 in a register. */
7917 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7918 return 0;
7919
7920 return 1;
7921 }
7922 else
7923 {
7924 int nRc;
7925
7926 /* All aggregate types that won't fit in a register must be returned
7927 in memory. */
7928 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7929 return 1;
7930
7931 /* In the ARM ABI, "integer" like aggregate types are returned in
7932 registers. For an aggregate type to be integer like, its size
7933 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7934 offset of each addressable subfield must be zero. Note that bit
7935 fields are not addressable, and all addressable subfields of
7936 unions always start at offset zero.
7937
7938 This function is based on the behaviour of GCC 2.95.1.
7939 See: gcc/arm.c: arm_return_in_memory() for details.
7940
7941 Note: All versions of GCC before GCC 2.95.2 do not set up the
7942 parameters correctly for a function returning the following
7943 structure: struct { float f;}; This should be returned in memory,
7944 not a register. Richard Earnshaw sent me a patch, but I do not
7945 know of any way to detect if a function like the above has been
7946 compiled with the correct calling convention. */
7947
7948 /* Assume all other aggregate types can be returned in a register.
7949 Run a check for structures, unions and arrays. */
7950 nRc = 0;
7951
7952 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7953 {
7954 int i;
7955 /* Need to check if this struct/union is "integer" like. For
7956 this to be true, its size must be less than or equal to
7957 ARM_INT_REGISTER_SIZE and the offset of each addressable
7958 subfield must be zero. Note that bit fields are not
7959 addressable, and unions always start at offset zero. If any
7960 of the subfields is a floating point type, the struct/union
7961 cannot be an integer type. */
7962
7963 /* For each field in the object, check:
7964 1) Is it FP? --> yes, nRc = 1;
7965 2) Is it addressable (bitpos != 0) and
7966 not packed (bitsize == 0)?
7967 --> yes, nRc = 1
7968 */
7969
7970 for (i = 0; i < TYPE_NFIELDS (type); i++)
7971 {
7972 enum type_code field_type_code;
7973
7974 field_type_code
7975 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7976 i)));
7977
7978 /* Is it a floating point type field? */
7979 if (field_type_code == TYPE_CODE_FLT)
7980 {
7981 nRc = 1;
7982 break;
7983 }
7984
7985 /* If bitpos != 0, then we have to care about it. */
7986 if (TYPE_FIELD_BITPOS (type, i) != 0)
7987 {
7988 /* Bitfields are not addressable. If the field bitsize is
7989 zero, then the field is not packed. Hence it cannot be
7990 a bitfield or any other packed type. */
7991 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7992 {
7993 nRc = 1;
7994 break;
7995 }
7996 }
7997 }
7998 }
7999
8000 return nRc;
8001 }
8002 }
8003
8004 /* Write into appropriate registers a function return value of type
8005 TYPE, given in virtual format. */
8006
8007 static void
8008 arm_store_return_value (struct type *type, struct regcache *regs,
8009 const gdb_byte *valbuf)
8010 {
8011 struct gdbarch *gdbarch = regs->arch ();
8012 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8013
8014 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8015 {
8016 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8017
8018 switch (gdbarch_tdep (gdbarch)->fp_model)
8019 {
8020 case ARM_FLOAT_FPA:
8021
8022 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8023 regs->cooked_write (ARM_F0_REGNUM, buf);
8024 break;
8025
8026 case ARM_FLOAT_SOFT_FPA:
8027 case ARM_FLOAT_SOFT_VFP:
8028 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8029 not using the VFP ABI code. */
8030 case ARM_FLOAT_VFP:
8031 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8032 if (TYPE_LENGTH (type) > 4)
8033 regs->cooked_write (ARM_A1_REGNUM + 1,
8034 valbuf + ARM_INT_REGISTER_SIZE);
8035 break;
8036
8037 default:
8038 internal_error (__FILE__, __LINE__,
8039 _("arm_store_return_value: Floating "
8040 "point model not supported"));
8041 break;
8042 }
8043 }
8044 else if (TYPE_CODE (type) == TYPE_CODE_INT
8045 || TYPE_CODE (type) == TYPE_CODE_CHAR
8046 || TYPE_CODE (type) == TYPE_CODE_BOOL
8047 || TYPE_CODE (type) == TYPE_CODE_PTR
8048 || TYPE_IS_REFERENCE (type)
8049 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8050 {
8051 if (TYPE_LENGTH (type) <= 4)
8052 {
8053 /* Values of one word or less are zero/sign-extended and
8054 returned in r0. */
8055 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8056 LONGEST val = unpack_long (type, valbuf);
8057
8058 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8059 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8060 }
8061 else
8062 {
8063 /* Integral values greater than one word are stored in consecutive
8064 registers starting with r0. This will always be a multiple of
8065 the regiser size. */
8066 int len = TYPE_LENGTH (type);
8067 int regno = ARM_A1_REGNUM;
8068
8069 while (len > 0)
8070 {
8071 regs->cooked_write (regno++, valbuf);
8072 len -= ARM_INT_REGISTER_SIZE;
8073 valbuf += ARM_INT_REGISTER_SIZE;
8074 }
8075 }
8076 }
8077 else
8078 {
8079 /* For a structure or union the behaviour is as if the value had
8080 been stored to word-aligned memory and then loaded into
8081 registers with 32-bit load instruction(s). */
8082 int len = TYPE_LENGTH (type);
8083 int regno = ARM_A1_REGNUM;
8084 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8085
8086 while (len > 0)
8087 {
8088 memcpy (tmpbuf, valbuf,
8089 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8090 regs->cooked_write (regno++, tmpbuf);
8091 len -= ARM_INT_REGISTER_SIZE;
8092 valbuf += ARM_INT_REGISTER_SIZE;
8093 }
8094 }
8095 }
8096
8097
8098 /* Handle function return values. */
8099
8100 static enum return_value_convention
8101 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8102 struct type *valtype, struct regcache *regcache,
8103 gdb_byte *readbuf, const gdb_byte *writebuf)
8104 {
8105 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8106 struct type *func_type = function ? value_type (function) : NULL;
8107 enum arm_vfp_cprc_base_type vfp_base_type;
8108 int vfp_base_count;
8109
8110 if (arm_vfp_abi_for_function (gdbarch, func_type)
8111 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8112 {
8113 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8114 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8115 int i;
8116 for (i = 0; i < vfp_base_count; i++)
8117 {
8118 if (reg_char == 'q')
8119 {
8120 if (writebuf)
8121 arm_neon_quad_write (gdbarch, regcache, i,
8122 writebuf + i * unit_length);
8123
8124 if (readbuf)
8125 arm_neon_quad_read (gdbarch, regcache, i,
8126 readbuf + i * unit_length);
8127 }
8128 else
8129 {
8130 char name_buf[4];
8131 int regnum;
8132
8133 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8134 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8135 strlen (name_buf));
8136 if (writebuf)
8137 regcache->cooked_write (regnum, writebuf + i * unit_length);
8138 if (readbuf)
8139 regcache->cooked_read (regnum, readbuf + i * unit_length);
8140 }
8141 }
8142 return RETURN_VALUE_REGISTER_CONVENTION;
8143 }
8144
8145 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8146 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8147 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8148 {
8149 if (tdep->struct_return == pcc_struct_return
8150 || arm_return_in_memory (gdbarch, valtype))
8151 return RETURN_VALUE_STRUCT_CONVENTION;
8152 }
8153 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8154 {
8155 if (arm_return_in_memory (gdbarch, valtype))
8156 return RETURN_VALUE_STRUCT_CONVENTION;
8157 }
8158
8159 if (writebuf)
8160 arm_store_return_value (valtype, regcache, writebuf);
8161
8162 if (readbuf)
8163 arm_extract_return_value (valtype, regcache, readbuf);
8164
8165 return RETURN_VALUE_REGISTER_CONVENTION;
8166 }
8167
8168
8169 static int
8170 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8171 {
8172 struct gdbarch *gdbarch = get_frame_arch (frame);
8173 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8174 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8175 CORE_ADDR jb_addr;
8176 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8177
8178 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8179
8180 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8181 ARM_INT_REGISTER_SIZE))
8182 return 0;
8183
8184 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8185 return 1;
8186 }
8187 /* A call to cmse secure entry function "foo" at "a" is modified by
8188 GNU ld as "b".
8189 a) bl xxxx <foo>
8190
8191 <foo>
8192 xxxx:
8193
8194 b) bl yyyy <__acle_se_foo>
8195
8196 section .gnu.sgstubs:
8197 <foo>
8198 yyyy: sg // secure gateway
8199 b.w xxxx <__acle_se_foo> // original_branch_dest
8200
8201 <__acle_se_foo>
8202 xxxx:
8203
8204 When the control at "b", the pc contains "yyyy" (sg address) which is a
8205 trampoline and does not exist in source code. This function returns the
8206 target pc "xxxx". For more details please refer to section 5.4
8207 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8208 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8209 document on www.developer.arm.com. */
8210
8211 static CORE_ADDR
8212 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8213 {
8214 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8215 char *target_name = (char *) alloca (target_len);
8216 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8217
8218 struct bound_minimal_symbol minsym
8219 = lookup_minimal_symbol (target_name, NULL, objfile);
8220
8221 if (minsym.minsym != nullptr)
8222 return BMSYMBOL_VALUE_ADDRESS (minsym);
8223
8224 return 0;
8225 }
8226
8227 /* Return true when SEC points to ".gnu.sgstubs" section. */
8228
8229 static bool
8230 arm_is_sgstubs_section (struct obj_section *sec)
8231 {
8232 return (sec != nullptr
8233 && sec->the_bfd_section != nullptr
8234 && sec->the_bfd_section->name != nullptr
8235 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8236 }
8237
8238 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8239 return the target PC. Otherwise return 0. */
8240
8241 CORE_ADDR
8242 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8243 {
8244 const char *name;
8245 int namelen;
8246 CORE_ADDR start_addr;
8247
8248 /* Find the starting address and name of the function containing the PC. */
8249 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8250 {
8251 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8252 check here. */
8253 start_addr = arm_skip_bx_reg (frame, pc);
8254 if (start_addr != 0)
8255 return start_addr;
8256
8257 return 0;
8258 }
8259
8260 /* If PC is in a Thumb call or return stub, return the address of the
8261 target PC, which is in a register. The thunk functions are called
8262 _call_via_xx, where x is the register name. The possible names
8263 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8264 functions, named __ARM_call_via_r[0-7]. */
8265 if (startswith (name, "_call_via_")
8266 || startswith (name, "__ARM_call_via_"))
8267 {
8268 /* Use the name suffix to determine which register contains the
8269 target PC. */
8270 static const char *table[15] =
8271 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8272 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8273 };
8274 int regno;
8275 int offset = strlen (name) - 2;
8276
8277 for (regno = 0; regno <= 14; regno++)
8278 if (strcmp (&name[offset], table[regno]) == 0)
8279 return get_frame_register_unsigned (frame, regno);
8280 }
8281
8282 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8283 non-interworking calls to foo. We could decode the stubs
8284 to find the target but it's easier to use the symbol table. */
8285 namelen = strlen (name);
8286 if (name[0] == '_' && name[1] == '_'
8287 && ((namelen > 2 + strlen ("_from_thumb")
8288 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8289 || (namelen > 2 + strlen ("_from_arm")
8290 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8291 {
8292 char *target_name;
8293 int target_len = namelen - 2;
8294 struct bound_minimal_symbol minsym;
8295 struct objfile *objfile;
8296 struct obj_section *sec;
8297
8298 if (name[namelen - 1] == 'b')
8299 target_len -= strlen ("_from_thumb");
8300 else
8301 target_len -= strlen ("_from_arm");
8302
8303 target_name = (char *) alloca (target_len + 1);
8304 memcpy (target_name, name + 2, target_len);
8305 target_name[target_len] = '\0';
8306
8307 sec = find_pc_section (pc);
8308 objfile = (sec == NULL) ? NULL : sec->objfile;
8309 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8310 if (minsym.minsym != NULL)
8311 return BMSYMBOL_VALUE_ADDRESS (minsym);
8312 else
8313 return 0;
8314 }
8315
8316 struct obj_section *section = find_pc_section (pc);
8317
8318 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8319 if (arm_is_sgstubs_section (section))
8320 return arm_skip_cmse_entry (pc, name, section->objfile);
8321
8322 return 0; /* not a stub */
8323 }
8324
8325 static void
8326 set_arm_command (const char *args, int from_tty)
8327 {
8328 printf_unfiltered (_("\
8329 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8330 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8331 }
8332
8333 static void
8334 show_arm_command (const char *args, int from_tty)
8335 {
8336 cmd_show_list (showarmcmdlist, from_tty, "");
8337 }
8338
8339 static void
8340 arm_update_current_architecture (void)
8341 {
8342 struct gdbarch_info info;
8343
8344 /* If the current architecture is not ARM, we have nothing to do. */
8345 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8346 return;
8347
8348 /* Update the architecture. */
8349 gdbarch_info_init (&info);
8350
8351 if (!gdbarch_update_p (info))
8352 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8353 }
8354
8355 static void
8356 set_fp_model_sfunc (const char *args, int from_tty,
8357 struct cmd_list_element *c)
8358 {
8359 int fp_model;
8360
8361 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8362 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8363 {
8364 arm_fp_model = (enum arm_float_model) fp_model;
8365 break;
8366 }
8367
8368 if (fp_model == ARM_FLOAT_LAST)
8369 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8370 current_fp_model);
8371
8372 arm_update_current_architecture ();
8373 }
8374
8375 static void
8376 show_fp_model (struct ui_file *file, int from_tty,
8377 struct cmd_list_element *c, const char *value)
8378 {
8379 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8380
8381 if (arm_fp_model == ARM_FLOAT_AUTO
8382 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8383 fprintf_filtered (file, _("\
8384 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8385 fp_model_strings[tdep->fp_model]);
8386 else
8387 fprintf_filtered (file, _("\
8388 The current ARM floating point model is \"%s\".\n"),
8389 fp_model_strings[arm_fp_model]);
8390 }
8391
8392 static void
8393 arm_set_abi (const char *args, int from_tty,
8394 struct cmd_list_element *c)
8395 {
8396 int arm_abi;
8397
8398 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8399 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8400 {
8401 arm_abi_global = (enum arm_abi_kind) arm_abi;
8402 break;
8403 }
8404
8405 if (arm_abi == ARM_ABI_LAST)
8406 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8407 arm_abi_string);
8408
8409 arm_update_current_architecture ();
8410 }
8411
8412 static void
8413 arm_show_abi (struct ui_file *file, int from_tty,
8414 struct cmd_list_element *c, const char *value)
8415 {
8416 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8417
8418 if (arm_abi_global == ARM_ABI_AUTO
8419 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8420 fprintf_filtered (file, _("\
8421 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8422 arm_abi_strings[tdep->arm_abi]);
8423 else
8424 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8425 arm_abi_string);
8426 }
8427
8428 static void
8429 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8430 struct cmd_list_element *c, const char *value)
8431 {
8432 fprintf_filtered (file,
8433 _("The current execution mode assumed "
8434 "(when symbols are unavailable) is \"%s\".\n"),
8435 arm_fallback_mode_string);
8436 }
8437
8438 static void
8439 arm_show_force_mode (struct ui_file *file, int from_tty,
8440 struct cmd_list_element *c, const char *value)
8441 {
8442 fprintf_filtered (file,
8443 _("The current execution mode assumed "
8444 "(even when symbols are available) is \"%s\".\n"),
8445 arm_force_mode_string);
8446 }
8447
8448 /* If the user changes the register disassembly style used for info
8449 register and other commands, we have to also switch the style used
8450 in opcodes for disassembly output. This function is run in the "set
8451 arm disassembly" command, and does that. */
8452
8453 static void
8454 set_disassembly_style_sfunc (const char *args, int from_tty,
8455 struct cmd_list_element *c)
8456 {
8457 /* Convert the short style name into the long style name (eg, reg-names-*)
8458 before calling the generic set_disassembler_options() function. */
8459 std::string long_name = std::string ("reg-names-") + disassembly_style;
8460 set_disassembler_options (&long_name[0]);
8461 }
8462
8463 static void
8464 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8465 struct cmd_list_element *c, const char *value)
8466 {
8467 struct gdbarch *gdbarch = get_current_arch ();
8468 char *options = get_disassembler_options (gdbarch);
8469 const char *style = "";
8470 int len = 0;
8471 const char *opt;
8472
8473 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8474 if (CONST_STRNEQ (opt, "reg-names-"))
8475 {
8476 style = &opt[strlen ("reg-names-")];
8477 len = strcspn (style, ",");
8478 }
8479
8480 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8481 }
8482 \f
8483 /* Return the ARM register name corresponding to register I. */
8484 static const char *
8485 arm_register_name (struct gdbarch *gdbarch, int i)
8486 {
8487 const int num_regs = gdbarch_num_regs (gdbarch);
8488
8489 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8490 && i >= num_regs && i < num_regs + 32)
8491 {
8492 static const char *const vfp_pseudo_names[] = {
8493 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8494 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8495 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8496 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8497 };
8498
8499 return vfp_pseudo_names[i - num_regs];
8500 }
8501
8502 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8503 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8504 {
8505 static const char *const neon_pseudo_names[] = {
8506 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8507 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8508 };
8509
8510 return neon_pseudo_names[i - num_regs - 32];
8511 }
8512
8513 if (i >= ARRAY_SIZE (arm_register_names))
8514 /* These registers are only supported on targets which supply
8515 an XML description. */
8516 return "";
8517
8518 return arm_register_names[i];
8519 }
8520
8521 /* Test whether the coff symbol specific value corresponds to a Thumb
8522 function. */
8523
8524 static int
8525 coff_sym_is_thumb (int val)
8526 {
8527 return (val == C_THUMBEXT
8528 || val == C_THUMBSTAT
8529 || val == C_THUMBEXTFUNC
8530 || val == C_THUMBSTATFUNC
8531 || val == C_THUMBLABEL);
8532 }
8533
8534 /* arm_coff_make_msymbol_special()
8535 arm_elf_make_msymbol_special()
8536
8537 These functions test whether the COFF or ELF symbol corresponds to
8538 an address in thumb code, and set a "special" bit in a minimal
8539 symbol to indicate that it does. */
8540
8541 static void
8542 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8543 {
8544 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8545
8546 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8547 == ST_BRANCH_TO_THUMB)
8548 MSYMBOL_SET_SPECIAL (msym);
8549 }
8550
8551 static void
8552 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8553 {
8554 if (coff_sym_is_thumb (val))
8555 MSYMBOL_SET_SPECIAL (msym);
8556 }
8557
8558 static void
8559 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8560 asymbol *sym)
8561 {
8562 const char *name = bfd_asymbol_name (sym);
8563 struct arm_per_objfile *data;
8564 struct arm_mapping_symbol new_map_sym;
8565
8566 gdb_assert (name[0] == '$');
8567 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8568 return;
8569
8570 data = arm_objfile_data_key.get (objfile);
8571 if (data == NULL)
8572 data = arm_objfile_data_key.emplace (objfile,
8573 objfile->obfd->section_count);
8574 arm_mapping_symbol_vec &map
8575 = data->section_maps[bfd_asymbol_section (sym)->index];
8576
8577 new_map_sym.value = sym->value;
8578 new_map_sym.type = name[1];
8579
8580 /* Insert at the end, the vector will be sorted on first use. */
8581 map.push_back (new_map_sym);
8582 }
8583
8584 static void
8585 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8586 {
8587 struct gdbarch *gdbarch = regcache->arch ();
8588 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8589
8590 /* If necessary, set the T bit. */
8591 if (arm_apcs_32)
8592 {
8593 ULONGEST val, t_bit;
8594 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8595 t_bit = arm_psr_thumb_bit (gdbarch);
8596 if (arm_pc_is_thumb (gdbarch, pc))
8597 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8598 val | t_bit);
8599 else
8600 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8601 val & ~t_bit);
8602 }
8603 }
8604
8605 /* Read the contents of a NEON quad register, by reading from two
8606 double registers. This is used to implement the quad pseudo
8607 registers, and for argument passing in case the quad registers are
8608 missing; vectors are passed in quad registers when using the VFP
8609 ABI, even if a NEON unit is not present. REGNUM is the index of
8610 the quad register, in [0, 15]. */
8611
8612 static enum register_status
8613 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8614 int regnum, gdb_byte *buf)
8615 {
8616 char name_buf[4];
8617 gdb_byte reg_buf[8];
8618 int offset, double_regnum;
8619 enum register_status status;
8620
8621 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8622 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8623 strlen (name_buf));
8624
8625 /* d0 is always the least significant half of q0. */
8626 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8627 offset = 8;
8628 else
8629 offset = 0;
8630
8631 status = regcache->raw_read (double_regnum, reg_buf);
8632 if (status != REG_VALID)
8633 return status;
8634 memcpy (buf + offset, reg_buf, 8);
8635
8636 offset = 8 - offset;
8637 status = regcache->raw_read (double_regnum + 1, reg_buf);
8638 if (status != REG_VALID)
8639 return status;
8640 memcpy (buf + offset, reg_buf, 8);
8641
8642 return REG_VALID;
8643 }
8644
8645 static enum register_status
8646 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8647 int regnum, gdb_byte *buf)
8648 {
8649 const int num_regs = gdbarch_num_regs (gdbarch);
8650 char name_buf[4];
8651 gdb_byte reg_buf[8];
8652 int offset, double_regnum;
8653
8654 gdb_assert (regnum >= num_regs);
8655 regnum -= num_regs;
8656
8657 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8658 /* Quad-precision register. */
8659 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8660 else
8661 {
8662 enum register_status status;
8663
8664 /* Single-precision register. */
8665 gdb_assert (regnum < 32);
8666
8667 /* s0 is always the least significant half of d0. */
8668 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8669 offset = (regnum & 1) ? 0 : 4;
8670 else
8671 offset = (regnum & 1) ? 4 : 0;
8672
8673 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8674 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8675 strlen (name_buf));
8676
8677 status = regcache->raw_read (double_regnum, reg_buf);
8678 if (status == REG_VALID)
8679 memcpy (buf, reg_buf + offset, 4);
8680 return status;
8681 }
8682 }
8683
8684 /* Store the contents of BUF to a NEON quad register, by writing to
8685 two double registers. This is used to implement the quad pseudo
8686 registers, and for argument passing in case the quad registers are
8687 missing; vectors are passed in quad registers when using the VFP
8688 ABI, even if a NEON unit is not present. REGNUM is the index
8689 of the quad register, in [0, 15]. */
8690
8691 static void
8692 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8693 int regnum, const gdb_byte *buf)
8694 {
8695 char name_buf[4];
8696 int offset, double_regnum;
8697
8698 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8699 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8700 strlen (name_buf));
8701
8702 /* d0 is always the least significant half of q0. */
8703 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8704 offset = 8;
8705 else
8706 offset = 0;
8707
8708 regcache->raw_write (double_regnum, buf + offset);
8709 offset = 8 - offset;
8710 regcache->raw_write (double_regnum + 1, buf + offset);
8711 }
8712
8713 static void
8714 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8715 int regnum, const gdb_byte *buf)
8716 {
8717 const int num_regs = gdbarch_num_regs (gdbarch);
8718 char name_buf[4];
8719 gdb_byte reg_buf[8];
8720 int offset, double_regnum;
8721
8722 gdb_assert (regnum >= num_regs);
8723 regnum -= num_regs;
8724
8725 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8726 /* Quad-precision register. */
8727 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8728 else
8729 {
8730 /* Single-precision register. */
8731 gdb_assert (regnum < 32);
8732
8733 /* s0 is always the least significant half of d0. */
8734 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8735 offset = (regnum & 1) ? 0 : 4;
8736 else
8737 offset = (regnum & 1) ? 4 : 0;
8738
8739 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8740 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8741 strlen (name_buf));
8742
8743 regcache->raw_read (double_regnum, reg_buf);
8744 memcpy (reg_buf + offset, buf, 4);
8745 regcache->raw_write (double_regnum, reg_buf);
8746 }
8747 }
8748
8749 static struct value *
8750 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8751 {
8752 const int *reg_p = (const int *) baton;
8753 return value_of_register (*reg_p, frame);
8754 }
8755 \f
8756 static enum gdb_osabi
8757 arm_elf_osabi_sniffer (bfd *abfd)
8758 {
8759 unsigned int elfosabi;
8760 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8761
8762 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8763
8764 if (elfosabi == ELFOSABI_ARM)
8765 /* GNU tools use this value. Check note sections in this case,
8766 as well. */
8767 bfd_map_over_sections (abfd,
8768 generic_elf_osabi_sniff_abi_tag_sections,
8769 &osabi);
8770
8771 /* Anything else will be handled by the generic ELF sniffer. */
8772 return osabi;
8773 }
8774
8775 static int
8776 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8777 struct reggroup *group)
8778 {
8779 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8780 this, FPS register belongs to save_regroup, restore_reggroup, and
8781 all_reggroup, of course. */
8782 if (regnum == ARM_FPS_REGNUM)
8783 return (group == float_reggroup
8784 || group == save_reggroup
8785 || group == restore_reggroup
8786 || group == all_reggroup);
8787 else
8788 return default_register_reggroup_p (gdbarch, regnum, group);
8789 }
8790
8791 /* For backward-compatibility we allow two 'g' packet lengths with
8792 the remote protocol depending on whether FPA registers are
8793 supplied. M-profile targets do not have FPA registers, but some
8794 stubs already exist in the wild which use a 'g' packet which
8795 supplies them albeit with dummy values. The packet format which
8796 includes FPA registers should be considered deprecated for
8797 M-profile targets. */
8798
8799 static void
8800 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8801 {
8802 if (gdbarch_tdep (gdbarch)->is_m)
8803 {
8804 const target_desc *tdesc;
8805
8806 /* If we know from the executable this is an M-profile target,
8807 cater for remote targets whose register set layout is the
8808 same as the FPA layout. */
8809 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8810 register_remote_g_packet_guess (gdbarch,
8811 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8812 tdesc);
8813
8814 /* The regular M-profile layout. */
8815 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8816 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8817 tdesc);
8818
8819 /* M-profile plus M4F VFP. */
8820 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8821 register_remote_g_packet_guess (gdbarch,
8822 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8823 tdesc);
8824 }
8825
8826 /* Otherwise we don't have a useful guess. */
8827 }
8828
8829 /* Implement the code_of_frame_writable gdbarch method. */
8830
8831 static int
8832 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8833 {
8834 if (gdbarch_tdep (gdbarch)->is_m
8835 && get_frame_type (frame) == SIGTRAMP_FRAME)
8836 {
8837 /* M-profile exception frames return to some magic PCs, where
8838 isn't writable at all. */
8839 return 0;
8840 }
8841 else
8842 return 1;
8843 }
8844
8845 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8846 to be postfixed by a version (eg armv7hl). */
8847
8848 static const char *
8849 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8850 {
8851 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8852 return "arm(v[^- ]*)?";
8853 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8854 }
8855
8856 /* Initialize the current architecture based on INFO. If possible,
8857 re-use an architecture from ARCHES, which is a list of
8858 architectures already created during this debugging session.
8859
8860 Called e.g. at program startup, when reading a core file, and when
8861 reading a binary file. */
8862
8863 static struct gdbarch *
8864 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8865 {
8866 struct gdbarch_tdep *tdep;
8867 struct gdbarch *gdbarch;
8868 struct gdbarch_list *best_arch;
8869 enum arm_abi_kind arm_abi = arm_abi_global;
8870 enum arm_float_model fp_model = arm_fp_model;
8871 struct tdesc_arch_data *tdesc_data = NULL;
8872 int i, is_m = 0;
8873 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8874 int have_wmmx_registers = 0;
8875 int have_neon = 0;
8876 int have_fpa_registers = 1;
8877 const struct target_desc *tdesc = info.target_desc;
8878
8879 /* If we have an object to base this architecture on, try to determine
8880 its ABI. */
8881
8882 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8883 {
8884 int ei_osabi, e_flags;
8885
8886 switch (bfd_get_flavour (info.abfd))
8887 {
8888 case bfd_target_coff_flavour:
8889 /* Assume it's an old APCS-style ABI. */
8890 /* XXX WinCE? */
8891 arm_abi = ARM_ABI_APCS;
8892 break;
8893
8894 case bfd_target_elf_flavour:
8895 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8896 e_flags = elf_elfheader (info.abfd)->e_flags;
8897
8898 if (ei_osabi == ELFOSABI_ARM)
8899 {
8900 /* GNU tools used to use this value, but do not for EABI
8901 objects. There's nowhere to tag an EABI version
8902 anyway, so assume APCS. */
8903 arm_abi = ARM_ABI_APCS;
8904 }
8905 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8906 {
8907 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8908
8909 switch (eabi_ver)
8910 {
8911 case EF_ARM_EABI_UNKNOWN:
8912 /* Assume GNU tools. */
8913 arm_abi = ARM_ABI_APCS;
8914 break;
8915
8916 case EF_ARM_EABI_VER4:
8917 case EF_ARM_EABI_VER5:
8918 arm_abi = ARM_ABI_AAPCS;
8919 /* EABI binaries default to VFP float ordering.
8920 They may also contain build attributes that can
8921 be used to identify if the VFP argument-passing
8922 ABI is in use. */
8923 if (fp_model == ARM_FLOAT_AUTO)
8924 {
8925 #ifdef HAVE_ELF
8926 switch (bfd_elf_get_obj_attr_int (info.abfd,
8927 OBJ_ATTR_PROC,
8928 Tag_ABI_VFP_args))
8929 {
8930 case AEABI_VFP_args_base:
8931 /* "The user intended FP parameter/result
8932 passing to conform to AAPCS, base
8933 variant". */
8934 fp_model = ARM_FLOAT_SOFT_VFP;
8935 break;
8936 case AEABI_VFP_args_vfp:
8937 /* "The user intended FP parameter/result
8938 passing to conform to AAPCS, VFP
8939 variant". */
8940 fp_model = ARM_FLOAT_VFP;
8941 break;
8942 case AEABI_VFP_args_toolchain:
8943 /* "The user intended FP parameter/result
8944 passing to conform to tool chain-specific
8945 conventions" - we don't know any such
8946 conventions, so leave it as "auto". */
8947 break;
8948 case AEABI_VFP_args_compatible:
8949 /* "Code is compatible with both the base
8950 and VFP variants; the user did not permit
8951 non-variadic functions to pass FP
8952 parameters/results" - leave it as
8953 "auto". */
8954 break;
8955 default:
8956 /* Attribute value not mentioned in the
8957 November 2012 ABI, so leave it as
8958 "auto". */
8959 break;
8960 }
8961 #else
8962 fp_model = ARM_FLOAT_SOFT_VFP;
8963 #endif
8964 }
8965 break;
8966
8967 default:
8968 /* Leave it as "auto". */
8969 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8970 break;
8971 }
8972
8973 #ifdef HAVE_ELF
8974 /* Detect M-profile programs. This only works if the
8975 executable file includes build attributes; GCC does
8976 copy them to the executable, but e.g. RealView does
8977 not. */
8978 int attr_arch
8979 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8980 Tag_CPU_arch);
8981 int attr_profile
8982 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8983 Tag_CPU_arch_profile);
8984
8985 /* GCC specifies the profile for v6-M; RealView only
8986 specifies the profile for architectures starting with
8987 V7 (as opposed to architectures with a tag
8988 numerically greater than TAG_CPU_ARCH_V7). */
8989 if (!tdesc_has_registers (tdesc)
8990 && (attr_arch == TAG_CPU_ARCH_V6_M
8991 || attr_arch == TAG_CPU_ARCH_V6S_M
8992 || attr_profile == 'M'))
8993 is_m = 1;
8994 #endif
8995 }
8996
8997 if (fp_model == ARM_FLOAT_AUTO)
8998 {
8999 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9000 {
9001 case 0:
9002 /* Leave it as "auto". Strictly speaking this case
9003 means FPA, but almost nobody uses that now, and
9004 many toolchains fail to set the appropriate bits
9005 for the floating-point model they use. */
9006 break;
9007 case EF_ARM_SOFT_FLOAT:
9008 fp_model = ARM_FLOAT_SOFT_FPA;
9009 break;
9010 case EF_ARM_VFP_FLOAT:
9011 fp_model = ARM_FLOAT_VFP;
9012 break;
9013 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9014 fp_model = ARM_FLOAT_SOFT_VFP;
9015 break;
9016 }
9017 }
9018
9019 if (e_flags & EF_ARM_BE8)
9020 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9021
9022 break;
9023
9024 default:
9025 /* Leave it as "auto". */
9026 break;
9027 }
9028 }
9029
9030 /* Check any target description for validity. */
9031 if (tdesc_has_registers (tdesc))
9032 {
9033 /* For most registers we require GDB's default names; but also allow
9034 the numeric names for sp / lr / pc, as a convenience. */
9035 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9036 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9037 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9038
9039 const struct tdesc_feature *feature;
9040 int valid_p;
9041
9042 feature = tdesc_find_feature (tdesc,
9043 "org.gnu.gdb.arm.core");
9044 if (feature == NULL)
9045 {
9046 feature = tdesc_find_feature (tdesc,
9047 "org.gnu.gdb.arm.m-profile");
9048 if (feature == NULL)
9049 return NULL;
9050 else
9051 is_m = 1;
9052 }
9053
9054 tdesc_data = tdesc_data_alloc ();
9055
9056 valid_p = 1;
9057 for (i = 0; i < ARM_SP_REGNUM; i++)
9058 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9059 arm_register_names[i]);
9060 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9061 ARM_SP_REGNUM,
9062 arm_sp_names);
9063 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9064 ARM_LR_REGNUM,
9065 arm_lr_names);
9066 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9067 ARM_PC_REGNUM,
9068 arm_pc_names);
9069 if (is_m)
9070 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9071 ARM_PS_REGNUM, "xpsr");
9072 else
9073 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9074 ARM_PS_REGNUM, "cpsr");
9075
9076 if (!valid_p)
9077 {
9078 tdesc_data_cleanup (tdesc_data);
9079 return NULL;
9080 }
9081
9082 feature = tdesc_find_feature (tdesc,
9083 "org.gnu.gdb.arm.fpa");
9084 if (feature != NULL)
9085 {
9086 valid_p = 1;
9087 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9088 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9089 arm_register_names[i]);
9090 if (!valid_p)
9091 {
9092 tdesc_data_cleanup (tdesc_data);
9093 return NULL;
9094 }
9095 }
9096 else
9097 have_fpa_registers = 0;
9098
9099 feature = tdesc_find_feature (tdesc,
9100 "org.gnu.gdb.xscale.iwmmxt");
9101 if (feature != NULL)
9102 {
9103 static const char *const iwmmxt_names[] = {
9104 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9105 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9106 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9107 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9108 };
9109
9110 valid_p = 1;
9111 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9112 valid_p
9113 &= tdesc_numbered_register (feature, tdesc_data, i,
9114 iwmmxt_names[i - ARM_WR0_REGNUM]);
9115
9116 /* Check for the control registers, but do not fail if they
9117 are missing. */
9118 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9119 tdesc_numbered_register (feature, tdesc_data, i,
9120 iwmmxt_names[i - ARM_WR0_REGNUM]);
9121
9122 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9123 valid_p
9124 &= tdesc_numbered_register (feature, tdesc_data, i,
9125 iwmmxt_names[i - ARM_WR0_REGNUM]);
9126
9127 if (!valid_p)
9128 {
9129 tdesc_data_cleanup (tdesc_data);
9130 return NULL;
9131 }
9132
9133 have_wmmx_registers = 1;
9134 }
9135
9136 /* If we have a VFP unit, check whether the single precision registers
9137 are present. If not, then we will synthesize them as pseudo
9138 registers. */
9139 feature = tdesc_find_feature (tdesc,
9140 "org.gnu.gdb.arm.vfp");
9141 if (feature != NULL)
9142 {
9143 static const char *const vfp_double_names[] = {
9144 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9145 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9146 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9147 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9148 };
9149
9150 /* Require the double precision registers. There must be either
9151 16 or 32. */
9152 valid_p = 1;
9153 for (i = 0; i < 32; i++)
9154 {
9155 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9156 ARM_D0_REGNUM + i,
9157 vfp_double_names[i]);
9158 if (!valid_p)
9159 break;
9160 }
9161 if (!valid_p && i == 16)
9162 valid_p = 1;
9163
9164 /* Also require FPSCR. */
9165 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9166 ARM_FPSCR_REGNUM, "fpscr");
9167 if (!valid_p)
9168 {
9169 tdesc_data_cleanup (tdesc_data);
9170 return NULL;
9171 }
9172
9173 if (tdesc_unnumbered_register (feature, "s0") == 0)
9174 have_vfp_pseudos = 1;
9175
9176 vfp_register_count = i;
9177
9178 /* If we have VFP, also check for NEON. The architecture allows
9179 NEON without VFP (integer vector operations only), but GDB
9180 does not support that. */
9181 feature = tdesc_find_feature (tdesc,
9182 "org.gnu.gdb.arm.neon");
9183 if (feature != NULL)
9184 {
9185 /* NEON requires 32 double-precision registers. */
9186 if (i != 32)
9187 {
9188 tdesc_data_cleanup (tdesc_data);
9189 return NULL;
9190 }
9191
9192 /* If there are quad registers defined by the stub, use
9193 their type; otherwise (normally) provide them with
9194 the default type. */
9195 if (tdesc_unnumbered_register (feature, "q0") == 0)
9196 have_neon_pseudos = 1;
9197
9198 have_neon = 1;
9199 }
9200 }
9201 }
9202
9203 /* If there is already a candidate, use it. */
9204 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9205 best_arch != NULL;
9206 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9207 {
9208 if (arm_abi != ARM_ABI_AUTO
9209 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9210 continue;
9211
9212 if (fp_model != ARM_FLOAT_AUTO
9213 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9214 continue;
9215
9216 /* There are various other properties in tdep that we do not
9217 need to check here: those derived from a target description,
9218 since gdbarches with a different target description are
9219 automatically disqualified. */
9220
9221 /* Do check is_m, though, since it might come from the binary. */
9222 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9223 continue;
9224
9225 /* Found a match. */
9226 break;
9227 }
9228
9229 if (best_arch != NULL)
9230 {
9231 if (tdesc_data != NULL)
9232 tdesc_data_cleanup (tdesc_data);
9233 return best_arch->gdbarch;
9234 }
9235
9236 tdep = XCNEW (struct gdbarch_tdep);
9237 gdbarch = gdbarch_alloc (&info, tdep);
9238
9239 /* Record additional information about the architecture we are defining.
9240 These are gdbarch discriminators, like the OSABI. */
9241 tdep->arm_abi = arm_abi;
9242 tdep->fp_model = fp_model;
9243 tdep->is_m = is_m;
9244 tdep->have_fpa_registers = have_fpa_registers;
9245 tdep->have_wmmx_registers = have_wmmx_registers;
9246 gdb_assert (vfp_register_count == 0
9247 || vfp_register_count == 16
9248 || vfp_register_count == 32);
9249 tdep->vfp_register_count = vfp_register_count;
9250 tdep->have_vfp_pseudos = have_vfp_pseudos;
9251 tdep->have_neon_pseudos = have_neon_pseudos;
9252 tdep->have_neon = have_neon;
9253
9254 arm_register_g_packet_guesses (gdbarch);
9255
9256 /* Breakpoints. */
9257 switch (info.byte_order_for_code)
9258 {
9259 case BFD_ENDIAN_BIG:
9260 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9261 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9262 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9263 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9264
9265 break;
9266
9267 case BFD_ENDIAN_LITTLE:
9268 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9269 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9270 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9271 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9272
9273 break;
9274
9275 default:
9276 internal_error (__FILE__, __LINE__,
9277 _("arm_gdbarch_init: bad byte order for float format"));
9278 }
9279
9280 /* On ARM targets char defaults to unsigned. */
9281 set_gdbarch_char_signed (gdbarch, 0);
9282
9283 /* wchar_t is unsigned under the AAPCS. */
9284 if (tdep->arm_abi == ARM_ABI_AAPCS)
9285 set_gdbarch_wchar_signed (gdbarch, 0);
9286 else
9287 set_gdbarch_wchar_signed (gdbarch, 1);
9288
9289 /* Compute type alignment. */
9290 set_gdbarch_type_align (gdbarch, arm_type_align);
9291
9292 /* Note: for displaced stepping, this includes the breakpoint, and one word
9293 of additional scratch space. This setting isn't used for anything beside
9294 displaced stepping at present. */
9295 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9296
9297 /* This should be low enough for everything. */
9298 tdep->lowest_pc = 0x20;
9299 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9300
9301 /* The default, for both APCS and AAPCS, is to return small
9302 structures in registers. */
9303 tdep->struct_return = reg_struct_return;
9304
9305 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9306 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9307
9308 if (is_m)
9309 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9310
9311 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9312
9313 frame_base_set_default (gdbarch, &arm_normal_base);
9314
9315 /* Address manipulation. */
9316 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9317
9318 /* Advance PC across function entry code. */
9319 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9320
9321 /* Detect whether PC is at a point where the stack has been destroyed. */
9322 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9323
9324 /* Skip trampolines. */
9325 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9326
9327 /* The stack grows downward. */
9328 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9329
9330 /* Breakpoint manipulation. */
9331 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9332 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9333 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9334 arm_breakpoint_kind_from_current_state);
9335
9336 /* Information about registers, etc. */
9337 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9338 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9339 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9340 set_gdbarch_register_type (gdbarch, arm_register_type);
9341 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9342
9343 /* This "info float" is FPA-specific. Use the generic version if we
9344 do not have FPA. */
9345 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9346 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9347
9348 /* Internal <-> external register number maps. */
9349 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9350 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9351
9352 set_gdbarch_register_name (gdbarch, arm_register_name);
9353
9354 /* Returning results. */
9355 set_gdbarch_return_value (gdbarch, arm_return_value);
9356
9357 /* Disassembly. */
9358 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9359
9360 /* Minsymbol frobbing. */
9361 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9362 set_gdbarch_coff_make_msymbol_special (gdbarch,
9363 arm_coff_make_msymbol_special);
9364 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9365
9366 /* Thumb-2 IT block support. */
9367 set_gdbarch_adjust_breakpoint_address (gdbarch,
9368 arm_adjust_breakpoint_address);
9369
9370 /* Virtual tables. */
9371 set_gdbarch_vbit_in_delta (gdbarch, 1);
9372
9373 /* Hook in the ABI-specific overrides, if they have been registered. */
9374 gdbarch_init_osabi (info, gdbarch);
9375
9376 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9377
9378 /* Add some default predicates. */
9379 if (is_m)
9380 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9381 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9382 dwarf2_append_unwinders (gdbarch);
9383 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9384 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9385 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9386
9387 /* Now we have tuned the configuration, set a few final things,
9388 based on what the OS ABI has told us. */
9389
9390 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9391 binaries are always marked. */
9392 if (tdep->arm_abi == ARM_ABI_AUTO)
9393 tdep->arm_abi = ARM_ABI_APCS;
9394
9395 /* Watchpoints are not steppable. */
9396 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9397
9398 /* We used to default to FPA for generic ARM, but almost nobody
9399 uses that now, and we now provide a way for the user to force
9400 the model. So default to the most useful variant. */
9401 if (tdep->fp_model == ARM_FLOAT_AUTO)
9402 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9403
9404 if (tdep->jb_pc >= 0)
9405 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9406
9407 /* Floating point sizes and format. */
9408 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9409 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9410 {
9411 set_gdbarch_double_format
9412 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9413 set_gdbarch_long_double_format
9414 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9415 }
9416 else
9417 {
9418 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9419 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9420 }
9421
9422 if (have_vfp_pseudos)
9423 {
9424 /* NOTE: These are the only pseudo registers used by
9425 the ARM target at the moment. If more are added, a
9426 little more care in numbering will be needed. */
9427
9428 int num_pseudos = 32;
9429 if (have_neon_pseudos)
9430 num_pseudos += 16;
9431 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9432 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9433 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9434 }
9435
9436 if (tdesc_data)
9437 {
9438 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9439
9440 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9441
9442 /* Override tdesc_register_type to adjust the types of VFP
9443 registers for NEON. */
9444 set_gdbarch_register_type (gdbarch, arm_register_type);
9445 }
9446
9447 /* Add standard register aliases. We add aliases even for those
9448 names which are used by the current architecture - it's simpler,
9449 and does no harm, since nothing ever lists user registers. */
9450 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9451 user_reg_add (gdbarch, arm_register_aliases[i].name,
9452 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9453
9454 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9455 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9456
9457 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9458
9459 return gdbarch;
9460 }
9461
9462 static void
9463 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9464 {
9465 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9466
9467 if (tdep == NULL)
9468 return;
9469
9470 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9471 (unsigned long) tdep->lowest_pc);
9472 }
9473
9474 #if GDB_SELF_TEST
9475 namespace selftests
9476 {
9477 static void arm_record_test (void);
9478 }
9479 #endif
9480
9481 void
9482 _initialize_arm_tdep (void)
9483 {
9484 long length;
9485 int i, j;
9486 char regdesc[1024], *rdptr = regdesc;
9487 size_t rest = sizeof (regdesc);
9488
9489 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9490
9491 /* Add ourselves to objfile event chain. */
9492 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9493
9494 /* Register an ELF OS ABI sniffer for ARM binaries. */
9495 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9496 bfd_target_elf_flavour,
9497 arm_elf_osabi_sniffer);
9498
9499 /* Add root prefix command for all "set arm"/"show arm" commands. */
9500 add_prefix_cmd ("arm", no_class, set_arm_command,
9501 _("Various ARM-specific commands."),
9502 &setarmcmdlist, "set arm ", 0, &setlist);
9503
9504 add_prefix_cmd ("arm", no_class, show_arm_command,
9505 _("Various ARM-specific commands."),
9506 &showarmcmdlist, "show arm ", 0, &showlist);
9507
9508
9509 arm_disassembler_options = xstrdup ("reg-names-std");
9510 const disasm_options_t *disasm_options
9511 = &disassembler_options_arm ()->options;
9512 int num_disassembly_styles = 0;
9513 for (i = 0; disasm_options->name[i] != NULL; i++)
9514 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9515 num_disassembly_styles++;
9516
9517 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9518 valid_disassembly_styles = XNEWVEC (const char *,
9519 num_disassembly_styles + 1);
9520 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9521 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9522 {
9523 size_t offset = strlen ("reg-names-");
9524 const char *style = disasm_options->name[i];
9525 valid_disassembly_styles[j++] = &style[offset];
9526 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9527 disasm_options->description[i]);
9528 rdptr += length;
9529 rest -= length;
9530 }
9531 /* Mark the end of valid options. */
9532 valid_disassembly_styles[num_disassembly_styles] = NULL;
9533
9534 /* Create the help text. */
9535 std::string helptext = string_printf ("%s%s%s",
9536 _("The valid values are:\n"),
9537 regdesc,
9538 _("The default is \"std\"."));
9539
9540 add_setshow_enum_cmd("disassembler", no_class,
9541 valid_disassembly_styles, &disassembly_style,
9542 _("Set the disassembly style."),
9543 _("Show the disassembly style."),
9544 helptext.c_str (),
9545 set_disassembly_style_sfunc,
9546 show_disassembly_style_sfunc,
9547 &setarmcmdlist, &showarmcmdlist);
9548
9549 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9550 _("Set usage of ARM 32-bit mode."),
9551 _("Show usage of ARM 32-bit mode."),
9552 _("When off, a 26-bit PC will be used."),
9553 NULL,
9554 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9555 mode is %s. */
9556 &setarmcmdlist, &showarmcmdlist);
9557
9558 /* Add a command to allow the user to force the FPU model. */
9559 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9560 _("Set the floating point type."),
9561 _("Show the floating point type."),
9562 _("auto - Determine the FP typefrom the OS-ABI.\n\
9563 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9564 fpa - FPA co-processor (GCC compiled).\n\
9565 softvfp - Software FP with pure-endian doubles.\n\
9566 vfp - VFP co-processor."),
9567 set_fp_model_sfunc, show_fp_model,
9568 &setarmcmdlist, &showarmcmdlist);
9569
9570 /* Add a command to allow the user to force the ABI. */
9571 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9572 _("Set the ABI."),
9573 _("Show the ABI."),
9574 NULL, arm_set_abi, arm_show_abi,
9575 &setarmcmdlist, &showarmcmdlist);
9576
9577 /* Add two commands to allow the user to force the assumed
9578 execution mode. */
9579 add_setshow_enum_cmd ("fallback-mode", class_support,
9580 arm_mode_strings, &arm_fallback_mode_string,
9581 _("Set the mode assumed when symbols are unavailable."),
9582 _("Show the mode assumed when symbols are unavailable."),
9583 NULL, NULL, arm_show_fallback_mode,
9584 &setarmcmdlist, &showarmcmdlist);
9585 add_setshow_enum_cmd ("force-mode", class_support,
9586 arm_mode_strings, &arm_force_mode_string,
9587 _("Set the mode assumed even when symbols are available."),
9588 _("Show the mode assumed even when symbols are available."),
9589 NULL, NULL, arm_show_force_mode,
9590 &setarmcmdlist, &showarmcmdlist);
9591
9592 /* Debugging flag. */
9593 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9594 _("Set ARM debugging."),
9595 _("Show ARM debugging."),
9596 _("When on, arm-specific debugging is enabled."),
9597 NULL,
9598 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9599 &setdebuglist, &showdebuglist);
9600
9601 #if GDB_SELF_TEST
9602 selftests::register_test ("arm-record", selftests::arm_record_test);
9603 #endif
9604
9605 }
9606
9607 /* ARM-reversible process record data structures. */
9608
9609 #define ARM_INSN_SIZE_BYTES 4
9610 #define THUMB_INSN_SIZE_BYTES 2
9611 #define THUMB2_INSN_SIZE_BYTES 4
9612
9613
9614 /* Position of the bit within a 32-bit ARM instruction
9615 that defines whether the instruction is a load or store. */
9616 #define INSN_S_L_BIT_NUM 20
9617
9618 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9619 do \
9620 { \
9621 unsigned int reg_len = LENGTH; \
9622 if (reg_len) \
9623 { \
9624 REGS = XNEWVEC (uint32_t, reg_len); \
9625 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9626 } \
9627 } \
9628 while (0)
9629
9630 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9631 do \
9632 { \
9633 unsigned int mem_len = LENGTH; \
9634 if (mem_len) \
9635 { \
9636 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9637 memcpy(&MEMS->len, &RECORD_BUF[0], \
9638 sizeof(struct arm_mem_r) * LENGTH); \
9639 } \
9640 } \
9641 while (0)
9642
9643 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9644 #define INSN_RECORDED(ARM_RECORD) \
9645 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9646
9647 /* ARM memory record structure. */
9648 struct arm_mem_r
9649 {
9650 uint32_t len; /* Record length. */
9651 uint32_t addr; /* Memory address. */
9652 };
9653
9654 /* ARM instruction record contains opcode of current insn
9655 and execution state (before entry to decode_insn()),
9656 contains list of to-be-modified registers and
9657 memory blocks (on return from decode_insn()). */
9658
9659 typedef struct insn_decode_record_t
9660 {
9661 struct gdbarch *gdbarch;
9662 struct regcache *regcache;
9663 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9664 uint32_t arm_insn; /* Should accommodate thumb. */
9665 uint32_t cond; /* Condition code. */
9666 uint32_t opcode; /* Insn opcode. */
9667 uint32_t decode; /* Insn decode bits. */
9668 uint32_t mem_rec_count; /* No of mem records. */
9669 uint32_t reg_rec_count; /* No of reg records. */
9670 uint32_t *arm_regs; /* Registers to be saved for this record. */
9671 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9672 } insn_decode_record;
9673
9674
9675 /* Checks ARM SBZ and SBO mandatory fields. */
9676
9677 static int
9678 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9679 {
9680 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9681
9682 if (!len)
9683 return 1;
9684
9685 if (!sbo)
9686 ones = ~ones;
9687
9688 while (ones)
9689 {
9690 if (!(ones & sbo))
9691 {
9692 return 0;
9693 }
9694 ones = ones >> 1;
9695 }
9696 return 1;
9697 }
9698
9699 enum arm_record_result
9700 {
9701 ARM_RECORD_SUCCESS = 0,
9702 ARM_RECORD_FAILURE = 1
9703 };
9704
9705 typedef enum
9706 {
9707 ARM_RECORD_STRH=1,
9708 ARM_RECORD_STRD
9709 } arm_record_strx_t;
9710
9711 typedef enum
9712 {
9713 ARM_RECORD=1,
9714 THUMB_RECORD,
9715 THUMB2_RECORD
9716 } record_type_t;
9717
9718
9719 static int
9720 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9721 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9722 {
9723
9724 struct regcache *reg_cache = arm_insn_r->regcache;
9725 ULONGEST u_regval[2]= {0};
9726
9727 uint32_t reg_src1 = 0, reg_src2 = 0;
9728 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9729
9730 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9731 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9732
9733 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9734 {
9735 /* 1) Handle misc store, immediate offset. */
9736 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9737 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9738 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9739 regcache_raw_read_unsigned (reg_cache, reg_src1,
9740 &u_regval[0]);
9741 if (ARM_PC_REGNUM == reg_src1)
9742 {
9743 /* If R15 was used as Rn, hence current PC+8. */
9744 u_regval[0] = u_regval[0] + 8;
9745 }
9746 offset_8 = (immed_high << 4) | immed_low;
9747 /* Calculate target store address. */
9748 if (14 == arm_insn_r->opcode)
9749 {
9750 tgt_mem_addr = u_regval[0] + offset_8;
9751 }
9752 else
9753 {
9754 tgt_mem_addr = u_regval[0] - offset_8;
9755 }
9756 if (ARM_RECORD_STRH == str_type)
9757 {
9758 record_buf_mem[0] = 2;
9759 record_buf_mem[1] = tgt_mem_addr;
9760 arm_insn_r->mem_rec_count = 1;
9761 }
9762 else if (ARM_RECORD_STRD == str_type)
9763 {
9764 record_buf_mem[0] = 4;
9765 record_buf_mem[1] = tgt_mem_addr;
9766 record_buf_mem[2] = 4;
9767 record_buf_mem[3] = tgt_mem_addr + 4;
9768 arm_insn_r->mem_rec_count = 2;
9769 }
9770 }
9771 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9772 {
9773 /* 2) Store, register offset. */
9774 /* Get Rm. */
9775 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9776 /* Get Rn. */
9777 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9778 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9779 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9780 if (15 == reg_src2)
9781 {
9782 /* If R15 was used as Rn, hence current PC+8. */
9783 u_regval[0] = u_regval[0] + 8;
9784 }
9785 /* Calculate target store address, Rn +/- Rm, register offset. */
9786 if (12 == arm_insn_r->opcode)
9787 {
9788 tgt_mem_addr = u_regval[0] + u_regval[1];
9789 }
9790 else
9791 {
9792 tgt_mem_addr = u_regval[1] - u_regval[0];
9793 }
9794 if (ARM_RECORD_STRH == str_type)
9795 {
9796 record_buf_mem[0] = 2;
9797 record_buf_mem[1] = tgt_mem_addr;
9798 arm_insn_r->mem_rec_count = 1;
9799 }
9800 else if (ARM_RECORD_STRD == str_type)
9801 {
9802 record_buf_mem[0] = 4;
9803 record_buf_mem[1] = tgt_mem_addr;
9804 record_buf_mem[2] = 4;
9805 record_buf_mem[3] = tgt_mem_addr + 4;
9806 arm_insn_r->mem_rec_count = 2;
9807 }
9808 }
9809 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9810 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9811 {
9812 /* 3) Store, immediate pre-indexed. */
9813 /* 5) Store, immediate post-indexed. */
9814 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9815 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9816 offset_8 = (immed_high << 4) | immed_low;
9817 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9818 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9819 /* Calculate target store address, Rn +/- Rm, register offset. */
9820 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9821 {
9822 tgt_mem_addr = u_regval[0] + offset_8;
9823 }
9824 else
9825 {
9826 tgt_mem_addr = u_regval[0] - offset_8;
9827 }
9828 if (ARM_RECORD_STRH == str_type)
9829 {
9830 record_buf_mem[0] = 2;
9831 record_buf_mem[1] = tgt_mem_addr;
9832 arm_insn_r->mem_rec_count = 1;
9833 }
9834 else if (ARM_RECORD_STRD == str_type)
9835 {
9836 record_buf_mem[0] = 4;
9837 record_buf_mem[1] = tgt_mem_addr;
9838 record_buf_mem[2] = 4;
9839 record_buf_mem[3] = tgt_mem_addr + 4;
9840 arm_insn_r->mem_rec_count = 2;
9841 }
9842 /* Record Rn also as it changes. */
9843 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9844 arm_insn_r->reg_rec_count = 1;
9845 }
9846 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9847 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9848 {
9849 /* 4) Store, register pre-indexed. */
9850 /* 6) Store, register post -indexed. */
9851 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9852 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9853 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9854 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9855 /* Calculate target store address, Rn +/- Rm, register offset. */
9856 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9857 {
9858 tgt_mem_addr = u_regval[0] + u_regval[1];
9859 }
9860 else
9861 {
9862 tgt_mem_addr = u_regval[1] - u_regval[0];
9863 }
9864 if (ARM_RECORD_STRH == str_type)
9865 {
9866 record_buf_mem[0] = 2;
9867 record_buf_mem[1] = tgt_mem_addr;
9868 arm_insn_r->mem_rec_count = 1;
9869 }
9870 else if (ARM_RECORD_STRD == str_type)
9871 {
9872 record_buf_mem[0] = 4;
9873 record_buf_mem[1] = tgt_mem_addr;
9874 record_buf_mem[2] = 4;
9875 record_buf_mem[3] = tgt_mem_addr + 4;
9876 arm_insn_r->mem_rec_count = 2;
9877 }
9878 /* Record Rn also as it changes. */
9879 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9880 arm_insn_r->reg_rec_count = 1;
9881 }
9882 return 0;
9883 }
9884
9885 /* Handling ARM extension space insns. */
9886
9887 static int
9888 arm_record_extension_space (insn_decode_record *arm_insn_r)
9889 {
9890 int ret = 0; /* Return value: -1:record failure ; 0:success */
9891 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9892 uint32_t record_buf[8], record_buf_mem[8];
9893 uint32_t reg_src1 = 0;
9894 struct regcache *reg_cache = arm_insn_r->regcache;
9895 ULONGEST u_regval = 0;
9896
9897 gdb_assert (!INSN_RECORDED(arm_insn_r));
9898 /* Handle unconditional insn extension space. */
9899
9900 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9901 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9902 if (arm_insn_r->cond)
9903 {
9904 /* PLD has no affect on architectural state, it just affects
9905 the caches. */
9906 if (5 == ((opcode1 & 0xE0) >> 5))
9907 {
9908 /* BLX(1) */
9909 record_buf[0] = ARM_PS_REGNUM;
9910 record_buf[1] = ARM_LR_REGNUM;
9911 arm_insn_r->reg_rec_count = 2;
9912 }
9913 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9914 }
9915
9916
9917 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9918 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9919 {
9920 ret = -1;
9921 /* Undefined instruction on ARM V5; need to handle if later
9922 versions define it. */
9923 }
9924
9925 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9926 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9927 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9928
9929 /* Handle arithmetic insn extension space. */
9930 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9931 && !INSN_RECORDED(arm_insn_r))
9932 {
9933 /* Handle MLA(S) and MUL(S). */
9934 if (in_inclusive_range (insn_op1, 0U, 3U))
9935 {
9936 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9937 record_buf[1] = ARM_PS_REGNUM;
9938 arm_insn_r->reg_rec_count = 2;
9939 }
9940 else if (in_inclusive_range (insn_op1, 4U, 15U))
9941 {
9942 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9943 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9944 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9945 record_buf[2] = ARM_PS_REGNUM;
9946 arm_insn_r->reg_rec_count = 3;
9947 }
9948 }
9949
9950 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9951 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9952 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9953
9954 /* Handle control insn extension space. */
9955
9956 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9957 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9958 {
9959 if (!bit (arm_insn_r->arm_insn,25))
9960 {
9961 if (!bits (arm_insn_r->arm_insn, 4, 7))
9962 {
9963 if ((0 == insn_op1) || (2 == insn_op1))
9964 {
9965 /* MRS. */
9966 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9967 arm_insn_r->reg_rec_count = 1;
9968 }
9969 else if (1 == insn_op1)
9970 {
9971 /* CSPR is going to be changed. */
9972 record_buf[0] = ARM_PS_REGNUM;
9973 arm_insn_r->reg_rec_count = 1;
9974 }
9975 else if (3 == insn_op1)
9976 {
9977 /* SPSR is going to be changed. */
9978 /* We need to get SPSR value, which is yet to be done. */
9979 return -1;
9980 }
9981 }
9982 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9983 {
9984 if (1 == insn_op1)
9985 {
9986 /* BX. */
9987 record_buf[0] = ARM_PS_REGNUM;
9988 arm_insn_r->reg_rec_count = 1;
9989 }
9990 else if (3 == insn_op1)
9991 {
9992 /* CLZ. */
9993 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9994 arm_insn_r->reg_rec_count = 1;
9995 }
9996 }
9997 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9998 {
9999 /* BLX. */
10000 record_buf[0] = ARM_PS_REGNUM;
10001 record_buf[1] = ARM_LR_REGNUM;
10002 arm_insn_r->reg_rec_count = 2;
10003 }
10004 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10005 {
10006 /* QADD, QSUB, QDADD, QDSUB */
10007 record_buf[0] = ARM_PS_REGNUM;
10008 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10009 arm_insn_r->reg_rec_count = 2;
10010 }
10011 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10012 {
10013 /* BKPT. */
10014 record_buf[0] = ARM_PS_REGNUM;
10015 record_buf[1] = ARM_LR_REGNUM;
10016 arm_insn_r->reg_rec_count = 2;
10017
10018 /* Save SPSR also;how? */
10019 return -1;
10020 }
10021 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10022 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10023 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10024 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10025 )
10026 {
10027 if (0 == insn_op1 || 1 == insn_op1)
10028 {
10029 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10030 /* We dont do optimization for SMULW<y> where we
10031 need only Rd. */
10032 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10033 record_buf[1] = ARM_PS_REGNUM;
10034 arm_insn_r->reg_rec_count = 2;
10035 }
10036 else if (2 == insn_op1)
10037 {
10038 /* SMLAL<x><y>. */
10039 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10040 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10041 arm_insn_r->reg_rec_count = 2;
10042 }
10043 else if (3 == insn_op1)
10044 {
10045 /* SMUL<x><y>. */
10046 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10047 arm_insn_r->reg_rec_count = 1;
10048 }
10049 }
10050 }
10051 else
10052 {
10053 /* MSR : immediate form. */
10054 if (1 == insn_op1)
10055 {
10056 /* CSPR is going to be changed. */
10057 record_buf[0] = ARM_PS_REGNUM;
10058 arm_insn_r->reg_rec_count = 1;
10059 }
10060 else if (3 == insn_op1)
10061 {
10062 /* SPSR is going to be changed. */
10063 /* we need to get SPSR value, which is yet to be done */
10064 return -1;
10065 }
10066 }
10067 }
10068
10069 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10070 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10071 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10072
10073 /* Handle load/store insn extension space. */
10074
10075 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10076 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10077 && !INSN_RECORDED(arm_insn_r))
10078 {
10079 /* SWP/SWPB. */
10080 if (0 == insn_op1)
10081 {
10082 /* These insn, changes register and memory as well. */
10083 /* SWP or SWPB insn. */
10084 /* Get memory address given by Rn. */
10085 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10086 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10087 /* SWP insn ?, swaps word. */
10088 if (8 == arm_insn_r->opcode)
10089 {
10090 record_buf_mem[0] = 4;
10091 }
10092 else
10093 {
10094 /* SWPB insn, swaps only byte. */
10095 record_buf_mem[0] = 1;
10096 }
10097 record_buf_mem[1] = u_regval;
10098 arm_insn_r->mem_rec_count = 1;
10099 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10100 arm_insn_r->reg_rec_count = 1;
10101 }
10102 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10103 {
10104 /* STRH. */
10105 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10106 ARM_RECORD_STRH);
10107 }
10108 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10109 {
10110 /* LDRD. */
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 record_buf[1] = record_buf[0] + 1;
10113 arm_insn_r->reg_rec_count = 2;
10114 }
10115 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10116 {
10117 /* STRD. */
10118 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10119 ARM_RECORD_STRD);
10120 }
10121 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10122 {
10123 /* LDRH, LDRSB, LDRSH. */
10124 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10125 arm_insn_r->reg_rec_count = 1;
10126 }
10127
10128 }
10129
10130 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10131 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10132 && !INSN_RECORDED(arm_insn_r))
10133 {
10134 ret = -1;
10135 /* Handle coprocessor insn extension space. */
10136 }
10137
10138 /* To be done for ARMv5 and later; as of now we return -1. */
10139 if (-1 == ret)
10140 return ret;
10141
10142 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10143 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10144
10145 return ret;
10146 }
10147
10148 /* Handling opcode 000 insns. */
10149
10150 static int
10151 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10152 {
10153 struct regcache *reg_cache = arm_insn_r->regcache;
10154 uint32_t record_buf[8], record_buf_mem[8];
10155 ULONGEST u_regval[2] = {0};
10156
10157 uint32_t reg_src1 = 0;
10158 uint32_t opcode1 = 0;
10159
10160 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10161 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10162 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10163
10164 if (!((opcode1 & 0x19) == 0x10))
10165 {
10166 /* Data-processing (register) and Data-processing (register-shifted
10167 register */
10168 /* Out of 11 shifter operands mode, all the insn modifies destination
10169 register, which is specified by 13-16 decode. */
10170 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10171 record_buf[1] = ARM_PS_REGNUM;
10172 arm_insn_r->reg_rec_count = 2;
10173 }
10174 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10175 {
10176 /* Miscellaneous instructions */
10177
10178 if (3 == arm_insn_r->decode && 0x12 == opcode1
10179 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10180 {
10181 /* Handle BLX, branch and link/exchange. */
10182 if (9 == arm_insn_r->opcode)
10183 {
10184 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10185 and R14 stores the return address. */
10186 record_buf[0] = ARM_PS_REGNUM;
10187 record_buf[1] = ARM_LR_REGNUM;
10188 arm_insn_r->reg_rec_count = 2;
10189 }
10190 }
10191 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10192 {
10193 /* Handle enhanced software breakpoint insn, BKPT. */
10194 /* CPSR is changed to be executed in ARM state, disabling normal
10195 interrupts, entering abort mode. */
10196 /* According to high vector configuration PC is set. */
10197 /* user hit breakpoint and type reverse, in
10198 that case, we need to go back with previous CPSR and
10199 Program Counter. */
10200 record_buf[0] = ARM_PS_REGNUM;
10201 record_buf[1] = ARM_LR_REGNUM;
10202 arm_insn_r->reg_rec_count = 2;
10203
10204 /* Save SPSR also; how? */
10205 return -1;
10206 }
10207 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10208 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10209 {
10210 /* Handle BX, branch and link/exchange. */
10211 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10212 record_buf[0] = ARM_PS_REGNUM;
10213 arm_insn_r->reg_rec_count = 1;
10214 }
10215 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10216 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10217 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10218 {
10219 /* Count leading zeros: CLZ. */
10220 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10221 arm_insn_r->reg_rec_count = 1;
10222 }
10223 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10224 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10225 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10226 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10227 {
10228 /* Handle MRS insn. */
10229 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10230 arm_insn_r->reg_rec_count = 1;
10231 }
10232 }
10233 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10234 {
10235 /* Multiply and multiply-accumulate */
10236
10237 /* Handle multiply instructions. */
10238 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10239 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10240 {
10241 /* Handle MLA and MUL. */
10242 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10243 record_buf[1] = ARM_PS_REGNUM;
10244 arm_insn_r->reg_rec_count = 2;
10245 }
10246 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10247 {
10248 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10249 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10250 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10251 record_buf[2] = ARM_PS_REGNUM;
10252 arm_insn_r->reg_rec_count = 3;
10253 }
10254 }
10255 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10256 {
10257 /* Synchronization primitives */
10258
10259 /* Handling SWP, SWPB. */
10260 /* These insn, changes register and memory as well. */
10261 /* SWP or SWPB insn. */
10262
10263 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10264 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10265 /* SWP insn ?, swaps word. */
10266 if (8 == arm_insn_r->opcode)
10267 {
10268 record_buf_mem[0] = 4;
10269 }
10270 else
10271 {
10272 /* SWPB insn, swaps only byte. */
10273 record_buf_mem[0] = 1;
10274 }
10275 record_buf_mem[1] = u_regval[0];
10276 arm_insn_r->mem_rec_count = 1;
10277 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10278 arm_insn_r->reg_rec_count = 1;
10279 }
10280 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10281 || 15 == arm_insn_r->decode)
10282 {
10283 if ((opcode1 & 0x12) == 2)
10284 {
10285 /* Extra load/store (unprivileged) */
10286 return -1;
10287 }
10288 else
10289 {
10290 /* Extra load/store */
10291 switch (bits (arm_insn_r->arm_insn, 5, 6))
10292 {
10293 case 1:
10294 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10295 {
10296 /* STRH (register), STRH (immediate) */
10297 arm_record_strx (arm_insn_r, &record_buf[0],
10298 &record_buf_mem[0], ARM_RECORD_STRH);
10299 }
10300 else if ((opcode1 & 0x05) == 0x1)
10301 {
10302 /* LDRH (register) */
10303 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10304 arm_insn_r->reg_rec_count = 1;
10305
10306 if (bit (arm_insn_r->arm_insn, 21))
10307 {
10308 /* Write back to Rn. */
10309 record_buf[arm_insn_r->reg_rec_count++]
10310 = bits (arm_insn_r->arm_insn, 16, 19);
10311 }
10312 }
10313 else if ((opcode1 & 0x05) == 0x5)
10314 {
10315 /* LDRH (immediate), LDRH (literal) */
10316 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10317
10318 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10319 arm_insn_r->reg_rec_count = 1;
10320
10321 if (rn != 15)
10322 {
10323 /*LDRH (immediate) */
10324 if (bit (arm_insn_r->arm_insn, 21))
10325 {
10326 /* Write back to Rn. */
10327 record_buf[arm_insn_r->reg_rec_count++] = rn;
10328 }
10329 }
10330 }
10331 else
10332 return -1;
10333 break;
10334 case 2:
10335 if ((opcode1 & 0x05) == 0x0)
10336 {
10337 /* LDRD (register) */
10338 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10339 record_buf[1] = record_buf[0] + 1;
10340 arm_insn_r->reg_rec_count = 2;
10341
10342 if (bit (arm_insn_r->arm_insn, 21))
10343 {
10344 /* Write back to Rn. */
10345 record_buf[arm_insn_r->reg_rec_count++]
10346 = bits (arm_insn_r->arm_insn, 16, 19);
10347 }
10348 }
10349 else if ((opcode1 & 0x05) == 0x1)
10350 {
10351 /* LDRSB (register) */
10352 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10353 arm_insn_r->reg_rec_count = 1;
10354
10355 if (bit (arm_insn_r->arm_insn, 21))
10356 {
10357 /* Write back to Rn. */
10358 record_buf[arm_insn_r->reg_rec_count++]
10359 = bits (arm_insn_r->arm_insn, 16, 19);
10360 }
10361 }
10362 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10363 {
10364 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10365 LDRSB (literal) */
10366 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10367
10368 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10369 arm_insn_r->reg_rec_count = 1;
10370
10371 if (rn != 15)
10372 {
10373 /*LDRD (immediate), LDRSB (immediate) */
10374 if (bit (arm_insn_r->arm_insn, 21))
10375 {
10376 /* Write back to Rn. */
10377 record_buf[arm_insn_r->reg_rec_count++] = rn;
10378 }
10379 }
10380 }
10381 else
10382 return -1;
10383 break;
10384 case 3:
10385 if ((opcode1 & 0x05) == 0x0)
10386 {
10387 /* STRD (register) */
10388 arm_record_strx (arm_insn_r, &record_buf[0],
10389 &record_buf_mem[0], ARM_RECORD_STRD);
10390 }
10391 else if ((opcode1 & 0x05) == 0x1)
10392 {
10393 /* LDRSH (register) */
10394 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10395 arm_insn_r->reg_rec_count = 1;
10396
10397 if (bit (arm_insn_r->arm_insn, 21))
10398 {
10399 /* Write back to Rn. */
10400 record_buf[arm_insn_r->reg_rec_count++]
10401 = bits (arm_insn_r->arm_insn, 16, 19);
10402 }
10403 }
10404 else if ((opcode1 & 0x05) == 0x4)
10405 {
10406 /* STRD (immediate) */
10407 arm_record_strx (arm_insn_r, &record_buf[0],
10408 &record_buf_mem[0], ARM_RECORD_STRD);
10409 }
10410 else if ((opcode1 & 0x05) == 0x5)
10411 {
10412 /* LDRSH (immediate), LDRSH (literal) */
10413 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10414 arm_insn_r->reg_rec_count = 1;
10415
10416 if (bit (arm_insn_r->arm_insn, 21))
10417 {
10418 /* Write back to Rn. */
10419 record_buf[arm_insn_r->reg_rec_count++]
10420 = bits (arm_insn_r->arm_insn, 16, 19);
10421 }
10422 }
10423 else
10424 return -1;
10425 break;
10426 default:
10427 return -1;
10428 }
10429 }
10430 }
10431 else
10432 {
10433 return -1;
10434 }
10435
10436 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10437 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10438 return 0;
10439 }
10440
10441 /* Handling opcode 001 insns. */
10442
10443 static int
10444 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10445 {
10446 uint32_t record_buf[8], record_buf_mem[8];
10447
10448 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10449 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10450
10451 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10452 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10453 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10454 )
10455 {
10456 /* Handle MSR insn. */
10457 if (9 == arm_insn_r->opcode)
10458 {
10459 /* CSPR is going to be changed. */
10460 record_buf[0] = ARM_PS_REGNUM;
10461 arm_insn_r->reg_rec_count = 1;
10462 }
10463 else
10464 {
10465 /* SPSR is going to be changed. */
10466 }
10467 }
10468 else if (arm_insn_r->opcode <= 15)
10469 {
10470 /* Normal data processing insns. */
10471 /* Out of 11 shifter operands mode, all the insn modifies destination
10472 register, which is specified by 13-16 decode. */
10473 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10474 record_buf[1] = ARM_PS_REGNUM;
10475 arm_insn_r->reg_rec_count = 2;
10476 }
10477 else
10478 {
10479 return -1;
10480 }
10481
10482 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10483 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10484 return 0;
10485 }
10486
10487 static int
10488 arm_record_media (insn_decode_record *arm_insn_r)
10489 {
10490 uint32_t record_buf[8];
10491
10492 switch (bits (arm_insn_r->arm_insn, 22, 24))
10493 {
10494 case 0:
10495 /* Parallel addition and subtraction, signed */
10496 case 1:
10497 /* Parallel addition and subtraction, unsigned */
10498 case 2:
10499 case 3:
10500 /* Packing, unpacking, saturation and reversal */
10501 {
10502 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10503
10504 record_buf[arm_insn_r->reg_rec_count++] = rd;
10505 }
10506 break;
10507
10508 case 4:
10509 case 5:
10510 /* Signed multiplies */
10511 {
10512 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10513 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10514
10515 record_buf[arm_insn_r->reg_rec_count++] = rd;
10516 if (op1 == 0x0)
10517 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10518 else if (op1 == 0x4)
10519 record_buf[arm_insn_r->reg_rec_count++]
10520 = bits (arm_insn_r->arm_insn, 12, 15);
10521 }
10522 break;
10523
10524 case 6:
10525 {
10526 if (bit (arm_insn_r->arm_insn, 21)
10527 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10528 {
10529 /* SBFX */
10530 record_buf[arm_insn_r->reg_rec_count++]
10531 = bits (arm_insn_r->arm_insn, 12, 15);
10532 }
10533 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10534 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10535 {
10536 /* USAD8 and USADA8 */
10537 record_buf[arm_insn_r->reg_rec_count++]
10538 = bits (arm_insn_r->arm_insn, 16, 19);
10539 }
10540 }
10541 break;
10542
10543 case 7:
10544 {
10545 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10546 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10547 {
10548 /* Permanently UNDEFINED */
10549 return -1;
10550 }
10551 else
10552 {
10553 /* BFC, BFI and UBFX */
10554 record_buf[arm_insn_r->reg_rec_count++]
10555 = bits (arm_insn_r->arm_insn, 12, 15);
10556 }
10557 }
10558 break;
10559
10560 default:
10561 return -1;
10562 }
10563
10564 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10565
10566 return 0;
10567 }
10568
10569 /* Handle ARM mode instructions with opcode 010. */
10570
10571 static int
10572 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10573 {
10574 struct regcache *reg_cache = arm_insn_r->regcache;
10575
10576 uint32_t reg_base , reg_dest;
10577 uint32_t offset_12, tgt_mem_addr;
10578 uint32_t record_buf[8], record_buf_mem[8];
10579 unsigned char wback;
10580 ULONGEST u_regval;
10581
10582 /* Calculate wback. */
10583 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10584 || (bit (arm_insn_r->arm_insn, 21) == 1);
10585
10586 arm_insn_r->reg_rec_count = 0;
10587 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10588
10589 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10590 {
10591 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10592 and LDRT. */
10593
10594 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10595 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10596
10597 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10598 preceeds a LDR instruction having R15 as reg_base, it
10599 emulates a branch and link instruction, and hence we need to save
10600 CPSR and PC as well. */
10601 if (ARM_PC_REGNUM == reg_dest)
10602 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10603
10604 /* If wback is true, also save the base register, which is going to be
10605 written to. */
10606 if (wback)
10607 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10608 }
10609 else
10610 {
10611 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10612
10613 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10614 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10615
10616 /* Handle bit U. */
10617 if (bit (arm_insn_r->arm_insn, 23))
10618 {
10619 /* U == 1: Add the offset. */
10620 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10621 }
10622 else
10623 {
10624 /* U == 0: subtract the offset. */
10625 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10626 }
10627
10628 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10629 bytes. */
10630 if (bit (arm_insn_r->arm_insn, 22))
10631 {
10632 /* STRB and STRBT: 1 byte. */
10633 record_buf_mem[0] = 1;
10634 }
10635 else
10636 {
10637 /* STR and STRT: 4 bytes. */
10638 record_buf_mem[0] = 4;
10639 }
10640
10641 /* Handle bit P. */
10642 if (bit (arm_insn_r->arm_insn, 24))
10643 record_buf_mem[1] = tgt_mem_addr;
10644 else
10645 record_buf_mem[1] = (uint32_t) u_regval;
10646
10647 arm_insn_r->mem_rec_count = 1;
10648
10649 /* If wback is true, also save the base register, which is going to be
10650 written to. */
10651 if (wback)
10652 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10653 }
10654
10655 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10656 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10657 return 0;
10658 }
10659
10660 /* Handling opcode 011 insns. */
10661
10662 static int
10663 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10664 {
10665 struct regcache *reg_cache = arm_insn_r->regcache;
10666
10667 uint32_t shift_imm = 0;
10668 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10669 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10670 uint32_t record_buf[8], record_buf_mem[8];
10671
10672 LONGEST s_word;
10673 ULONGEST u_regval[2];
10674
10675 if (bit (arm_insn_r->arm_insn, 4))
10676 return arm_record_media (arm_insn_r);
10677
10678 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10679 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10680
10681 /* Handle enhanced store insns and LDRD DSP insn,
10682 order begins according to addressing modes for store insns
10683 STRH insn. */
10684
10685 /* LDR or STR? */
10686 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10687 {
10688 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10689 /* LDR insn has a capability to do branching, if
10690 MOV LR, PC is preceded by LDR insn having Rn as R15
10691 in that case, it emulates branch and link insn, and hence we
10692 need to save CSPR and PC as well. */
10693 if (15 != reg_dest)
10694 {
10695 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10696 arm_insn_r->reg_rec_count = 1;
10697 }
10698 else
10699 {
10700 record_buf[0] = reg_dest;
10701 record_buf[1] = ARM_PS_REGNUM;
10702 arm_insn_r->reg_rec_count = 2;
10703 }
10704 }
10705 else
10706 {
10707 if (! bits (arm_insn_r->arm_insn, 4, 11))
10708 {
10709 /* Store insn, register offset and register pre-indexed,
10710 register post-indexed. */
10711 /* Get Rm. */
10712 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10713 /* Get Rn. */
10714 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10715 regcache_raw_read_unsigned (reg_cache, reg_src1
10716 , &u_regval[0]);
10717 regcache_raw_read_unsigned (reg_cache, reg_src2
10718 , &u_regval[1]);
10719 if (15 == reg_src2)
10720 {
10721 /* If R15 was used as Rn, hence current PC+8. */
10722 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10723 u_regval[0] = u_regval[0] + 8;
10724 }
10725 /* Calculate target store address, Rn +/- Rm, register offset. */
10726 /* U == 1. */
10727 if (bit (arm_insn_r->arm_insn, 23))
10728 {
10729 tgt_mem_addr = u_regval[0] + u_regval[1];
10730 }
10731 else
10732 {
10733 tgt_mem_addr = u_regval[1] - u_regval[0];
10734 }
10735
10736 switch (arm_insn_r->opcode)
10737 {
10738 /* STR. */
10739 case 8:
10740 case 12:
10741 /* STR. */
10742 case 9:
10743 case 13:
10744 /* STRT. */
10745 case 1:
10746 case 5:
10747 /* STR. */
10748 case 0:
10749 case 4:
10750 record_buf_mem[0] = 4;
10751 break;
10752
10753 /* STRB. */
10754 case 10:
10755 case 14:
10756 /* STRB. */
10757 case 11:
10758 case 15:
10759 /* STRBT. */
10760 case 3:
10761 case 7:
10762 /* STRB. */
10763 case 2:
10764 case 6:
10765 record_buf_mem[0] = 1;
10766 break;
10767
10768 default:
10769 gdb_assert_not_reached ("no decoding pattern found");
10770 break;
10771 }
10772 record_buf_mem[1] = tgt_mem_addr;
10773 arm_insn_r->mem_rec_count = 1;
10774
10775 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10776 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10777 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10778 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10779 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10780 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10781 )
10782 {
10783 /* Rn is going to be changed in pre-indexed mode and
10784 post-indexed mode as well. */
10785 record_buf[0] = reg_src2;
10786 arm_insn_r->reg_rec_count = 1;
10787 }
10788 }
10789 else
10790 {
10791 /* Store insn, scaled register offset; scaled pre-indexed. */
10792 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10793 /* Get Rm. */
10794 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10795 /* Get Rn. */
10796 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10797 /* Get shift_imm. */
10798 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10799 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10800 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10801 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10802 /* Offset_12 used as shift. */
10803 switch (offset_12)
10804 {
10805 case 0:
10806 /* Offset_12 used as index. */
10807 offset_12 = u_regval[0] << shift_imm;
10808 break;
10809
10810 case 1:
10811 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10812 break;
10813
10814 case 2:
10815 if (!shift_imm)
10816 {
10817 if (bit (u_regval[0], 31))
10818 {
10819 offset_12 = 0xFFFFFFFF;
10820 }
10821 else
10822 {
10823 offset_12 = 0;
10824 }
10825 }
10826 else
10827 {
10828 /* This is arithmetic shift. */
10829 offset_12 = s_word >> shift_imm;
10830 }
10831 break;
10832
10833 case 3:
10834 if (!shift_imm)
10835 {
10836 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10837 &u_regval[1]);
10838 /* Get C flag value and shift it by 31. */
10839 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10840 | (u_regval[0]) >> 1);
10841 }
10842 else
10843 {
10844 offset_12 = (u_regval[0] >> shift_imm) \
10845 | (u_regval[0] <<
10846 (sizeof(uint32_t) - shift_imm));
10847 }
10848 break;
10849
10850 default:
10851 gdb_assert_not_reached ("no decoding pattern found");
10852 break;
10853 }
10854
10855 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10856 /* bit U set. */
10857 if (bit (arm_insn_r->arm_insn, 23))
10858 {
10859 tgt_mem_addr = u_regval[1] + offset_12;
10860 }
10861 else
10862 {
10863 tgt_mem_addr = u_regval[1] - offset_12;
10864 }
10865
10866 switch (arm_insn_r->opcode)
10867 {
10868 /* STR. */
10869 case 8:
10870 case 12:
10871 /* STR. */
10872 case 9:
10873 case 13:
10874 /* STRT. */
10875 case 1:
10876 case 5:
10877 /* STR. */
10878 case 0:
10879 case 4:
10880 record_buf_mem[0] = 4;
10881 break;
10882
10883 /* STRB. */
10884 case 10:
10885 case 14:
10886 /* STRB. */
10887 case 11:
10888 case 15:
10889 /* STRBT. */
10890 case 3:
10891 case 7:
10892 /* STRB. */
10893 case 2:
10894 case 6:
10895 record_buf_mem[0] = 1;
10896 break;
10897
10898 default:
10899 gdb_assert_not_reached ("no decoding pattern found");
10900 break;
10901 }
10902 record_buf_mem[1] = tgt_mem_addr;
10903 arm_insn_r->mem_rec_count = 1;
10904
10905 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10906 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10907 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10908 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10909 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10910 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10911 )
10912 {
10913 /* Rn is going to be changed in register scaled pre-indexed
10914 mode,and scaled post indexed mode. */
10915 record_buf[0] = reg_src2;
10916 arm_insn_r->reg_rec_count = 1;
10917 }
10918 }
10919 }
10920
10921 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10922 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10923 return 0;
10924 }
10925
10926 /* Handle ARM mode instructions with opcode 100. */
10927
10928 static int
10929 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10930 {
10931 struct regcache *reg_cache = arm_insn_r->regcache;
10932 uint32_t register_count = 0, register_bits;
10933 uint32_t reg_base, addr_mode;
10934 uint32_t record_buf[24], record_buf_mem[48];
10935 uint32_t wback;
10936 ULONGEST u_regval;
10937
10938 /* Fetch the list of registers. */
10939 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10940 arm_insn_r->reg_rec_count = 0;
10941
10942 /* Fetch the base register that contains the address we are loading data
10943 to. */
10944 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10945
10946 /* Calculate wback. */
10947 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10948
10949 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10950 {
10951 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10952
10953 /* Find out which registers are going to be loaded from memory. */
10954 while (register_bits)
10955 {
10956 if (register_bits & 0x00000001)
10957 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10958 register_bits = register_bits >> 1;
10959 register_count++;
10960 }
10961
10962
10963 /* If wback is true, also save the base register, which is going to be
10964 written to. */
10965 if (wback)
10966 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10967
10968 /* Save the CPSR register. */
10969 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10970 }
10971 else
10972 {
10973 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10974
10975 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10976
10977 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10978
10979 /* Find out how many registers are going to be stored to memory. */
10980 while (register_bits)
10981 {
10982 if (register_bits & 0x00000001)
10983 register_count++;
10984 register_bits = register_bits >> 1;
10985 }
10986
10987 switch (addr_mode)
10988 {
10989 /* STMDA (STMED): Decrement after. */
10990 case 0:
10991 record_buf_mem[1] = (uint32_t) u_regval
10992 - register_count * ARM_INT_REGISTER_SIZE + 4;
10993 break;
10994 /* STM (STMIA, STMEA): Increment after. */
10995 case 1:
10996 record_buf_mem[1] = (uint32_t) u_regval;
10997 break;
10998 /* STMDB (STMFD): Decrement before. */
10999 case 2:
11000 record_buf_mem[1] = (uint32_t) u_regval
11001 - register_count * ARM_INT_REGISTER_SIZE;
11002 break;
11003 /* STMIB (STMFA): Increment before. */
11004 case 3:
11005 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11006 break;
11007 default:
11008 gdb_assert_not_reached ("no decoding pattern found");
11009 break;
11010 }
11011
11012 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11013 arm_insn_r->mem_rec_count = 1;
11014
11015 /* If wback is true, also save the base register, which is going to be
11016 written to. */
11017 if (wback)
11018 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11019 }
11020
11021 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11022 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11023 return 0;
11024 }
11025
11026 /* Handling opcode 101 insns. */
11027
11028 static int
11029 arm_record_b_bl (insn_decode_record *arm_insn_r)
11030 {
11031 uint32_t record_buf[8];
11032
11033 /* Handle B, BL, BLX(1) insns. */
11034 /* B simply branches so we do nothing here. */
11035 /* Note: BLX(1) doesnt fall here but instead it falls into
11036 extension space. */
11037 if (bit (arm_insn_r->arm_insn, 24))
11038 {
11039 record_buf[0] = ARM_LR_REGNUM;
11040 arm_insn_r->reg_rec_count = 1;
11041 }
11042
11043 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11044
11045 return 0;
11046 }
11047
11048 static int
11049 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11050 {
11051 printf_unfiltered (_("Process record does not support instruction "
11052 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11053 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11054
11055 return -1;
11056 }
11057
11058 /* Record handler for vector data transfer instructions. */
11059
11060 static int
11061 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11062 {
11063 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11064 uint32_t record_buf[4];
11065
11066 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11067 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11068 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11069 bit_l = bit (arm_insn_r->arm_insn, 20);
11070 bit_c = bit (arm_insn_r->arm_insn, 8);
11071
11072 /* Handle VMOV instruction. */
11073 if (bit_l && bit_c)
11074 {
11075 record_buf[0] = reg_t;
11076 arm_insn_r->reg_rec_count = 1;
11077 }
11078 else if (bit_l && !bit_c)
11079 {
11080 /* Handle VMOV instruction. */
11081 if (bits_a == 0x00)
11082 {
11083 record_buf[0] = reg_t;
11084 arm_insn_r->reg_rec_count = 1;
11085 }
11086 /* Handle VMRS instruction. */
11087 else if (bits_a == 0x07)
11088 {
11089 if (reg_t == 15)
11090 reg_t = ARM_PS_REGNUM;
11091
11092 record_buf[0] = reg_t;
11093 arm_insn_r->reg_rec_count = 1;
11094 }
11095 }
11096 else if (!bit_l && !bit_c)
11097 {
11098 /* Handle VMOV instruction. */
11099 if (bits_a == 0x00)
11100 {
11101 record_buf[0] = ARM_D0_REGNUM + reg_v;
11102
11103 arm_insn_r->reg_rec_count = 1;
11104 }
11105 /* Handle VMSR instruction. */
11106 else if (bits_a == 0x07)
11107 {
11108 record_buf[0] = ARM_FPSCR_REGNUM;
11109 arm_insn_r->reg_rec_count = 1;
11110 }
11111 }
11112 else if (!bit_l && bit_c)
11113 {
11114 /* Handle VMOV instruction. */
11115 if (!(bits_a & 0x04))
11116 {
11117 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11118 + ARM_D0_REGNUM;
11119 arm_insn_r->reg_rec_count = 1;
11120 }
11121 /* Handle VDUP instruction. */
11122 else
11123 {
11124 if (bit (arm_insn_r->arm_insn, 21))
11125 {
11126 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11127 record_buf[0] = reg_v + ARM_D0_REGNUM;
11128 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11129 arm_insn_r->reg_rec_count = 2;
11130 }
11131 else
11132 {
11133 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11134 record_buf[0] = reg_v + ARM_D0_REGNUM;
11135 arm_insn_r->reg_rec_count = 1;
11136 }
11137 }
11138 }
11139
11140 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11141 return 0;
11142 }
11143
11144 /* Record handler for extension register load/store instructions. */
11145
11146 static int
11147 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11148 {
11149 uint32_t opcode, single_reg;
11150 uint8_t op_vldm_vstm;
11151 uint32_t record_buf[8], record_buf_mem[128];
11152 ULONGEST u_regval = 0;
11153
11154 struct regcache *reg_cache = arm_insn_r->regcache;
11155
11156 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11157 single_reg = !bit (arm_insn_r->arm_insn, 8);
11158 op_vldm_vstm = opcode & 0x1b;
11159
11160 /* Handle VMOV instructions. */
11161 if ((opcode & 0x1e) == 0x04)
11162 {
11163 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11164 {
11165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11166 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11167 arm_insn_r->reg_rec_count = 2;
11168 }
11169 else
11170 {
11171 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11172 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11173
11174 if (single_reg)
11175 {
11176 /* The first S register number m is REG_M:M (M is bit 5),
11177 the corresponding D register number is REG_M:M / 2, which
11178 is REG_M. */
11179 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11180 /* The second S register number is REG_M:M + 1, the
11181 corresponding D register number is (REG_M:M + 1) / 2.
11182 IOW, if bit M is 1, the first and second S registers
11183 are mapped to different D registers, otherwise, they are
11184 in the same D register. */
11185 if (bit_m)
11186 {
11187 record_buf[arm_insn_r->reg_rec_count++]
11188 = ARM_D0_REGNUM + reg_m + 1;
11189 }
11190 }
11191 else
11192 {
11193 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11194 arm_insn_r->reg_rec_count = 1;
11195 }
11196 }
11197 }
11198 /* Handle VSTM and VPUSH instructions. */
11199 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11200 || op_vldm_vstm == 0x12)
11201 {
11202 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11203 uint32_t memory_index = 0;
11204
11205 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11206 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11207 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11208 imm_off32 = imm_off8 << 2;
11209 memory_count = imm_off8;
11210
11211 if (bit (arm_insn_r->arm_insn, 23))
11212 start_address = u_regval;
11213 else
11214 start_address = u_regval - imm_off32;
11215
11216 if (bit (arm_insn_r->arm_insn, 21))
11217 {
11218 record_buf[0] = reg_rn;
11219 arm_insn_r->reg_rec_count = 1;
11220 }
11221
11222 while (memory_count > 0)
11223 {
11224 if (single_reg)
11225 {
11226 record_buf_mem[memory_index] = 4;
11227 record_buf_mem[memory_index + 1] = start_address;
11228 start_address = start_address + 4;
11229 memory_index = memory_index + 2;
11230 }
11231 else
11232 {
11233 record_buf_mem[memory_index] = 4;
11234 record_buf_mem[memory_index + 1] = start_address;
11235 record_buf_mem[memory_index + 2] = 4;
11236 record_buf_mem[memory_index + 3] = start_address + 4;
11237 start_address = start_address + 8;
11238 memory_index = memory_index + 4;
11239 }
11240 memory_count--;
11241 }
11242 arm_insn_r->mem_rec_count = (memory_index >> 1);
11243 }
11244 /* Handle VLDM instructions. */
11245 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11246 || op_vldm_vstm == 0x13)
11247 {
11248 uint32_t reg_count, reg_vd;
11249 uint32_t reg_index = 0;
11250 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11251
11252 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11253 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11254
11255 /* REG_VD is the first D register number. If the instruction
11256 loads memory to S registers (SINGLE_REG is TRUE), the register
11257 number is (REG_VD << 1 | bit D), so the corresponding D
11258 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11259 if (!single_reg)
11260 reg_vd = reg_vd | (bit_d << 4);
11261
11262 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11263 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11264
11265 /* If the instruction loads memory to D register, REG_COUNT should
11266 be divided by 2, according to the ARM Architecture Reference
11267 Manual. If the instruction loads memory to S register, divide by
11268 2 as well because two S registers are mapped to D register. */
11269 reg_count = reg_count / 2;
11270 if (single_reg && bit_d)
11271 {
11272 /* Increase the register count if S register list starts from
11273 an odd number (bit d is one). */
11274 reg_count++;
11275 }
11276
11277 while (reg_count > 0)
11278 {
11279 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11280 reg_count--;
11281 }
11282 arm_insn_r->reg_rec_count = reg_index;
11283 }
11284 /* VSTR Vector store register. */
11285 else if ((opcode & 0x13) == 0x10)
11286 {
11287 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11288 uint32_t memory_index = 0;
11289
11290 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11291 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11292 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11293 imm_off32 = imm_off8 << 2;
11294
11295 if (bit (arm_insn_r->arm_insn, 23))
11296 start_address = u_regval + imm_off32;
11297 else
11298 start_address = u_regval - imm_off32;
11299
11300 if (single_reg)
11301 {
11302 record_buf_mem[memory_index] = 4;
11303 record_buf_mem[memory_index + 1] = start_address;
11304 arm_insn_r->mem_rec_count = 1;
11305 }
11306 else
11307 {
11308 record_buf_mem[memory_index] = 4;
11309 record_buf_mem[memory_index + 1] = start_address;
11310 record_buf_mem[memory_index + 2] = 4;
11311 record_buf_mem[memory_index + 3] = start_address + 4;
11312 arm_insn_r->mem_rec_count = 2;
11313 }
11314 }
11315 /* VLDR Vector load register. */
11316 else if ((opcode & 0x13) == 0x11)
11317 {
11318 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11319
11320 if (!single_reg)
11321 {
11322 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11323 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11324 }
11325 else
11326 {
11327 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11328 /* Record register D rather than pseudo register S. */
11329 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11330 }
11331 arm_insn_r->reg_rec_count = 1;
11332 }
11333
11334 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11335 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11336 return 0;
11337 }
11338
11339 /* Record handler for arm/thumb mode VFP data processing instructions. */
11340
11341 static int
11342 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11343 {
11344 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11345 uint32_t record_buf[4];
11346 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11347 enum insn_types curr_insn_type = INSN_INV;
11348
11349 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11350 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11351 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11352 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11353 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11354 bit_d = bit (arm_insn_r->arm_insn, 22);
11355 /* Mask off the "D" bit. */
11356 opc1 = opc1 & ~0x04;
11357
11358 /* Handle VMLA, VMLS. */
11359 if (opc1 == 0x00)
11360 {
11361 if (bit (arm_insn_r->arm_insn, 10))
11362 {
11363 if (bit (arm_insn_r->arm_insn, 6))
11364 curr_insn_type = INSN_T0;
11365 else
11366 curr_insn_type = INSN_T1;
11367 }
11368 else
11369 {
11370 if (dp_op_sz)
11371 curr_insn_type = INSN_T1;
11372 else
11373 curr_insn_type = INSN_T2;
11374 }
11375 }
11376 /* Handle VNMLA, VNMLS, VNMUL. */
11377 else if (opc1 == 0x01)
11378 {
11379 if (dp_op_sz)
11380 curr_insn_type = INSN_T1;
11381 else
11382 curr_insn_type = INSN_T2;
11383 }
11384 /* Handle VMUL. */
11385 else if (opc1 == 0x02 && !(opc3 & 0x01))
11386 {
11387 if (bit (arm_insn_r->arm_insn, 10))
11388 {
11389 if (bit (arm_insn_r->arm_insn, 6))
11390 curr_insn_type = INSN_T0;
11391 else
11392 curr_insn_type = INSN_T1;
11393 }
11394 else
11395 {
11396 if (dp_op_sz)
11397 curr_insn_type = INSN_T1;
11398 else
11399 curr_insn_type = INSN_T2;
11400 }
11401 }
11402 /* Handle VADD, VSUB. */
11403 else if (opc1 == 0x03)
11404 {
11405 if (!bit (arm_insn_r->arm_insn, 9))
11406 {
11407 if (bit (arm_insn_r->arm_insn, 6))
11408 curr_insn_type = INSN_T0;
11409 else
11410 curr_insn_type = INSN_T1;
11411 }
11412 else
11413 {
11414 if (dp_op_sz)
11415 curr_insn_type = INSN_T1;
11416 else
11417 curr_insn_type = INSN_T2;
11418 }
11419 }
11420 /* Handle VDIV. */
11421 else if (opc1 == 0x08)
11422 {
11423 if (dp_op_sz)
11424 curr_insn_type = INSN_T1;
11425 else
11426 curr_insn_type = INSN_T2;
11427 }
11428 /* Handle all other vfp data processing instructions. */
11429 else if (opc1 == 0x0b)
11430 {
11431 /* Handle VMOV. */
11432 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11433 {
11434 if (bit (arm_insn_r->arm_insn, 4))
11435 {
11436 if (bit (arm_insn_r->arm_insn, 6))
11437 curr_insn_type = INSN_T0;
11438 else
11439 curr_insn_type = INSN_T1;
11440 }
11441 else
11442 {
11443 if (dp_op_sz)
11444 curr_insn_type = INSN_T1;
11445 else
11446 curr_insn_type = INSN_T2;
11447 }
11448 }
11449 /* Handle VNEG and VABS. */
11450 else if ((opc2 == 0x01 && opc3 == 0x01)
11451 || (opc2 == 0x00 && opc3 == 0x03))
11452 {
11453 if (!bit (arm_insn_r->arm_insn, 11))
11454 {
11455 if (bit (arm_insn_r->arm_insn, 6))
11456 curr_insn_type = INSN_T0;
11457 else
11458 curr_insn_type = INSN_T1;
11459 }
11460 else
11461 {
11462 if (dp_op_sz)
11463 curr_insn_type = INSN_T1;
11464 else
11465 curr_insn_type = INSN_T2;
11466 }
11467 }
11468 /* Handle VSQRT. */
11469 else if (opc2 == 0x01 && opc3 == 0x03)
11470 {
11471 if (dp_op_sz)
11472 curr_insn_type = INSN_T1;
11473 else
11474 curr_insn_type = INSN_T2;
11475 }
11476 /* Handle VCVT. */
11477 else if (opc2 == 0x07 && opc3 == 0x03)
11478 {
11479 if (!dp_op_sz)
11480 curr_insn_type = INSN_T1;
11481 else
11482 curr_insn_type = INSN_T2;
11483 }
11484 else if (opc3 & 0x01)
11485 {
11486 /* Handle VCVT. */
11487 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11488 {
11489 if (!bit (arm_insn_r->arm_insn, 18))
11490 curr_insn_type = INSN_T2;
11491 else
11492 {
11493 if (dp_op_sz)
11494 curr_insn_type = INSN_T1;
11495 else
11496 curr_insn_type = INSN_T2;
11497 }
11498 }
11499 /* Handle VCVT. */
11500 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11501 {
11502 if (dp_op_sz)
11503 curr_insn_type = INSN_T1;
11504 else
11505 curr_insn_type = INSN_T2;
11506 }
11507 /* Handle VCVTB, VCVTT. */
11508 else if ((opc2 & 0x0e) == 0x02)
11509 curr_insn_type = INSN_T2;
11510 /* Handle VCMP, VCMPE. */
11511 else if ((opc2 & 0x0e) == 0x04)
11512 curr_insn_type = INSN_T3;
11513 }
11514 }
11515
11516 switch (curr_insn_type)
11517 {
11518 case INSN_T0:
11519 reg_vd = reg_vd | (bit_d << 4);
11520 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11521 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11522 arm_insn_r->reg_rec_count = 2;
11523 break;
11524
11525 case INSN_T1:
11526 reg_vd = reg_vd | (bit_d << 4);
11527 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11528 arm_insn_r->reg_rec_count = 1;
11529 break;
11530
11531 case INSN_T2:
11532 reg_vd = (reg_vd << 1) | bit_d;
11533 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11534 arm_insn_r->reg_rec_count = 1;
11535 break;
11536
11537 case INSN_T3:
11538 record_buf[0] = ARM_FPSCR_REGNUM;
11539 arm_insn_r->reg_rec_count = 1;
11540 break;
11541
11542 default:
11543 gdb_assert_not_reached ("no decoding pattern found");
11544 break;
11545 }
11546
11547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11548 return 0;
11549 }
11550
11551 /* Handling opcode 110 insns. */
11552
11553 static int
11554 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11555 {
11556 uint32_t op1, op1_ebit, coproc;
11557
11558 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11559 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11560 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11561
11562 if ((coproc & 0x0e) == 0x0a)
11563 {
11564 /* Handle extension register ld/st instructions. */
11565 if (!(op1 & 0x20))
11566 return arm_record_exreg_ld_st_insn (arm_insn_r);
11567
11568 /* 64-bit transfers between arm core and extension registers. */
11569 if ((op1 & 0x3e) == 0x04)
11570 return arm_record_exreg_ld_st_insn (arm_insn_r);
11571 }
11572 else
11573 {
11574 /* Handle coprocessor ld/st instructions. */
11575 if (!(op1 & 0x3a))
11576 {
11577 /* Store. */
11578 if (!op1_ebit)
11579 return arm_record_unsupported_insn (arm_insn_r);
11580 else
11581 /* Load. */
11582 return arm_record_unsupported_insn (arm_insn_r);
11583 }
11584
11585 /* Move to coprocessor from two arm core registers. */
11586 if (op1 == 0x4)
11587 return arm_record_unsupported_insn (arm_insn_r);
11588
11589 /* Move to two arm core registers from coprocessor. */
11590 if (op1 == 0x5)
11591 {
11592 uint32_t reg_t[2];
11593
11594 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11595 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11596 arm_insn_r->reg_rec_count = 2;
11597
11598 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11599 return 0;
11600 }
11601 }
11602 return arm_record_unsupported_insn (arm_insn_r);
11603 }
11604
11605 /* Handling opcode 111 insns. */
11606
11607 static int
11608 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11609 {
11610 uint32_t op, op1_ebit, coproc, bits_24_25;
11611 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11612 struct regcache *reg_cache = arm_insn_r->regcache;
11613
11614 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11615 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11616 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11617 op = bit (arm_insn_r->arm_insn, 4);
11618 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11619
11620 /* Handle arm SWI/SVC system call instructions. */
11621 if (bits_24_25 == 0x3)
11622 {
11623 if (tdep->arm_syscall_record != NULL)
11624 {
11625 ULONGEST svc_operand, svc_number;
11626
11627 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11628
11629 if (svc_operand) /* OABI. */
11630 svc_number = svc_operand - 0x900000;
11631 else /* EABI. */
11632 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11633
11634 return tdep->arm_syscall_record (reg_cache, svc_number);
11635 }
11636 else
11637 {
11638 printf_unfiltered (_("no syscall record support\n"));
11639 return -1;
11640 }
11641 }
11642 else if (bits_24_25 == 0x02)
11643 {
11644 if (op)
11645 {
11646 if ((coproc & 0x0e) == 0x0a)
11647 {
11648 /* 8, 16, and 32-bit transfer */
11649 return arm_record_vdata_transfer_insn (arm_insn_r);
11650 }
11651 else
11652 {
11653 if (op1_ebit)
11654 {
11655 /* MRC, MRC2 */
11656 uint32_t record_buf[1];
11657
11658 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11659 if (record_buf[0] == 15)
11660 record_buf[0] = ARM_PS_REGNUM;
11661
11662 arm_insn_r->reg_rec_count = 1;
11663 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11664 record_buf);
11665 return 0;
11666 }
11667 else
11668 {
11669 /* MCR, MCR2 */
11670 return -1;
11671 }
11672 }
11673 }
11674 else
11675 {
11676 if ((coproc & 0x0e) == 0x0a)
11677 {
11678 /* VFP data-processing instructions. */
11679 return arm_record_vfp_data_proc_insn (arm_insn_r);
11680 }
11681 else
11682 {
11683 /* CDP, CDP2 */
11684 return -1;
11685 }
11686 }
11687 }
11688 else
11689 {
11690 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11691
11692 if (op1 == 5)
11693 {
11694 if ((coproc & 0x0e) != 0x0a)
11695 {
11696 /* MRRC, MRRC2 */
11697 return -1;
11698 }
11699 }
11700 else if (op1 == 4 || op1 == 5)
11701 {
11702 if ((coproc & 0x0e) == 0x0a)
11703 {
11704 /* 64-bit transfers between ARM core and extension */
11705 return -1;
11706 }
11707 else if (op1 == 4)
11708 {
11709 /* MCRR, MCRR2 */
11710 return -1;
11711 }
11712 }
11713 else if (op1 == 0 || op1 == 1)
11714 {
11715 /* UNDEFINED */
11716 return -1;
11717 }
11718 else
11719 {
11720 if ((coproc & 0x0e) == 0x0a)
11721 {
11722 /* Extension register load/store */
11723 }
11724 else
11725 {
11726 /* STC, STC2, LDC, LDC2 */
11727 }
11728 return -1;
11729 }
11730 }
11731
11732 return -1;
11733 }
11734
11735 /* Handling opcode 000 insns. */
11736
11737 static int
11738 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11739 {
11740 uint32_t record_buf[8];
11741 uint32_t reg_src1 = 0;
11742
11743 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11744
11745 record_buf[0] = ARM_PS_REGNUM;
11746 record_buf[1] = reg_src1;
11747 thumb_insn_r->reg_rec_count = 2;
11748
11749 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11750
11751 return 0;
11752 }
11753
11754
11755 /* Handling opcode 001 insns. */
11756
11757 static int
11758 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11759 {
11760 uint32_t record_buf[8];
11761 uint32_t reg_src1 = 0;
11762
11763 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11764
11765 record_buf[0] = ARM_PS_REGNUM;
11766 record_buf[1] = reg_src1;
11767 thumb_insn_r->reg_rec_count = 2;
11768
11769 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11770
11771 return 0;
11772 }
11773
11774 /* Handling opcode 010 insns. */
11775
11776 static int
11777 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11778 {
11779 struct regcache *reg_cache = thumb_insn_r->regcache;
11780 uint32_t record_buf[8], record_buf_mem[8];
11781
11782 uint32_t reg_src1 = 0, reg_src2 = 0;
11783 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11784
11785 ULONGEST u_regval[2] = {0};
11786
11787 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11788
11789 if (bit (thumb_insn_r->arm_insn, 12))
11790 {
11791 /* Handle load/store register offset. */
11792 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11793
11794 if (in_inclusive_range (opB, 4U, 7U))
11795 {
11796 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11797 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11798 record_buf[0] = reg_src1;
11799 thumb_insn_r->reg_rec_count = 1;
11800 }
11801 else if (in_inclusive_range (opB, 0U, 2U))
11802 {
11803 /* STR(2), STRB(2), STRH(2) . */
11804 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11805 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11806 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11807 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11808 if (0 == opB)
11809 record_buf_mem[0] = 4; /* STR (2). */
11810 else if (2 == opB)
11811 record_buf_mem[0] = 1; /* STRB (2). */
11812 else if (1 == opB)
11813 record_buf_mem[0] = 2; /* STRH (2). */
11814 record_buf_mem[1] = u_regval[0] + u_regval[1];
11815 thumb_insn_r->mem_rec_count = 1;
11816 }
11817 }
11818 else if (bit (thumb_insn_r->arm_insn, 11))
11819 {
11820 /* Handle load from literal pool. */
11821 /* LDR(3). */
11822 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11823 record_buf[0] = reg_src1;
11824 thumb_insn_r->reg_rec_count = 1;
11825 }
11826 else if (opcode1)
11827 {
11828 /* Special data instructions and branch and exchange */
11829 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11830 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11831 if ((3 == opcode2) && (!opcode3))
11832 {
11833 /* Branch with exchange. */
11834 record_buf[0] = ARM_PS_REGNUM;
11835 thumb_insn_r->reg_rec_count = 1;
11836 }
11837 else
11838 {
11839 /* Format 8; special data processing insns. */
11840 record_buf[0] = ARM_PS_REGNUM;
11841 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11842 | bits (thumb_insn_r->arm_insn, 0, 2));
11843 thumb_insn_r->reg_rec_count = 2;
11844 }
11845 }
11846 else
11847 {
11848 /* Format 5; data processing insns. */
11849 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11850 if (bit (thumb_insn_r->arm_insn, 7))
11851 {
11852 reg_src1 = reg_src1 + 8;
11853 }
11854 record_buf[0] = ARM_PS_REGNUM;
11855 record_buf[1] = reg_src1;
11856 thumb_insn_r->reg_rec_count = 2;
11857 }
11858
11859 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11860 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11861 record_buf_mem);
11862
11863 return 0;
11864 }
11865
11866 /* Handling opcode 001 insns. */
11867
11868 static int
11869 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11870 {
11871 struct regcache *reg_cache = thumb_insn_r->regcache;
11872 uint32_t record_buf[8], record_buf_mem[8];
11873
11874 uint32_t reg_src1 = 0;
11875 uint32_t opcode = 0, immed_5 = 0;
11876
11877 ULONGEST u_regval = 0;
11878
11879 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11880
11881 if (opcode)
11882 {
11883 /* LDR(1). */
11884 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11885 record_buf[0] = reg_src1;
11886 thumb_insn_r->reg_rec_count = 1;
11887 }
11888 else
11889 {
11890 /* STR(1). */
11891 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11892 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11893 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11894 record_buf_mem[0] = 4;
11895 record_buf_mem[1] = u_regval + (immed_5 * 4);
11896 thumb_insn_r->mem_rec_count = 1;
11897 }
11898
11899 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11900 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11901 record_buf_mem);
11902
11903 return 0;
11904 }
11905
11906 /* Handling opcode 100 insns. */
11907
11908 static int
11909 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11910 {
11911 struct regcache *reg_cache = thumb_insn_r->regcache;
11912 uint32_t record_buf[8], record_buf_mem[8];
11913
11914 uint32_t reg_src1 = 0;
11915 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11916
11917 ULONGEST u_regval = 0;
11918
11919 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11920
11921 if (3 == opcode)
11922 {
11923 /* LDR(4). */
11924 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11925 record_buf[0] = reg_src1;
11926 thumb_insn_r->reg_rec_count = 1;
11927 }
11928 else if (1 == opcode)
11929 {
11930 /* LDRH(1). */
11931 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11932 record_buf[0] = reg_src1;
11933 thumb_insn_r->reg_rec_count = 1;
11934 }
11935 else if (2 == opcode)
11936 {
11937 /* STR(3). */
11938 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11939 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11940 record_buf_mem[0] = 4;
11941 record_buf_mem[1] = u_regval + (immed_8 * 4);
11942 thumb_insn_r->mem_rec_count = 1;
11943 }
11944 else if (0 == opcode)
11945 {
11946 /* STRH(1). */
11947 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11948 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11949 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11950 record_buf_mem[0] = 2;
11951 record_buf_mem[1] = u_regval + (immed_5 * 2);
11952 thumb_insn_r->mem_rec_count = 1;
11953 }
11954
11955 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11956 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11957 record_buf_mem);
11958
11959 return 0;
11960 }
11961
11962 /* Handling opcode 101 insns. */
11963
11964 static int
11965 thumb_record_misc (insn_decode_record *thumb_insn_r)
11966 {
11967 struct regcache *reg_cache = thumb_insn_r->regcache;
11968
11969 uint32_t opcode = 0;
11970 uint32_t register_bits = 0, register_count = 0;
11971 uint32_t index = 0, start_address = 0;
11972 uint32_t record_buf[24], record_buf_mem[48];
11973 uint32_t reg_src1;
11974
11975 ULONGEST u_regval = 0;
11976
11977 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11978
11979 if (opcode == 0 || opcode == 1)
11980 {
11981 /* ADR and ADD (SP plus immediate) */
11982
11983 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11984 record_buf[0] = reg_src1;
11985 thumb_insn_r->reg_rec_count = 1;
11986 }
11987 else
11988 {
11989 /* Miscellaneous 16-bit instructions */
11990 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11991
11992 switch (opcode2)
11993 {
11994 case 6:
11995 /* SETEND and CPS */
11996 break;
11997 case 0:
11998 /* ADD/SUB (SP plus immediate) */
11999 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12000 record_buf[0] = ARM_SP_REGNUM;
12001 thumb_insn_r->reg_rec_count = 1;
12002 break;
12003 case 1: /* fall through */
12004 case 3: /* fall through */
12005 case 9: /* fall through */
12006 case 11:
12007 /* CBNZ, CBZ */
12008 break;
12009 case 2:
12010 /* SXTH, SXTB, UXTH, UXTB */
12011 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12012 thumb_insn_r->reg_rec_count = 1;
12013 break;
12014 case 4: /* fall through */
12015 case 5:
12016 /* PUSH. */
12017 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12018 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12019 while (register_bits)
12020 {
12021 if (register_bits & 0x00000001)
12022 register_count++;
12023 register_bits = register_bits >> 1;
12024 }
12025 start_address = u_regval - \
12026 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12027 thumb_insn_r->mem_rec_count = register_count;
12028 while (register_count)
12029 {
12030 record_buf_mem[(register_count * 2) - 1] = start_address;
12031 record_buf_mem[(register_count * 2) - 2] = 4;
12032 start_address = start_address + 4;
12033 register_count--;
12034 }
12035 record_buf[0] = ARM_SP_REGNUM;
12036 thumb_insn_r->reg_rec_count = 1;
12037 break;
12038 case 10:
12039 /* REV, REV16, REVSH */
12040 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12041 thumb_insn_r->reg_rec_count = 1;
12042 break;
12043 case 12: /* fall through */
12044 case 13:
12045 /* POP. */
12046 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12047 while (register_bits)
12048 {
12049 if (register_bits & 0x00000001)
12050 record_buf[index++] = register_count;
12051 register_bits = register_bits >> 1;
12052 register_count++;
12053 }
12054 record_buf[index++] = ARM_PS_REGNUM;
12055 record_buf[index++] = ARM_SP_REGNUM;
12056 thumb_insn_r->reg_rec_count = index;
12057 break;
12058 case 0xe:
12059 /* BKPT insn. */
12060 /* Handle enhanced software breakpoint insn, BKPT. */
12061 /* CPSR is changed to be executed in ARM state, disabling normal
12062 interrupts, entering abort mode. */
12063 /* According to high vector configuration PC is set. */
12064 /* User hits breakpoint and type reverse, in that case, we need to go back with
12065 previous CPSR and Program Counter. */
12066 record_buf[0] = ARM_PS_REGNUM;
12067 record_buf[1] = ARM_LR_REGNUM;
12068 thumb_insn_r->reg_rec_count = 2;
12069 /* We need to save SPSR value, which is not yet done. */
12070 printf_unfiltered (_("Process record does not support instruction "
12071 "0x%0x at address %s.\n"),
12072 thumb_insn_r->arm_insn,
12073 paddress (thumb_insn_r->gdbarch,
12074 thumb_insn_r->this_addr));
12075 return -1;
12076
12077 case 0xf:
12078 /* If-Then, and hints */
12079 break;
12080 default:
12081 return -1;
12082 };
12083 }
12084
12085 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12086 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12087 record_buf_mem);
12088
12089 return 0;
12090 }
12091
12092 /* Handling opcode 110 insns. */
12093
12094 static int
12095 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12096 {
12097 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12098 struct regcache *reg_cache = thumb_insn_r->regcache;
12099
12100 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12101 uint32_t reg_src1 = 0;
12102 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12103 uint32_t index = 0, start_address = 0;
12104 uint32_t record_buf[24], record_buf_mem[48];
12105
12106 ULONGEST u_regval = 0;
12107
12108 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12109 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12110
12111 if (1 == opcode2)
12112 {
12113
12114 /* LDMIA. */
12115 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12116 /* Get Rn. */
12117 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12118 while (register_bits)
12119 {
12120 if (register_bits & 0x00000001)
12121 record_buf[index++] = register_count;
12122 register_bits = register_bits >> 1;
12123 register_count++;
12124 }
12125 record_buf[index++] = reg_src1;
12126 thumb_insn_r->reg_rec_count = index;
12127 }
12128 else if (0 == opcode2)
12129 {
12130 /* It handles both STMIA. */
12131 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12132 /* Get Rn. */
12133 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12134 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12135 while (register_bits)
12136 {
12137 if (register_bits & 0x00000001)
12138 register_count++;
12139 register_bits = register_bits >> 1;
12140 }
12141 start_address = u_regval;
12142 thumb_insn_r->mem_rec_count = register_count;
12143 while (register_count)
12144 {
12145 record_buf_mem[(register_count * 2) - 1] = start_address;
12146 record_buf_mem[(register_count * 2) - 2] = 4;
12147 start_address = start_address + 4;
12148 register_count--;
12149 }
12150 }
12151 else if (0x1F == opcode1)
12152 {
12153 /* Handle arm syscall insn. */
12154 if (tdep->arm_syscall_record != NULL)
12155 {
12156 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12157 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12158 }
12159 else
12160 {
12161 printf_unfiltered (_("no syscall record support\n"));
12162 return -1;
12163 }
12164 }
12165
12166 /* B (1), conditional branch is automatically taken care in process_record,
12167 as PC is saved there. */
12168
12169 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12170 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12171 record_buf_mem);
12172
12173 return ret;
12174 }
12175
12176 /* Handling opcode 111 insns. */
12177
12178 static int
12179 thumb_record_branch (insn_decode_record *thumb_insn_r)
12180 {
12181 uint32_t record_buf[8];
12182 uint32_t bits_h = 0;
12183
12184 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12185
12186 if (2 == bits_h || 3 == bits_h)
12187 {
12188 /* BL */
12189 record_buf[0] = ARM_LR_REGNUM;
12190 thumb_insn_r->reg_rec_count = 1;
12191 }
12192 else if (1 == bits_h)
12193 {
12194 /* BLX(1). */
12195 record_buf[0] = ARM_PS_REGNUM;
12196 record_buf[1] = ARM_LR_REGNUM;
12197 thumb_insn_r->reg_rec_count = 2;
12198 }
12199
12200 /* B(2) is automatically taken care in process_record, as PC is
12201 saved there. */
12202
12203 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12204
12205 return 0;
12206 }
12207
12208 /* Handler for thumb2 load/store multiple instructions. */
12209
12210 static int
12211 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12212 {
12213 struct regcache *reg_cache = thumb2_insn_r->regcache;
12214
12215 uint32_t reg_rn, op;
12216 uint32_t register_bits = 0, register_count = 0;
12217 uint32_t index = 0, start_address = 0;
12218 uint32_t record_buf[24], record_buf_mem[48];
12219
12220 ULONGEST u_regval = 0;
12221
12222 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12223 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12224
12225 if (0 == op || 3 == op)
12226 {
12227 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12228 {
12229 /* Handle RFE instruction. */
12230 record_buf[0] = ARM_PS_REGNUM;
12231 thumb2_insn_r->reg_rec_count = 1;
12232 }
12233 else
12234 {
12235 /* Handle SRS instruction after reading banked SP. */
12236 return arm_record_unsupported_insn (thumb2_insn_r);
12237 }
12238 }
12239 else if (1 == op || 2 == op)
12240 {
12241 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12242 {
12243 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12244 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12245 while (register_bits)
12246 {
12247 if (register_bits & 0x00000001)
12248 record_buf[index++] = register_count;
12249
12250 register_count++;
12251 register_bits = register_bits >> 1;
12252 }
12253 record_buf[index++] = reg_rn;
12254 record_buf[index++] = ARM_PS_REGNUM;
12255 thumb2_insn_r->reg_rec_count = index;
12256 }
12257 else
12258 {
12259 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12260 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12261 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12262 while (register_bits)
12263 {
12264 if (register_bits & 0x00000001)
12265 register_count++;
12266
12267 register_bits = register_bits >> 1;
12268 }
12269
12270 if (1 == op)
12271 {
12272 /* Start address calculation for LDMDB/LDMEA. */
12273 start_address = u_regval;
12274 }
12275 else if (2 == op)
12276 {
12277 /* Start address calculation for LDMDB/LDMEA. */
12278 start_address = u_regval - register_count * 4;
12279 }
12280
12281 thumb2_insn_r->mem_rec_count = register_count;
12282 while (register_count)
12283 {
12284 record_buf_mem[register_count * 2 - 1] = start_address;
12285 record_buf_mem[register_count * 2 - 2] = 4;
12286 start_address = start_address + 4;
12287 register_count--;
12288 }
12289 record_buf[0] = reg_rn;
12290 record_buf[1] = ARM_PS_REGNUM;
12291 thumb2_insn_r->reg_rec_count = 2;
12292 }
12293 }
12294
12295 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12296 record_buf_mem);
12297 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12298 record_buf);
12299 return ARM_RECORD_SUCCESS;
12300 }
12301
12302 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12303 instructions. */
12304
12305 static int
12306 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12307 {
12308 struct regcache *reg_cache = thumb2_insn_r->regcache;
12309
12310 uint32_t reg_rd, reg_rn, offset_imm;
12311 uint32_t reg_dest1, reg_dest2;
12312 uint32_t address, offset_addr;
12313 uint32_t record_buf[8], record_buf_mem[8];
12314 uint32_t op1, op2, op3;
12315
12316 ULONGEST u_regval[2];
12317
12318 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12319 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12320 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12321
12322 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12323 {
12324 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12325 {
12326 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12327 record_buf[0] = reg_dest1;
12328 record_buf[1] = ARM_PS_REGNUM;
12329 thumb2_insn_r->reg_rec_count = 2;
12330 }
12331
12332 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12333 {
12334 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12335 record_buf[2] = reg_dest2;
12336 thumb2_insn_r->reg_rec_count = 3;
12337 }
12338 }
12339 else
12340 {
12341 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12342 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12343
12344 if (0 == op1 && 0 == op2)
12345 {
12346 /* Handle STREX. */
12347 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12348 address = u_regval[0] + (offset_imm * 4);
12349 record_buf_mem[0] = 4;
12350 record_buf_mem[1] = address;
12351 thumb2_insn_r->mem_rec_count = 1;
12352 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12353 record_buf[0] = reg_rd;
12354 thumb2_insn_r->reg_rec_count = 1;
12355 }
12356 else if (1 == op1 && 0 == op2)
12357 {
12358 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12359 record_buf[0] = reg_rd;
12360 thumb2_insn_r->reg_rec_count = 1;
12361 address = u_regval[0];
12362 record_buf_mem[1] = address;
12363
12364 if (4 == op3)
12365 {
12366 /* Handle STREXB. */
12367 record_buf_mem[0] = 1;
12368 thumb2_insn_r->mem_rec_count = 1;
12369 }
12370 else if (5 == op3)
12371 {
12372 /* Handle STREXH. */
12373 record_buf_mem[0] = 2 ;
12374 thumb2_insn_r->mem_rec_count = 1;
12375 }
12376 else if (7 == op3)
12377 {
12378 /* Handle STREXD. */
12379 address = u_regval[0];
12380 record_buf_mem[0] = 4;
12381 record_buf_mem[2] = 4;
12382 record_buf_mem[3] = address + 4;
12383 thumb2_insn_r->mem_rec_count = 2;
12384 }
12385 }
12386 else
12387 {
12388 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12389
12390 if (bit (thumb2_insn_r->arm_insn, 24))
12391 {
12392 if (bit (thumb2_insn_r->arm_insn, 23))
12393 offset_addr = u_regval[0] + (offset_imm * 4);
12394 else
12395 offset_addr = u_regval[0] - (offset_imm * 4);
12396
12397 address = offset_addr;
12398 }
12399 else
12400 address = u_regval[0];
12401
12402 record_buf_mem[0] = 4;
12403 record_buf_mem[1] = address;
12404 record_buf_mem[2] = 4;
12405 record_buf_mem[3] = address + 4;
12406 thumb2_insn_r->mem_rec_count = 2;
12407 record_buf[0] = reg_rn;
12408 thumb2_insn_r->reg_rec_count = 1;
12409 }
12410 }
12411
12412 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12413 record_buf);
12414 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12415 record_buf_mem);
12416 return ARM_RECORD_SUCCESS;
12417 }
12418
12419 /* Handler for thumb2 data processing (shift register and modified immediate)
12420 instructions. */
12421
12422 static int
12423 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12424 {
12425 uint32_t reg_rd, op;
12426 uint32_t record_buf[8];
12427
12428 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12429 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12430
12431 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12432 {
12433 record_buf[0] = ARM_PS_REGNUM;
12434 thumb2_insn_r->reg_rec_count = 1;
12435 }
12436 else
12437 {
12438 record_buf[0] = reg_rd;
12439 record_buf[1] = ARM_PS_REGNUM;
12440 thumb2_insn_r->reg_rec_count = 2;
12441 }
12442
12443 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12444 record_buf);
12445 return ARM_RECORD_SUCCESS;
12446 }
12447
12448 /* Generic handler for thumb2 instructions which effect destination and PS
12449 registers. */
12450
12451 static int
12452 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12453 {
12454 uint32_t reg_rd;
12455 uint32_t record_buf[8];
12456
12457 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12458
12459 record_buf[0] = reg_rd;
12460 record_buf[1] = ARM_PS_REGNUM;
12461 thumb2_insn_r->reg_rec_count = 2;
12462
12463 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12464 record_buf);
12465 return ARM_RECORD_SUCCESS;
12466 }
12467
12468 /* Handler for thumb2 branch and miscellaneous control instructions. */
12469
12470 static int
12471 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12472 {
12473 uint32_t op, op1, op2;
12474 uint32_t record_buf[8];
12475
12476 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12477 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12478 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12479
12480 /* Handle MSR insn. */
12481 if (!(op1 & 0x2) && 0x38 == op)
12482 {
12483 if (!(op2 & 0x3))
12484 {
12485 /* CPSR is going to be changed. */
12486 record_buf[0] = ARM_PS_REGNUM;
12487 thumb2_insn_r->reg_rec_count = 1;
12488 }
12489 else
12490 {
12491 arm_record_unsupported_insn(thumb2_insn_r);
12492 return -1;
12493 }
12494 }
12495 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12496 {
12497 /* BLX. */
12498 record_buf[0] = ARM_PS_REGNUM;
12499 record_buf[1] = ARM_LR_REGNUM;
12500 thumb2_insn_r->reg_rec_count = 2;
12501 }
12502
12503 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12504 record_buf);
12505 return ARM_RECORD_SUCCESS;
12506 }
12507
12508 /* Handler for thumb2 store single data item instructions. */
12509
12510 static int
12511 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12512 {
12513 struct regcache *reg_cache = thumb2_insn_r->regcache;
12514
12515 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12516 uint32_t address, offset_addr;
12517 uint32_t record_buf[8], record_buf_mem[8];
12518 uint32_t op1, op2;
12519
12520 ULONGEST u_regval[2];
12521
12522 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12523 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12524 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12525 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12526
12527 if (bit (thumb2_insn_r->arm_insn, 23))
12528 {
12529 /* T2 encoding. */
12530 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12531 offset_addr = u_regval[0] + offset_imm;
12532 address = offset_addr;
12533 }
12534 else
12535 {
12536 /* T3 encoding. */
12537 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12538 {
12539 /* Handle STRB (register). */
12540 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12541 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12542 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12543 offset_addr = u_regval[1] << shift_imm;
12544 address = u_regval[0] + offset_addr;
12545 }
12546 else
12547 {
12548 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12549 if (bit (thumb2_insn_r->arm_insn, 10))
12550 {
12551 if (bit (thumb2_insn_r->arm_insn, 9))
12552 offset_addr = u_regval[0] + offset_imm;
12553 else
12554 offset_addr = u_regval[0] - offset_imm;
12555
12556 address = offset_addr;
12557 }
12558 else
12559 address = u_regval[0];
12560 }
12561 }
12562
12563 switch (op1)
12564 {
12565 /* Store byte instructions. */
12566 case 4:
12567 case 0:
12568 record_buf_mem[0] = 1;
12569 break;
12570 /* Store half word instructions. */
12571 case 1:
12572 case 5:
12573 record_buf_mem[0] = 2;
12574 break;
12575 /* Store word instructions. */
12576 case 2:
12577 case 6:
12578 record_buf_mem[0] = 4;
12579 break;
12580
12581 default:
12582 gdb_assert_not_reached ("no decoding pattern found");
12583 break;
12584 }
12585
12586 record_buf_mem[1] = address;
12587 thumb2_insn_r->mem_rec_count = 1;
12588 record_buf[0] = reg_rn;
12589 thumb2_insn_r->reg_rec_count = 1;
12590
12591 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12592 record_buf);
12593 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12594 record_buf_mem);
12595 return ARM_RECORD_SUCCESS;
12596 }
12597
12598 /* Handler for thumb2 load memory hints instructions. */
12599
12600 static int
12601 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12602 {
12603 uint32_t record_buf[8];
12604 uint32_t reg_rt, reg_rn;
12605
12606 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12607 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12608
12609 if (ARM_PC_REGNUM != reg_rt)
12610 {
12611 record_buf[0] = reg_rt;
12612 record_buf[1] = reg_rn;
12613 record_buf[2] = ARM_PS_REGNUM;
12614 thumb2_insn_r->reg_rec_count = 3;
12615
12616 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12617 record_buf);
12618 return ARM_RECORD_SUCCESS;
12619 }
12620
12621 return ARM_RECORD_FAILURE;
12622 }
12623
12624 /* Handler for thumb2 load word instructions. */
12625
12626 static int
12627 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12628 {
12629 uint32_t record_buf[8];
12630
12631 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12632 record_buf[1] = ARM_PS_REGNUM;
12633 thumb2_insn_r->reg_rec_count = 2;
12634
12635 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12636 record_buf);
12637 return ARM_RECORD_SUCCESS;
12638 }
12639
12640 /* Handler for thumb2 long multiply, long multiply accumulate, and
12641 divide instructions. */
12642
12643 static int
12644 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12645 {
12646 uint32_t opcode1 = 0, opcode2 = 0;
12647 uint32_t record_buf[8];
12648
12649 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12650 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12651
12652 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12653 {
12654 /* Handle SMULL, UMULL, SMULAL. */
12655 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12656 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12657 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12658 record_buf[2] = ARM_PS_REGNUM;
12659 thumb2_insn_r->reg_rec_count = 3;
12660 }
12661 else if (1 == opcode1 || 3 == opcode2)
12662 {
12663 /* Handle SDIV and UDIV. */
12664 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12665 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12666 record_buf[2] = ARM_PS_REGNUM;
12667 thumb2_insn_r->reg_rec_count = 3;
12668 }
12669 else
12670 return ARM_RECORD_FAILURE;
12671
12672 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12673 record_buf);
12674 return ARM_RECORD_SUCCESS;
12675 }
12676
12677 /* Record handler for thumb32 coprocessor instructions. */
12678
12679 static int
12680 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12681 {
12682 if (bit (thumb2_insn_r->arm_insn, 25))
12683 return arm_record_coproc_data_proc (thumb2_insn_r);
12684 else
12685 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12686 }
12687
12688 /* Record handler for advance SIMD structure load/store instructions. */
12689
12690 static int
12691 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12692 {
12693 struct regcache *reg_cache = thumb2_insn_r->regcache;
12694 uint32_t l_bit, a_bit, b_bits;
12695 uint32_t record_buf[128], record_buf_mem[128];
12696 uint32_t reg_rn, reg_vd, address, f_elem;
12697 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12698 uint8_t f_ebytes;
12699
12700 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12701 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12702 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12703 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12704 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12705 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12706 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12707 f_elem = 8 / f_ebytes;
12708
12709 if (!l_bit)
12710 {
12711 ULONGEST u_regval = 0;
12712 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12713 address = u_regval;
12714
12715 if (!a_bit)
12716 {
12717 /* Handle VST1. */
12718 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12719 {
12720 if (b_bits == 0x07)
12721 bf_regs = 1;
12722 else if (b_bits == 0x0a)
12723 bf_regs = 2;
12724 else if (b_bits == 0x06)
12725 bf_regs = 3;
12726 else if (b_bits == 0x02)
12727 bf_regs = 4;
12728 else
12729 bf_regs = 0;
12730
12731 for (index_r = 0; index_r < bf_regs; index_r++)
12732 {
12733 for (index_e = 0; index_e < f_elem; index_e++)
12734 {
12735 record_buf_mem[index_m++] = f_ebytes;
12736 record_buf_mem[index_m++] = address;
12737 address = address + f_ebytes;
12738 thumb2_insn_r->mem_rec_count += 1;
12739 }
12740 }
12741 }
12742 /* Handle VST2. */
12743 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12744 {
12745 if (b_bits == 0x09 || b_bits == 0x08)
12746 bf_regs = 1;
12747 else if (b_bits == 0x03)
12748 bf_regs = 2;
12749 else
12750 bf_regs = 0;
12751
12752 for (index_r = 0; index_r < bf_regs; index_r++)
12753 for (index_e = 0; index_e < f_elem; index_e++)
12754 {
12755 for (loop_t = 0; loop_t < 2; loop_t++)
12756 {
12757 record_buf_mem[index_m++] = f_ebytes;
12758 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12759 thumb2_insn_r->mem_rec_count += 1;
12760 }
12761 address = address + (2 * f_ebytes);
12762 }
12763 }
12764 /* Handle VST3. */
12765 else if ((b_bits & 0x0e) == 0x04)
12766 {
12767 for (index_e = 0; index_e < f_elem; index_e++)
12768 {
12769 for (loop_t = 0; loop_t < 3; loop_t++)
12770 {
12771 record_buf_mem[index_m++] = f_ebytes;
12772 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12773 thumb2_insn_r->mem_rec_count += 1;
12774 }
12775 address = address + (3 * f_ebytes);
12776 }
12777 }
12778 /* Handle VST4. */
12779 else if (!(b_bits & 0x0e))
12780 {
12781 for (index_e = 0; index_e < f_elem; index_e++)
12782 {
12783 for (loop_t = 0; loop_t < 4; loop_t++)
12784 {
12785 record_buf_mem[index_m++] = f_ebytes;
12786 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12787 thumb2_insn_r->mem_rec_count += 1;
12788 }
12789 address = address + (4 * f_ebytes);
12790 }
12791 }
12792 }
12793 else
12794 {
12795 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12796
12797 if (bft_size == 0x00)
12798 f_ebytes = 1;
12799 else if (bft_size == 0x01)
12800 f_ebytes = 2;
12801 else if (bft_size == 0x02)
12802 f_ebytes = 4;
12803 else
12804 f_ebytes = 0;
12805
12806 /* Handle VST1. */
12807 if (!(b_bits & 0x0b) || b_bits == 0x08)
12808 thumb2_insn_r->mem_rec_count = 1;
12809 /* Handle VST2. */
12810 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12811 thumb2_insn_r->mem_rec_count = 2;
12812 /* Handle VST3. */
12813 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12814 thumb2_insn_r->mem_rec_count = 3;
12815 /* Handle VST4. */
12816 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12817 thumb2_insn_r->mem_rec_count = 4;
12818
12819 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12820 {
12821 record_buf_mem[index_m] = f_ebytes;
12822 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12823 }
12824 }
12825 }
12826 else
12827 {
12828 if (!a_bit)
12829 {
12830 /* Handle VLD1. */
12831 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12832 thumb2_insn_r->reg_rec_count = 1;
12833 /* Handle VLD2. */
12834 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12835 thumb2_insn_r->reg_rec_count = 2;
12836 /* Handle VLD3. */
12837 else if ((b_bits & 0x0e) == 0x04)
12838 thumb2_insn_r->reg_rec_count = 3;
12839 /* Handle VLD4. */
12840 else if (!(b_bits & 0x0e))
12841 thumb2_insn_r->reg_rec_count = 4;
12842 }
12843 else
12844 {
12845 /* Handle VLD1. */
12846 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12847 thumb2_insn_r->reg_rec_count = 1;
12848 /* Handle VLD2. */
12849 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12850 thumb2_insn_r->reg_rec_count = 2;
12851 /* Handle VLD3. */
12852 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12853 thumb2_insn_r->reg_rec_count = 3;
12854 /* Handle VLD4. */
12855 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12856 thumb2_insn_r->reg_rec_count = 4;
12857
12858 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12859 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12860 }
12861 }
12862
12863 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12864 {
12865 record_buf[index_r] = reg_rn;
12866 thumb2_insn_r->reg_rec_count += 1;
12867 }
12868
12869 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12870 record_buf);
12871 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12872 record_buf_mem);
12873 return 0;
12874 }
12875
12876 /* Decodes thumb2 instruction type and invokes its record handler. */
12877
12878 static unsigned int
12879 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12880 {
12881 uint32_t op, op1, op2;
12882
12883 op = bit (thumb2_insn_r->arm_insn, 15);
12884 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12885 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12886
12887 if (op1 == 0x01)
12888 {
12889 if (!(op2 & 0x64 ))
12890 {
12891 /* Load/store multiple instruction. */
12892 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12893 }
12894 else if ((op2 & 0x64) == 0x4)
12895 {
12896 /* Load/store (dual/exclusive) and table branch instruction. */
12897 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12898 }
12899 else if ((op2 & 0x60) == 0x20)
12900 {
12901 /* Data-processing (shifted register). */
12902 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12903 }
12904 else if (op2 & 0x40)
12905 {
12906 /* Co-processor instructions. */
12907 return thumb2_record_coproc_insn (thumb2_insn_r);
12908 }
12909 }
12910 else if (op1 == 0x02)
12911 {
12912 if (op)
12913 {
12914 /* Branches and miscellaneous control instructions. */
12915 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12916 }
12917 else if (op2 & 0x20)
12918 {
12919 /* Data-processing (plain binary immediate) instruction. */
12920 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12921 }
12922 else
12923 {
12924 /* Data-processing (modified immediate). */
12925 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12926 }
12927 }
12928 else if (op1 == 0x03)
12929 {
12930 if (!(op2 & 0x71 ))
12931 {
12932 /* Store single data item. */
12933 return thumb2_record_str_single_data (thumb2_insn_r);
12934 }
12935 else if (!((op2 & 0x71) ^ 0x10))
12936 {
12937 /* Advanced SIMD or structure load/store instructions. */
12938 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12939 }
12940 else if (!((op2 & 0x67) ^ 0x01))
12941 {
12942 /* Load byte, memory hints instruction. */
12943 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12944 }
12945 else if (!((op2 & 0x67) ^ 0x03))
12946 {
12947 /* Load halfword, memory hints instruction. */
12948 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12949 }
12950 else if (!((op2 & 0x67) ^ 0x05))
12951 {
12952 /* Load word instruction. */
12953 return thumb2_record_ld_word (thumb2_insn_r);
12954 }
12955 else if (!((op2 & 0x70) ^ 0x20))
12956 {
12957 /* Data-processing (register) instruction. */
12958 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12959 }
12960 else if (!((op2 & 0x78) ^ 0x30))
12961 {
12962 /* Multiply, multiply accumulate, abs diff instruction. */
12963 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12964 }
12965 else if (!((op2 & 0x78) ^ 0x38))
12966 {
12967 /* Long multiply, long multiply accumulate, and divide. */
12968 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12969 }
12970 else if (op2 & 0x40)
12971 {
12972 /* Co-processor instructions. */
12973 return thumb2_record_coproc_insn (thumb2_insn_r);
12974 }
12975 }
12976
12977 return -1;
12978 }
12979
12980 namespace {
12981 /* Abstract memory reader. */
12982
12983 class abstract_memory_reader
12984 {
12985 public:
12986 /* Read LEN bytes of target memory at address MEMADDR, placing the
12987 results in GDB's memory at BUF. Return true on success. */
12988
12989 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12990 };
12991
12992 /* Instruction reader from real target. */
12993
12994 class instruction_reader : public abstract_memory_reader
12995 {
12996 public:
12997 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12998 {
12999 if (target_read_memory (memaddr, buf, len))
13000 return false;
13001 else
13002 return true;
13003 }
13004 };
13005
13006 } // namespace
13007
13008 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13009 and positive val on failure. */
13010
13011 static int
13012 extract_arm_insn (abstract_memory_reader& reader,
13013 insn_decode_record *insn_record, uint32_t insn_size)
13014 {
13015 gdb_byte buf[insn_size];
13016
13017 memset (&buf[0], 0, insn_size);
13018
13019 if (!reader.read (insn_record->this_addr, buf, insn_size))
13020 return 1;
13021 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13022 insn_size,
13023 gdbarch_byte_order_for_code (insn_record->gdbarch));
13024 return 0;
13025 }
13026
13027 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13028
13029 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13030 dispatch it. */
13031
13032 static int
13033 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13034 record_type_t record_type, uint32_t insn_size)
13035 {
13036
13037 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13038 instruction. */
13039 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13040 {
13041 arm_record_data_proc_misc_ld_str, /* 000. */
13042 arm_record_data_proc_imm, /* 001. */
13043 arm_record_ld_st_imm_offset, /* 010. */
13044 arm_record_ld_st_reg_offset, /* 011. */
13045 arm_record_ld_st_multiple, /* 100. */
13046 arm_record_b_bl, /* 101. */
13047 arm_record_asimd_vfp_coproc, /* 110. */
13048 arm_record_coproc_data_proc /* 111. */
13049 };
13050
13051 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13052 instruction. */
13053 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13054 { \
13055 thumb_record_shift_add_sub, /* 000. */
13056 thumb_record_add_sub_cmp_mov, /* 001. */
13057 thumb_record_ld_st_reg_offset, /* 010. */
13058 thumb_record_ld_st_imm_offset, /* 011. */
13059 thumb_record_ld_st_stack, /* 100. */
13060 thumb_record_misc, /* 101. */
13061 thumb_record_ldm_stm_swi, /* 110. */
13062 thumb_record_branch /* 111. */
13063 };
13064
13065 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13066 uint32_t insn_id = 0;
13067
13068 if (extract_arm_insn (reader, arm_record, insn_size))
13069 {
13070 if (record_debug)
13071 {
13072 printf_unfiltered (_("Process record: error reading memory at "
13073 "addr %s len = %d.\n"),
13074 paddress (arm_record->gdbarch,
13075 arm_record->this_addr), insn_size);
13076 }
13077 return -1;
13078 }
13079 else if (ARM_RECORD == record_type)
13080 {
13081 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13082 insn_id = bits (arm_record->arm_insn, 25, 27);
13083
13084 if (arm_record->cond == 0xf)
13085 ret = arm_record_extension_space (arm_record);
13086 else
13087 {
13088 /* If this insn has fallen into extension space
13089 then we need not decode it anymore. */
13090 ret = arm_handle_insn[insn_id] (arm_record);
13091 }
13092 if (ret != ARM_RECORD_SUCCESS)
13093 {
13094 arm_record_unsupported_insn (arm_record);
13095 ret = -1;
13096 }
13097 }
13098 else if (THUMB_RECORD == record_type)
13099 {
13100 /* As thumb does not have condition codes, we set negative. */
13101 arm_record->cond = -1;
13102 insn_id = bits (arm_record->arm_insn, 13, 15);
13103 ret = thumb_handle_insn[insn_id] (arm_record);
13104 if (ret != ARM_RECORD_SUCCESS)
13105 {
13106 arm_record_unsupported_insn (arm_record);
13107 ret = -1;
13108 }
13109 }
13110 else if (THUMB2_RECORD == record_type)
13111 {
13112 /* As thumb does not have condition codes, we set negative. */
13113 arm_record->cond = -1;
13114
13115 /* Swap first half of 32bit thumb instruction with second half. */
13116 arm_record->arm_insn
13117 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13118
13119 ret = thumb2_record_decode_insn_handler (arm_record);
13120
13121 if (ret != ARM_RECORD_SUCCESS)
13122 {
13123 arm_record_unsupported_insn (arm_record);
13124 ret = -1;
13125 }
13126 }
13127 else
13128 {
13129 /* Throw assertion. */
13130 gdb_assert_not_reached ("not a valid instruction, could not decode");
13131 }
13132
13133 return ret;
13134 }
13135
13136 #if GDB_SELF_TEST
13137 namespace selftests {
13138
13139 /* Provide both 16-bit and 32-bit thumb instructions. */
13140
13141 class instruction_reader_thumb : public abstract_memory_reader
13142 {
13143 public:
13144 template<size_t SIZE>
13145 instruction_reader_thumb (enum bfd_endian endian,
13146 const uint16_t (&insns)[SIZE])
13147 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13148 {}
13149
13150 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13151 {
13152 SELF_CHECK (len == 4 || len == 2);
13153 SELF_CHECK (memaddr % 2 == 0);
13154 SELF_CHECK ((memaddr / 2) < m_insns_size);
13155
13156 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13157 if (len == 4)
13158 {
13159 store_unsigned_integer (&buf[2], 2, m_endian,
13160 m_insns[memaddr / 2 + 1]);
13161 }
13162 return true;
13163 }
13164
13165 private:
13166 enum bfd_endian m_endian;
13167 const uint16_t *m_insns;
13168 size_t m_insns_size;
13169 };
13170
13171 static void
13172 arm_record_test (void)
13173 {
13174 struct gdbarch_info info;
13175 gdbarch_info_init (&info);
13176 info.bfd_arch_info = bfd_scan_arch ("arm");
13177
13178 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13179
13180 SELF_CHECK (gdbarch != NULL);
13181
13182 /* 16-bit Thumb instructions. */
13183 {
13184 insn_decode_record arm_record;
13185
13186 memset (&arm_record, 0, sizeof (insn_decode_record));
13187 arm_record.gdbarch = gdbarch;
13188
13189 static const uint16_t insns[] = {
13190 /* db b2 uxtb r3, r3 */
13191 0xb2db,
13192 /* cd 58 ldr r5, [r1, r3] */
13193 0x58cd,
13194 };
13195
13196 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13197 instruction_reader_thumb reader (endian, insns);
13198 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13199 THUMB_INSN_SIZE_BYTES);
13200
13201 SELF_CHECK (ret == 0);
13202 SELF_CHECK (arm_record.mem_rec_count == 0);
13203 SELF_CHECK (arm_record.reg_rec_count == 1);
13204 SELF_CHECK (arm_record.arm_regs[0] == 3);
13205
13206 arm_record.this_addr += 2;
13207 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13208 THUMB_INSN_SIZE_BYTES);
13209
13210 SELF_CHECK (ret == 0);
13211 SELF_CHECK (arm_record.mem_rec_count == 0);
13212 SELF_CHECK (arm_record.reg_rec_count == 1);
13213 SELF_CHECK (arm_record.arm_regs[0] == 5);
13214 }
13215
13216 /* 32-bit Thumb-2 instructions. */
13217 {
13218 insn_decode_record arm_record;
13219
13220 memset (&arm_record, 0, sizeof (insn_decode_record));
13221 arm_record.gdbarch = gdbarch;
13222
13223 static const uint16_t insns[] = {
13224 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13225 0xee1d, 0x7f70,
13226 };
13227
13228 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13229 instruction_reader_thumb reader (endian, insns);
13230 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13231 THUMB2_INSN_SIZE_BYTES);
13232
13233 SELF_CHECK (ret == 0);
13234 SELF_CHECK (arm_record.mem_rec_count == 0);
13235 SELF_CHECK (arm_record.reg_rec_count == 1);
13236 SELF_CHECK (arm_record.arm_regs[0] == 7);
13237 }
13238 }
13239 } // namespace selftests
13240 #endif /* GDB_SELF_TEST */
13241
13242 /* Cleans up local record registers and memory allocations. */
13243
13244 static void
13245 deallocate_reg_mem (insn_decode_record *record)
13246 {
13247 xfree (record->arm_regs);
13248 xfree (record->arm_mems);
13249 }
13250
13251
13252 /* Parse the current instruction and record the values of the registers and
13253 memory that will be changed in current instruction to record_arch_list".
13254 Return -1 if something is wrong. */
13255
13256 int
13257 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13258 CORE_ADDR insn_addr)
13259 {
13260
13261 uint32_t no_of_rec = 0;
13262 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13263 ULONGEST t_bit = 0, insn_id = 0;
13264
13265 ULONGEST u_regval = 0;
13266
13267 insn_decode_record arm_record;
13268
13269 memset (&arm_record, 0, sizeof (insn_decode_record));
13270 arm_record.regcache = regcache;
13271 arm_record.this_addr = insn_addr;
13272 arm_record.gdbarch = gdbarch;
13273
13274
13275 if (record_debug > 1)
13276 {
13277 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13278 "addr = %s\n",
13279 paddress (gdbarch, arm_record.this_addr));
13280 }
13281
13282 instruction_reader reader;
13283 if (extract_arm_insn (reader, &arm_record, 2))
13284 {
13285 if (record_debug)
13286 {
13287 printf_unfiltered (_("Process record: error reading memory at "
13288 "addr %s len = %d.\n"),
13289 paddress (arm_record.gdbarch,
13290 arm_record.this_addr), 2);
13291 }
13292 return -1;
13293 }
13294
13295 /* Check the insn, whether it is thumb or arm one. */
13296
13297 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13298 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13299
13300
13301 if (!(u_regval & t_bit))
13302 {
13303 /* We are decoding arm insn. */
13304 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13305 }
13306 else
13307 {
13308 insn_id = bits (arm_record.arm_insn, 11, 15);
13309 /* is it thumb2 insn? */
13310 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13311 {
13312 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13313 THUMB2_INSN_SIZE_BYTES);
13314 }
13315 else
13316 {
13317 /* We are decoding thumb insn. */
13318 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13319 THUMB_INSN_SIZE_BYTES);
13320 }
13321 }
13322
13323 if (0 == ret)
13324 {
13325 /* Record registers. */
13326 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13327 if (arm_record.arm_regs)
13328 {
13329 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13330 {
13331 if (record_full_arch_list_add_reg
13332 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13333 ret = -1;
13334 }
13335 }
13336 /* Record memories. */
13337 if (arm_record.arm_mems)
13338 {
13339 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13340 {
13341 if (record_full_arch_list_add_mem
13342 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13343 arm_record.arm_mems[no_of_rec].len))
13344 ret = -1;
13345 }
13346 }
13347
13348 if (record_full_arch_list_add_end ())
13349 ret = -1;
13350 }
13351
13352
13353 deallocate_reg_mem (&arm_record);
13354
13355 return ret;
13356 }
13357
13358 /* See arm-tdep.h. */
13359
13360 const target_desc *
13361 arm_read_description (arm_fp_type fp_type)
13362 {
13363 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13364
13365 if (tdesc == nullptr)
13366 {
13367 tdesc = arm_create_target_description (fp_type);
13368 tdesc_arm_list[fp_type] = tdesc;
13369 }
13370
13371 return tdesc;
13372 }
13373
13374 /* See arm-tdep.h. */
13375
13376 const target_desc *
13377 arm_read_mprofile_description (arm_m_profile_type m_type)
13378 {
13379 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13380
13381 if (tdesc == nullptr)
13382 {
13383 tdesc = arm_create_mprofile_target_description (m_type);
13384 tdesc_arm_mprofile_list[m_type] = tdesc;
13385 }
13386
13387 return tdesc;
13388 }
This page took 0.360766 seconds and 4 git commands to generate.