gdb: add target_ops::supports_displaced_step
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #if GDB_SELF_TEST
64 #include "gdbsupport/selftest.h"
65 #endif
66
67 static bool arm_debug;
68
69 /* Macros for setting and testing a bit in a minimal symbol that marks
70 it as Thumb function. The MSB of the minimal symbol's "info" field
71 is used for this purpose.
72
73 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
74 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
75
76 #define MSYMBOL_SET_SPECIAL(msym) \
77 MSYMBOL_TARGET_FLAG_1 (msym) = 1
78
79 #define MSYMBOL_IS_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym)
81
82 struct arm_mapping_symbol
83 {
84 CORE_ADDR value;
85 char type;
86
87 bool operator< (const arm_mapping_symbol &other) const
88 { return this->value < other.value; }
89 };
90
91 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
92
93 struct arm_per_bfd
94 {
95 explicit arm_per_bfd (size_t num_sections)
96 : section_maps (new arm_mapping_symbol_vec[num_sections]),
97 section_maps_sorted (new bool[num_sections] ())
98 {}
99
100 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
101
102 /* Information about mapping symbols ($a, $d, $t) in the objfile.
103
104 The format is an array of vectors of arm_mapping_symbols, there is one
105 vector for each section of the objfile (the array is index by BFD section
106 index).
107
108 For each section, the vector of arm_mapping_symbol is sorted by
109 symbol value (address). */
110 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
111
112 /* For each corresponding element of section_maps above, is this vector
113 sorted. */
114 std::unique_ptr<bool[]> section_maps_sorted;
115 };
116
117 /* Per-bfd data used for mapping symbols. */
118 static bfd_key<arm_per_bfd> arm_bfd_data_key;
119
120 /* The list of available "set arm ..." and "show arm ..." commands. */
121 static struct cmd_list_element *setarmcmdlist = NULL;
122 static struct cmd_list_element *showarmcmdlist = NULL;
123
124 /* The type of floating-point to use. Keep this in sync with enum
125 arm_float_model, and the help string in _initialize_arm_tdep. */
126 static const char *const fp_model_strings[] =
127 {
128 "auto",
129 "softfpa",
130 "fpa",
131 "softvfp",
132 "vfp",
133 NULL
134 };
135
136 /* A variable that can be configured by the user. */
137 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
138 static const char *current_fp_model = "auto";
139
140 /* The ABI to use. Keep this in sync with arm_abi_kind. */
141 static const char *const arm_abi_strings[] =
142 {
143 "auto",
144 "APCS",
145 "AAPCS",
146 NULL
147 };
148
149 /* A variable that can be configured by the user. */
150 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
151 static const char *arm_abi_string = "auto";
152
153 /* The execution mode to assume. */
154 static const char *const arm_mode_strings[] =
155 {
156 "auto",
157 "arm",
158 "thumb",
159 NULL
160 };
161
162 static const char *arm_fallback_mode_string = "auto";
163 static const char *arm_force_mode_string = "auto";
164
165 /* The standard register names, and all the valid aliases for them. Note
166 that `fp', `sp' and `pc' are not added in this alias list, because they
167 have been added as builtin user registers in
168 std-regs.c:_initialize_frame_reg. */
169 static const struct
170 {
171 const char *name;
172 int regnum;
173 } arm_register_aliases[] = {
174 /* Basic register numbers. */
175 { "r0", 0 },
176 { "r1", 1 },
177 { "r2", 2 },
178 { "r3", 3 },
179 { "r4", 4 },
180 { "r5", 5 },
181 { "r6", 6 },
182 { "r7", 7 },
183 { "r8", 8 },
184 { "r9", 9 },
185 { "r10", 10 },
186 { "r11", 11 },
187 { "r12", 12 },
188 { "r13", 13 },
189 { "r14", 14 },
190 { "r15", 15 },
191 /* Synonyms (argument and variable registers). */
192 { "a1", 0 },
193 { "a2", 1 },
194 { "a3", 2 },
195 { "a4", 3 },
196 { "v1", 4 },
197 { "v2", 5 },
198 { "v3", 6 },
199 { "v4", 7 },
200 { "v5", 8 },
201 { "v6", 9 },
202 { "v7", 10 },
203 { "v8", 11 },
204 /* Other platform-specific names for r9. */
205 { "sb", 9 },
206 { "tr", 9 },
207 /* Special names. */
208 { "ip", 12 },
209 { "lr", 14 },
210 /* Names used by GCC (not listed in the ARM EABI). */
211 { "sl", 10 },
212 /* A special name from the older ATPCS. */
213 { "wr", 7 },
214 };
215
216 static const char *const arm_register_names[] =
217 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
218 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
219 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
220 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
221 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
222 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
223 "fps", "cpsr" }; /* 24 25 */
224
225 /* Holds the current set of options to be passed to the disassembler. */
226 static char *arm_disassembler_options;
227
228 /* Valid register name styles. */
229 static const char **valid_disassembly_styles;
230
231 /* Disassembly style to use. Default to "std" register names. */
232 static const char *disassembly_style;
233
234 /* All possible arm target descriptors. */
235 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
236 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
237
238 /* This is used to keep the bfd arch_info in sync with the disassembly
239 style. */
240 static void set_disassembly_style_sfunc (const char *, int,
241 struct cmd_list_element *);
242 static void show_disassembly_style_sfunc (struct ui_file *, int,
243 struct cmd_list_element *,
244 const char *);
245
246 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
247 readable_regcache *regcache,
248 int regnum, gdb_byte *buf);
249 static void arm_neon_quad_write (struct gdbarch *gdbarch,
250 struct regcache *regcache,
251 int regnum, const gdb_byte *buf);
252
253 static CORE_ADDR
254 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
255
256
257 /* get_next_pcs operations. */
258 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
259 arm_get_next_pcs_read_memory_unsigned_integer,
260 arm_get_next_pcs_syscall_next_pc,
261 arm_get_next_pcs_addr_bits_remove,
262 arm_get_next_pcs_is_thumb,
263 NULL,
264 };
265
266 struct arm_prologue_cache
267 {
268 /* The stack pointer at the time this frame was created; i.e. the
269 caller's stack pointer when this function was called. It is used
270 to identify this frame. */
271 CORE_ADDR prev_sp;
272
273 /* The frame base for this frame is just prev_sp - frame size.
274 FRAMESIZE is the distance from the frame pointer to the
275 initial stack pointer. */
276
277 int framesize;
278
279 /* The register used to hold the frame pointer for this frame. */
280 int framereg;
281
282 /* Saved register offsets. */
283 struct trad_frame_saved_reg *saved_regs;
284 };
285
286 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
287 CORE_ADDR prologue_start,
288 CORE_ADDR prologue_end,
289 struct arm_prologue_cache *cache);
290
291 /* Architecture version for displaced stepping. This effects the behaviour of
292 certain instructions, and really should not be hard-wired. */
293
294 #define DISPLACED_STEPPING_ARCH_VERSION 5
295
296 /* See arm-tdep.h. */
297
298 bool arm_apcs_32 = true;
299
300 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
301
302 int
303 arm_psr_thumb_bit (struct gdbarch *gdbarch)
304 {
305 if (gdbarch_tdep (gdbarch)->is_m)
306 return XPSR_T;
307 else
308 return CPSR_T;
309 }
310
311 /* Determine if the processor is currently executing in Thumb mode. */
312
313 int
314 arm_is_thumb (struct regcache *regcache)
315 {
316 ULONGEST cpsr;
317 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
318
319 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
320
321 return (cpsr & t_bit) != 0;
322 }
323
324 /* Determine if FRAME is executing in Thumb mode. */
325
326 int
327 arm_frame_is_thumb (struct frame_info *frame)
328 {
329 CORE_ADDR cpsr;
330 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
331
332 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
333 directly (from a signal frame or dummy frame) or by interpreting
334 the saved LR (from a prologue or DWARF frame). So consult it and
335 trust the unwinders. */
336 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
337
338 return (cpsr & t_bit) != 0;
339 }
340
341 /* Search for the mapping symbol covering MEMADDR. If one is found,
342 return its type. Otherwise, return 0. If START is non-NULL,
343 set *START to the location of the mapping symbol. */
344
345 static char
346 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
347 {
348 struct obj_section *sec;
349
350 /* If there are mapping symbols, consult them. */
351 sec = find_pc_section (memaddr);
352 if (sec != NULL)
353 {
354 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
355 if (data != NULL)
356 {
357 unsigned int section_idx = sec->the_bfd_section->index;
358 arm_mapping_symbol_vec &map
359 = data->section_maps[section_idx];
360
361 /* Sort the vector on first use. */
362 if (!data->section_maps_sorted[section_idx])
363 {
364 std::sort (map.begin (), map.end ());
365 data->section_maps_sorted[section_idx] = true;
366 }
367
368 struct arm_mapping_symbol map_key
369 = { memaddr - obj_section_addr (sec), 0 };
370 arm_mapping_symbol_vec::const_iterator it
371 = std::lower_bound (map.begin (), map.end (), map_key);
372
373 /* std::lower_bound finds the earliest ordered insertion
374 point. If the symbol at this position starts at this exact
375 address, we use that; otherwise, the preceding
376 mapping symbol covers this address. */
377 if (it < map.end ())
378 {
379 if (it->value == map_key.value)
380 {
381 if (start)
382 *start = it->value + obj_section_addr (sec);
383 return it->type;
384 }
385 }
386
387 if (it > map.begin ())
388 {
389 arm_mapping_symbol_vec::const_iterator prev_it
390 = it - 1;
391
392 if (start)
393 *start = prev_it->value + obj_section_addr (sec);
394 return prev_it->type;
395 }
396 }
397 }
398
399 return 0;
400 }
401
402 /* Determine if the program counter specified in MEMADDR is in a Thumb
403 function. This function should be called for addresses unrelated to
404 any executing frame; otherwise, prefer arm_frame_is_thumb. */
405
406 int
407 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
408 {
409 struct bound_minimal_symbol sym;
410 char type;
411 arm_displaced_step_copy_insn_closure *dsc
412 = ((arm_displaced_step_copy_insn_closure * )
413 get_displaced_step_copy_insn_closure_by_addr (memaddr));
414
415 /* If checking the mode of displaced instruction in copy area, the mode
416 should be determined by instruction on the original address. */
417 if (dsc)
418 {
419 if (debug_displaced)
420 fprintf_unfiltered (gdb_stdlog,
421 "displaced: check mode of %.8lx instead of %.8lx\n",
422 (unsigned long) dsc->insn_addr,
423 (unsigned long) memaddr);
424 memaddr = dsc->insn_addr;
425 }
426
427 /* If bit 0 of the address is set, assume this is a Thumb address. */
428 if (IS_THUMB_ADDR (memaddr))
429 return 1;
430
431 /* If the user wants to override the symbol table, let him. */
432 if (strcmp (arm_force_mode_string, "arm") == 0)
433 return 0;
434 if (strcmp (arm_force_mode_string, "thumb") == 0)
435 return 1;
436
437 /* ARM v6-M and v7-M are always in Thumb mode. */
438 if (gdbarch_tdep (gdbarch)->is_m)
439 return 1;
440
441 /* If there are mapping symbols, consult them. */
442 type = arm_find_mapping_symbol (memaddr, NULL);
443 if (type)
444 return type == 't';
445
446 /* Thumb functions have a "special" bit set in minimal symbols. */
447 sym = lookup_minimal_symbol_by_pc (memaddr);
448 if (sym.minsym)
449 return (MSYMBOL_IS_SPECIAL (sym.minsym));
450
451 /* If the user wants to override the fallback mode, let them. */
452 if (strcmp (arm_fallback_mode_string, "arm") == 0)
453 return 0;
454 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
455 return 1;
456
457 /* If we couldn't find any symbol, but we're talking to a running
458 target, then trust the current value of $cpsr. This lets
459 "display/i $pc" always show the correct mode (though if there is
460 a symbol table we will not reach here, so it still may not be
461 displayed in the mode it will be executed). */
462 if (target_has_registers)
463 return arm_frame_is_thumb (get_current_frame ());
464
465 /* Otherwise we're out of luck; we assume ARM. */
466 return 0;
467 }
468
469 /* Determine if the address specified equals any of these magic return
470 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
471 architectures.
472
473 From ARMv6-M Reference Manual B1.5.8
474 Table B1-5 Exception return behavior
475
476 EXC_RETURN Return To Return Stack
477 0xFFFFFFF1 Handler mode Main
478 0xFFFFFFF9 Thread mode Main
479 0xFFFFFFFD Thread mode Process
480
481 From ARMv7-M Reference Manual B1.5.8
482 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
483
484 EXC_RETURN Return To Return Stack
485 0xFFFFFFF1 Handler mode Main
486 0xFFFFFFF9 Thread mode Main
487 0xFFFFFFFD Thread mode Process
488
489 Table B1-9 EXC_RETURN definition of exception return behavior, with
490 FP
491
492 EXC_RETURN Return To Return Stack Frame Type
493 0xFFFFFFE1 Handler mode Main Extended
494 0xFFFFFFE9 Thread mode Main Extended
495 0xFFFFFFED Thread mode Process Extended
496 0xFFFFFFF1 Handler mode Main Basic
497 0xFFFFFFF9 Thread mode Main Basic
498 0xFFFFFFFD Thread mode Process Basic
499
500 For more details see "B1.5.8 Exception return behavior"
501 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
502
503 static int
504 arm_m_addr_is_magic (CORE_ADDR addr)
505 {
506 switch (addr)
507 {
508 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
509 the exception return behavior. */
510 case 0xffffffe1:
511 case 0xffffffe9:
512 case 0xffffffed:
513 case 0xfffffff1:
514 case 0xfffffff9:
515 case 0xfffffffd:
516 /* Address is magic. */
517 return 1;
518
519 default:
520 /* Address is not magic. */
521 return 0;
522 }
523 }
524
525 /* Remove useless bits from addresses in a running program. */
526 static CORE_ADDR
527 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
528 {
529 /* On M-profile devices, do not strip the low bit from EXC_RETURN
530 (the magic exception return address). */
531 if (gdbarch_tdep (gdbarch)->is_m
532 && arm_m_addr_is_magic (val))
533 return val;
534
535 if (arm_apcs_32)
536 return UNMAKE_THUMB_ADDR (val);
537 else
538 return (val & 0x03fffffc);
539 }
540
541 /* Return 1 if PC is the start of a compiler helper function which
542 can be safely ignored during prologue skipping. IS_THUMB is true
543 if the function is known to be a Thumb function due to the way it
544 is being called. */
545 static int
546 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
547 {
548 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
549 struct bound_minimal_symbol msym;
550
551 msym = lookup_minimal_symbol_by_pc (pc);
552 if (msym.minsym != NULL
553 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
554 && msym.minsym->linkage_name () != NULL)
555 {
556 const char *name = msym.minsym->linkage_name ();
557
558 /* The GNU linker's Thumb call stub to foo is named
559 __foo_from_thumb. */
560 if (strstr (name, "_from_thumb") != NULL)
561 name += 2;
562
563 /* On soft-float targets, __truncdfsf2 is called to convert promoted
564 arguments to their argument types in non-prototyped
565 functions. */
566 if (startswith (name, "__truncdfsf2"))
567 return 1;
568 if (startswith (name, "__aeabi_d2f"))
569 return 1;
570
571 /* Internal functions related to thread-local storage. */
572 if (startswith (name, "__tls_get_addr"))
573 return 1;
574 if (startswith (name, "__aeabi_read_tp"))
575 return 1;
576 }
577 else
578 {
579 /* If we run against a stripped glibc, we may be unable to identify
580 special functions by name. Check for one important case,
581 __aeabi_read_tp, by comparing the *code* against the default
582 implementation (this is hand-written ARM assembler in glibc). */
583
584 if (!is_thumb
585 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
586 == 0xe3e00a0f /* mov r0, #0xffff0fff */
587 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
588 == 0xe240f01f) /* sub pc, r0, #31 */
589 return 1;
590 }
591
592 return 0;
593 }
594
595 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
596 the first 16-bit of instruction, and INSN2 is the second 16-bit of
597 instruction. */
598 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
599 ((bits ((insn1), 0, 3) << 12) \
600 | (bits ((insn1), 10, 10) << 11) \
601 | (bits ((insn2), 12, 14) << 8) \
602 | bits ((insn2), 0, 7))
603
604 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
605 the 32-bit instruction. */
606 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
607 ((bits ((insn), 16, 19) << 12) \
608 | bits ((insn), 0, 11))
609
610 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
611
612 static unsigned int
613 thumb_expand_immediate (unsigned int imm)
614 {
615 unsigned int count = imm >> 7;
616
617 if (count < 8)
618 switch (count / 2)
619 {
620 case 0:
621 return imm & 0xff;
622 case 1:
623 return (imm & 0xff) | ((imm & 0xff) << 16);
624 case 2:
625 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
626 case 3:
627 return (imm & 0xff) | ((imm & 0xff) << 8)
628 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
629 }
630
631 return (0x80 | (imm & 0x7f)) << (32 - count);
632 }
633
634 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
635 epilogue, 0 otherwise. */
636
637 static int
638 thumb_instruction_restores_sp (unsigned short insn)
639 {
640 return (insn == 0x46bd /* mov sp, r7 */
641 || (insn & 0xff80) == 0xb000 /* add sp, imm */
642 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
643 }
644
645 /* Analyze a Thumb prologue, looking for a recognizable stack frame
646 and frame pointer. Scan until we encounter a store that could
647 clobber the stack frame unexpectedly, or an unknown instruction.
648 Return the last address which is definitely safe to skip for an
649 initial breakpoint. */
650
651 static CORE_ADDR
652 thumb_analyze_prologue (struct gdbarch *gdbarch,
653 CORE_ADDR start, CORE_ADDR limit,
654 struct arm_prologue_cache *cache)
655 {
656 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
657 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
658 int i;
659 pv_t regs[16];
660 CORE_ADDR offset;
661 CORE_ADDR unrecognized_pc = 0;
662
663 for (i = 0; i < 16; i++)
664 regs[i] = pv_register (i, 0);
665 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
666
667 while (start < limit)
668 {
669 unsigned short insn;
670
671 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
672
673 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
674 {
675 int regno;
676 int mask;
677
678 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
679 break;
680
681 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
682 whether to save LR (R14). */
683 mask = (insn & 0xff) | ((insn & 0x100) << 6);
684
685 /* Calculate offsets of saved R0-R7 and LR. */
686 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
687 if (mask & (1 << regno))
688 {
689 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
690 -4);
691 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
692 }
693 }
694 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
695 {
696 offset = (insn & 0x7f) << 2; /* get scaled offset */
697 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
698 -offset);
699 }
700 else if (thumb_instruction_restores_sp (insn))
701 {
702 /* Don't scan past the epilogue. */
703 break;
704 }
705 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
706 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
707 (insn & 0xff) << 2);
708 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
709 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
710 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
711 bits (insn, 6, 8));
712 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
713 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
714 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
715 bits (insn, 0, 7));
716 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
717 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
718 && pv_is_constant (regs[bits (insn, 3, 5)]))
719 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
720 regs[bits (insn, 6, 8)]);
721 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
722 && pv_is_constant (regs[bits (insn, 3, 6)]))
723 {
724 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
725 int rm = bits (insn, 3, 6);
726 regs[rd] = pv_add (regs[rd], regs[rm]);
727 }
728 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
729 {
730 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
731 int src_reg = (insn & 0x78) >> 3;
732 regs[dst_reg] = regs[src_reg];
733 }
734 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
735 {
736 /* Handle stores to the stack. Normally pushes are used,
737 but with GCC -mtpcs-frame, there may be other stores
738 in the prologue to create the frame. */
739 int regno = (insn >> 8) & 0x7;
740 pv_t addr;
741
742 offset = (insn & 0xff) << 2;
743 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
744
745 if (stack.store_would_trash (addr))
746 break;
747
748 stack.store (addr, 4, regs[regno]);
749 }
750 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
751 {
752 int rd = bits (insn, 0, 2);
753 int rn = bits (insn, 3, 5);
754 pv_t addr;
755
756 offset = bits (insn, 6, 10) << 2;
757 addr = pv_add_constant (regs[rn], offset);
758
759 if (stack.store_would_trash (addr))
760 break;
761
762 stack.store (addr, 4, regs[rd]);
763 }
764 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
765 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
766 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
767 /* Ignore stores of argument registers to the stack. */
768 ;
769 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
770 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
771 /* Ignore block loads from the stack, potentially copying
772 parameters from memory. */
773 ;
774 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
775 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
776 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
777 /* Similarly ignore single loads from the stack. */
778 ;
779 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
780 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
781 /* Skip register copies, i.e. saves to another register
782 instead of the stack. */
783 ;
784 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
785 /* Recognize constant loads; even with small stacks these are necessary
786 on Thumb. */
787 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
788 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
789 {
790 /* Constant pool loads, for the same reason. */
791 unsigned int constant;
792 CORE_ADDR loc;
793
794 loc = start + 4 + bits (insn, 0, 7) * 4;
795 constant = read_memory_unsigned_integer (loc, 4, byte_order);
796 regs[bits (insn, 8, 10)] = pv_constant (constant);
797 }
798 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
799 {
800 unsigned short inst2;
801
802 inst2 = read_code_unsigned_integer (start + 2, 2,
803 byte_order_for_code);
804
805 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
806 {
807 /* BL, BLX. Allow some special function calls when
808 skipping the prologue; GCC generates these before
809 storing arguments to the stack. */
810 CORE_ADDR nextpc;
811 int j1, j2, imm1, imm2;
812
813 imm1 = sbits (insn, 0, 10);
814 imm2 = bits (inst2, 0, 10);
815 j1 = bit (inst2, 13);
816 j2 = bit (inst2, 11);
817
818 offset = ((imm1 << 12) + (imm2 << 1));
819 offset ^= ((!j2) << 22) | ((!j1) << 23);
820
821 nextpc = start + 4 + offset;
822 /* For BLX make sure to clear the low bits. */
823 if (bit (inst2, 12) == 0)
824 nextpc = nextpc & 0xfffffffc;
825
826 if (!skip_prologue_function (gdbarch, nextpc,
827 bit (inst2, 12) != 0))
828 break;
829 }
830
831 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
832 { registers } */
833 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
834 {
835 pv_t addr = regs[bits (insn, 0, 3)];
836 int regno;
837
838 if (stack.store_would_trash (addr))
839 break;
840
841 /* Calculate offsets of saved registers. */
842 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
843 if (inst2 & (1 << regno))
844 {
845 addr = pv_add_constant (addr, -4);
846 stack.store (addr, 4, regs[regno]);
847 }
848
849 if (insn & 0x0020)
850 regs[bits (insn, 0, 3)] = addr;
851 }
852
853 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
854 [Rn, #+/-imm]{!} */
855 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
856 {
857 int regno1 = bits (inst2, 12, 15);
858 int regno2 = bits (inst2, 8, 11);
859 pv_t addr = regs[bits (insn, 0, 3)];
860
861 offset = inst2 & 0xff;
862 if (insn & 0x0080)
863 addr = pv_add_constant (addr, offset);
864 else
865 addr = pv_add_constant (addr, -offset);
866
867 if (stack.store_would_trash (addr))
868 break;
869
870 stack.store (addr, 4, regs[regno1]);
871 stack.store (pv_add_constant (addr, 4),
872 4, regs[regno2]);
873
874 if (insn & 0x0020)
875 regs[bits (insn, 0, 3)] = addr;
876 }
877
878 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
879 && (inst2 & 0x0c00) == 0x0c00
880 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 {
882 int regno = bits (inst2, 12, 15);
883 pv_t addr = regs[bits (insn, 0, 3)];
884
885 offset = inst2 & 0xff;
886 if (inst2 & 0x0200)
887 addr = pv_add_constant (addr, offset);
888 else
889 addr = pv_add_constant (addr, -offset);
890
891 if (stack.store_would_trash (addr))
892 break;
893
894 stack.store (addr, 4, regs[regno]);
895
896 if (inst2 & 0x0100)
897 regs[bits (insn, 0, 3)] = addr;
898 }
899
900 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 {
903 int regno = bits (inst2, 12, 15);
904 pv_t addr;
905
906 offset = inst2 & 0xfff;
907 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
908
909 if (stack.store_would_trash (addr))
910 break;
911
912 stack.store (addr, 4, regs[regno]);
913 }
914
915 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
916 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
917 /* Ignore stores of argument registers to the stack. */
918 ;
919
920 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
921 && (inst2 & 0x0d00) == 0x0c00
922 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
923 /* Ignore stores of argument registers to the stack. */
924 ;
925
926 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
927 { registers } */
928 && (inst2 & 0x8000) == 0x0000
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 /* Ignore block loads from the stack, potentially copying
931 parameters from memory. */
932 ;
933
934 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
935 [Rn, #+/-imm] */
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Similarly ignore dual loads from the stack. */
938 ;
939
940 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
941 && (inst2 & 0x0d00) == 0x0c00
942 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
943 /* Similarly ignore single loads from the stack. */
944 ;
945
946 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 /* Similarly ignore single loads from the stack. */
949 ;
950
951 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
952 && (inst2 & 0x8000) == 0x0000)
953 {
954 unsigned int imm = ((bits (insn, 10, 10) << 11)
955 | (bits (inst2, 12, 14) << 8)
956 | bits (inst2, 0, 7));
957
958 regs[bits (inst2, 8, 11)]
959 = pv_add_constant (regs[bits (insn, 0, 3)],
960 thumb_expand_immediate (imm));
961 }
962
963 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
964 && (inst2 & 0x8000) == 0x0000)
965 {
966 unsigned int imm = ((bits (insn, 10, 10) << 11)
967 | (bits (inst2, 12, 14) << 8)
968 | bits (inst2, 0, 7));
969
970 regs[bits (inst2, 8, 11)]
971 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
972 }
973
974 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
975 && (inst2 & 0x8000) == 0x0000)
976 {
977 unsigned int imm = ((bits (insn, 10, 10) << 11)
978 | (bits (inst2, 12, 14) << 8)
979 | bits (inst2, 0, 7));
980
981 regs[bits (inst2, 8, 11)]
982 = pv_add_constant (regs[bits (insn, 0, 3)],
983 - (CORE_ADDR) thumb_expand_immediate (imm));
984 }
985
986 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
987 && (inst2 & 0x8000) == 0x0000)
988 {
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
992
993 regs[bits (inst2, 8, 11)]
994 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
995 }
996
997 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
998 {
999 unsigned int imm = ((bits (insn, 10, 10) << 11)
1000 | (bits (inst2, 12, 14) << 8)
1001 | bits (inst2, 0, 7));
1002
1003 regs[bits (inst2, 8, 11)]
1004 = pv_constant (thumb_expand_immediate (imm));
1005 }
1006
1007 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1008 {
1009 unsigned int imm
1010 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1011
1012 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1013 }
1014
1015 else if (insn == 0xea5f /* mov.w Rd,Rm */
1016 && (inst2 & 0xf0f0) == 0)
1017 {
1018 int dst_reg = (inst2 & 0x0f00) >> 8;
1019 int src_reg = inst2 & 0xf;
1020 regs[dst_reg] = regs[src_reg];
1021 }
1022
1023 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1024 {
1025 /* Constant pool loads. */
1026 unsigned int constant;
1027 CORE_ADDR loc;
1028
1029 offset = bits (inst2, 0, 11);
1030 if (insn & 0x0080)
1031 loc = start + 4 + offset;
1032 else
1033 loc = start + 4 - offset;
1034
1035 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1036 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1037 }
1038
1039 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1040 {
1041 /* Constant pool loads. */
1042 unsigned int constant;
1043 CORE_ADDR loc;
1044
1045 offset = bits (inst2, 0, 7) << 2;
1046 if (insn & 0x0080)
1047 loc = start + 4 + offset;
1048 else
1049 loc = start + 4 - offset;
1050
1051 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1052 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1053
1054 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1055 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1056 }
1057
1058 else if (thumb2_instruction_changes_pc (insn, inst2))
1059 {
1060 /* Don't scan past anything that might change control flow. */
1061 break;
1062 }
1063 else
1064 {
1065 /* The optimizer might shove anything into the prologue,
1066 so we just skip what we don't recognize. */
1067 unrecognized_pc = start;
1068 }
1069
1070 start += 2;
1071 }
1072 else if (thumb_instruction_changes_pc (insn))
1073 {
1074 /* Don't scan past anything that might change control flow. */
1075 break;
1076 }
1077 else
1078 {
1079 /* The optimizer might shove anything into the prologue,
1080 so we just skip what we don't recognize. */
1081 unrecognized_pc = start;
1082 }
1083
1084 start += 2;
1085 }
1086
1087 if (arm_debug)
1088 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1089 paddress (gdbarch, start));
1090
1091 if (unrecognized_pc == 0)
1092 unrecognized_pc = start;
1093
1094 if (cache == NULL)
1095 return unrecognized_pc;
1096
1097 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1098 {
1099 /* Frame pointer is fp. Frame size is constant. */
1100 cache->framereg = ARM_FP_REGNUM;
1101 cache->framesize = -regs[ARM_FP_REGNUM].k;
1102 }
1103 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1104 {
1105 /* Frame pointer is r7. Frame size is constant. */
1106 cache->framereg = THUMB_FP_REGNUM;
1107 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1108 }
1109 else
1110 {
1111 /* Try the stack pointer... this is a bit desperate. */
1112 cache->framereg = ARM_SP_REGNUM;
1113 cache->framesize = -regs[ARM_SP_REGNUM].k;
1114 }
1115
1116 for (i = 0; i < 16; i++)
1117 if (stack.find_reg (gdbarch, i, &offset))
1118 cache->saved_regs[i].addr = offset;
1119
1120 return unrecognized_pc;
1121 }
1122
1123
1124 /* Try to analyze the instructions starting from PC, which load symbol
1125 __stack_chk_guard. Return the address of instruction after loading this
1126 symbol, set the dest register number to *BASEREG, and set the size of
1127 instructions for loading symbol in OFFSET. Return 0 if instructions are
1128 not recognized. */
1129
1130 static CORE_ADDR
1131 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1132 unsigned int *destreg, int *offset)
1133 {
1134 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1135 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1136 unsigned int low, high, address;
1137
1138 address = 0;
1139 if (is_thumb)
1140 {
1141 unsigned short insn1
1142 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1143
1144 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1145 {
1146 *destreg = bits (insn1, 8, 10);
1147 *offset = 2;
1148 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1149 address = read_memory_unsigned_integer (address, 4,
1150 byte_order_for_code);
1151 }
1152 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1153 {
1154 unsigned short insn2
1155 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1156
1157 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1158
1159 insn1
1160 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1161 insn2
1162 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1163
1164 /* movt Rd, #const */
1165 if ((insn1 & 0xfbc0) == 0xf2c0)
1166 {
1167 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1168 *destreg = bits (insn2, 8, 11);
1169 *offset = 8;
1170 address = (high << 16 | low);
1171 }
1172 }
1173 }
1174 else
1175 {
1176 unsigned int insn
1177 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1178
1179 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1180 {
1181 address = bits (insn, 0, 11) + pc + 8;
1182 address = read_memory_unsigned_integer (address, 4,
1183 byte_order_for_code);
1184
1185 *destreg = bits (insn, 12, 15);
1186 *offset = 4;
1187 }
1188 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1189 {
1190 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1191
1192 insn
1193 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1194
1195 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1196 {
1197 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1198 *destreg = bits (insn, 12, 15);
1199 *offset = 8;
1200 address = (high << 16 | low);
1201 }
1202 }
1203 }
1204
1205 return address;
1206 }
1207
1208 /* Try to skip a sequence of instructions used for stack protector. If PC
1209 points to the first instruction of this sequence, return the address of
1210 first instruction after this sequence, otherwise, return original PC.
1211
1212 On arm, this sequence of instructions is composed of mainly three steps,
1213 Step 1: load symbol __stack_chk_guard,
1214 Step 2: load from address of __stack_chk_guard,
1215 Step 3: store it to somewhere else.
1216
1217 Usually, instructions on step 2 and step 3 are the same on various ARM
1218 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1219 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1220 instructions in step 1 vary from different ARM architectures. On ARMv7,
1221 they are,
1222
1223 movw Rn, #:lower16:__stack_chk_guard
1224 movt Rn, #:upper16:__stack_chk_guard
1225
1226 On ARMv5t, it is,
1227
1228 ldr Rn, .Label
1229 ....
1230 .Lable:
1231 .word __stack_chk_guard
1232
1233 Since ldr/str is a very popular instruction, we can't use them as
1234 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1235 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1236 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1237
1238 static CORE_ADDR
1239 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1240 {
1241 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1242 unsigned int basereg;
1243 struct bound_minimal_symbol stack_chk_guard;
1244 int offset;
1245 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1246 CORE_ADDR addr;
1247
1248 /* Try to parse the instructions in Step 1. */
1249 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1250 &basereg, &offset);
1251 if (!addr)
1252 return pc;
1253
1254 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1255 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1256 Otherwise, this sequence cannot be for stack protector. */
1257 if (stack_chk_guard.minsym == NULL
1258 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1259 return pc;
1260
1261 if (is_thumb)
1262 {
1263 unsigned int destreg;
1264 unsigned short insn
1265 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1266
1267 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6800)
1269 return pc;
1270 if (bits (insn, 3, 5) != basereg)
1271 return pc;
1272 destreg = bits (insn, 0, 2);
1273
1274 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1275 byte_order_for_code);
1276 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1277 if ((insn & 0xf800) != 0x6000)
1278 return pc;
1279 if (destreg != bits (insn, 0, 2))
1280 return pc;
1281 }
1282 else
1283 {
1284 unsigned int destreg;
1285 unsigned int insn
1286 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1287
1288 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1289 if ((insn & 0x0e500000) != 0x04100000)
1290 return pc;
1291 if (bits (insn, 16, 19) != basereg)
1292 return pc;
1293 destreg = bits (insn, 12, 15);
1294 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1295 insn = read_code_unsigned_integer (pc + offset + 4,
1296 4, byte_order_for_code);
1297 if ((insn & 0x0e500000) != 0x04000000)
1298 return pc;
1299 if (bits (insn, 12, 15) != destreg)
1300 return pc;
1301 }
1302 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1303 on arm. */
1304 if (is_thumb)
1305 return pc + offset + 4;
1306 else
1307 return pc + offset + 8;
1308 }
1309
1310 /* Advance the PC across any function entry prologue instructions to
1311 reach some "real" code.
1312
1313 The APCS (ARM Procedure Call Standard) defines the following
1314 prologue:
1315
1316 mov ip, sp
1317 [stmfd sp!, {a1,a2,a3,a4}]
1318 stmfd sp!, {...,fp,ip,lr,pc}
1319 [stfe f7, [sp, #-12]!]
1320 [stfe f6, [sp, #-12]!]
1321 [stfe f5, [sp, #-12]!]
1322 [stfe f4, [sp, #-12]!]
1323 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1324
1325 static CORE_ADDR
1326 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1327 {
1328 CORE_ADDR func_addr, limit_pc;
1329
1330 /* See if we can determine the end of the prologue via the symbol table.
1331 If so, then return either PC, or the PC after the prologue, whichever
1332 is greater. */
1333 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1334 {
1335 CORE_ADDR post_prologue_pc
1336 = skip_prologue_using_sal (gdbarch, func_addr);
1337 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1338
1339 if (post_prologue_pc)
1340 post_prologue_pc
1341 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1342
1343
1344 /* GCC always emits a line note before the prologue and another
1345 one after, even if the two are at the same address or on the
1346 same line. Take advantage of this so that we do not need to
1347 know every instruction that might appear in the prologue. We
1348 will have producer information for most binaries; if it is
1349 missing (e.g. for -gstabs), assuming the GNU tools. */
1350 if (post_prologue_pc
1351 && (cust == NULL
1352 || COMPUNIT_PRODUCER (cust) == NULL
1353 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1354 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1355 return post_prologue_pc;
1356
1357 if (post_prologue_pc != 0)
1358 {
1359 CORE_ADDR analyzed_limit;
1360
1361 /* For non-GCC compilers, make sure the entire line is an
1362 acceptable prologue; GDB will round this function's
1363 return value up to the end of the following line so we
1364 can not skip just part of a line (and we do not want to).
1365
1366 RealView does not treat the prologue specially, but does
1367 associate prologue code with the opening brace; so this
1368 lets us skip the first line if we think it is the opening
1369 brace. */
1370 if (arm_pc_is_thumb (gdbarch, func_addr))
1371 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1372 post_prologue_pc, NULL);
1373 else
1374 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1375 post_prologue_pc, NULL);
1376
1377 if (analyzed_limit != post_prologue_pc)
1378 return func_addr;
1379
1380 return post_prologue_pc;
1381 }
1382 }
1383
1384 /* Can't determine prologue from the symbol table, need to examine
1385 instructions. */
1386
1387 /* Find an upper limit on the function prologue using the debug
1388 information. If the debug information could not be used to provide
1389 that bound, then use an arbitrary large number as the upper bound. */
1390 /* Like arm_scan_prologue, stop no later than pc + 64. */
1391 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1392 if (limit_pc == 0)
1393 limit_pc = pc + 64; /* Magic. */
1394
1395
1396 /* Check if this is Thumb code. */
1397 if (arm_pc_is_thumb (gdbarch, pc))
1398 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1399 else
1400 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1401 }
1402
1403 /* *INDENT-OFF* */
1404 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1405 This function decodes a Thumb function prologue to determine:
1406 1) the size of the stack frame
1407 2) which registers are saved on it
1408 3) the offsets of saved regs
1409 4) the offset from the stack pointer to the frame pointer
1410
1411 A typical Thumb function prologue would create this stack frame
1412 (offsets relative to FP)
1413 old SP -> 24 stack parameters
1414 20 LR
1415 16 R7
1416 R7 -> 0 local variables (16 bytes)
1417 SP -> -12 additional stack space (12 bytes)
1418 The frame size would thus be 36 bytes, and the frame offset would be
1419 12 bytes. The frame register is R7.
1420
1421 The comments for thumb_skip_prolog() describe the algorithm we use
1422 to detect the end of the prolog. */
1423 /* *INDENT-ON* */
1424
1425 static void
1426 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1427 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1428 {
1429 CORE_ADDR prologue_start;
1430 CORE_ADDR prologue_end;
1431
1432 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1433 &prologue_end))
1434 {
1435 /* See comment in arm_scan_prologue for an explanation of
1436 this heuristics. */
1437 if (prologue_end > prologue_start + 64)
1438 {
1439 prologue_end = prologue_start + 64;
1440 }
1441 }
1442 else
1443 /* We're in the boondocks: we have no idea where the start of the
1444 function is. */
1445 return;
1446
1447 prologue_end = std::min (prologue_end, prev_pc);
1448
1449 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1450 }
1451
1452 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1453 otherwise. */
1454
1455 static int
1456 arm_instruction_restores_sp (unsigned int insn)
1457 {
1458 if (bits (insn, 28, 31) != INST_NV)
1459 {
1460 if ((insn & 0x0df0f000) == 0x0080d000
1461 /* ADD SP (register or immediate). */
1462 || (insn & 0x0df0f000) == 0x0040d000
1463 /* SUB SP (register or immediate). */
1464 || (insn & 0x0ffffff0) == 0x01a0d000
1465 /* MOV SP. */
1466 || (insn & 0x0fff0000) == 0x08bd0000
1467 /* POP (LDMIA). */
1468 || (insn & 0x0fff0000) == 0x049d0000)
1469 /* POP of a single register. */
1470 return 1;
1471 }
1472
1473 return 0;
1474 }
1475
1476 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1477 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1478 fill it in. Return the first address not recognized as a prologue
1479 instruction.
1480
1481 We recognize all the instructions typically found in ARM prologues,
1482 plus harmless instructions which can be skipped (either for analysis
1483 purposes, or a more restrictive set that can be skipped when finding
1484 the end of the prologue). */
1485
1486 static CORE_ADDR
1487 arm_analyze_prologue (struct gdbarch *gdbarch,
1488 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1489 struct arm_prologue_cache *cache)
1490 {
1491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1492 int regno;
1493 CORE_ADDR offset, current_pc;
1494 pv_t regs[ARM_FPS_REGNUM];
1495 CORE_ADDR unrecognized_pc = 0;
1496
1497 /* Search the prologue looking for instructions that set up the
1498 frame pointer, adjust the stack pointer, and save registers.
1499
1500 Be careful, however, and if it doesn't look like a prologue,
1501 don't try to scan it. If, for instance, a frameless function
1502 begins with stmfd sp!, then we will tell ourselves there is
1503 a frame, which will confuse stack traceback, as well as "finish"
1504 and other operations that rely on a knowledge of the stack
1505 traceback. */
1506
1507 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1508 regs[regno] = pv_register (regno, 0);
1509 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1510
1511 for (current_pc = prologue_start;
1512 current_pc < prologue_end;
1513 current_pc += 4)
1514 {
1515 unsigned int insn
1516 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1517
1518 if (insn == 0xe1a0c00d) /* mov ip, sp */
1519 {
1520 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1521 continue;
1522 }
1523 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1525 {
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1528 int rd = bits (insn, 12, 15);
1529 imm = (imm >> rot) | (imm << (32 - rot));
1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1531 continue;
1532 }
1533 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1534 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1535 {
1536 unsigned imm = insn & 0xff; /* immediate value */
1537 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1538 int rd = bits (insn, 12, 15);
1539 imm = (imm >> rot) | (imm << (32 - rot));
1540 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1541 continue;
1542 }
1543 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1544 [sp, #-4]! */
1545 {
1546 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1547 break;
1548 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1549 stack.store (regs[ARM_SP_REGNUM], 4,
1550 regs[bits (insn, 12, 15)]);
1551 continue;
1552 }
1553 else if ((insn & 0xffff0000) == 0xe92d0000)
1554 /* stmfd sp!, {..., fp, ip, lr, pc}
1555 or
1556 stmfd sp!, {a1, a2, a3, a4} */
1557 {
1558 int mask = insn & 0xffff;
1559
1560 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1561 break;
1562
1563 /* Calculate offsets of saved registers. */
1564 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1565 if (mask & (1 << regno))
1566 {
1567 regs[ARM_SP_REGNUM]
1568 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1569 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1570 }
1571 }
1572 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1573 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1574 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1575 {
1576 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 continue;
1578 }
1579 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1580 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1581 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1582 {
1583 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 continue;
1585 }
1586 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1587 { registers } */
1588 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1589 {
1590 /* No need to add this to saved_regs -- it's just arg regs. */
1591 continue;
1592 }
1593 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1594 {
1595 unsigned imm = insn & 0xff; /* immediate value */
1596 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1597 imm = (imm >> rot) | (imm << (32 - rot));
1598 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1599 }
1600 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1601 {
1602 unsigned imm = insn & 0xff; /* immediate value */
1603 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1604 imm = (imm >> rot) | (imm << (32 - rot));
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1606 }
1607 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1608 [sp, -#c]! */
1609 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1610 {
1611 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1612 break;
1613
1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1615 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1616 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1617 }
1618 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1619 [sp!] */
1620 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1621 {
1622 int n_saved_fp_regs;
1623 unsigned int fp_start_reg, fp_bound_reg;
1624
1625 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1626 break;
1627
1628 if ((insn & 0x800) == 0x800) /* N0 is set */
1629 {
1630 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1631 n_saved_fp_regs = 3;
1632 else
1633 n_saved_fp_regs = 1;
1634 }
1635 else
1636 {
1637 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1638 n_saved_fp_regs = 2;
1639 else
1640 n_saved_fp_regs = 4;
1641 }
1642
1643 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1644 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1645 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1646 {
1647 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1648 stack.store (regs[ARM_SP_REGNUM], 12,
1649 regs[fp_start_reg++]);
1650 }
1651 }
1652 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1653 {
1654 /* Allow some special function calls when skipping the
1655 prologue; GCC generates these before storing arguments to
1656 the stack. */
1657 CORE_ADDR dest = BranchDest (current_pc, insn);
1658
1659 if (skip_prologue_function (gdbarch, dest, 0))
1660 continue;
1661 else
1662 break;
1663 }
1664 else if ((insn & 0xf0000000) != 0xe0000000)
1665 break; /* Condition not true, exit early. */
1666 else if (arm_instruction_changes_pc (insn))
1667 /* Don't scan past anything that might change control flow. */
1668 break;
1669 else if (arm_instruction_restores_sp (insn))
1670 {
1671 /* Don't scan past the epilogue. */
1672 break;
1673 }
1674 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 /* Ignore block loads from the stack, potentially copying
1677 parameters from memory. */
1678 continue;
1679 else if ((insn & 0xfc500000) == 0xe4100000
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Similarly ignore single loads from the stack. */
1682 continue;
1683 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1684 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1685 register instead of the stack. */
1686 continue;
1687 else
1688 {
1689 /* The optimizer might shove anything into the prologue, if
1690 we build up cache (cache != NULL) from scanning prologue,
1691 we just skip what we don't recognize and scan further to
1692 make cache as complete as possible. However, if we skip
1693 prologue, we'll stop immediately on unrecognized
1694 instruction. */
1695 unrecognized_pc = current_pc;
1696 if (cache != NULL)
1697 continue;
1698 else
1699 break;
1700 }
1701 }
1702
1703 if (unrecognized_pc == 0)
1704 unrecognized_pc = current_pc;
1705
1706 if (cache)
1707 {
1708 int framereg, framesize;
1709
1710 /* The frame size is just the distance from the frame register
1711 to the original stack pointer. */
1712 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1713 {
1714 /* Frame pointer is fp. */
1715 framereg = ARM_FP_REGNUM;
1716 framesize = -regs[ARM_FP_REGNUM].k;
1717 }
1718 else
1719 {
1720 /* Try the stack pointer... this is a bit desperate. */
1721 framereg = ARM_SP_REGNUM;
1722 framesize = -regs[ARM_SP_REGNUM].k;
1723 }
1724
1725 cache->framereg = framereg;
1726 cache->framesize = framesize;
1727
1728 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1729 if (stack.find_reg (gdbarch, regno, &offset))
1730 cache->saved_regs[regno].addr = offset;
1731 }
1732
1733 if (arm_debug)
1734 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1735 paddress (gdbarch, unrecognized_pc));
1736
1737 return unrecognized_pc;
1738 }
1739
1740 static void
1741 arm_scan_prologue (struct frame_info *this_frame,
1742 struct arm_prologue_cache *cache)
1743 {
1744 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1745 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1746 CORE_ADDR prologue_start, prologue_end;
1747 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1748 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1749
1750 /* Assume there is no frame until proven otherwise. */
1751 cache->framereg = ARM_SP_REGNUM;
1752 cache->framesize = 0;
1753
1754 /* Check for Thumb prologue. */
1755 if (arm_frame_is_thumb (this_frame))
1756 {
1757 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1758 return;
1759 }
1760
1761 /* Find the function prologue. If we can't find the function in
1762 the symbol table, peek in the stack frame to find the PC. */
1763 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1764 &prologue_end))
1765 {
1766 /* One way to find the end of the prologue (which works well
1767 for unoptimized code) is to do the following:
1768
1769 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1770
1771 if (sal.line == 0)
1772 prologue_end = prev_pc;
1773 else if (sal.end < prologue_end)
1774 prologue_end = sal.end;
1775
1776 This mechanism is very accurate so long as the optimizer
1777 doesn't move any instructions from the function body into the
1778 prologue. If this happens, sal.end will be the last
1779 instruction in the first hunk of prologue code just before
1780 the first instruction that the scheduler has moved from
1781 the body to the prologue.
1782
1783 In order to make sure that we scan all of the prologue
1784 instructions, we use a slightly less accurate mechanism which
1785 may scan more than necessary. To help compensate for this
1786 lack of accuracy, the prologue scanning loop below contains
1787 several clauses which'll cause the loop to terminate early if
1788 an implausible prologue instruction is encountered.
1789
1790 The expression
1791
1792 prologue_start + 64
1793
1794 is a suitable endpoint since it accounts for the largest
1795 possible prologue plus up to five instructions inserted by
1796 the scheduler. */
1797
1798 if (prologue_end > prologue_start + 64)
1799 {
1800 prologue_end = prologue_start + 64; /* See above. */
1801 }
1802 }
1803 else
1804 {
1805 /* We have no symbol information. Our only option is to assume this
1806 function has a standard stack frame and the normal frame register.
1807 Then, we can find the value of our frame pointer on entrance to
1808 the callee (or at the present moment if this is the innermost frame).
1809 The value stored there should be the address of the stmfd + 8. */
1810 CORE_ADDR frame_loc;
1811 ULONGEST return_value;
1812
1813 /* AAPCS does not use a frame register, so we can abort here. */
1814 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1815 return;
1816
1817 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1818 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1819 &return_value))
1820 return;
1821 else
1822 {
1823 prologue_start = gdbarch_addr_bits_remove
1824 (gdbarch, return_value) - 8;
1825 prologue_end = prologue_start + 64; /* See above. */
1826 }
1827 }
1828
1829 if (prev_pc < prologue_end)
1830 prologue_end = prev_pc;
1831
1832 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1833 }
1834
1835 static struct arm_prologue_cache *
1836 arm_make_prologue_cache (struct frame_info *this_frame)
1837 {
1838 int reg;
1839 struct arm_prologue_cache *cache;
1840 CORE_ADDR unwound_fp;
1841
1842 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1843 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1844
1845 arm_scan_prologue (this_frame, cache);
1846
1847 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1848 if (unwound_fp == 0)
1849 return cache;
1850
1851 cache->prev_sp = unwound_fp + cache->framesize;
1852
1853 /* Calculate actual addresses of saved registers using offsets
1854 determined by arm_scan_prologue. */
1855 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1856 if (trad_frame_addr_p (cache->saved_regs, reg))
1857 cache->saved_regs[reg].addr += cache->prev_sp;
1858
1859 return cache;
1860 }
1861
1862 /* Implementation of the stop_reason hook for arm_prologue frames. */
1863
1864 static enum unwind_stop_reason
1865 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1866 void **this_cache)
1867 {
1868 struct arm_prologue_cache *cache;
1869 CORE_ADDR pc;
1870
1871 if (*this_cache == NULL)
1872 *this_cache = arm_make_prologue_cache (this_frame);
1873 cache = (struct arm_prologue_cache *) *this_cache;
1874
1875 /* This is meant to halt the backtrace at "_start". */
1876 pc = get_frame_pc (this_frame);
1877 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1878 return UNWIND_OUTERMOST;
1879
1880 /* If we've hit a wall, stop. */
1881 if (cache->prev_sp == 0)
1882 return UNWIND_OUTERMOST;
1883
1884 return UNWIND_NO_REASON;
1885 }
1886
1887 /* Our frame ID for a normal frame is the current function's starting PC
1888 and the caller's SP when we were called. */
1889
1890 static void
1891 arm_prologue_this_id (struct frame_info *this_frame,
1892 void **this_cache,
1893 struct frame_id *this_id)
1894 {
1895 struct arm_prologue_cache *cache;
1896 struct frame_id id;
1897 CORE_ADDR pc, func;
1898
1899 if (*this_cache == NULL)
1900 *this_cache = arm_make_prologue_cache (this_frame);
1901 cache = (struct arm_prologue_cache *) *this_cache;
1902
1903 /* Use function start address as part of the frame ID. If we cannot
1904 identify the start address (due to missing symbol information),
1905 fall back to just using the current PC. */
1906 pc = get_frame_pc (this_frame);
1907 func = get_frame_func (this_frame);
1908 if (!func)
1909 func = pc;
1910
1911 id = frame_id_build (cache->prev_sp, func);
1912 *this_id = id;
1913 }
1914
1915 static struct value *
1916 arm_prologue_prev_register (struct frame_info *this_frame,
1917 void **this_cache,
1918 int prev_regnum)
1919 {
1920 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1921 struct arm_prologue_cache *cache;
1922
1923 if (*this_cache == NULL)
1924 *this_cache = arm_make_prologue_cache (this_frame);
1925 cache = (struct arm_prologue_cache *) *this_cache;
1926
1927 /* If we are asked to unwind the PC, then we need to return the LR
1928 instead. The prologue may save PC, but it will point into this
1929 frame's prologue, not the next frame's resume location. Also
1930 strip the saved T bit. A valid LR may have the low bit set, but
1931 a valid PC never does. */
1932 if (prev_regnum == ARM_PC_REGNUM)
1933 {
1934 CORE_ADDR lr;
1935
1936 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1937 return frame_unwind_got_constant (this_frame, prev_regnum,
1938 arm_addr_bits_remove (gdbarch, lr));
1939 }
1940
1941 /* SP is generally not saved to the stack, but this frame is
1942 identified by the next frame's stack pointer at the time of the call.
1943 The value was already reconstructed into PREV_SP. */
1944 if (prev_regnum == ARM_SP_REGNUM)
1945 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1946
1947 /* The CPSR may have been changed by the call instruction and by the
1948 called function. The only bit we can reconstruct is the T bit,
1949 by checking the low bit of LR as of the call. This is a reliable
1950 indicator of Thumb-ness except for some ARM v4T pre-interworking
1951 Thumb code, which could get away with a clear low bit as long as
1952 the called function did not use bx. Guess that all other
1953 bits are unchanged; the condition flags are presumably lost,
1954 but the processor status is likely valid. */
1955 if (prev_regnum == ARM_PS_REGNUM)
1956 {
1957 CORE_ADDR lr, cpsr;
1958 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1959
1960 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1961 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1962 if (IS_THUMB_ADDR (lr))
1963 cpsr |= t_bit;
1964 else
1965 cpsr &= ~t_bit;
1966 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1967 }
1968
1969 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1970 prev_regnum);
1971 }
1972
1973 struct frame_unwind arm_prologue_unwind = {
1974 NORMAL_FRAME,
1975 arm_prologue_unwind_stop_reason,
1976 arm_prologue_this_id,
1977 arm_prologue_prev_register,
1978 NULL,
1979 default_frame_sniffer
1980 };
1981
1982 /* Maintain a list of ARM exception table entries per objfile, similar to the
1983 list of mapping symbols. We only cache entries for standard ARM-defined
1984 personality routines; the cache will contain only the frame unwinding
1985 instructions associated with the entry (not the descriptors). */
1986
1987 struct arm_exidx_entry
1988 {
1989 CORE_ADDR addr;
1990 gdb_byte *entry;
1991
1992 bool operator< (const arm_exidx_entry &other) const
1993 {
1994 return addr < other.addr;
1995 }
1996 };
1997
1998 struct arm_exidx_data
1999 {
2000 std::vector<std::vector<arm_exidx_entry>> section_maps;
2001 };
2002
2003 /* Per-BFD key to store exception handling information. */
2004 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2005
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 {
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2013 {
2014 bfd_vma start, size;
2015 start = bfd_section_vma (osect->the_bfd_section);
2016 size = bfd_section_size (osect->the_bfd_section);
2017
2018 if (start <= vma && vma < start + size)
2019 return osect;
2020 }
2021
2022 return NULL;
2023 }
2024
2025 /* Parse contents of exception table and exception index sections
2026 of OBJFILE, and fill in the exception table entry cache.
2027
2028 For each entry that refers to a standard ARM-defined personality
2029 routine, extract the frame unwinding instructions (from either
2030 the index or the table section). The unwinding instructions
2031 are normalized by:
2032 - extracting them from the rest of the table data
2033 - converting to host endianness
2034 - appending the implicit 0xb0 ("Finish") code
2035
2036 The extracted and normalized instructions are stored for later
2037 retrieval by the arm_find_exidx_entry routine. */
2038
2039 static void
2040 arm_exidx_new_objfile (struct objfile *objfile)
2041 {
2042 struct arm_exidx_data *data;
2043 asection *exidx, *extab;
2044 bfd_vma exidx_vma = 0, extab_vma = 0;
2045 LONGEST i;
2046
2047 /* If we've already touched this file, do nothing. */
2048 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2049 return;
2050
2051 /* Read contents of exception table and index. */
2052 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2053 gdb::byte_vector exidx_data;
2054 if (exidx)
2055 {
2056 exidx_vma = bfd_section_vma (exidx);
2057 exidx_data.resize (bfd_section_size (exidx));
2058
2059 if (!bfd_get_section_contents (objfile->obfd, exidx,
2060 exidx_data.data (), 0,
2061 exidx_data.size ()))
2062 return;
2063 }
2064
2065 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2066 gdb::byte_vector extab_data;
2067 if (extab)
2068 {
2069 extab_vma = bfd_section_vma (extab);
2070 extab_data.resize (bfd_section_size (extab));
2071
2072 if (!bfd_get_section_contents (objfile->obfd, extab,
2073 extab_data.data (), 0,
2074 extab_data.size ()))
2075 return;
2076 }
2077
2078 /* Allocate exception table data structure. */
2079 data = arm_exidx_data_key.emplace (objfile->obfd);
2080 data->section_maps.resize (objfile->obfd->section_count);
2081
2082 /* Fill in exception table. */
2083 for (i = 0; i < exidx_data.size () / 8; i++)
2084 {
2085 struct arm_exidx_entry new_exidx_entry;
2086 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2087 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2088 exidx_data.data () + i * 8 + 4);
2089 bfd_vma addr = 0, word = 0;
2090 int n_bytes = 0, n_words = 0;
2091 struct obj_section *sec;
2092 gdb_byte *entry = NULL;
2093
2094 /* Extract address of start of function. */
2095 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2096 idx += exidx_vma + i * 8;
2097
2098 /* Find section containing function and compute section offset. */
2099 sec = arm_obj_section_from_vma (objfile, idx);
2100 if (sec == NULL)
2101 continue;
2102 idx -= bfd_section_vma (sec->the_bfd_section);
2103
2104 /* Determine address of exception table entry. */
2105 if (val == 1)
2106 {
2107 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2108 }
2109 else if ((val & 0xff000000) == 0x80000000)
2110 {
2111 /* Exception table entry embedded in .ARM.exidx
2112 -- must be short form. */
2113 word = val;
2114 n_bytes = 3;
2115 }
2116 else if (!(val & 0x80000000))
2117 {
2118 /* Exception table entry in .ARM.extab. */
2119 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2120 addr += exidx_vma + i * 8 + 4;
2121
2122 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2123 {
2124 word = bfd_h_get_32 (objfile->obfd,
2125 extab_data.data () + addr - extab_vma);
2126 addr += 4;
2127
2128 if ((word & 0xff000000) == 0x80000000)
2129 {
2130 /* Short form. */
2131 n_bytes = 3;
2132 }
2133 else if ((word & 0xff000000) == 0x81000000
2134 || (word & 0xff000000) == 0x82000000)
2135 {
2136 /* Long form. */
2137 n_bytes = 2;
2138 n_words = ((word >> 16) & 0xff);
2139 }
2140 else if (!(word & 0x80000000))
2141 {
2142 bfd_vma pers;
2143 struct obj_section *pers_sec;
2144 int gnu_personality = 0;
2145
2146 /* Custom personality routine. */
2147 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2148 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2149
2150 /* Check whether we've got one of the variants of the
2151 GNU personality routines. */
2152 pers_sec = arm_obj_section_from_vma (objfile, pers);
2153 if (pers_sec)
2154 {
2155 static const char *personality[] =
2156 {
2157 "__gcc_personality_v0",
2158 "__gxx_personality_v0",
2159 "__gcj_personality_v0",
2160 "__gnu_objc_personality_v0",
2161 NULL
2162 };
2163
2164 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2165 int k;
2166
2167 for (k = 0; personality[k]; k++)
2168 if (lookup_minimal_symbol_by_pc_name
2169 (pc, personality[k], objfile))
2170 {
2171 gnu_personality = 1;
2172 break;
2173 }
2174 }
2175
2176 /* If so, the next word contains a word count in the high
2177 byte, followed by the same unwind instructions as the
2178 pre-defined forms. */
2179 if (gnu_personality
2180 && addr + 4 <= extab_vma + extab_data.size ())
2181 {
2182 word = bfd_h_get_32 (objfile->obfd,
2183 (extab_data.data ()
2184 + addr - extab_vma));
2185 addr += 4;
2186 n_bytes = 3;
2187 n_words = ((word >> 24) & 0xff);
2188 }
2189 }
2190 }
2191 }
2192
2193 /* Sanity check address. */
2194 if (n_words)
2195 if (addr < extab_vma
2196 || addr + 4 * n_words > extab_vma + extab_data.size ())
2197 n_words = n_bytes = 0;
2198
2199 /* The unwind instructions reside in WORD (only the N_BYTES least
2200 significant bytes are valid), followed by N_WORDS words in the
2201 extab section starting at ADDR. */
2202 if (n_bytes || n_words)
2203 {
2204 gdb_byte *p = entry
2205 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2206 n_bytes + n_words * 4 + 1);
2207
2208 while (n_bytes--)
2209 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2210
2211 while (n_words--)
2212 {
2213 word = bfd_h_get_32 (objfile->obfd,
2214 extab_data.data () + addr - extab_vma);
2215 addr += 4;
2216
2217 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2218 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2219 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2220 *p++ = (gdb_byte) (word & 0xff);
2221 }
2222
2223 /* Implied "Finish" to terminate the list. */
2224 *p++ = 0xb0;
2225 }
2226
2227 /* Push entry onto vector. They are guaranteed to always
2228 appear in order of increasing addresses. */
2229 new_exidx_entry.addr = idx;
2230 new_exidx_entry.entry = entry;
2231 data->section_maps[sec->the_bfd_section->index].push_back
2232 (new_exidx_entry);
2233 }
2234 }
2235
2236 /* Search for the exception table entry covering MEMADDR. If one is found,
2237 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2238 set *START to the start of the region covered by this entry. */
2239
2240 static gdb_byte *
2241 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2242 {
2243 struct obj_section *sec;
2244
2245 sec = find_pc_section (memaddr);
2246 if (sec != NULL)
2247 {
2248 struct arm_exidx_data *data;
2249 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2250
2251 data = arm_exidx_data_key.get (sec->objfile->obfd);
2252 if (data != NULL)
2253 {
2254 std::vector<arm_exidx_entry> &map
2255 = data->section_maps[sec->the_bfd_section->index];
2256 if (!map.empty ())
2257 {
2258 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2259
2260 /* std::lower_bound finds the earliest ordered insertion
2261 point. If the following symbol starts at this exact
2262 address, we use that; otherwise, the preceding
2263 exception table entry covers this address. */
2264 if (idx < map.end ())
2265 {
2266 if (idx->addr == map_key.addr)
2267 {
2268 if (start)
2269 *start = idx->addr + obj_section_addr (sec);
2270 return idx->entry;
2271 }
2272 }
2273
2274 if (idx > map.begin ())
2275 {
2276 idx = idx - 1;
2277 if (start)
2278 *start = idx->addr + obj_section_addr (sec);
2279 return idx->entry;
2280 }
2281 }
2282 }
2283 }
2284
2285 return NULL;
2286 }
2287
2288 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2289 instruction list from the ARM exception table entry ENTRY, allocate and
2290 return a prologue cache structure describing how to unwind this frame.
2291
2292 Return NULL if the unwinding instruction list contains a "spare",
2293 "reserved" or "refuse to unwind" instruction as defined in section
2294 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2295 for the ARM Architecture" document. */
2296
2297 static struct arm_prologue_cache *
2298 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2299 {
2300 CORE_ADDR vsp = 0;
2301 int vsp_valid = 0;
2302
2303 struct arm_prologue_cache *cache;
2304 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2305 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2306
2307 for (;;)
2308 {
2309 gdb_byte insn;
2310
2311 /* Whenever we reload SP, we actually have to retrieve its
2312 actual value in the current frame. */
2313 if (!vsp_valid)
2314 {
2315 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2316 {
2317 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2318 vsp = get_frame_register_unsigned (this_frame, reg);
2319 }
2320 else
2321 {
2322 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2323 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2324 }
2325
2326 vsp_valid = 1;
2327 }
2328
2329 /* Decode next unwind instruction. */
2330 insn = *entry++;
2331
2332 if ((insn & 0xc0) == 0)
2333 {
2334 int offset = insn & 0x3f;
2335 vsp += (offset << 2) + 4;
2336 }
2337 else if ((insn & 0xc0) == 0x40)
2338 {
2339 int offset = insn & 0x3f;
2340 vsp -= (offset << 2) + 4;
2341 }
2342 else if ((insn & 0xf0) == 0x80)
2343 {
2344 int mask = ((insn & 0xf) << 8) | *entry++;
2345 int i;
2346
2347 /* The special case of an all-zero mask identifies
2348 "Refuse to unwind". We return NULL to fall back
2349 to the prologue analyzer. */
2350 if (mask == 0)
2351 return NULL;
2352
2353 /* Pop registers r4..r15 under mask. */
2354 for (i = 0; i < 12; i++)
2355 if (mask & (1 << i))
2356 {
2357 cache->saved_regs[4 + i].addr = vsp;
2358 vsp += 4;
2359 }
2360
2361 /* Special-case popping SP -- we need to reload vsp. */
2362 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2363 vsp_valid = 0;
2364 }
2365 else if ((insn & 0xf0) == 0x90)
2366 {
2367 int reg = insn & 0xf;
2368
2369 /* Reserved cases. */
2370 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2371 return NULL;
2372
2373 /* Set SP from another register and mark VSP for reload. */
2374 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2375 vsp_valid = 0;
2376 }
2377 else if ((insn & 0xf0) == 0xa0)
2378 {
2379 int count = insn & 0x7;
2380 int pop_lr = (insn & 0x8) != 0;
2381 int i;
2382
2383 /* Pop r4..r[4+count]. */
2384 for (i = 0; i <= count; i++)
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* If indicated by flag, pop LR as well. */
2391 if (pop_lr)
2392 {
2393 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2394 vsp += 4;
2395 }
2396 }
2397 else if (insn == 0xb0)
2398 {
2399 /* We could only have updated PC by popping into it; if so, it
2400 will show up as address. Otherwise, copy LR into PC. */
2401 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2402 cache->saved_regs[ARM_PC_REGNUM]
2403 = cache->saved_regs[ARM_LR_REGNUM];
2404
2405 /* We're done. */
2406 break;
2407 }
2408 else if (insn == 0xb1)
2409 {
2410 int mask = *entry++;
2411 int i;
2412
2413 /* All-zero mask and mask >= 16 is "spare". */
2414 if (mask == 0 || mask >= 16)
2415 return NULL;
2416
2417 /* Pop r0..r3 under mask. */
2418 for (i = 0; i < 4; i++)
2419 if (mask & (1 << i))
2420 {
2421 cache->saved_regs[i].addr = vsp;
2422 vsp += 4;
2423 }
2424 }
2425 else if (insn == 0xb2)
2426 {
2427 ULONGEST offset = 0;
2428 unsigned shift = 0;
2429
2430 do
2431 {
2432 offset |= (*entry & 0x7f) << shift;
2433 shift += 7;
2434 }
2435 while (*entry++ & 0x80);
2436
2437 vsp += 0x204 + (offset << 2);
2438 }
2439 else if (insn == 0xb3)
2440 {
2441 int start = *entry >> 4;
2442 int count = (*entry++) & 0xf;
2443 int i;
2444
2445 /* Only registers D0..D15 are valid here. */
2446 if (start + count >= 16)
2447 return NULL;
2448
2449 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2450 for (i = 0; i <= count; i++)
2451 {
2452 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2453 vsp += 8;
2454 }
2455
2456 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2457 vsp += 4;
2458 }
2459 else if ((insn & 0xf8) == 0xb8)
2460 {
2461 int count = insn & 0x7;
2462 int i;
2463
2464 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2465 for (i = 0; i <= count; i++)
2466 {
2467 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2468 vsp += 8;
2469 }
2470
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2472 vsp += 4;
2473 }
2474 else if (insn == 0xc6)
2475 {
2476 int start = *entry >> 4;
2477 int count = (*entry++) & 0xf;
2478 int i;
2479
2480 /* Only registers WR0..WR15 are valid. */
2481 if (start + count >= 16)
2482 return NULL;
2483
2484 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2485 for (i = 0; i <= count; i++)
2486 {
2487 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2488 vsp += 8;
2489 }
2490 }
2491 else if (insn == 0xc7)
2492 {
2493 int mask = *entry++;
2494 int i;
2495
2496 /* All-zero mask and mask >= 16 is "spare". */
2497 if (mask == 0 || mask >= 16)
2498 return NULL;
2499
2500 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2501 for (i = 0; i < 4; i++)
2502 if (mask & (1 << i))
2503 {
2504 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2505 vsp += 4;
2506 }
2507 }
2508 else if ((insn & 0xf8) == 0xc0)
2509 {
2510 int count = insn & 0x7;
2511 int i;
2512
2513 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc8)
2521 {
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2524 int i;
2525
2526 /* Only registers D0..D31 are valid. */
2527 if (start + count >= 16)
2528 return NULL;
2529
2530 /* Pop VFP double-precision registers
2531 D[16+start]..D[16+start+count]. */
2532 for (i = 0; i <= count; i++)
2533 {
2534 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2535 vsp += 8;
2536 }
2537 }
2538 else if (insn == 0xc9)
2539 {
2540 int start = *entry >> 4;
2541 int count = (*entry++) & 0xf;
2542 int i;
2543
2544 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2545 for (i = 0; i <= count; i++)
2546 {
2547 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2548 vsp += 8;
2549 }
2550 }
2551 else if ((insn & 0xf8) == 0xd0)
2552 {
2553 int count = insn & 0x7;
2554 int i;
2555
2556 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2557 for (i = 0; i <= count; i++)
2558 {
2559 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2560 vsp += 8;
2561 }
2562 }
2563 else
2564 {
2565 /* Everything else is "spare". */
2566 return NULL;
2567 }
2568 }
2569
2570 /* If we restore SP from a register, assume this was the frame register.
2571 Otherwise just fall back to SP as frame register. */
2572 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2573 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2574 else
2575 cache->framereg = ARM_SP_REGNUM;
2576
2577 /* Determine offset to previous frame. */
2578 cache->framesize
2579 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2580
2581 /* We already got the previous SP. */
2582 cache->prev_sp = vsp;
2583
2584 return cache;
2585 }
2586
2587 /* Unwinding via ARM exception table entries. Note that the sniffer
2588 already computes a filled-in prologue cache, which is then used
2589 with the same arm_prologue_this_id and arm_prologue_prev_register
2590 routines also used for prologue-parsing based unwinding. */
2591
2592 static int
2593 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2594 struct frame_info *this_frame,
2595 void **this_prologue_cache)
2596 {
2597 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2598 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2599 CORE_ADDR addr_in_block, exidx_region, func_start;
2600 struct arm_prologue_cache *cache;
2601 gdb_byte *entry;
2602
2603 /* See if we have an ARM exception table entry covering this address. */
2604 addr_in_block = get_frame_address_in_block (this_frame);
2605 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2606 if (!entry)
2607 return 0;
2608
2609 /* The ARM exception table does not describe unwind information
2610 for arbitrary PC values, but is guaranteed to be correct only
2611 at call sites. We have to decide here whether we want to use
2612 ARM exception table information for this frame, or fall back
2613 to using prologue parsing. (Note that if we have DWARF CFI,
2614 this sniffer isn't even called -- CFI is always preferred.)
2615
2616 Before we make this decision, however, we check whether we
2617 actually have *symbol* information for the current frame.
2618 If not, prologue parsing would not work anyway, so we might
2619 as well use the exception table and hope for the best. */
2620 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2621 {
2622 int exc_valid = 0;
2623
2624 /* If the next frame is "normal", we are at a call site in this
2625 frame, so exception information is guaranteed to be valid. */
2626 if (get_next_frame (this_frame)
2627 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2628 exc_valid = 1;
2629
2630 /* We also assume exception information is valid if we're currently
2631 blocked in a system call. The system library is supposed to
2632 ensure this, so that e.g. pthread cancellation works. */
2633 if (arm_frame_is_thumb (this_frame))
2634 {
2635 ULONGEST insn;
2636
2637 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2638 2, byte_order_for_code, &insn)
2639 && (insn & 0xff00) == 0xdf00 /* svc */)
2640 exc_valid = 1;
2641 }
2642 else
2643 {
2644 ULONGEST insn;
2645
2646 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2647 4, byte_order_for_code, &insn)
2648 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2649 exc_valid = 1;
2650 }
2651
2652 /* Bail out if we don't know that exception information is valid. */
2653 if (!exc_valid)
2654 return 0;
2655
2656 /* The ARM exception index does not mark the *end* of the region
2657 covered by the entry, and some functions will not have any entry.
2658 To correctly recognize the end of the covered region, the linker
2659 should have inserted dummy records with a CANTUNWIND marker.
2660
2661 Unfortunately, current versions of GNU ld do not reliably do
2662 this, and thus we may have found an incorrect entry above.
2663 As a (temporary) sanity check, we only use the entry if it
2664 lies *within* the bounds of the function. Note that this check
2665 might reject perfectly valid entries that just happen to cover
2666 multiple functions; therefore this check ought to be removed
2667 once the linker is fixed. */
2668 if (func_start > exidx_region)
2669 return 0;
2670 }
2671
2672 /* Decode the list of unwinding instructions into a prologue cache.
2673 Note that this may fail due to e.g. a "refuse to unwind" code. */
2674 cache = arm_exidx_fill_cache (this_frame, entry);
2675 if (!cache)
2676 return 0;
2677
2678 *this_prologue_cache = cache;
2679 return 1;
2680 }
2681
2682 struct frame_unwind arm_exidx_unwind = {
2683 NORMAL_FRAME,
2684 default_frame_unwind_stop_reason,
2685 arm_prologue_this_id,
2686 arm_prologue_prev_register,
2687 NULL,
2688 arm_exidx_unwind_sniffer
2689 };
2690
2691 static struct arm_prologue_cache *
2692 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2693 {
2694 struct arm_prologue_cache *cache;
2695 int reg;
2696
2697 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2698 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2699
2700 /* Still rely on the offset calculated from prologue. */
2701 arm_scan_prologue (this_frame, cache);
2702
2703 /* Since we are in epilogue, the SP has been restored. */
2704 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2705
2706 /* Calculate actual addresses of saved registers using offsets
2707 determined by arm_scan_prologue. */
2708 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2709 if (trad_frame_addr_p (cache->saved_regs, reg))
2710 cache->saved_regs[reg].addr += cache->prev_sp;
2711
2712 return cache;
2713 }
2714
2715 /* Implementation of function hook 'this_id' in
2716 'struct frame_uwnind' for epilogue unwinder. */
2717
2718 static void
2719 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2720 void **this_cache,
2721 struct frame_id *this_id)
2722 {
2723 struct arm_prologue_cache *cache;
2724 CORE_ADDR pc, func;
2725
2726 if (*this_cache == NULL)
2727 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2728 cache = (struct arm_prologue_cache *) *this_cache;
2729
2730 /* Use function start address as part of the frame ID. If we cannot
2731 identify the start address (due to missing symbol information),
2732 fall back to just using the current PC. */
2733 pc = get_frame_pc (this_frame);
2734 func = get_frame_func (this_frame);
2735 if (func == 0)
2736 func = pc;
2737
2738 (*this_id) = frame_id_build (cache->prev_sp, pc);
2739 }
2740
2741 /* Implementation of function hook 'prev_register' in
2742 'struct frame_uwnind' for epilogue unwinder. */
2743
2744 static struct value *
2745 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2746 void **this_cache, int regnum)
2747 {
2748 if (*this_cache == NULL)
2749 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2750
2751 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2752 }
2753
2754 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2755 CORE_ADDR pc);
2756 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2757 CORE_ADDR pc);
2758
2759 /* Implementation of function hook 'sniffer' in
2760 'struct frame_uwnind' for epilogue unwinder. */
2761
2762 static int
2763 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2764 struct frame_info *this_frame,
2765 void **this_prologue_cache)
2766 {
2767 if (frame_relative_level (this_frame) == 0)
2768 {
2769 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2770 CORE_ADDR pc = get_frame_pc (this_frame);
2771
2772 if (arm_frame_is_thumb (this_frame))
2773 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2774 else
2775 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2776 }
2777 else
2778 return 0;
2779 }
2780
2781 /* Frame unwinder from epilogue. */
2782
2783 static const struct frame_unwind arm_epilogue_frame_unwind =
2784 {
2785 NORMAL_FRAME,
2786 default_frame_unwind_stop_reason,
2787 arm_epilogue_frame_this_id,
2788 arm_epilogue_frame_prev_register,
2789 NULL,
2790 arm_epilogue_frame_sniffer,
2791 };
2792
2793 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2794 trampoline, return the target PC. Otherwise return 0.
2795
2796 void call0a (char c, short s, int i, long l) {}
2797
2798 int main (void)
2799 {
2800 (*pointer_to_call0a) (c, s, i, l);
2801 }
2802
2803 Instead of calling a stub library function _call_via_xx (xx is
2804 the register name), GCC may inline the trampoline in the object
2805 file as below (register r2 has the address of call0a).
2806
2807 .global main
2808 .type main, %function
2809 ...
2810 bl .L1
2811 ...
2812 .size main, .-main
2813
2814 .L1:
2815 bx r2
2816
2817 The trampoline 'bx r2' doesn't belong to main. */
2818
2819 static CORE_ADDR
2820 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2821 {
2822 /* The heuristics of recognizing such trampoline is that FRAME is
2823 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2824 if (arm_frame_is_thumb (frame))
2825 {
2826 gdb_byte buf[2];
2827
2828 if (target_read_memory (pc, buf, 2) == 0)
2829 {
2830 struct gdbarch *gdbarch = get_frame_arch (frame);
2831 enum bfd_endian byte_order_for_code
2832 = gdbarch_byte_order_for_code (gdbarch);
2833 uint16_t insn
2834 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2835
2836 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2837 {
2838 CORE_ADDR dest
2839 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2840
2841 /* Clear the LSB so that gdb core sets step-resume
2842 breakpoint at the right address. */
2843 return UNMAKE_THUMB_ADDR (dest);
2844 }
2845 }
2846 }
2847
2848 return 0;
2849 }
2850
2851 static struct arm_prologue_cache *
2852 arm_make_stub_cache (struct frame_info *this_frame)
2853 {
2854 struct arm_prologue_cache *cache;
2855
2856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2858
2859 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2860
2861 return cache;
2862 }
2863
2864 /* Our frame ID for a stub frame is the current SP and LR. */
2865
2866 static void
2867 arm_stub_this_id (struct frame_info *this_frame,
2868 void **this_cache,
2869 struct frame_id *this_id)
2870 {
2871 struct arm_prologue_cache *cache;
2872
2873 if (*this_cache == NULL)
2874 *this_cache = arm_make_stub_cache (this_frame);
2875 cache = (struct arm_prologue_cache *) *this_cache;
2876
2877 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2878 }
2879
2880 static int
2881 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2882 struct frame_info *this_frame,
2883 void **this_prologue_cache)
2884 {
2885 CORE_ADDR addr_in_block;
2886 gdb_byte dummy[4];
2887 CORE_ADDR pc, start_addr;
2888 const char *name;
2889
2890 addr_in_block = get_frame_address_in_block (this_frame);
2891 pc = get_frame_pc (this_frame);
2892 if (in_plt_section (addr_in_block)
2893 /* We also use the stub winder if the target memory is unreadable
2894 to avoid having the prologue unwinder trying to read it. */
2895 || target_read_memory (pc, dummy, 4) != 0)
2896 return 1;
2897
2898 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2899 && arm_skip_bx_reg (this_frame, pc) != 0)
2900 return 1;
2901
2902 return 0;
2903 }
2904
2905 struct frame_unwind arm_stub_unwind = {
2906 NORMAL_FRAME,
2907 default_frame_unwind_stop_reason,
2908 arm_stub_this_id,
2909 arm_prologue_prev_register,
2910 NULL,
2911 arm_stub_unwind_sniffer
2912 };
2913
2914 /* Put here the code to store, into CACHE->saved_regs, the addresses
2915 of the saved registers of frame described by THIS_FRAME. CACHE is
2916 returned. */
2917
2918 static struct arm_prologue_cache *
2919 arm_m_exception_cache (struct frame_info *this_frame)
2920 {
2921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2923 struct arm_prologue_cache *cache;
2924 CORE_ADDR unwound_sp;
2925 LONGEST xpsr;
2926
2927 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2928 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2929
2930 unwound_sp = get_frame_register_unsigned (this_frame,
2931 ARM_SP_REGNUM);
2932
2933 /* The hardware saves eight 32-bit words, comprising xPSR,
2934 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2935 "B1.5.6 Exception entry behavior" in
2936 "ARMv7-M Architecture Reference Manual". */
2937 cache->saved_regs[0].addr = unwound_sp;
2938 cache->saved_regs[1].addr = unwound_sp + 4;
2939 cache->saved_regs[2].addr = unwound_sp + 8;
2940 cache->saved_regs[3].addr = unwound_sp + 12;
2941 cache->saved_regs[12].addr = unwound_sp + 16;
2942 cache->saved_regs[14].addr = unwound_sp + 20;
2943 cache->saved_regs[15].addr = unwound_sp + 24;
2944 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2945
2946 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2947 aligner between the top of the 32-byte stack frame and the
2948 previous context's stack pointer. */
2949 cache->prev_sp = unwound_sp + 32;
2950 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2951 && (xpsr & (1 << 9)) != 0)
2952 cache->prev_sp += 4;
2953
2954 return cache;
2955 }
2956
2957 /* Implementation of function hook 'this_id' in
2958 'struct frame_uwnind'. */
2959
2960 static void
2961 arm_m_exception_this_id (struct frame_info *this_frame,
2962 void **this_cache,
2963 struct frame_id *this_id)
2964 {
2965 struct arm_prologue_cache *cache;
2966
2967 if (*this_cache == NULL)
2968 *this_cache = arm_m_exception_cache (this_frame);
2969 cache = (struct arm_prologue_cache *) *this_cache;
2970
2971 /* Our frame ID for a stub frame is the current SP and LR. */
2972 *this_id = frame_id_build (cache->prev_sp,
2973 get_frame_pc (this_frame));
2974 }
2975
2976 /* Implementation of function hook 'prev_register' in
2977 'struct frame_uwnind'. */
2978
2979 static struct value *
2980 arm_m_exception_prev_register (struct frame_info *this_frame,
2981 void **this_cache,
2982 int prev_regnum)
2983 {
2984 struct arm_prologue_cache *cache;
2985
2986 if (*this_cache == NULL)
2987 *this_cache = arm_m_exception_cache (this_frame);
2988 cache = (struct arm_prologue_cache *) *this_cache;
2989
2990 /* The value was already reconstructed into PREV_SP. */
2991 if (prev_regnum == ARM_SP_REGNUM)
2992 return frame_unwind_got_constant (this_frame, prev_regnum,
2993 cache->prev_sp);
2994
2995 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2996 prev_regnum);
2997 }
2998
2999 /* Implementation of function hook 'sniffer' in
3000 'struct frame_uwnind'. */
3001
3002 static int
3003 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3004 struct frame_info *this_frame,
3005 void **this_prologue_cache)
3006 {
3007 CORE_ADDR this_pc = get_frame_pc (this_frame);
3008
3009 /* No need to check is_m; this sniffer is only registered for
3010 M-profile architectures. */
3011
3012 /* Check if exception frame returns to a magic PC value. */
3013 return arm_m_addr_is_magic (this_pc);
3014 }
3015
3016 /* Frame unwinder for M-profile exceptions. */
3017
3018 struct frame_unwind arm_m_exception_unwind =
3019 {
3020 SIGTRAMP_FRAME,
3021 default_frame_unwind_stop_reason,
3022 arm_m_exception_this_id,
3023 arm_m_exception_prev_register,
3024 NULL,
3025 arm_m_exception_unwind_sniffer
3026 };
3027
3028 static CORE_ADDR
3029 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3030 {
3031 struct arm_prologue_cache *cache;
3032
3033 if (*this_cache == NULL)
3034 *this_cache = arm_make_prologue_cache (this_frame);
3035 cache = (struct arm_prologue_cache *) *this_cache;
3036
3037 return cache->prev_sp - cache->framesize;
3038 }
3039
3040 struct frame_base arm_normal_base = {
3041 &arm_prologue_unwind,
3042 arm_normal_frame_base,
3043 arm_normal_frame_base,
3044 arm_normal_frame_base
3045 };
3046
3047 static struct value *
3048 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3049 int regnum)
3050 {
3051 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3052 CORE_ADDR lr, cpsr;
3053 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3054
3055 switch (regnum)
3056 {
3057 case ARM_PC_REGNUM:
3058 /* The PC is normally copied from the return column, which
3059 describes saves of LR. However, that version may have an
3060 extra bit set to indicate Thumb state. The bit is not
3061 part of the PC. */
3062 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3063 return frame_unwind_got_constant (this_frame, regnum,
3064 arm_addr_bits_remove (gdbarch, lr));
3065
3066 case ARM_PS_REGNUM:
3067 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3068 cpsr = get_frame_register_unsigned (this_frame, regnum);
3069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3070 if (IS_THUMB_ADDR (lr))
3071 cpsr |= t_bit;
3072 else
3073 cpsr &= ~t_bit;
3074 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3075
3076 default:
3077 internal_error (__FILE__, __LINE__,
3078 _("Unexpected register %d"), regnum);
3079 }
3080 }
3081
3082 static void
3083 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3084 struct dwarf2_frame_state_reg *reg,
3085 struct frame_info *this_frame)
3086 {
3087 switch (regnum)
3088 {
3089 case ARM_PC_REGNUM:
3090 case ARM_PS_REGNUM:
3091 reg->how = DWARF2_FRAME_REG_FN;
3092 reg->loc.fn = arm_dwarf2_prev_register;
3093 break;
3094 case ARM_SP_REGNUM:
3095 reg->how = DWARF2_FRAME_REG_CFA;
3096 break;
3097 }
3098 }
3099
3100 /* Implement the stack_frame_destroyed_p gdbarch method. */
3101
3102 static int
3103 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3104 {
3105 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3106 unsigned int insn, insn2;
3107 int found_return = 0, found_stack_adjust = 0;
3108 CORE_ADDR func_start, func_end;
3109 CORE_ADDR scan_pc;
3110 gdb_byte buf[4];
3111
3112 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3113 return 0;
3114
3115 /* The epilogue is a sequence of instructions along the following lines:
3116
3117 - add stack frame size to SP or FP
3118 - [if frame pointer used] restore SP from FP
3119 - restore registers from SP [may include PC]
3120 - a return-type instruction [if PC wasn't already restored]
3121
3122 In a first pass, we scan forward from the current PC and verify the
3123 instructions we find as compatible with this sequence, ending in a
3124 return instruction.
3125
3126 However, this is not sufficient to distinguish indirect function calls
3127 within a function from indirect tail calls in the epilogue in some cases.
3128 Therefore, if we didn't already find any SP-changing instruction during
3129 forward scan, we add a backward scanning heuristic to ensure we actually
3130 are in the epilogue. */
3131
3132 scan_pc = pc;
3133 while (scan_pc < func_end && !found_return)
3134 {
3135 if (target_read_memory (scan_pc, buf, 2))
3136 break;
3137
3138 scan_pc += 2;
3139 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3140
3141 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3142 found_return = 1;
3143 else if (insn == 0x46f7) /* mov pc, lr */
3144 found_return = 1;
3145 else if (thumb_instruction_restores_sp (insn))
3146 {
3147 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3148 found_return = 1;
3149 }
3150 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3151 {
3152 if (target_read_memory (scan_pc, buf, 2))
3153 break;
3154
3155 scan_pc += 2;
3156 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3157
3158 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3159 {
3160 if (insn2 & 0x8000) /* <registers> include PC. */
3161 found_return = 1;
3162 }
3163 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3164 && (insn2 & 0x0fff) == 0x0b04)
3165 {
3166 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3167 found_return = 1;
3168 }
3169 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3170 && (insn2 & 0x0e00) == 0x0a00)
3171 ;
3172 else
3173 break;
3174 }
3175 else
3176 break;
3177 }
3178
3179 if (!found_return)
3180 return 0;
3181
3182 /* Since any instruction in the epilogue sequence, with the possible
3183 exception of return itself, updates the stack pointer, we need to
3184 scan backwards for at most one instruction. Try either a 16-bit or
3185 a 32-bit instruction. This is just a heuristic, so we do not worry
3186 too much about false positives. */
3187
3188 if (pc - 4 < func_start)
3189 return 0;
3190 if (target_read_memory (pc - 4, buf, 4))
3191 return 0;
3192
3193 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3194 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3195
3196 if (thumb_instruction_restores_sp (insn2))
3197 found_stack_adjust = 1;
3198 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3199 found_stack_adjust = 1;
3200 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3201 && (insn2 & 0x0fff) == 0x0b04)
3202 found_stack_adjust = 1;
3203 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3204 && (insn2 & 0x0e00) == 0x0a00)
3205 found_stack_adjust = 1;
3206
3207 return found_stack_adjust;
3208 }
3209
3210 static int
3211 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3212 {
3213 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3214 unsigned int insn;
3215 int found_return;
3216 CORE_ADDR func_start, func_end;
3217
3218 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3219 return 0;
3220
3221 /* We are in the epilogue if the previous instruction was a stack
3222 adjustment and the next instruction is a possible return (bx, mov
3223 pc, or pop). We could have to scan backwards to find the stack
3224 adjustment, or forwards to find the return, but this is a decent
3225 approximation. First scan forwards. */
3226
3227 found_return = 0;
3228 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3229 if (bits (insn, 28, 31) != INST_NV)
3230 {
3231 if ((insn & 0x0ffffff0) == 0x012fff10)
3232 /* BX. */
3233 found_return = 1;
3234 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3235 /* MOV PC. */
3236 found_return = 1;
3237 else if ((insn & 0x0fff0000) == 0x08bd0000
3238 && (insn & 0x0000c000) != 0)
3239 /* POP (LDMIA), including PC or LR. */
3240 found_return = 1;
3241 }
3242
3243 if (!found_return)
3244 return 0;
3245
3246 /* Scan backwards. This is just a heuristic, so do not worry about
3247 false positives from mode changes. */
3248
3249 if (pc < func_start + 4)
3250 return 0;
3251
3252 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3253 if (arm_instruction_restores_sp (insn))
3254 return 1;
3255
3256 return 0;
3257 }
3258
3259 /* Implement the stack_frame_destroyed_p gdbarch method. */
3260
3261 static int
3262 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3263 {
3264 if (arm_pc_is_thumb (gdbarch, pc))
3265 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3266 else
3267 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3268 }
3269
3270 /* When arguments must be pushed onto the stack, they go on in reverse
3271 order. The code below implements a FILO (stack) to do this. */
3272
3273 struct stack_item
3274 {
3275 int len;
3276 struct stack_item *prev;
3277 gdb_byte *data;
3278 };
3279
3280 static struct stack_item *
3281 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3282 {
3283 struct stack_item *si;
3284 si = XNEW (struct stack_item);
3285 si->data = (gdb_byte *) xmalloc (len);
3286 si->len = len;
3287 si->prev = prev;
3288 memcpy (si->data, contents, len);
3289 return si;
3290 }
3291
3292 static struct stack_item *
3293 pop_stack_item (struct stack_item *si)
3294 {
3295 struct stack_item *dead = si;
3296 si = si->prev;
3297 xfree (dead->data);
3298 xfree (dead);
3299 return si;
3300 }
3301
3302 /* Implement the gdbarch type alignment method, overrides the generic
3303 alignment algorithm for anything that is arm specific. */
3304
3305 static ULONGEST
3306 arm_type_align (gdbarch *gdbarch, struct type *t)
3307 {
3308 t = check_typedef (t);
3309 if (t->code () == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3310 {
3311 /* Use the natural alignment for vector types (the same for
3312 scalar type), but the maximum alignment is 64-bit. */
3313 if (TYPE_LENGTH (t) > 8)
3314 return 8;
3315 else
3316 return TYPE_LENGTH (t);
3317 }
3318
3319 /* Allow the common code to calculate the alignment. */
3320 return 0;
3321 }
3322
3323 /* Possible base types for a candidate for passing and returning in
3324 VFP registers. */
3325
3326 enum arm_vfp_cprc_base_type
3327 {
3328 VFP_CPRC_UNKNOWN,
3329 VFP_CPRC_SINGLE,
3330 VFP_CPRC_DOUBLE,
3331 VFP_CPRC_VEC64,
3332 VFP_CPRC_VEC128
3333 };
3334
3335 /* The length of one element of base type B. */
3336
3337 static unsigned
3338 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3339 {
3340 switch (b)
3341 {
3342 case VFP_CPRC_SINGLE:
3343 return 4;
3344 case VFP_CPRC_DOUBLE:
3345 return 8;
3346 case VFP_CPRC_VEC64:
3347 return 8;
3348 case VFP_CPRC_VEC128:
3349 return 16;
3350 default:
3351 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3352 (int) b);
3353 }
3354 }
3355
3356 /* The character ('s', 'd' or 'q') for the type of VFP register used
3357 for passing base type B. */
3358
3359 static int
3360 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3361 {
3362 switch (b)
3363 {
3364 case VFP_CPRC_SINGLE:
3365 return 's';
3366 case VFP_CPRC_DOUBLE:
3367 return 'd';
3368 case VFP_CPRC_VEC64:
3369 return 'd';
3370 case VFP_CPRC_VEC128:
3371 return 'q';
3372 default:
3373 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3374 (int) b);
3375 }
3376 }
3377
3378 /* Determine whether T may be part of a candidate for passing and
3379 returning in VFP registers, ignoring the limit on the total number
3380 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3381 classification of the first valid component found; if it is not
3382 VFP_CPRC_UNKNOWN, all components must have the same classification
3383 as *BASE_TYPE. If it is found that T contains a type not permitted
3384 for passing and returning in VFP registers, a type differently
3385 classified from *BASE_TYPE, or two types differently classified
3386 from each other, return -1, otherwise return the total number of
3387 base-type elements found (possibly 0 in an empty structure or
3388 array). Vector types are not currently supported, matching the
3389 generic AAPCS support. */
3390
3391 static int
3392 arm_vfp_cprc_sub_candidate (struct type *t,
3393 enum arm_vfp_cprc_base_type *base_type)
3394 {
3395 t = check_typedef (t);
3396 switch (t->code ())
3397 {
3398 case TYPE_CODE_FLT:
3399 switch (TYPE_LENGTH (t))
3400 {
3401 case 4:
3402 if (*base_type == VFP_CPRC_UNKNOWN)
3403 *base_type = VFP_CPRC_SINGLE;
3404 else if (*base_type != VFP_CPRC_SINGLE)
3405 return -1;
3406 return 1;
3407
3408 case 8:
3409 if (*base_type == VFP_CPRC_UNKNOWN)
3410 *base_type = VFP_CPRC_DOUBLE;
3411 else if (*base_type != VFP_CPRC_DOUBLE)
3412 return -1;
3413 return 1;
3414
3415 default:
3416 return -1;
3417 }
3418 break;
3419
3420 case TYPE_CODE_COMPLEX:
3421 /* Arguments of complex T where T is one of the types float or
3422 double get treated as if they are implemented as:
3423
3424 struct complexT
3425 {
3426 T real;
3427 T imag;
3428 };
3429
3430 */
3431 switch (TYPE_LENGTH (t))
3432 {
3433 case 8:
3434 if (*base_type == VFP_CPRC_UNKNOWN)
3435 *base_type = VFP_CPRC_SINGLE;
3436 else if (*base_type != VFP_CPRC_SINGLE)
3437 return -1;
3438 return 2;
3439
3440 case 16:
3441 if (*base_type == VFP_CPRC_UNKNOWN)
3442 *base_type = VFP_CPRC_DOUBLE;
3443 else if (*base_type != VFP_CPRC_DOUBLE)
3444 return -1;
3445 return 2;
3446
3447 default:
3448 return -1;
3449 }
3450 break;
3451
3452 case TYPE_CODE_ARRAY:
3453 {
3454 if (TYPE_VECTOR (t))
3455 {
3456 /* A 64-bit or 128-bit containerized vector type are VFP
3457 CPRCs. */
3458 switch (TYPE_LENGTH (t))
3459 {
3460 case 8:
3461 if (*base_type == VFP_CPRC_UNKNOWN)
3462 *base_type = VFP_CPRC_VEC64;
3463 return 1;
3464 case 16:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_VEC128;
3467 return 1;
3468 default:
3469 return -1;
3470 }
3471 }
3472 else
3473 {
3474 int count;
3475 unsigned unitlen;
3476
3477 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3478 base_type);
3479 if (count == -1)
3480 return -1;
3481 if (TYPE_LENGTH (t) == 0)
3482 {
3483 gdb_assert (count == 0);
3484 return 0;
3485 }
3486 else if (count == 0)
3487 return -1;
3488 unitlen = arm_vfp_cprc_unit_length (*base_type);
3489 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3490 return TYPE_LENGTH (t) / unitlen;
3491 }
3492 }
3493 break;
3494
3495 case TYPE_CODE_STRUCT:
3496 {
3497 int count = 0;
3498 unsigned unitlen;
3499 int i;
3500 for (i = 0; i < t->num_fields (); i++)
3501 {
3502 int sub_count = 0;
3503
3504 if (!field_is_static (&t->field (i)))
3505 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3506 base_type);
3507 if (sub_count == -1)
3508 return -1;
3509 count += sub_count;
3510 }
3511 if (TYPE_LENGTH (t) == 0)
3512 {
3513 gdb_assert (count == 0);
3514 return 0;
3515 }
3516 else if (count == 0)
3517 return -1;
3518 unitlen = arm_vfp_cprc_unit_length (*base_type);
3519 if (TYPE_LENGTH (t) != unitlen * count)
3520 return -1;
3521 return count;
3522 }
3523
3524 case TYPE_CODE_UNION:
3525 {
3526 int count = 0;
3527 unsigned unitlen;
3528 int i;
3529 for (i = 0; i < t->num_fields (); i++)
3530 {
3531 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3532 base_type);
3533 if (sub_count == -1)
3534 return -1;
3535 count = (count > sub_count ? count : sub_count);
3536 }
3537 if (TYPE_LENGTH (t) == 0)
3538 {
3539 gdb_assert (count == 0);
3540 return 0;
3541 }
3542 else if (count == 0)
3543 return -1;
3544 unitlen = arm_vfp_cprc_unit_length (*base_type);
3545 if (TYPE_LENGTH (t) != unitlen * count)
3546 return -1;
3547 return count;
3548 }
3549
3550 default:
3551 break;
3552 }
3553
3554 return -1;
3555 }
3556
3557 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3558 if passed to or returned from a non-variadic function with the VFP
3559 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3560 *BASE_TYPE to the base type for T and *COUNT to the number of
3561 elements of that base type before returning. */
3562
3563 static int
3564 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3565 int *count)
3566 {
3567 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3568 int c = arm_vfp_cprc_sub_candidate (t, &b);
3569 if (c <= 0 || c > 4)
3570 return 0;
3571 *base_type = b;
3572 *count = c;
3573 return 1;
3574 }
3575
3576 /* Return 1 if the VFP ABI should be used for passing arguments to and
3577 returning values from a function of type FUNC_TYPE, 0
3578 otherwise. */
3579
3580 static int
3581 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3582 {
3583 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3584 /* Variadic functions always use the base ABI. Assume that functions
3585 without debug info are not variadic. */
3586 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3587 return 0;
3588 /* The VFP ABI is only supported as a variant of AAPCS. */
3589 if (tdep->arm_abi != ARM_ABI_AAPCS)
3590 return 0;
3591 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3592 }
3593
3594 /* We currently only support passing parameters in integer registers, which
3595 conforms with GCC's default model, and VFP argument passing following
3596 the VFP variant of AAPCS. Several other variants exist and
3597 we should probably support some of them based on the selected ABI. */
3598
3599 static CORE_ADDR
3600 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3601 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3602 struct value **args, CORE_ADDR sp,
3603 function_call_return_method return_method,
3604 CORE_ADDR struct_addr)
3605 {
3606 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3607 int argnum;
3608 int argreg;
3609 int nstack;
3610 struct stack_item *si = NULL;
3611 int use_vfp_abi;
3612 struct type *ftype;
3613 unsigned vfp_regs_free = (1 << 16) - 1;
3614
3615 /* Determine the type of this function and whether the VFP ABI
3616 applies. */
3617 ftype = check_typedef (value_type (function));
3618 if (ftype->code () == TYPE_CODE_PTR)
3619 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3620 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3621
3622 /* Set the return address. For the ARM, the return breakpoint is
3623 always at BP_ADDR. */
3624 if (arm_pc_is_thumb (gdbarch, bp_addr))
3625 bp_addr |= 1;
3626 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3627
3628 /* Walk through the list of args and determine how large a temporary
3629 stack is required. Need to take care here as structs may be
3630 passed on the stack, and we have to push them. */
3631 nstack = 0;
3632
3633 argreg = ARM_A1_REGNUM;
3634 nstack = 0;
3635
3636 /* The struct_return pointer occupies the first parameter
3637 passing register. */
3638 if (return_method == return_method_struct)
3639 {
3640 if (arm_debug)
3641 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3642 gdbarch_register_name (gdbarch, argreg),
3643 paddress (gdbarch, struct_addr));
3644 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3645 argreg++;
3646 }
3647
3648 for (argnum = 0; argnum < nargs; argnum++)
3649 {
3650 int len;
3651 struct type *arg_type;
3652 struct type *target_type;
3653 enum type_code typecode;
3654 const bfd_byte *val;
3655 int align;
3656 enum arm_vfp_cprc_base_type vfp_base_type;
3657 int vfp_base_count;
3658 int may_use_core_reg = 1;
3659
3660 arg_type = check_typedef (value_type (args[argnum]));
3661 len = TYPE_LENGTH (arg_type);
3662 target_type = TYPE_TARGET_TYPE (arg_type);
3663 typecode = arg_type->code ();
3664 val = value_contents (args[argnum]);
3665
3666 align = type_align (arg_type);
3667 /* Round alignment up to a whole number of words. */
3668 align = (align + ARM_INT_REGISTER_SIZE - 1)
3669 & ~(ARM_INT_REGISTER_SIZE - 1);
3670 /* Different ABIs have different maximum alignments. */
3671 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3672 {
3673 /* The APCS ABI only requires word alignment. */
3674 align = ARM_INT_REGISTER_SIZE;
3675 }
3676 else
3677 {
3678 /* The AAPCS requires at most doubleword alignment. */
3679 if (align > ARM_INT_REGISTER_SIZE * 2)
3680 align = ARM_INT_REGISTER_SIZE * 2;
3681 }
3682
3683 if (use_vfp_abi
3684 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3685 &vfp_base_count))
3686 {
3687 int regno;
3688 int unit_length;
3689 int shift;
3690 unsigned mask;
3691
3692 /* Because this is a CPRC it cannot go in a core register or
3693 cause a core register to be skipped for alignment.
3694 Either it goes in VFP registers and the rest of this loop
3695 iteration is skipped for this argument, or it goes on the
3696 stack (and the stack alignment code is correct for this
3697 case). */
3698 may_use_core_reg = 0;
3699
3700 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3701 shift = unit_length / 4;
3702 mask = (1 << (shift * vfp_base_count)) - 1;
3703 for (regno = 0; regno < 16; regno += shift)
3704 if (((vfp_regs_free >> regno) & mask) == mask)
3705 break;
3706
3707 if (regno < 16)
3708 {
3709 int reg_char;
3710 int reg_scaled;
3711 int i;
3712
3713 vfp_regs_free &= ~(mask << regno);
3714 reg_scaled = regno / shift;
3715 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3716 for (i = 0; i < vfp_base_count; i++)
3717 {
3718 char name_buf[4];
3719 int regnum;
3720 if (reg_char == 'q')
3721 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3722 val + i * unit_length);
3723 else
3724 {
3725 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3726 reg_char, reg_scaled + i);
3727 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3728 strlen (name_buf));
3729 regcache->cooked_write (regnum, val + i * unit_length);
3730 }
3731 }
3732 continue;
3733 }
3734 else
3735 {
3736 /* This CPRC could not go in VFP registers, so all VFP
3737 registers are now marked as used. */
3738 vfp_regs_free = 0;
3739 }
3740 }
3741
3742 /* Push stack padding for doubleword alignment. */
3743 if (nstack & (align - 1))
3744 {
3745 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3746 nstack += ARM_INT_REGISTER_SIZE;
3747 }
3748
3749 /* Doubleword aligned quantities must go in even register pairs. */
3750 if (may_use_core_reg
3751 && argreg <= ARM_LAST_ARG_REGNUM
3752 && align > ARM_INT_REGISTER_SIZE
3753 && argreg & 1)
3754 argreg++;
3755
3756 /* If the argument is a pointer to a function, and it is a
3757 Thumb function, create a LOCAL copy of the value and set
3758 the THUMB bit in it. */
3759 if (TYPE_CODE_PTR == typecode
3760 && target_type != NULL
3761 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3762 {
3763 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3764 if (arm_pc_is_thumb (gdbarch, regval))
3765 {
3766 bfd_byte *copy = (bfd_byte *) alloca (len);
3767 store_unsigned_integer (copy, len, byte_order,
3768 MAKE_THUMB_ADDR (regval));
3769 val = copy;
3770 }
3771 }
3772
3773 /* Copy the argument to general registers or the stack in
3774 register-sized pieces. Large arguments are split between
3775 registers and stack. */
3776 while (len > 0)
3777 {
3778 int partial_len = len < ARM_INT_REGISTER_SIZE
3779 ? len : ARM_INT_REGISTER_SIZE;
3780 CORE_ADDR regval
3781 = extract_unsigned_integer (val, partial_len, byte_order);
3782
3783 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3784 {
3785 /* The argument is being passed in a general purpose
3786 register. */
3787 if (byte_order == BFD_ENDIAN_BIG)
3788 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3789 if (arm_debug)
3790 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3791 argnum,
3792 gdbarch_register_name
3793 (gdbarch, argreg),
3794 phex (regval, ARM_INT_REGISTER_SIZE));
3795 regcache_cooked_write_unsigned (regcache, argreg, regval);
3796 argreg++;
3797 }
3798 else
3799 {
3800 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3801
3802 memset (buf, 0, sizeof (buf));
3803 store_unsigned_integer (buf, partial_len, byte_order, regval);
3804
3805 /* Push the arguments onto the stack. */
3806 if (arm_debug)
3807 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3808 argnum, nstack);
3809 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3810 nstack += ARM_INT_REGISTER_SIZE;
3811 }
3812
3813 len -= partial_len;
3814 val += partial_len;
3815 }
3816 }
3817 /* If we have an odd number of words to push, then decrement the stack
3818 by one word now, so first stack argument will be dword aligned. */
3819 if (nstack & 4)
3820 sp -= 4;
3821
3822 while (si)
3823 {
3824 sp -= si->len;
3825 write_memory (sp, si->data, si->len);
3826 si = pop_stack_item (si);
3827 }
3828
3829 /* Finally, update teh SP register. */
3830 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3831
3832 return sp;
3833 }
3834
3835
3836 /* Always align the frame to an 8-byte boundary. This is required on
3837 some platforms and harmless on the rest. */
3838
3839 static CORE_ADDR
3840 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3841 {
3842 /* Align the stack to eight bytes. */
3843 return sp & ~ (CORE_ADDR) 7;
3844 }
3845
3846 static void
3847 print_fpu_flags (struct ui_file *file, int flags)
3848 {
3849 if (flags & (1 << 0))
3850 fputs_filtered ("IVO ", file);
3851 if (flags & (1 << 1))
3852 fputs_filtered ("DVZ ", file);
3853 if (flags & (1 << 2))
3854 fputs_filtered ("OFL ", file);
3855 if (flags & (1 << 3))
3856 fputs_filtered ("UFL ", file);
3857 if (flags & (1 << 4))
3858 fputs_filtered ("INX ", file);
3859 fputc_filtered ('\n', file);
3860 }
3861
3862 /* Print interesting information about the floating point processor
3863 (if present) or emulator. */
3864 static void
3865 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3866 struct frame_info *frame, const char *args)
3867 {
3868 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3869 int type;
3870
3871 type = (status >> 24) & 127;
3872 if (status & (1 << 31))
3873 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3874 else
3875 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3876 /* i18n: [floating point unit] mask */
3877 fputs_filtered (_("mask: "), file);
3878 print_fpu_flags (file, status >> 16);
3879 /* i18n: [floating point unit] flags */
3880 fputs_filtered (_("flags: "), file);
3881 print_fpu_flags (file, status);
3882 }
3883
3884 /* Construct the ARM extended floating point type. */
3885 static struct type *
3886 arm_ext_type (struct gdbarch *gdbarch)
3887 {
3888 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3889
3890 if (!tdep->arm_ext_type)
3891 tdep->arm_ext_type
3892 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3893 floatformats_arm_ext);
3894
3895 return tdep->arm_ext_type;
3896 }
3897
3898 static struct type *
3899 arm_neon_double_type (struct gdbarch *gdbarch)
3900 {
3901 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3902
3903 if (tdep->neon_double_type == NULL)
3904 {
3905 struct type *t, *elem;
3906
3907 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3908 TYPE_CODE_UNION);
3909 elem = builtin_type (gdbarch)->builtin_uint8;
3910 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3911 elem = builtin_type (gdbarch)->builtin_uint16;
3912 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3913 elem = builtin_type (gdbarch)->builtin_uint32;
3914 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3915 elem = builtin_type (gdbarch)->builtin_uint64;
3916 append_composite_type_field (t, "u64", elem);
3917 elem = builtin_type (gdbarch)->builtin_float;
3918 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3919 elem = builtin_type (gdbarch)->builtin_double;
3920 append_composite_type_field (t, "f64", elem);
3921
3922 TYPE_VECTOR (t) = 1;
3923 t->set_name ("neon_d");
3924 tdep->neon_double_type = t;
3925 }
3926
3927 return tdep->neon_double_type;
3928 }
3929
3930 /* FIXME: The vector types are not correctly ordered on big-endian
3931 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3932 bits of d0 - regardless of what unit size is being held in d0. So
3933 the offset of the first uint8 in d0 is 7, but the offset of the
3934 first float is 4. This code works as-is for little-endian
3935 targets. */
3936
3937 static struct type *
3938 arm_neon_quad_type (struct gdbarch *gdbarch)
3939 {
3940 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3941
3942 if (tdep->neon_quad_type == NULL)
3943 {
3944 struct type *t, *elem;
3945
3946 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3947 TYPE_CODE_UNION);
3948 elem = builtin_type (gdbarch)->builtin_uint8;
3949 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3950 elem = builtin_type (gdbarch)->builtin_uint16;
3951 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3952 elem = builtin_type (gdbarch)->builtin_uint32;
3953 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3954 elem = builtin_type (gdbarch)->builtin_uint64;
3955 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3956 elem = builtin_type (gdbarch)->builtin_float;
3957 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3958 elem = builtin_type (gdbarch)->builtin_double;
3959 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3960
3961 TYPE_VECTOR (t) = 1;
3962 t->set_name ("neon_q");
3963 tdep->neon_quad_type = t;
3964 }
3965
3966 return tdep->neon_quad_type;
3967 }
3968
3969 /* Return the GDB type object for the "standard" data type of data in
3970 register N. */
3971
3972 static struct type *
3973 arm_register_type (struct gdbarch *gdbarch, int regnum)
3974 {
3975 int num_regs = gdbarch_num_regs (gdbarch);
3976
3977 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3978 && regnum >= num_regs && regnum < num_regs + 32)
3979 return builtin_type (gdbarch)->builtin_float;
3980
3981 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3982 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3983 return arm_neon_quad_type (gdbarch);
3984
3985 /* If the target description has register information, we are only
3986 in this function so that we can override the types of
3987 double-precision registers for NEON. */
3988 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3989 {
3990 struct type *t = tdesc_register_type (gdbarch, regnum);
3991
3992 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3993 && t->code () == TYPE_CODE_FLT
3994 && gdbarch_tdep (gdbarch)->have_neon)
3995 return arm_neon_double_type (gdbarch);
3996 else
3997 return t;
3998 }
3999
4000 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4001 {
4002 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4003 return builtin_type (gdbarch)->builtin_void;
4004
4005 return arm_ext_type (gdbarch);
4006 }
4007 else if (regnum == ARM_SP_REGNUM)
4008 return builtin_type (gdbarch)->builtin_data_ptr;
4009 else if (regnum == ARM_PC_REGNUM)
4010 return builtin_type (gdbarch)->builtin_func_ptr;
4011 else if (regnum >= ARRAY_SIZE (arm_register_names))
4012 /* These registers are only supported on targets which supply
4013 an XML description. */
4014 return builtin_type (gdbarch)->builtin_int0;
4015 else
4016 return builtin_type (gdbarch)->builtin_uint32;
4017 }
4018
4019 /* Map a DWARF register REGNUM onto the appropriate GDB register
4020 number. */
4021
4022 static int
4023 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4024 {
4025 /* Core integer regs. */
4026 if (reg >= 0 && reg <= 15)
4027 return reg;
4028
4029 /* Legacy FPA encoding. These were once used in a way which
4030 overlapped with VFP register numbering, so their use is
4031 discouraged, but GDB doesn't support the ARM toolchain
4032 which used them for VFP. */
4033 if (reg >= 16 && reg <= 23)
4034 return ARM_F0_REGNUM + reg - 16;
4035
4036 /* New assignments for the FPA registers. */
4037 if (reg >= 96 && reg <= 103)
4038 return ARM_F0_REGNUM + reg - 96;
4039
4040 /* WMMX register assignments. */
4041 if (reg >= 104 && reg <= 111)
4042 return ARM_WCGR0_REGNUM + reg - 104;
4043
4044 if (reg >= 112 && reg <= 127)
4045 return ARM_WR0_REGNUM + reg - 112;
4046
4047 if (reg >= 192 && reg <= 199)
4048 return ARM_WC0_REGNUM + reg - 192;
4049
4050 /* VFP v2 registers. A double precision value is actually
4051 in d1 rather than s2, but the ABI only defines numbering
4052 for the single precision registers. This will "just work"
4053 in GDB for little endian targets (we'll read eight bytes,
4054 starting in s0 and then progressing to s1), but will be
4055 reversed on big endian targets with VFP. This won't
4056 be a problem for the new Neon quad registers; you're supposed
4057 to use DW_OP_piece for those. */
4058 if (reg >= 64 && reg <= 95)
4059 {
4060 char name_buf[4];
4061
4062 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4063 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4064 strlen (name_buf));
4065 }
4066
4067 /* VFP v3 / Neon registers. This range is also used for VFP v2
4068 registers, except that it now describes d0 instead of s0. */
4069 if (reg >= 256 && reg <= 287)
4070 {
4071 char name_buf[4];
4072
4073 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4074 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4075 strlen (name_buf));
4076 }
4077
4078 return -1;
4079 }
4080
4081 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4082 static int
4083 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4084 {
4085 int reg = regnum;
4086 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4087
4088 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4089 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4090
4091 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4092 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4093
4094 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4095 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4096
4097 if (reg < NUM_GREGS)
4098 return SIM_ARM_R0_REGNUM + reg;
4099 reg -= NUM_GREGS;
4100
4101 if (reg < NUM_FREGS)
4102 return SIM_ARM_FP0_REGNUM + reg;
4103 reg -= NUM_FREGS;
4104
4105 if (reg < NUM_SREGS)
4106 return SIM_ARM_FPS_REGNUM + reg;
4107 reg -= NUM_SREGS;
4108
4109 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4110 }
4111
4112 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4113 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4114 NULL if an error occurs. BUF is freed. */
4115
4116 static gdb_byte *
4117 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4118 int old_len, int new_len)
4119 {
4120 gdb_byte *new_buf;
4121 int bytes_to_read = new_len - old_len;
4122
4123 new_buf = (gdb_byte *) xmalloc (new_len);
4124 memcpy (new_buf + bytes_to_read, buf, old_len);
4125 xfree (buf);
4126 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4127 {
4128 xfree (new_buf);
4129 return NULL;
4130 }
4131 return new_buf;
4132 }
4133
4134 /* An IT block is at most the 2-byte IT instruction followed by
4135 four 4-byte instructions. The furthest back we must search to
4136 find an IT block that affects the current instruction is thus
4137 2 + 3 * 4 == 14 bytes. */
4138 #define MAX_IT_BLOCK_PREFIX 14
4139
4140 /* Use a quick scan if there are more than this many bytes of
4141 code. */
4142 #define IT_SCAN_THRESHOLD 32
4143
4144 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4145 A breakpoint in an IT block may not be hit, depending on the
4146 condition flags. */
4147 static CORE_ADDR
4148 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4149 {
4150 gdb_byte *buf;
4151 char map_type;
4152 CORE_ADDR boundary, func_start;
4153 int buf_len;
4154 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4155 int i, any, last_it, last_it_count;
4156
4157 /* If we are using BKPT breakpoints, none of this is necessary. */
4158 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4159 return bpaddr;
4160
4161 /* ARM mode does not have this problem. */
4162 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4163 return bpaddr;
4164
4165 /* We are setting a breakpoint in Thumb code that could potentially
4166 contain an IT block. The first step is to find how much Thumb
4167 code there is; we do not need to read outside of known Thumb
4168 sequences. */
4169 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4170 if (map_type == 0)
4171 /* Thumb-2 code must have mapping symbols to have a chance. */
4172 return bpaddr;
4173
4174 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4175
4176 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4177 && func_start > boundary)
4178 boundary = func_start;
4179
4180 /* Search for a candidate IT instruction. We have to do some fancy
4181 footwork to distinguish a real IT instruction from the second
4182 half of a 32-bit instruction, but there is no need for that if
4183 there's no candidate. */
4184 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4185 if (buf_len == 0)
4186 /* No room for an IT instruction. */
4187 return bpaddr;
4188
4189 buf = (gdb_byte *) xmalloc (buf_len);
4190 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4191 return bpaddr;
4192 any = 0;
4193 for (i = 0; i < buf_len; i += 2)
4194 {
4195 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4196 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4197 {
4198 any = 1;
4199 break;
4200 }
4201 }
4202
4203 if (any == 0)
4204 {
4205 xfree (buf);
4206 return bpaddr;
4207 }
4208
4209 /* OK, the code bytes before this instruction contain at least one
4210 halfword which resembles an IT instruction. We know that it's
4211 Thumb code, but there are still two possibilities. Either the
4212 halfword really is an IT instruction, or it is the second half of
4213 a 32-bit Thumb instruction. The only way we can tell is to
4214 scan forwards from a known instruction boundary. */
4215 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4216 {
4217 int definite;
4218
4219 /* There's a lot of code before this instruction. Start with an
4220 optimistic search; it's easy to recognize halfwords that can
4221 not be the start of a 32-bit instruction, and use that to
4222 lock on to the instruction boundaries. */
4223 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4224 if (buf == NULL)
4225 return bpaddr;
4226 buf_len = IT_SCAN_THRESHOLD;
4227
4228 definite = 0;
4229 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4230 {
4231 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4232 if (thumb_insn_size (inst1) == 2)
4233 {
4234 definite = 1;
4235 break;
4236 }
4237 }
4238
4239 /* At this point, if DEFINITE, BUF[I] is the first place we
4240 are sure that we know the instruction boundaries, and it is far
4241 enough from BPADDR that we could not miss an IT instruction
4242 affecting BPADDR. If ! DEFINITE, give up - start from a
4243 known boundary. */
4244 if (! definite)
4245 {
4246 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4247 bpaddr - boundary);
4248 if (buf == NULL)
4249 return bpaddr;
4250 buf_len = bpaddr - boundary;
4251 i = 0;
4252 }
4253 }
4254 else
4255 {
4256 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4257 if (buf == NULL)
4258 return bpaddr;
4259 buf_len = bpaddr - boundary;
4260 i = 0;
4261 }
4262
4263 /* Scan forwards. Find the last IT instruction before BPADDR. */
4264 last_it = -1;
4265 last_it_count = 0;
4266 while (i < buf_len)
4267 {
4268 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4269 last_it_count--;
4270 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4271 {
4272 last_it = i;
4273 if (inst1 & 0x0001)
4274 last_it_count = 4;
4275 else if (inst1 & 0x0002)
4276 last_it_count = 3;
4277 else if (inst1 & 0x0004)
4278 last_it_count = 2;
4279 else
4280 last_it_count = 1;
4281 }
4282 i += thumb_insn_size (inst1);
4283 }
4284
4285 xfree (buf);
4286
4287 if (last_it == -1)
4288 /* There wasn't really an IT instruction after all. */
4289 return bpaddr;
4290
4291 if (last_it_count < 1)
4292 /* It was too far away. */
4293 return bpaddr;
4294
4295 /* This really is a trouble spot. Move the breakpoint to the IT
4296 instruction. */
4297 return bpaddr - buf_len + last_it;
4298 }
4299
4300 /* ARM displaced stepping support.
4301
4302 Generally ARM displaced stepping works as follows:
4303
4304 1. When an instruction is to be single-stepped, it is first decoded by
4305 arm_process_displaced_insn. Depending on the type of instruction, it is
4306 then copied to a scratch location, possibly in a modified form. The
4307 copy_* set of functions performs such modification, as necessary. A
4308 breakpoint is placed after the modified instruction in the scratch space
4309 to return control to GDB. Note in particular that instructions which
4310 modify the PC will no longer do so after modification.
4311
4312 2. The instruction is single-stepped, by setting the PC to the scratch
4313 location address, and resuming. Control returns to GDB when the
4314 breakpoint is hit.
4315
4316 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4317 function used for the current instruction. This function's job is to
4318 put the CPU/memory state back to what it would have been if the
4319 instruction had been executed unmodified in its original location. */
4320
4321 /* NOP instruction (mov r0, r0). */
4322 #define ARM_NOP 0xe1a00000
4323 #define THUMB_NOP 0x4600
4324
4325 /* Helper for register reads for displaced stepping. In particular, this
4326 returns the PC as it would be seen by the instruction at its original
4327 location. */
4328
4329 ULONGEST
4330 displaced_read_reg (struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4331 int regno)
4332 {
4333 ULONGEST ret;
4334 CORE_ADDR from = dsc->insn_addr;
4335
4336 if (regno == ARM_PC_REGNUM)
4337 {
4338 /* Compute pipeline offset:
4339 - When executing an ARM instruction, PC reads as the address of the
4340 current instruction plus 8.
4341 - When executing a Thumb instruction, PC reads as the address of the
4342 current instruction plus 4. */
4343
4344 if (!dsc->is_thumb)
4345 from += 8;
4346 else
4347 from += 4;
4348
4349 if (debug_displaced)
4350 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4351 (unsigned long) from);
4352 return (ULONGEST) from;
4353 }
4354 else
4355 {
4356 regcache_cooked_read_unsigned (regs, regno, &ret);
4357 if (debug_displaced)
4358 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4359 regno, (unsigned long) ret);
4360 return ret;
4361 }
4362 }
4363
4364 static int
4365 displaced_in_arm_mode (struct regcache *regs)
4366 {
4367 ULONGEST ps;
4368 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4369
4370 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4371
4372 return (ps & t_bit) == 0;
4373 }
4374
4375 /* Write to the PC as from a branch instruction. */
4376
4377 static void
4378 branch_write_pc (struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4379 ULONGEST val)
4380 {
4381 if (!dsc->is_thumb)
4382 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4383 architecture versions < 6. */
4384 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4385 val & ~(ULONGEST) 0x3);
4386 else
4387 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4388 val & ~(ULONGEST) 0x1);
4389 }
4390
4391 /* Write to the PC as from a branch-exchange instruction. */
4392
4393 static void
4394 bx_write_pc (struct regcache *regs, ULONGEST val)
4395 {
4396 ULONGEST ps;
4397 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4398
4399 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4400
4401 if ((val & 1) == 1)
4402 {
4403 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4404 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4405 }
4406 else if ((val & 2) == 0)
4407 {
4408 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4409 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4410 }
4411 else
4412 {
4413 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4414 mode, align dest to 4 bytes). */
4415 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4416 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4417 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4418 }
4419 }
4420
4421 /* Write to the PC as if from a load instruction. */
4422
4423 static void
4424 load_write_pc (struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4425 ULONGEST val)
4426 {
4427 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4428 bx_write_pc (regs, val);
4429 else
4430 branch_write_pc (regs, dsc, val);
4431 }
4432
4433 /* Write to the PC as if from an ALU instruction. */
4434
4435 static void
4436 alu_write_pc (struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4437 ULONGEST val)
4438 {
4439 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4440 bx_write_pc (regs, val);
4441 else
4442 branch_write_pc (regs, dsc, val);
4443 }
4444
4445 /* Helper for writing to registers for displaced stepping. Writing to the PC
4446 has a varying effects depending on the instruction which does the write:
4447 this is controlled by the WRITE_PC argument. */
4448
4449 void
4450 displaced_write_reg (struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4451 int regno, ULONGEST val, enum pc_write_style write_pc)
4452 {
4453 if (regno == ARM_PC_REGNUM)
4454 {
4455 if (debug_displaced)
4456 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4457 (unsigned long) val);
4458 switch (write_pc)
4459 {
4460 case BRANCH_WRITE_PC:
4461 branch_write_pc (regs, dsc, val);
4462 break;
4463
4464 case BX_WRITE_PC:
4465 bx_write_pc (regs, val);
4466 break;
4467
4468 case LOAD_WRITE_PC:
4469 load_write_pc (regs, dsc, val);
4470 break;
4471
4472 case ALU_WRITE_PC:
4473 alu_write_pc (regs, dsc, val);
4474 break;
4475
4476 case CANNOT_WRITE_PC:
4477 warning (_("Instruction wrote to PC in an unexpected way when "
4478 "single-stepping"));
4479 break;
4480
4481 default:
4482 internal_error (__FILE__, __LINE__,
4483 _("Invalid argument to displaced_write_reg"));
4484 }
4485
4486 dsc->wrote_to_pc = 1;
4487 }
4488 else
4489 {
4490 if (debug_displaced)
4491 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4492 regno, (unsigned long) val);
4493 regcache_cooked_write_unsigned (regs, regno, val);
4494 }
4495 }
4496
4497 /* This function is used to concisely determine if an instruction INSN
4498 references PC. Register fields of interest in INSN should have the
4499 corresponding fields of BITMASK set to 0b1111. The function
4500 returns return 1 if any of these fields in INSN reference the PC
4501 (also 0b1111, r15), else it returns 0. */
4502
4503 static int
4504 insn_references_pc (uint32_t insn, uint32_t bitmask)
4505 {
4506 uint32_t lowbit = 1;
4507
4508 while (bitmask != 0)
4509 {
4510 uint32_t mask;
4511
4512 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4513 ;
4514
4515 if (!lowbit)
4516 break;
4517
4518 mask = lowbit * 0xf;
4519
4520 if ((insn & mask) == mask)
4521 return 1;
4522
4523 bitmask &= ~mask;
4524 }
4525
4526 return 0;
4527 }
4528
4529 /* The simplest copy function. Many instructions have the same effect no
4530 matter what address they are executed at: in those cases, use this. */
4531
4532 static int
4533 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4534 const char *iname, arm_displaced_step_copy_insn_closure *dsc)
4535 {
4536 if (debug_displaced)
4537 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4538 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4539 iname);
4540
4541 dsc->modinsn[0] = insn;
4542
4543 return 0;
4544 }
4545
4546 static int
4547 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4548 uint16_t insn2, const char *iname,
4549 arm_displaced_step_copy_insn_closure *dsc)
4550 {
4551 if (debug_displaced)
4552 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4553 "opcode/class '%s' unmodified\n", insn1, insn2,
4554 iname);
4555
4556 dsc->modinsn[0] = insn1;
4557 dsc->modinsn[1] = insn2;
4558 dsc->numinsns = 2;
4559
4560 return 0;
4561 }
4562
4563 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4564 modification. */
4565 static int
4566 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4567 const char *iname,
4568 arm_displaced_step_copy_insn_closure *dsc)
4569 {
4570 if (debug_displaced)
4571 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4572 "opcode/class '%s' unmodified\n", insn,
4573 iname);
4574
4575 dsc->modinsn[0] = insn;
4576
4577 return 0;
4578 }
4579
4580 /* Preload instructions with immediate offset. */
4581
4582 static void
4583 cleanup_preload (struct gdbarch *gdbarch,
4584 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4585 {
4586 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4587 if (!dsc->u.preload.immed)
4588 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4589 }
4590
4591 static void
4592 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4593 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
4594 {
4595 ULONGEST rn_val;
4596 /* Preload instructions:
4597
4598 {pli/pld} [rn, #+/-imm]
4599 ->
4600 {pli/pld} [r0, #+/-imm]. */
4601
4602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4603 rn_val = displaced_read_reg (regs, dsc, rn);
4604 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4605 dsc->u.preload.immed = 1;
4606
4607 dsc->cleanup = &cleanup_preload;
4608 }
4609
4610 static int
4611 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4612 arm_displaced_step_copy_insn_closure *dsc)
4613 {
4614 unsigned int rn = bits (insn, 16, 19);
4615
4616 if (!insn_references_pc (insn, 0x000f0000ul))
4617 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4618
4619 if (debug_displaced)
4620 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4621 (unsigned long) insn);
4622
4623 dsc->modinsn[0] = insn & 0xfff0ffff;
4624
4625 install_preload (gdbarch, regs, dsc, rn);
4626
4627 return 0;
4628 }
4629
4630 static int
4631 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4632 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4633 {
4634 unsigned int rn = bits (insn1, 0, 3);
4635 unsigned int u_bit = bit (insn1, 7);
4636 int imm12 = bits (insn2, 0, 11);
4637 ULONGEST pc_val;
4638
4639 if (rn != ARM_PC_REGNUM)
4640 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4641
4642 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4643 PLD (literal) Encoding T1. */
4644 if (debug_displaced)
4645 fprintf_unfiltered (gdb_stdlog,
4646 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4647 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4648 imm12);
4649
4650 if (!u_bit)
4651 imm12 = -1 * imm12;
4652
4653 /* Rewrite instruction {pli/pld} PC imm12 into:
4654 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4655
4656 {pli/pld} [r0, r1]
4657
4658 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4659
4660 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4661 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4662
4663 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4664
4665 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4666 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4667 dsc->u.preload.immed = 0;
4668
4669 /* {pli/pld} [r0, r1] */
4670 dsc->modinsn[0] = insn1 & 0xfff0;
4671 dsc->modinsn[1] = 0xf001;
4672 dsc->numinsns = 2;
4673
4674 dsc->cleanup = &cleanup_preload;
4675 return 0;
4676 }
4677
4678 /* Preload instructions with register offset. */
4679
4680 static void
4681 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4682 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
4683 unsigned int rm)
4684 {
4685 ULONGEST rn_val, rm_val;
4686
4687 /* Preload register-offset instructions:
4688
4689 {pli/pld} [rn, rm {, shift}]
4690 ->
4691 {pli/pld} [r0, r1 {, shift}]. */
4692
4693 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4694 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4695 rn_val = displaced_read_reg (regs, dsc, rn);
4696 rm_val = displaced_read_reg (regs, dsc, rm);
4697 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4698 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4699 dsc->u.preload.immed = 0;
4700
4701 dsc->cleanup = &cleanup_preload;
4702 }
4703
4704 static int
4705 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4706 struct regcache *regs,
4707 arm_displaced_step_copy_insn_closure *dsc)
4708 {
4709 unsigned int rn = bits (insn, 16, 19);
4710 unsigned int rm = bits (insn, 0, 3);
4711
4712
4713 if (!insn_references_pc (insn, 0x000f000ful))
4714 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4715
4716 if (debug_displaced)
4717 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4718 (unsigned long) insn);
4719
4720 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4721
4722 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4723 return 0;
4724 }
4725
4726 /* Copy/cleanup coprocessor load and store instructions. */
4727
4728 static void
4729 cleanup_copro_load_store (struct gdbarch *gdbarch,
4730 struct regcache *regs,
4731 arm_displaced_step_copy_insn_closure *dsc)
4732 {
4733 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4734
4735 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4736
4737 if (dsc->u.ldst.writeback)
4738 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4739 }
4740
4741 static void
4742 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4743 arm_displaced_step_copy_insn_closure *dsc,
4744 int writeback, unsigned int rn)
4745 {
4746 ULONGEST rn_val;
4747
4748 /* Coprocessor load/store instructions:
4749
4750 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4751 ->
4752 {stc/stc2} [r0, #+/-imm].
4753
4754 ldc/ldc2 are handled identically. */
4755
4756 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4757 rn_val = displaced_read_reg (regs, dsc, rn);
4758 /* PC should be 4-byte aligned. */
4759 rn_val = rn_val & 0xfffffffc;
4760 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4761
4762 dsc->u.ldst.writeback = writeback;
4763 dsc->u.ldst.rn = rn;
4764
4765 dsc->cleanup = &cleanup_copro_load_store;
4766 }
4767
4768 static int
4769 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4770 struct regcache *regs,
4771 arm_displaced_step_copy_insn_closure *dsc)
4772 {
4773 unsigned int rn = bits (insn, 16, 19);
4774
4775 if (!insn_references_pc (insn, 0x000f0000ul))
4776 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4777
4778 if (debug_displaced)
4779 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4780 "load/store insn %.8lx\n", (unsigned long) insn);
4781
4782 dsc->modinsn[0] = insn & 0xfff0ffff;
4783
4784 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4785
4786 return 0;
4787 }
4788
4789 static int
4790 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4791 uint16_t insn2, struct regcache *regs,
4792 arm_displaced_step_copy_insn_closure *dsc)
4793 {
4794 unsigned int rn = bits (insn1, 0, 3);
4795
4796 if (rn != ARM_PC_REGNUM)
4797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4798 "copro load/store", dsc);
4799
4800 if (debug_displaced)
4801 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4802 "load/store insn %.4x%.4x\n", insn1, insn2);
4803
4804 dsc->modinsn[0] = insn1 & 0xfff0;
4805 dsc->modinsn[1] = insn2;
4806 dsc->numinsns = 2;
4807
4808 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4809 doesn't support writeback, so pass 0. */
4810 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4811
4812 return 0;
4813 }
4814
4815 /* Clean up branch instructions (actually perform the branch, by setting
4816 PC). */
4817
4818 static void
4819 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4820 arm_displaced_step_copy_insn_closure *dsc)
4821 {
4822 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4823 int branch_taken = condition_true (dsc->u.branch.cond, status);
4824 enum pc_write_style write_pc = dsc->u.branch.exchange
4825 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4826
4827 if (!branch_taken)
4828 return;
4829
4830 if (dsc->u.branch.link)
4831 {
4832 /* The value of LR should be the next insn of current one. In order
4833 not to confuse logic handling later insn `bx lr', if current insn mode
4834 is Thumb, the bit 0 of LR value should be set to 1. */
4835 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4836
4837 if (dsc->is_thumb)
4838 next_insn_addr |= 0x1;
4839
4840 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4841 CANNOT_WRITE_PC);
4842 }
4843
4844 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4845 }
4846
4847 /* Copy B/BL/BLX instructions with immediate destinations. */
4848
4849 static void
4850 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4851 arm_displaced_step_copy_insn_closure *dsc,
4852 unsigned int cond, int exchange, int link, long offset)
4853 {
4854 /* Implement "BL<cond> <label>" as:
4855
4856 Preparation: cond <- instruction condition
4857 Insn: mov r0, r0 (nop)
4858 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4859
4860 B<cond> similar, but don't set r14 in cleanup. */
4861
4862 dsc->u.branch.cond = cond;
4863 dsc->u.branch.link = link;
4864 dsc->u.branch.exchange = exchange;
4865
4866 dsc->u.branch.dest = dsc->insn_addr;
4867 if (link && exchange)
4868 /* For BLX, offset is computed from the Align (PC, 4). */
4869 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4870
4871 if (dsc->is_thumb)
4872 dsc->u.branch.dest += 4 + offset;
4873 else
4874 dsc->u.branch.dest += 8 + offset;
4875
4876 dsc->cleanup = &cleanup_branch;
4877 }
4878 static int
4879 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4880 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4881 {
4882 unsigned int cond = bits (insn, 28, 31);
4883 int exchange = (cond == 0xf);
4884 int link = exchange || bit (insn, 24);
4885 long offset;
4886
4887 if (debug_displaced)
4888 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4889 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4890 (unsigned long) insn);
4891 if (exchange)
4892 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4893 then arrange the switch into Thumb mode. */
4894 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4895 else
4896 offset = bits (insn, 0, 23) << 2;
4897
4898 if (bit (offset, 25))
4899 offset = offset | ~0x3ffffff;
4900
4901 dsc->modinsn[0] = ARM_NOP;
4902
4903 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4904 return 0;
4905 }
4906
4907 static int
4908 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4909 uint16_t insn2, struct regcache *regs,
4910 arm_displaced_step_copy_insn_closure *dsc)
4911 {
4912 int link = bit (insn2, 14);
4913 int exchange = link && !bit (insn2, 12);
4914 int cond = INST_AL;
4915 long offset = 0;
4916 int j1 = bit (insn2, 13);
4917 int j2 = bit (insn2, 11);
4918 int s = sbits (insn1, 10, 10);
4919 int i1 = !(j1 ^ bit (insn1, 10));
4920 int i2 = !(j2 ^ bit (insn1, 10));
4921
4922 if (!link && !exchange) /* B */
4923 {
4924 offset = (bits (insn2, 0, 10) << 1);
4925 if (bit (insn2, 12)) /* Encoding T4 */
4926 {
4927 offset |= (bits (insn1, 0, 9) << 12)
4928 | (i2 << 22)
4929 | (i1 << 23)
4930 | (s << 24);
4931 cond = INST_AL;
4932 }
4933 else /* Encoding T3 */
4934 {
4935 offset |= (bits (insn1, 0, 5) << 12)
4936 | (j1 << 18)
4937 | (j2 << 19)
4938 | (s << 20);
4939 cond = bits (insn1, 6, 9);
4940 }
4941 }
4942 else
4943 {
4944 offset = (bits (insn1, 0, 9) << 12);
4945 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4946 offset |= exchange ?
4947 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4948 }
4949
4950 if (debug_displaced)
4951 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4952 "%.4x %.4x with offset %.8lx\n",
4953 link ? (exchange) ? "blx" : "bl" : "b",
4954 insn1, insn2, offset);
4955
4956 dsc->modinsn[0] = THUMB_NOP;
4957
4958 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4959 return 0;
4960 }
4961
4962 /* Copy B Thumb instructions. */
4963 static int
4964 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4965 arm_displaced_step_copy_insn_closure *dsc)
4966 {
4967 unsigned int cond = 0;
4968 int offset = 0;
4969 unsigned short bit_12_15 = bits (insn, 12, 15);
4970 CORE_ADDR from = dsc->insn_addr;
4971
4972 if (bit_12_15 == 0xd)
4973 {
4974 /* offset = SignExtend (imm8:0, 32) */
4975 offset = sbits ((insn << 1), 0, 8);
4976 cond = bits (insn, 8, 11);
4977 }
4978 else if (bit_12_15 == 0xe) /* Encoding T2 */
4979 {
4980 offset = sbits ((insn << 1), 0, 11);
4981 cond = INST_AL;
4982 }
4983
4984 if (debug_displaced)
4985 fprintf_unfiltered (gdb_stdlog,
4986 "displaced: copying b immediate insn %.4x "
4987 "with offset %d\n", insn, offset);
4988
4989 dsc->u.branch.cond = cond;
4990 dsc->u.branch.link = 0;
4991 dsc->u.branch.exchange = 0;
4992 dsc->u.branch.dest = from + 4 + offset;
4993
4994 dsc->modinsn[0] = THUMB_NOP;
4995
4996 dsc->cleanup = &cleanup_branch;
4997
4998 return 0;
4999 }
5000
5001 /* Copy BX/BLX with register-specified destinations. */
5002
5003 static void
5004 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5005 arm_displaced_step_copy_insn_closure *dsc, int link,
5006 unsigned int cond, unsigned int rm)
5007 {
5008 /* Implement {BX,BLX}<cond> <reg>" as:
5009
5010 Preparation: cond <- instruction condition
5011 Insn: mov r0, r0 (nop)
5012 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5013
5014 Don't set r14 in cleanup for BX. */
5015
5016 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5017
5018 dsc->u.branch.cond = cond;
5019 dsc->u.branch.link = link;
5020
5021 dsc->u.branch.exchange = 1;
5022
5023 dsc->cleanup = &cleanup_branch;
5024 }
5025
5026 static int
5027 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5028 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5029 {
5030 unsigned int cond = bits (insn, 28, 31);
5031 /* BX: x12xxx1x
5032 BLX: x12xxx3x. */
5033 int link = bit (insn, 5);
5034 unsigned int rm = bits (insn, 0, 3);
5035
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5038 (unsigned long) insn);
5039
5040 dsc->modinsn[0] = ARM_NOP;
5041
5042 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5043 return 0;
5044 }
5045
5046 static int
5047 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5048 struct regcache *regs,
5049 arm_displaced_step_copy_insn_closure *dsc)
5050 {
5051 int link = bit (insn, 7);
5052 unsigned int rm = bits (insn, 3, 6);
5053
5054 if (debug_displaced)
5055 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5056 (unsigned short) insn);
5057
5058 dsc->modinsn[0] = THUMB_NOP;
5059
5060 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5061
5062 return 0;
5063 }
5064
5065
5066 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5067
5068 static void
5069 cleanup_alu_imm (struct gdbarch *gdbarch,
5070 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5071 {
5072 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5073 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5074 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5076 }
5077
5078 static int
5079 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5080 arm_displaced_step_copy_insn_closure *dsc)
5081 {
5082 unsigned int rn = bits (insn, 16, 19);
5083 unsigned int rd = bits (insn, 12, 15);
5084 unsigned int op = bits (insn, 21, 24);
5085 int is_mov = (op == 0xd);
5086 ULONGEST rd_val, rn_val;
5087
5088 if (!insn_references_pc (insn, 0x000ff000ul))
5089 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5090
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5093 "%.8lx\n", is_mov ? "move" : "ALU",
5094 (unsigned long) insn);
5095
5096 /* Instruction is of form:
5097
5098 <op><cond> rd, [rn,] #imm
5099
5100 Rewrite as:
5101
5102 Preparation: tmp1, tmp2 <- r0, r1;
5103 r0, r1 <- rd, rn
5104 Insn: <op><cond> r0, r1, #imm
5105 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5106 */
5107
5108 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5109 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5110 rn_val = displaced_read_reg (regs, dsc, rn);
5111 rd_val = displaced_read_reg (regs, dsc, rd);
5112 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5113 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5114 dsc->rd = rd;
5115
5116 if (is_mov)
5117 dsc->modinsn[0] = insn & 0xfff00fff;
5118 else
5119 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5120
5121 dsc->cleanup = &cleanup_alu_imm;
5122
5123 return 0;
5124 }
5125
5126 static int
5127 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5128 uint16_t insn2, struct regcache *regs,
5129 arm_displaced_step_copy_insn_closure *dsc)
5130 {
5131 unsigned int op = bits (insn1, 5, 8);
5132 unsigned int rn, rm, rd;
5133 ULONGEST rd_val, rn_val;
5134
5135 rn = bits (insn1, 0, 3); /* Rn */
5136 rm = bits (insn2, 0, 3); /* Rm */
5137 rd = bits (insn2, 8, 11); /* Rd */
5138
5139 /* This routine is only called for instruction MOV. */
5140 gdb_assert (op == 0x2 && rn == 0xf);
5141
5142 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5143 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5144
5145 if (debug_displaced)
5146 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5147 "ALU", insn1, insn2);
5148
5149 /* Instruction is of form:
5150
5151 <op><cond> rd, [rn,] #imm
5152
5153 Rewrite as:
5154
5155 Preparation: tmp1, tmp2 <- r0, r1;
5156 r0, r1 <- rd, rn
5157 Insn: <op><cond> r0, r1, #imm
5158 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5159 */
5160
5161 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5162 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5163 rn_val = displaced_read_reg (regs, dsc, rn);
5164 rd_val = displaced_read_reg (regs, dsc, rd);
5165 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5166 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5167 dsc->rd = rd;
5168
5169 dsc->modinsn[0] = insn1;
5170 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5171 dsc->numinsns = 2;
5172
5173 dsc->cleanup = &cleanup_alu_imm;
5174
5175 return 0;
5176 }
5177
5178 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5179
5180 static void
5181 cleanup_alu_reg (struct gdbarch *gdbarch,
5182 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5183 {
5184 ULONGEST rd_val;
5185 int i;
5186
5187 rd_val = displaced_read_reg (regs, dsc, 0);
5188
5189 for (i = 0; i < 3; i++)
5190 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5191
5192 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5193 }
5194
5195 static void
5196 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5197 arm_displaced_step_copy_insn_closure *dsc,
5198 unsigned int rd, unsigned int rn, unsigned int rm)
5199 {
5200 ULONGEST rd_val, rn_val, rm_val;
5201
5202 /* Instruction is of form:
5203
5204 <op><cond> rd, [rn,] rm [, <shift>]
5205
5206 Rewrite as:
5207
5208 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5209 r0, r1, r2 <- rd, rn, rm
5210 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5211 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5212 */
5213
5214 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5215 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5216 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5217 rd_val = displaced_read_reg (regs, dsc, rd);
5218 rn_val = displaced_read_reg (regs, dsc, rn);
5219 rm_val = displaced_read_reg (regs, dsc, rm);
5220 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5221 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5223 dsc->rd = rd;
5224
5225 dsc->cleanup = &cleanup_alu_reg;
5226 }
5227
5228 static int
5229 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5230 arm_displaced_step_copy_insn_closure *dsc)
5231 {
5232 unsigned int op = bits (insn, 21, 24);
5233 int is_mov = (op == 0xd);
5234
5235 if (!insn_references_pc (insn, 0x000ff00ful))
5236 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5237
5238 if (debug_displaced)
5239 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5240 is_mov ? "move" : "ALU", (unsigned long) insn);
5241
5242 if (is_mov)
5243 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5244 else
5245 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5246
5247 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5248 bits (insn, 0, 3));
5249 return 0;
5250 }
5251
5252 static int
5253 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5254 struct regcache *regs,
5255 arm_displaced_step_copy_insn_closure *dsc)
5256 {
5257 unsigned rm, rd;
5258
5259 rm = bits (insn, 3, 6);
5260 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5261
5262 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5263 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5264
5265 if (debug_displaced)
5266 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5267 (unsigned short) insn);
5268
5269 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5270
5271 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5272
5273 return 0;
5274 }
5275
5276 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5277
5278 static void
5279 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5280 struct regcache *regs,
5281 arm_displaced_step_copy_insn_closure *dsc)
5282 {
5283 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5284 int i;
5285
5286 for (i = 0; i < 4; i++)
5287 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5288
5289 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5290 }
5291
5292 static void
5293 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5294 arm_displaced_step_copy_insn_closure *dsc,
5295 unsigned int rd, unsigned int rn, unsigned int rm,
5296 unsigned rs)
5297 {
5298 int i;
5299 ULONGEST rd_val, rn_val, rm_val, rs_val;
5300
5301 /* Instruction is of form:
5302
5303 <op><cond> rd, [rn,] rm, <shift> rs
5304
5305 Rewrite as:
5306
5307 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5308 r0, r1, r2, r3 <- rd, rn, rm, rs
5309 Insn: <op><cond> r0, r1, r2, <shift> r3
5310 Cleanup: tmp5 <- r0
5311 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5312 rd <- tmp5
5313 */
5314
5315 for (i = 0; i < 4; i++)
5316 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5317
5318 rd_val = displaced_read_reg (regs, dsc, rd);
5319 rn_val = displaced_read_reg (regs, dsc, rn);
5320 rm_val = displaced_read_reg (regs, dsc, rm);
5321 rs_val = displaced_read_reg (regs, dsc, rs);
5322 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5326 dsc->rd = rd;
5327 dsc->cleanup = &cleanup_alu_shifted_reg;
5328 }
5329
5330 static int
5331 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5332 struct regcache *regs,
5333 arm_displaced_step_copy_insn_closure *dsc)
5334 {
5335 unsigned int op = bits (insn, 21, 24);
5336 int is_mov = (op == 0xd);
5337 unsigned int rd, rn, rm, rs;
5338
5339 if (!insn_references_pc (insn, 0x000fff0ful))
5340 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5341
5342 if (debug_displaced)
5343 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5344 "%.8lx\n", is_mov ? "move" : "ALU",
5345 (unsigned long) insn);
5346
5347 rn = bits (insn, 16, 19);
5348 rm = bits (insn, 0, 3);
5349 rs = bits (insn, 8, 11);
5350 rd = bits (insn, 12, 15);
5351
5352 if (is_mov)
5353 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5354 else
5355 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5356
5357 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5358
5359 return 0;
5360 }
5361
5362 /* Clean up load instructions. */
5363
5364 static void
5365 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5366 arm_displaced_step_copy_insn_closure *dsc)
5367 {
5368 ULONGEST rt_val, rt_val2 = 0, rn_val;
5369
5370 rt_val = displaced_read_reg (regs, dsc, 0);
5371 if (dsc->u.ldst.xfersize == 8)
5372 rt_val2 = displaced_read_reg (regs, dsc, 1);
5373 rn_val = displaced_read_reg (regs, dsc, 2);
5374
5375 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5376 if (dsc->u.ldst.xfersize > 4)
5377 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5378 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5379 if (!dsc->u.ldst.immed)
5380 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5381
5382 /* Handle register writeback. */
5383 if (dsc->u.ldst.writeback)
5384 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5385 /* Put result in right place. */
5386 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5387 if (dsc->u.ldst.xfersize == 8)
5388 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5389 }
5390
5391 /* Clean up store instructions. */
5392
5393 static void
5394 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5395 arm_displaced_step_copy_insn_closure *dsc)
5396 {
5397 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5398
5399 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5400 if (dsc->u.ldst.xfersize > 4)
5401 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5402 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5403 if (!dsc->u.ldst.immed)
5404 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5405 if (!dsc->u.ldst.restore_r4)
5406 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5407
5408 /* Writeback. */
5409 if (dsc->u.ldst.writeback)
5410 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5411 }
5412
5413 /* Copy "extra" load/store instructions. These are halfword/doubleword
5414 transfers, which have a different encoding to byte/word transfers. */
5415
5416 static int
5417 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5418 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5419 {
5420 unsigned int op1 = bits (insn, 20, 24);
5421 unsigned int op2 = bits (insn, 5, 6);
5422 unsigned int rt = bits (insn, 12, 15);
5423 unsigned int rn = bits (insn, 16, 19);
5424 unsigned int rm = bits (insn, 0, 3);
5425 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5426 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5427 int immed = (op1 & 0x4) != 0;
5428 int opcode;
5429 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5430
5431 if (!insn_references_pc (insn, 0x000ff00ful))
5432 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5433
5434 if (debug_displaced)
5435 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5436 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5437 (unsigned long) insn);
5438
5439 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5440
5441 if (opcode < 0)
5442 internal_error (__FILE__, __LINE__,
5443 _("copy_extra_ld_st: instruction decode error"));
5444
5445 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5446 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5447 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5448 if (!immed)
5449 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5450
5451 rt_val = displaced_read_reg (regs, dsc, rt);
5452 if (bytesize[opcode] == 8)
5453 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5454 rn_val = displaced_read_reg (regs, dsc, rn);
5455 if (!immed)
5456 rm_val = displaced_read_reg (regs, dsc, rm);
5457
5458 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5459 if (bytesize[opcode] == 8)
5460 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5461 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5462 if (!immed)
5463 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5464
5465 dsc->rd = rt;
5466 dsc->u.ldst.xfersize = bytesize[opcode];
5467 dsc->u.ldst.rn = rn;
5468 dsc->u.ldst.immed = immed;
5469 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5470 dsc->u.ldst.restore_r4 = 0;
5471
5472 if (immed)
5473 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5474 ->
5475 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5476 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5477 else
5478 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5479 ->
5480 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5481 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5482
5483 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5484
5485 return 0;
5486 }
5487
5488 /* Copy byte/half word/word loads and stores. */
5489
5490 static void
5491 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5492 arm_displaced_step_copy_insn_closure *dsc, int load,
5493 int immed, int writeback, int size, int usermode,
5494 int rt, int rm, int rn)
5495 {
5496 ULONGEST rt_val, rn_val, rm_val = 0;
5497
5498 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5499 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5500 if (!immed)
5501 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5502 if (!load)
5503 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5504
5505 rt_val = displaced_read_reg (regs, dsc, rt);
5506 rn_val = displaced_read_reg (regs, dsc, rn);
5507 if (!immed)
5508 rm_val = displaced_read_reg (regs, dsc, rm);
5509
5510 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5511 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5512 if (!immed)
5513 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5514 dsc->rd = rt;
5515 dsc->u.ldst.xfersize = size;
5516 dsc->u.ldst.rn = rn;
5517 dsc->u.ldst.immed = immed;
5518 dsc->u.ldst.writeback = writeback;
5519
5520 /* To write PC we can do:
5521
5522 Before this sequence of instructions:
5523 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5524 r2 is the Rn value got from displaced_read_reg.
5525
5526 Insn1: push {pc} Write address of STR instruction + offset on stack
5527 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5528 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5529 = addr(Insn1) + offset - addr(Insn3) - 8
5530 = offset - 16
5531 Insn4: add r4, r4, #8 r4 = offset - 8
5532 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5533 = from + offset
5534 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5535
5536 Otherwise we don't know what value to write for PC, since the offset is
5537 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5538 of this can be found in Section "Saving from r15" in
5539 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5540
5541 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5542 }
5543
5544
5545 static int
5546 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5547 uint16_t insn2, struct regcache *regs,
5548 arm_displaced_step_copy_insn_closure *dsc, int size)
5549 {
5550 unsigned int u_bit = bit (insn1, 7);
5551 unsigned int rt = bits (insn2, 12, 15);
5552 int imm12 = bits (insn2, 0, 11);
5553 ULONGEST pc_val;
5554
5555 if (debug_displaced)
5556 fprintf_unfiltered (gdb_stdlog,
5557 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5558 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5559 imm12);
5560
5561 if (!u_bit)
5562 imm12 = -1 * imm12;
5563
5564 /* Rewrite instruction LDR Rt imm12 into:
5565
5566 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5567
5568 LDR R0, R2, R3,
5569
5570 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5571
5572
5573 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5574 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5575 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5576
5577 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5578
5579 pc_val = pc_val & 0xfffffffc;
5580
5581 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5582 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5583
5584 dsc->rd = rt;
5585
5586 dsc->u.ldst.xfersize = size;
5587 dsc->u.ldst.immed = 0;
5588 dsc->u.ldst.writeback = 0;
5589 dsc->u.ldst.restore_r4 = 0;
5590
5591 /* LDR R0, R2, R3 */
5592 dsc->modinsn[0] = 0xf852;
5593 dsc->modinsn[1] = 0x3;
5594 dsc->numinsns = 2;
5595
5596 dsc->cleanup = &cleanup_load;
5597
5598 return 0;
5599 }
5600
5601 static int
5602 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5603 uint16_t insn2, struct regcache *regs,
5604 arm_displaced_step_copy_insn_closure *dsc,
5605 int writeback, int immed)
5606 {
5607 unsigned int rt = bits (insn2, 12, 15);
5608 unsigned int rn = bits (insn1, 0, 3);
5609 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5610 /* In LDR (register), there is also a register Rm, which is not allowed to
5611 be PC, so we don't have to check it. */
5612
5613 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5614 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5615 dsc);
5616
5617 if (debug_displaced)
5618 fprintf_unfiltered (gdb_stdlog,
5619 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5620 rt, rn, insn1, insn2);
5621
5622 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5623 0, rt, rm, rn);
5624
5625 dsc->u.ldst.restore_r4 = 0;
5626
5627 if (immed)
5628 /* ldr[b]<cond> rt, [rn, #imm], etc.
5629 ->
5630 ldr[b]<cond> r0, [r2, #imm]. */
5631 {
5632 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5633 dsc->modinsn[1] = insn2 & 0x0fff;
5634 }
5635 else
5636 /* ldr[b]<cond> rt, [rn, rm], etc.
5637 ->
5638 ldr[b]<cond> r0, [r2, r3]. */
5639 {
5640 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5641 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5642 }
5643
5644 dsc->numinsns = 2;
5645
5646 return 0;
5647 }
5648
5649
5650 static int
5651 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5652 struct regcache *regs,
5653 arm_displaced_step_copy_insn_closure *dsc,
5654 int load, int size, int usermode)
5655 {
5656 int immed = !bit (insn, 25);
5657 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5658 unsigned int rt = bits (insn, 12, 15);
5659 unsigned int rn = bits (insn, 16, 19);
5660 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5661
5662 if (!insn_references_pc (insn, 0x000ff00ful))
5663 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5664
5665 if (debug_displaced)
5666 fprintf_unfiltered (gdb_stdlog,
5667 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5668 load ? (size == 1 ? "ldrb" : "ldr")
5669 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5670 rt, rn,
5671 (unsigned long) insn);
5672
5673 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5674 usermode, rt, rm, rn);
5675
5676 if (load || rt != ARM_PC_REGNUM)
5677 {
5678 dsc->u.ldst.restore_r4 = 0;
5679
5680 if (immed)
5681 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5682 ->
5683 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5684 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5685 else
5686 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5687 ->
5688 {ldr,str}[b]<cond> r0, [r2, r3]. */
5689 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5690 }
5691 else
5692 {
5693 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5694 dsc->u.ldst.restore_r4 = 1;
5695 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5696 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5697 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5698 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5699 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5700
5701 /* As above. */
5702 if (immed)
5703 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5704 else
5705 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5706
5707 dsc->numinsns = 6;
5708 }
5709
5710 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5711
5712 return 0;
5713 }
5714
5715 /* Cleanup LDM instructions with fully-populated register list. This is an
5716 unfortunate corner case: it's impossible to implement correctly by modifying
5717 the instruction. The issue is as follows: we have an instruction,
5718
5719 ldm rN, {r0-r15}
5720
5721 which we must rewrite to avoid loading PC. A possible solution would be to
5722 do the load in two halves, something like (with suitable cleanup
5723 afterwards):
5724
5725 mov r8, rN
5726 ldm[id][ab] r8!, {r0-r7}
5727 str r7, <temp>
5728 ldm[id][ab] r8, {r7-r14}
5729 <bkpt>
5730
5731 but at present there's no suitable place for <temp>, since the scratch space
5732 is overwritten before the cleanup routine is called. For now, we simply
5733 emulate the instruction. */
5734
5735 static void
5736 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5737 arm_displaced_step_copy_insn_closure *dsc)
5738 {
5739 int inc = dsc->u.block.increment;
5740 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5741 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5742 uint32_t regmask = dsc->u.block.regmask;
5743 int regno = inc ? 0 : 15;
5744 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5745 int exception_return = dsc->u.block.load && dsc->u.block.user
5746 && (regmask & 0x8000) != 0;
5747 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5748 int do_transfer = condition_true (dsc->u.block.cond, status);
5749 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5750
5751 if (!do_transfer)
5752 return;
5753
5754 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5755 sensible we can do here. Complain loudly. */
5756 if (exception_return)
5757 error (_("Cannot single-step exception return"));
5758
5759 /* We don't handle any stores here for now. */
5760 gdb_assert (dsc->u.block.load != 0);
5761
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5764 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5765 dsc->u.block.increment ? "inc" : "dec",
5766 dsc->u.block.before ? "before" : "after");
5767
5768 while (regmask)
5769 {
5770 uint32_t memword;
5771
5772 if (inc)
5773 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5774 regno++;
5775 else
5776 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5777 regno--;
5778
5779 xfer_addr += bump_before;
5780
5781 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5782 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5783
5784 xfer_addr += bump_after;
5785
5786 regmask &= ~(1 << regno);
5787 }
5788
5789 if (dsc->u.block.writeback)
5790 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5791 CANNOT_WRITE_PC);
5792 }
5793
5794 /* Clean up an STM which included the PC in the register list. */
5795
5796 static void
5797 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5798 arm_displaced_step_copy_insn_closure *dsc)
5799 {
5800 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5801 int store_executed = condition_true (dsc->u.block.cond, status);
5802 CORE_ADDR pc_stored_at, transferred_regs
5803 = count_one_bits (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5805 uint32_t pc_val;
5806 long offset;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5808
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5811 return;
5812
5813 if (dsc->u.block.increment)
5814 {
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5816
5817 if (dsc->u.block.before)
5818 pc_stored_at += 4;
5819 }
5820 else
5821 {
5822 pc_stored_at = dsc->u.block.xfer_addr;
5823
5824 if (dsc->u.block.before)
5825 pc_stored_at -= 4;
5826 }
5827
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5831
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5835
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5837 instruction. */
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5840 }
5841
5842 /* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5846
5847 static void
5848 cleanup_block_load_pc (struct gdbarch *gdbarch,
5849 struct regcache *regs,
5850 arm_displaced_step_copy_insn_closure *dsc)
5851 {
5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5853 int load_executed = condition_true (dsc->u.block.cond, status);
5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5855 unsigned int regs_loaded = count_one_bits (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5857
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5861
5862 if (!load_executed)
5863 return;
5864
5865 clobbered = (1 << num_to_shuffle) - 1;
5866
5867 while (num_to_shuffle > 0)
5868 {
5869 if ((mask & (1 << write_reg)) != 0)
5870 {
5871 unsigned int read_reg = num_to_shuffle - 1;
5872
5873 if (read_reg != write_reg)
5874 {
5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5880 write_reg);
5881 }
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5885 write_reg);
5886
5887 clobbered &= ~(1 << write_reg);
5888
5889 num_to_shuffle--;
5890 }
5891
5892 write_reg--;
5893 }
5894
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5897 {
5898 if ((clobbered & (1 << write_reg)) != 0)
5899 {
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5901 CANNOT_WRITE_PC);
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5906 }
5907 }
5908
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5911 {
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5913
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5916 else
5917 new_rn_val -= regs_loaded * 4;
5918
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5920 CANNOT_WRITE_PC);
5921 }
5922 }
5923
5924 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5926
5927 static int
5928 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 arm_displaced_step_copy_insn_closure *dsc)
5931 {
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
5938
5939 /* Block transfers which don't mention PC can be run directly
5940 out-of-line. */
5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5943
5944 if (rn == ARM_PC_REGNUM)
5945 {
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5949 }
5950
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5954
5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5956 dsc->u.block.rn = rn;
5957
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5964
5965 dsc->u.block.regmask = insn & 0xffff;
5966
5967 if (load)
5968 {
5969 if ((insn & 0xffff) == 0xffff)
5970 {
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5975 too much). */
5976 dsc->modinsn[0] = ARM_NOP;
5977
5978 dsc->cleanup = &cleanup_block_load_all;
5979 }
5980 else
5981 {
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
5988 unsigned int i;
5989
5990 for (i = 0; i < num_in_list; i++)
5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5992
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5996 all cases, e.g.:
5997
5998 ldm r14!, {r0-r13,pc}
5999
6000 which would need to be rewritten as:
6001
6002 ldm rN!, {r0-r14}
6003
6004 but that can't work, because there's no free register for N.
6005
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6008
6009 if (writeback)
6010 insn &= ~(1 << 21);
6011
6012 new_regmask = (1 << num_in_list) - 1;
6013
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6019
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6021
6022 dsc->cleanup = &cleanup_block_load_pc;
6023 }
6024 }
6025 else
6026 {
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6034
6035 dsc->cleanup = &cleanup_block_store_pc;
6036 }
6037
6038 return 0;
6039 }
6040
6041 static int
6042 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 arm_displaced_step_copy_insn_closure *dsc)
6045 {
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6061 }
6062
6063 if (debug_displaced)
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
6066
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
6070
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6078
6079 if (load)
6080 {
6081 if (dsc->u.block.regmask == 0xffff)
6082 {
6083 /* This branch is impossible to happen. */
6084 gdb_assert (0);
6085 }
6086 else
6087 {
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6090 unsigned int i;
6091
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6094
6095 if (writeback)
6096 insn1 &= ~(1 << 5);
6097
6098 new_regmask = (1 << num_in_list) - 1;
6099
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6105
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6108 dsc->numinsns = 2;
6109
6110 dsc->cleanup = &cleanup_block_load_pc;
6111 }
6112 }
6113 else
6114 {
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6117 dsc->numinsns = 2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6119 }
6120 return 0;
6121 }
6122
6123 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6125
6126 ULONGEST
6127 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6128 int byte_order)
6129 {
6130 return read_memory_unsigned_integer (memaddr, len,
6131 (enum bfd_endian) byte_order);
6132 }
6133
6134 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6135
6136 CORE_ADDR
6137 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6138 CORE_ADDR val)
6139 {
6140 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6141 }
6142
6143 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6144
6145 static CORE_ADDR
6146 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6147 {
6148 return 0;
6149 }
6150
6151 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6152
6153 int
6154 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6155 {
6156 return arm_is_thumb (self->regcache);
6157 }
6158
6159 /* single_step() is called just before we want to resume the inferior,
6160 if we want to single-step it but there is no hardware or kernel
6161 single-step support. We find the target of the coming instructions
6162 and breakpoint them. */
6163
6164 std::vector<CORE_ADDR>
6165 arm_software_single_step (struct regcache *regcache)
6166 {
6167 struct gdbarch *gdbarch = regcache->arch ();
6168 struct arm_get_next_pcs next_pcs_ctx;
6169
6170 arm_get_next_pcs_ctor (&next_pcs_ctx,
6171 &arm_get_next_pcs_ops,
6172 gdbarch_byte_order (gdbarch),
6173 gdbarch_byte_order_for_code (gdbarch),
6174 0,
6175 regcache);
6176
6177 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6178
6179 for (CORE_ADDR &pc_ref : next_pcs)
6180 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6181
6182 return next_pcs;
6183 }
6184
6185 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6186 for Linux, where some SVC instructions must be treated specially. */
6187
6188 static void
6189 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6190 arm_displaced_step_copy_insn_closure *dsc)
6191 {
6192 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6193
6194 if (debug_displaced)
6195 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6196 "%.8lx\n", (unsigned long) resume_addr);
6197
6198 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6199 }
6200
6201
6202 /* Common copy routine for svc instruction. */
6203
6204 static int
6205 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6206 arm_displaced_step_copy_insn_closure *dsc)
6207 {
6208 /* Preparation: none.
6209 Insn: unmodified svc.
6210 Cleanup: pc <- insn_addr + insn_size. */
6211
6212 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6213 instruction. */
6214 dsc->wrote_to_pc = 1;
6215
6216 /* Allow OS-specific code to override SVC handling. */
6217 if (dsc->u.svc.copy_svc_os)
6218 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6219 else
6220 {
6221 dsc->cleanup = &cleanup_svc;
6222 return 0;
6223 }
6224 }
6225
6226 static int
6227 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6228 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6229 {
6230
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6233 (unsigned long) insn);
6234
6235 dsc->modinsn[0] = insn;
6236
6237 return install_svc (gdbarch, regs, dsc);
6238 }
6239
6240 static int
6241 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6242 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6243 {
6244
6245 if (debug_displaced)
6246 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6247 insn);
6248
6249 dsc->modinsn[0] = insn;
6250
6251 return install_svc (gdbarch, regs, dsc);
6252 }
6253
6254 /* Copy undefined instructions. */
6255
6256 static int
6257 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6258 arm_displaced_step_copy_insn_closure *dsc)
6259 {
6260 if (debug_displaced)
6261 fprintf_unfiltered (gdb_stdlog,
6262 "displaced: copying undefined insn %.8lx\n",
6263 (unsigned long) insn);
6264
6265 dsc->modinsn[0] = insn;
6266
6267 return 0;
6268 }
6269
6270 static int
6271 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6272 arm_displaced_step_copy_insn_closure *dsc)
6273 {
6274
6275 if (debug_displaced)
6276 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6277 "%.4x %.4x\n", (unsigned short) insn1,
6278 (unsigned short) insn2);
6279
6280 dsc->modinsn[0] = insn1;
6281 dsc->modinsn[1] = insn2;
6282 dsc->numinsns = 2;
6283
6284 return 0;
6285 }
6286
6287 /* Copy unpredictable instructions. */
6288
6289 static int
6290 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6291 arm_displaced_step_copy_insn_closure *dsc)
6292 {
6293 if (debug_displaced)
6294 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6295 "%.8lx\n", (unsigned long) insn);
6296
6297 dsc->modinsn[0] = insn;
6298
6299 return 0;
6300 }
6301
6302 /* The decode_* functions are instruction decoding helpers. They mostly follow
6303 the presentation in the ARM ARM. */
6304
6305 static int
6306 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6307 struct regcache *regs,
6308 arm_displaced_step_copy_insn_closure *dsc)
6309 {
6310 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6311 unsigned int rn = bits (insn, 16, 19);
6312
6313 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6314 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6315 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6316 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6317 else if ((op1 & 0x60) == 0x20)
6318 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6319 else if ((op1 & 0x71) == 0x40)
6320 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6321 dsc);
6322 else if ((op1 & 0x77) == 0x41)
6323 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6324 else if ((op1 & 0x77) == 0x45)
6325 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6326 else if ((op1 & 0x77) == 0x51)
6327 {
6328 if (rn != 0xf)
6329 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6330 else
6331 return arm_copy_unpred (gdbarch, insn, dsc);
6332 }
6333 else if ((op1 & 0x77) == 0x55)
6334 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6335 else if (op1 == 0x57)
6336 switch (op2)
6337 {
6338 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6339 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6340 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6341 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6342 default: return arm_copy_unpred (gdbarch, insn, dsc);
6343 }
6344 else if ((op1 & 0x63) == 0x43)
6345 return arm_copy_unpred (gdbarch, insn, dsc);
6346 else if ((op2 & 0x1) == 0x0)
6347 switch (op1 & ~0x80)
6348 {
6349 case 0x61:
6350 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6351 case 0x65:
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6353 case 0x71: case 0x75:
6354 /* pld/pldw reg. */
6355 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6356 case 0x63: case 0x67: case 0x73: case 0x77:
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 default:
6359 return arm_copy_undef (gdbarch, insn, dsc);
6360 }
6361 else
6362 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6363 }
6364
6365 static int
6366 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6367 struct regcache *regs,
6368 arm_displaced_step_copy_insn_closure *dsc)
6369 {
6370 if (bit (insn, 27) == 0)
6371 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6372 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6373 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6374 {
6375 case 0x0: case 0x2:
6376 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6377
6378 case 0x1: case 0x3:
6379 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6380
6381 case 0x4: case 0x5: case 0x6: case 0x7:
6382 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6383
6384 case 0x8:
6385 switch ((insn & 0xe00000) >> 21)
6386 {
6387 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6388 /* stc/stc2. */
6389 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6390
6391 case 0x2:
6392 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6393
6394 default:
6395 return arm_copy_undef (gdbarch, insn, dsc);
6396 }
6397
6398 case 0x9:
6399 {
6400 int rn_f = (bits (insn, 16, 19) == 0xf);
6401 switch ((insn & 0xe00000) >> 21)
6402 {
6403 case 0x1: case 0x3:
6404 /* ldc/ldc2 imm (undefined for rn == pc). */
6405 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6406 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6407
6408 case 0x2:
6409 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6410
6411 case 0x4: case 0x5: case 0x6: case 0x7:
6412 /* ldc/ldc2 lit (undefined for rn != pc). */
6413 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6414 : arm_copy_undef (gdbarch, insn, dsc);
6415
6416 default:
6417 return arm_copy_undef (gdbarch, insn, dsc);
6418 }
6419 }
6420
6421 case 0xa:
6422 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6423
6424 case 0xb:
6425 if (bits (insn, 16, 19) == 0xf)
6426 /* ldc/ldc2 lit. */
6427 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6428 else
6429 return arm_copy_undef (gdbarch, insn, dsc);
6430
6431 case 0xc:
6432 if (bit (insn, 4))
6433 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6434 else
6435 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6436
6437 case 0xd:
6438 if (bit (insn, 4))
6439 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6440 else
6441 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6442
6443 default:
6444 return arm_copy_undef (gdbarch, insn, dsc);
6445 }
6446 }
6447
6448 /* Decode miscellaneous instructions in dp/misc encoding space. */
6449
6450 static int
6451 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 arm_displaced_step_copy_insn_closure *dsc)
6454 {
6455 unsigned int op2 = bits (insn, 4, 6);
6456 unsigned int op = bits (insn, 21, 22);
6457
6458 switch (op2)
6459 {
6460 case 0x0:
6461 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6462
6463 case 0x1:
6464 if (op == 0x1) /* bx. */
6465 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6466 else if (op == 0x3)
6467 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6468 else
6469 return arm_copy_undef (gdbarch, insn, dsc);
6470
6471 case 0x2:
6472 if (op == 0x1)
6473 /* Not really supported. */
6474 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6475 else
6476 return arm_copy_undef (gdbarch, insn, dsc);
6477
6478 case 0x3:
6479 if (op == 0x1)
6480 return arm_copy_bx_blx_reg (gdbarch, insn,
6481 regs, dsc); /* blx register. */
6482 else
6483 return arm_copy_undef (gdbarch, insn, dsc);
6484
6485 case 0x5:
6486 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6487
6488 case 0x7:
6489 if (op == 0x1)
6490 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6491 else if (op == 0x3)
6492 /* Not really supported. */
6493 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6494 /* Fall through. */
6495
6496 default:
6497 return arm_copy_undef (gdbarch, insn, dsc);
6498 }
6499 }
6500
6501 static int
6502 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6503 struct regcache *regs,
6504 arm_displaced_step_copy_insn_closure *dsc)
6505 {
6506 if (bit (insn, 25))
6507 switch (bits (insn, 20, 24))
6508 {
6509 case 0x10:
6510 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6511
6512 case 0x14:
6513 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6514
6515 case 0x12: case 0x16:
6516 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6517
6518 default:
6519 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6520 }
6521 else
6522 {
6523 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6524
6525 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6526 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6527 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6528 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6530 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6532 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6533 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6534 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6535 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6536 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6537 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6538 /* 2nd arg means "unprivileged". */
6539 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6540 dsc);
6541 }
6542
6543 /* Should be unreachable. */
6544 return 1;
6545 }
6546
6547 static int
6548 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6549 struct regcache *regs,
6550 arm_displaced_step_copy_insn_closure *dsc)
6551 {
6552 int a = bit (insn, 25), b = bit (insn, 4);
6553 uint32_t op1 = bits (insn, 20, 24);
6554
6555 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6556 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6558 else if ((!a && (op1 & 0x17) == 0x02)
6559 || (a && (op1 & 0x17) == 0x02 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6561 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6562 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6564 else if ((!a && (op1 & 0x17) == 0x03)
6565 || (a && (op1 & 0x17) == 0x03 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6567 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6568 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6570 else if ((!a && (op1 & 0x17) == 0x06)
6571 || (a && (op1 & 0x17) == 0x06 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6573 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6574 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6576 else if ((!a && (op1 & 0x17) == 0x07)
6577 || (a && (op1 & 0x17) == 0x07 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6579
6580 /* Should be unreachable. */
6581 return 1;
6582 }
6583
6584 static int
6585 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6586 arm_displaced_step_copy_insn_closure *dsc)
6587 {
6588 switch (bits (insn, 20, 24))
6589 {
6590 case 0x00: case 0x01: case 0x02: case 0x03:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6592
6593 case 0x04: case 0x05: case 0x06: case 0x07:
6594 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6595
6596 case 0x08: case 0x09: case 0x0a: case 0x0b:
6597 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6598 return arm_copy_unmodified (gdbarch, insn,
6599 "decode/pack/unpack/saturate/reverse", dsc);
6600
6601 case 0x18:
6602 if (bits (insn, 5, 7) == 0) /* op2. */
6603 {
6604 if (bits (insn, 12, 15) == 0xf)
6605 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6606 else
6607 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6608 }
6609 else
6610 return arm_copy_undef (gdbarch, insn, dsc);
6611
6612 case 0x1a: case 0x1b:
6613 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6614 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6615 else
6616 return arm_copy_undef (gdbarch, insn, dsc);
6617
6618 case 0x1c: case 0x1d:
6619 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6620 {
6621 if (bits (insn, 0, 3) == 0xf)
6622 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6623 else
6624 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6625 }
6626 else
6627 return arm_copy_undef (gdbarch, insn, dsc);
6628
6629 case 0x1e: case 0x1f:
6630 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6631 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6632 else
6633 return arm_copy_undef (gdbarch, insn, dsc);
6634 }
6635
6636 /* Should be unreachable. */
6637 return 1;
6638 }
6639
6640 static int
6641 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6642 struct regcache *regs,
6643 arm_displaced_step_copy_insn_closure *dsc)
6644 {
6645 if (bit (insn, 25))
6646 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6647 else
6648 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6649 }
6650
6651 static int
6652 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6653 struct regcache *regs,
6654 arm_displaced_step_copy_insn_closure *dsc)
6655 {
6656 unsigned int opcode = bits (insn, 20, 24);
6657
6658 switch (opcode)
6659 {
6660 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6661 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6662
6663 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6664 case 0x12: case 0x16:
6665 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6666
6667 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6668 case 0x13: case 0x17:
6669 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6670
6671 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6672 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6673 /* Note: no writeback for these instructions. Bit 25 will always be
6674 zero though (via caller), so the following works OK. */
6675 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6676 }
6677
6678 /* Should be unreachable. */
6679 return 1;
6680 }
6681
6682 /* Decode shifted register instructions. */
6683
6684 static int
6685 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6686 uint16_t insn2, struct regcache *regs,
6687 arm_displaced_step_copy_insn_closure *dsc)
6688 {
6689 /* PC is only allowed to be used in instruction MOV. */
6690
6691 unsigned int op = bits (insn1, 5, 8);
6692 unsigned int rn = bits (insn1, 0, 3);
6693
6694 if (op == 0x2 && rn == 0xf) /* MOV */
6695 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6696 else
6697 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6698 "dp (shift reg)", dsc);
6699 }
6700
6701
6702 /* Decode extension register load/store. Exactly the same as
6703 arm_decode_ext_reg_ld_st. */
6704
6705 static int
6706 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6707 uint16_t insn2, struct regcache *regs,
6708 arm_displaced_step_copy_insn_closure *dsc)
6709 {
6710 unsigned int opcode = bits (insn1, 4, 8);
6711
6712 switch (opcode)
6713 {
6714 case 0x04: case 0x05:
6715 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6716 "vfp/neon vmov", dsc);
6717
6718 case 0x08: case 0x0c: /* 01x00 */
6719 case 0x0a: case 0x0e: /* 01x10 */
6720 case 0x12: case 0x16: /* 10x10 */
6721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6722 "vfp/neon vstm/vpush", dsc);
6723
6724 case 0x09: case 0x0d: /* 01x01 */
6725 case 0x0b: case 0x0f: /* 01x11 */
6726 case 0x13: case 0x17: /* 10x11 */
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vldm/vpop", dsc);
6729
6730 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6731 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6732 "vstr", dsc);
6733 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6734 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6735 }
6736
6737 /* Should be unreachable. */
6738 return 1;
6739 }
6740
6741 static int
6742 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6743 struct regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6744 {
6745 unsigned int op1 = bits (insn, 20, 25);
6746 int op = bit (insn, 4);
6747 unsigned int coproc = bits (insn, 8, 11);
6748
6749 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6750 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6751 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6752 && (coproc & 0xe) != 0xa)
6753 /* stc/stc2. */
6754 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6755 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6756 && (coproc & 0xe) != 0xa)
6757 /* ldc/ldc2 imm/lit. */
6758 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 else if ((op1 & 0x3e) == 0x00)
6760 return arm_copy_undef (gdbarch, insn, dsc);
6761 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6762 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6763 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6765 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6767 else if ((op1 & 0x30) == 0x20 && !op)
6768 {
6769 if ((coproc & 0xe) == 0xa)
6770 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6771 else
6772 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6773 }
6774 else if ((op1 & 0x30) == 0x20 && op)
6775 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6776 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6778 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6780 else if ((op1 & 0x30) == 0x30)
6781 return arm_copy_svc (gdbarch, insn, regs, dsc);
6782 else
6783 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6784 }
6785
6786 static int
6787 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6788 uint16_t insn2, struct regcache *regs,
6789 arm_displaced_step_copy_insn_closure *dsc)
6790 {
6791 unsigned int coproc = bits (insn2, 8, 11);
6792 unsigned int bit_5_8 = bits (insn1, 5, 8);
6793 unsigned int bit_9 = bit (insn1, 9);
6794 unsigned int bit_4 = bit (insn1, 4);
6795
6796 if (bit_9 == 0)
6797 {
6798 if (bit_5_8 == 2)
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6801 dsc);
6802 else if (bit_5_8 == 0) /* UNDEFINED. */
6803 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6804 else
6805 {
6806 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6807 if ((coproc & 0xe) == 0xa)
6808 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6809 dsc);
6810 else /* coproc is not 101x. */
6811 {
6812 if (bit_4 == 0) /* STC/STC2. */
6813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6814 "stc/stc2", dsc);
6815 else /* LDC/LDC2 {literal, immediate}. */
6816 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6817 regs, dsc);
6818 }
6819 }
6820 }
6821 else
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6823
6824 return 0;
6825 }
6826
6827 static void
6828 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6829 arm_displaced_step_copy_insn_closure *dsc, int rd)
6830 {
6831 /* ADR Rd, #imm
6832
6833 Rewrite as:
6834
6835 Preparation: Rd <- PC
6836 Insn: ADD Rd, #imm
6837 Cleanup: Null.
6838 */
6839
6840 /* Rd <- PC */
6841 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6842 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6843 }
6844
6845 static int
6846 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6847 arm_displaced_step_copy_insn_closure *dsc,
6848 int rd, unsigned int imm)
6849 {
6850
6851 /* Encoding T2: ADDS Rd, #imm */
6852 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6853
6854 install_pc_relative (gdbarch, regs, dsc, rd);
6855
6856 return 0;
6857 }
6858
6859 static int
6860 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6861 struct regcache *regs,
6862 arm_displaced_step_copy_insn_closure *dsc)
6863 {
6864 unsigned int rd = bits (insn, 8, 10);
6865 unsigned int imm8 = bits (insn, 0, 7);
6866
6867 if (debug_displaced)
6868 fprintf_unfiltered (gdb_stdlog,
6869 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6870 rd, imm8, insn);
6871
6872 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6873 }
6874
6875 static int
6876 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6877 uint16_t insn2, struct regcache *regs,
6878 arm_displaced_step_copy_insn_closure *dsc)
6879 {
6880 unsigned int rd = bits (insn2, 8, 11);
6881 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6882 extract raw immediate encoding rather than computing immediate. When
6883 generating ADD or SUB instruction, we can simply perform OR operation to
6884 set immediate into ADD. */
6885 unsigned int imm_3_8 = insn2 & 0x70ff;
6886 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6887
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog,
6890 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6891 rd, imm_i, imm_3_8, insn1, insn2);
6892
6893 if (bit (insn1, 7)) /* Encoding T2 */
6894 {
6895 /* Encoding T3: SUB Rd, Rd, #imm */
6896 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6897 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6898 }
6899 else /* Encoding T3 */
6900 {
6901 /* Encoding T3: ADD Rd, Rd, #imm */
6902 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6903 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6904 }
6905 dsc->numinsns = 2;
6906
6907 install_pc_relative (gdbarch, regs, dsc, rd);
6908
6909 return 0;
6910 }
6911
6912 static int
6913 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6914 struct regcache *regs,
6915 arm_displaced_step_copy_insn_closure *dsc)
6916 {
6917 unsigned int rt = bits (insn1, 8, 10);
6918 unsigned int pc;
6919 int imm8 = (bits (insn1, 0, 7) << 2);
6920
6921 /* LDR Rd, #imm8
6922
6923 Rwrite as:
6924
6925 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6926
6927 Insn: LDR R0, [R2, R3];
6928 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6929
6930 if (debug_displaced)
6931 fprintf_unfiltered (gdb_stdlog,
6932 "displaced: copying thumb ldr r%d [pc #%d]\n"
6933 , rt, imm8);
6934
6935 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6936 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6937 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6938 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6939 /* The assembler calculates the required value of the offset from the
6940 Align(PC,4) value of this instruction to the label. */
6941 pc = pc & 0xfffffffc;
6942
6943 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6944 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6945
6946 dsc->rd = rt;
6947 dsc->u.ldst.xfersize = 4;
6948 dsc->u.ldst.rn = 0;
6949 dsc->u.ldst.immed = 0;
6950 dsc->u.ldst.writeback = 0;
6951 dsc->u.ldst.restore_r4 = 0;
6952
6953 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6954
6955 dsc->cleanup = &cleanup_load;
6956
6957 return 0;
6958 }
6959
6960 /* Copy Thumb cbnz/cbz instruction. */
6961
6962 static int
6963 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6964 struct regcache *regs,
6965 arm_displaced_step_copy_insn_closure *dsc)
6966 {
6967 int non_zero = bit (insn1, 11);
6968 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6969 CORE_ADDR from = dsc->insn_addr;
6970 int rn = bits (insn1, 0, 2);
6971 int rn_val = displaced_read_reg (regs, dsc, rn);
6972
6973 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6974 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6975 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6976 condition is false, let it be, cleanup_branch will do nothing. */
6977 if (dsc->u.branch.cond)
6978 {
6979 dsc->u.branch.cond = INST_AL;
6980 dsc->u.branch.dest = from + 4 + imm5;
6981 }
6982 else
6983 dsc->u.branch.dest = from + 2;
6984
6985 dsc->u.branch.link = 0;
6986 dsc->u.branch.exchange = 0;
6987
6988 if (debug_displaced)
6989 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6990 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6991 rn, rn_val, insn1, dsc->u.branch.dest);
6992
6993 dsc->modinsn[0] = THUMB_NOP;
6994
6995 dsc->cleanup = &cleanup_branch;
6996 return 0;
6997 }
6998
6999 /* Copy Table Branch Byte/Halfword */
7000 static int
7001 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7002 uint16_t insn2, struct regcache *regs,
7003 arm_displaced_step_copy_insn_closure *dsc)
7004 {
7005 ULONGEST rn_val, rm_val;
7006 int is_tbh = bit (insn2, 4);
7007 CORE_ADDR halfwords = 0;
7008 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7009
7010 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7011 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7012
7013 if (is_tbh)
7014 {
7015 gdb_byte buf[2];
7016
7017 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7018 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7019 }
7020 else
7021 {
7022 gdb_byte buf[1];
7023
7024 target_read_memory (rn_val + rm_val, buf, 1);
7025 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7026 }
7027
7028 if (debug_displaced)
7029 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7030 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7031 (unsigned int) rn_val, (unsigned int) rm_val,
7032 (unsigned int) halfwords);
7033
7034 dsc->u.branch.cond = INST_AL;
7035 dsc->u.branch.link = 0;
7036 dsc->u.branch.exchange = 0;
7037 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7038
7039 dsc->cleanup = &cleanup_branch;
7040
7041 return 0;
7042 }
7043
7044 static void
7045 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7046 arm_displaced_step_copy_insn_closure *dsc)
7047 {
7048 /* PC <- r7 */
7049 int val = displaced_read_reg (regs, dsc, 7);
7050 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7051
7052 /* r7 <- r8 */
7053 val = displaced_read_reg (regs, dsc, 8);
7054 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7055
7056 /* r8 <- tmp[0] */
7057 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7058
7059 }
7060
7061 static int
7062 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7063 struct regcache *regs,
7064 arm_displaced_step_copy_insn_closure *dsc)
7065 {
7066 dsc->u.block.regmask = insn1 & 0x00ff;
7067
7068 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7069 to :
7070
7071 (1) register list is full, that is, r0-r7 are used.
7072 Prepare: tmp[0] <- r8
7073
7074 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7075 MOV r8, r7; Move value of r7 to r8;
7076 POP {r7}; Store PC value into r7.
7077
7078 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7079
7080 (2) register list is not full, supposing there are N registers in
7081 register list (except PC, 0 <= N <= 7).
7082 Prepare: for each i, 0 - N, tmp[i] <- ri.
7083
7084 POP {r0, r1, ...., rN};
7085
7086 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7087 from tmp[] properly.
7088 */
7089 if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog,
7091 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7092 dsc->u.block.regmask, insn1);
7093
7094 if (dsc->u.block.regmask == 0xff)
7095 {
7096 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7097
7098 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7099 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7100 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7101
7102 dsc->numinsns = 3;
7103 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7104 }
7105 else
7106 {
7107 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7108 unsigned int i;
7109 unsigned int new_regmask;
7110
7111 for (i = 0; i < num_in_list + 1; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 new_regmask = (1 << (num_in_list + 1)) - 1;
7115
7116 if (debug_displaced)
7117 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7118 "{..., pc}: original reg list %.4x,"
7119 " modified list %.4x\n"),
7120 (int) dsc->u.block.regmask, new_regmask);
7121
7122 dsc->u.block.regmask |= 0x8000;
7123 dsc->u.block.writeback = 0;
7124 dsc->u.block.cond = INST_AL;
7125
7126 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7127
7128 dsc->cleanup = &cleanup_block_load_pc;
7129 }
7130
7131 return 0;
7132 }
7133
7134 static void
7135 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7136 struct regcache *regs,
7137 arm_displaced_step_copy_insn_closure *dsc)
7138 {
7139 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7140 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7141 int err = 0;
7142
7143 /* 16-bit thumb instructions. */
7144 switch (op_bit_12_15)
7145 {
7146 /* Shift (imme), add, subtract, move and compare. */
7147 case 0: case 1: case 2: case 3:
7148 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7149 "shift/add/sub/mov/cmp",
7150 dsc);
7151 break;
7152 case 4:
7153 switch (op_bit_10_11)
7154 {
7155 case 0: /* Data-processing */
7156 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7157 "data-processing",
7158 dsc);
7159 break;
7160 case 1: /* Special data instructions and branch and exchange. */
7161 {
7162 unsigned short op = bits (insn1, 7, 9);
7163 if (op == 6 || op == 7) /* BX or BLX */
7164 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7165 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7166 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7167 else
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7169 dsc);
7170 }
7171 break;
7172 default: /* LDR (literal) */
7173 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7174 }
7175 break;
7176 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7177 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7178 break;
7179 case 10:
7180 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7181 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7182 else /* Generate SP-relative address */
7183 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7184 break;
7185 case 11: /* Misc 16-bit instructions */
7186 {
7187 switch (bits (insn1, 8, 11))
7188 {
7189 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7190 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7191 break;
7192 case 12: case 13: /* POP */
7193 if (bit (insn1, 8)) /* PC is in register list. */
7194 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7195 else
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7197 break;
7198 case 15: /* If-Then, and hints */
7199 if (bits (insn1, 0, 3))
7200 /* If-Then makes up to four following instructions conditional.
7201 IT instruction itself is not conditional, so handle it as a
7202 common unmodified instruction. */
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7204 dsc);
7205 else
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7207 break;
7208 default:
7209 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7210 }
7211 }
7212 break;
7213 case 12:
7214 if (op_bit_10_11 < 2) /* Store multiple registers */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7216 else /* Load multiple registers */
7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7218 break;
7219 case 13: /* Conditional branch and supervisor call */
7220 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7221 err = thumb_copy_b (gdbarch, insn1, dsc);
7222 else
7223 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7224 break;
7225 case 14: /* Unconditional branch */
7226 err = thumb_copy_b (gdbarch, insn1, dsc);
7227 break;
7228 default:
7229 err = 1;
7230 }
7231
7232 if (err)
7233 internal_error (__FILE__, __LINE__,
7234 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7235 }
7236
7237 static int
7238 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7239 uint16_t insn1, uint16_t insn2,
7240 struct regcache *regs,
7241 arm_displaced_step_copy_insn_closure *dsc)
7242 {
7243 int rt = bits (insn2, 12, 15);
7244 int rn = bits (insn1, 0, 3);
7245 int op1 = bits (insn1, 7, 8);
7246
7247 switch (bits (insn1, 5, 6))
7248 {
7249 case 0: /* Load byte and memory hints */
7250 if (rt == 0xf) /* PLD/PLI */
7251 {
7252 if (rn == 0xf)
7253 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7254 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7255 else
7256 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7257 "pli/pld", dsc);
7258 }
7259 else
7260 {
7261 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7262 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7263 1);
7264 else
7265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7266 "ldrb{reg, immediate}/ldrbt",
7267 dsc);
7268 }
7269
7270 break;
7271 case 1: /* Load halfword and memory hints. */
7272 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7273 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7274 "pld/unalloc memhint", dsc);
7275 else
7276 {
7277 if (rn == 0xf)
7278 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7279 2);
7280 else
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7282 "ldrh/ldrht", dsc);
7283 }
7284 break;
7285 case 2: /* Load word */
7286 {
7287 int insn2_bit_8_11 = bits (insn2, 8, 11);
7288
7289 if (rn == 0xf)
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7291 else if (op1 == 0x1) /* Encoding T3 */
7292 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7293 0, 1);
7294 else /* op1 == 0x0 */
7295 {
7296 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7297 /* LDR (immediate) */
7298 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7299 dsc, bit (insn2, 8), 1);
7300 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7302 "ldrt", dsc);
7303 else
7304 /* LDR (register) */
7305 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7306 dsc, 0, 0);
7307 }
7308 break;
7309 }
7310 default:
7311 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7312 break;
7313 }
7314 return 0;
7315 }
7316
7317 static void
7318 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7319 uint16_t insn2, struct regcache *regs,
7320 arm_displaced_step_copy_insn_closure *dsc)
7321 {
7322 int err = 0;
7323 unsigned short op = bit (insn2, 15);
7324 unsigned int op1 = bits (insn1, 11, 12);
7325
7326 switch (op1)
7327 {
7328 case 1:
7329 {
7330 switch (bits (insn1, 9, 10))
7331 {
7332 case 0:
7333 if (bit (insn1, 6))
7334 {
7335 /* Load/store {dual, exclusive}, table branch. */
7336 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7337 && bits (insn2, 5, 7) == 0)
7338 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7339 dsc);
7340 else
7341 /* PC is not allowed to use in load/store {dual, exclusive}
7342 instructions. */
7343 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "load/store dual/ex", dsc);
7345 }
7346 else /* load/store multiple */
7347 {
7348 switch (bits (insn1, 7, 8))
7349 {
7350 case 0: case 3: /* SRS, RFE */
7351 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "srs/rfe", dsc);
7353 break;
7354 case 1: case 2: /* LDM/STM/PUSH/POP */
7355 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7356 break;
7357 }
7358 }
7359 break;
7360
7361 case 1:
7362 /* Data-processing (shift register). */
7363 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7364 dsc);
7365 break;
7366 default: /* Coprocessor instructions. */
7367 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7368 break;
7369 }
7370 break;
7371 }
7372 case 2: /* op1 = 2 */
7373 if (op) /* Branch and misc control. */
7374 {
7375 if (bit (insn2, 14) /* BLX/BL */
7376 || bit (insn2, 12) /* Unconditional branch */
7377 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7378 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7379 else
7380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7381 "misc ctrl", dsc);
7382 }
7383 else
7384 {
7385 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7386 {
7387 int dp_op = bits (insn1, 4, 8);
7388 int rn = bits (insn1, 0, 3);
7389 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7390 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7391 regs, dsc);
7392 else
7393 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7394 "dp/pb", dsc);
7395 }
7396 else /* Data processing (modified immediate) */
7397 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7398 "dp/mi", dsc);
7399 }
7400 break;
7401 case 3: /* op1 = 3 */
7402 switch (bits (insn1, 9, 10))
7403 {
7404 case 0:
7405 if (bit (insn1, 4))
7406 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7407 regs, dsc);
7408 else /* NEON Load/Store and Store single data item */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7410 "neon elt/struct load/store",
7411 dsc);
7412 break;
7413 case 1: /* op1 = 3, bits (9, 10) == 1 */
7414 switch (bits (insn1, 7, 8))
7415 {
7416 case 0: case 1: /* Data processing (register) */
7417 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7418 "dp(reg)", dsc);
7419 break;
7420 case 2: /* Multiply and absolute difference */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "mul/mua/diff", dsc);
7423 break;
7424 case 3: /* Long multiply and divide */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "lmul/lmua", dsc);
7427 break;
7428 }
7429 break;
7430 default: /* Coprocessor instructions */
7431 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7432 break;
7433 }
7434 break;
7435 default:
7436 err = 1;
7437 }
7438
7439 if (err)
7440 internal_error (__FILE__, __LINE__,
7441 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7442
7443 }
7444
7445 static void
7446 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7447 struct regcache *regs,
7448 arm_displaced_step_copy_insn_closure *dsc)
7449 {
7450 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7451 uint16_t insn1
7452 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7453
7454 if (debug_displaced)
7455 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7456 "at %.8lx\n", insn1, (unsigned long) from);
7457
7458 dsc->is_thumb = 1;
7459 dsc->insn_size = thumb_insn_size (insn1);
7460 if (thumb_insn_size (insn1) == 4)
7461 {
7462 uint16_t insn2
7463 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7464 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7465 }
7466 else
7467 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7468 }
7469
7470 void
7471 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7472 CORE_ADDR to, struct regcache *regs,
7473 arm_displaced_step_copy_insn_closure *dsc)
7474 {
7475 int err = 0;
7476 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7477 uint32_t insn;
7478
7479 /* Most displaced instructions use a 1-instruction scratch space, so set this
7480 here and override below if/when necessary. */
7481 dsc->numinsns = 1;
7482 dsc->insn_addr = from;
7483 dsc->scratch_base = to;
7484 dsc->cleanup = NULL;
7485 dsc->wrote_to_pc = 0;
7486
7487 if (!displaced_in_arm_mode (regs))
7488 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7489
7490 dsc->is_thumb = 0;
7491 dsc->insn_size = 4;
7492 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7493 if (debug_displaced)
7494 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7495 "at %.8lx\n", (unsigned long) insn,
7496 (unsigned long) from);
7497
7498 if ((insn & 0xf0000000) == 0xf0000000)
7499 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7500 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7501 {
7502 case 0x0: case 0x1: case 0x2: case 0x3:
7503 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7504 break;
7505
7506 case 0x4: case 0x5: case 0x6:
7507 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7508 break;
7509
7510 case 0x7:
7511 err = arm_decode_media (gdbarch, insn, dsc);
7512 break;
7513
7514 case 0x8: case 0x9: case 0xa: case 0xb:
7515 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7516 break;
7517
7518 case 0xc: case 0xd: case 0xe: case 0xf:
7519 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7520 break;
7521 }
7522
7523 if (err)
7524 internal_error (__FILE__, __LINE__,
7525 _("arm_process_displaced_insn: Instruction decode error"));
7526 }
7527
7528 /* Actually set up the scratch space for a displaced instruction. */
7529
7530 void
7531 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7532 CORE_ADDR to, arm_displaced_step_copy_insn_closure *dsc)
7533 {
7534 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7535 unsigned int i, len, offset;
7536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7537 int size = dsc->is_thumb? 2 : 4;
7538 const gdb_byte *bkp_insn;
7539
7540 offset = 0;
7541 /* Poke modified instruction(s). */
7542 for (i = 0; i < dsc->numinsns; i++)
7543 {
7544 if (debug_displaced)
7545 {
7546 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7547 if (size == 4)
7548 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7549 dsc->modinsn[i]);
7550 else if (size == 2)
7551 fprintf_unfiltered (gdb_stdlog, "%.4x",
7552 (unsigned short)dsc->modinsn[i]);
7553
7554 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7555 (unsigned long) to + offset);
7556
7557 }
7558 write_memory_unsigned_integer (to + offset, size,
7559 byte_order_for_code,
7560 dsc->modinsn[i]);
7561 offset += size;
7562 }
7563
7564 /* Choose the correct breakpoint instruction. */
7565 if (dsc->is_thumb)
7566 {
7567 bkp_insn = tdep->thumb_breakpoint;
7568 len = tdep->thumb_breakpoint_size;
7569 }
7570 else
7571 {
7572 bkp_insn = tdep->arm_breakpoint;
7573 len = tdep->arm_breakpoint_size;
7574 }
7575
7576 /* Put breakpoint afterwards. */
7577 write_memory (to + offset, bkp_insn, len);
7578
7579 if (debug_displaced)
7580 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7581 paddress (gdbarch, from), paddress (gdbarch, to));
7582 }
7583
7584 /* Entry point for cleaning things up after a displaced instruction has been
7585 single-stepped. */
7586
7587 void
7588 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7589 struct displaced_step_copy_insn_closure *dsc_,
7590 CORE_ADDR from, CORE_ADDR to,
7591 struct regcache *regs)
7592 {
7593 arm_displaced_step_copy_insn_closure *dsc = (arm_displaced_step_copy_insn_closure *) dsc_;
7594
7595 if (dsc->cleanup)
7596 dsc->cleanup (gdbarch, regs, dsc);
7597
7598 if (!dsc->wrote_to_pc)
7599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7600 dsc->insn_addr + dsc->insn_size);
7601
7602 }
7603
7604 #include "bfd-in2.h"
7605 #include "libcoff.h"
7606
7607 static int
7608 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7609 {
7610 gdb_disassembler *di
7611 = static_cast<gdb_disassembler *>(info->application_data);
7612 struct gdbarch *gdbarch = di->arch ();
7613
7614 if (arm_pc_is_thumb (gdbarch, memaddr))
7615 {
7616 static asymbol *asym;
7617 static combined_entry_type ce;
7618 static struct coff_symbol_struct csym;
7619 static struct bfd fake_bfd;
7620 static bfd_target fake_target;
7621
7622 if (csym.native == NULL)
7623 {
7624 /* Create a fake symbol vector containing a Thumb symbol.
7625 This is solely so that the code in print_insn_little_arm()
7626 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7627 the presence of a Thumb symbol and switch to decoding
7628 Thumb instructions. */
7629
7630 fake_target.flavour = bfd_target_coff_flavour;
7631 fake_bfd.xvec = &fake_target;
7632 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7633 csym.native = &ce;
7634 csym.symbol.the_bfd = &fake_bfd;
7635 csym.symbol.name = "fake";
7636 asym = (asymbol *) & csym;
7637 }
7638
7639 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7640 info->symbols = &asym;
7641 }
7642 else
7643 info->symbols = NULL;
7644
7645 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7646 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7647 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7648 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7649 in default_print_insn. */
7650 if (exec_bfd != NULL)
7651 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7652
7653 return default_print_insn (memaddr, info);
7654 }
7655
7656 /* The following define instruction sequences that will cause ARM
7657 cpu's to take an undefined instruction trap. These are used to
7658 signal a breakpoint to GDB.
7659
7660 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7661 modes. A different instruction is required for each mode. The ARM
7662 cpu's can also be big or little endian. Thus four different
7663 instructions are needed to support all cases.
7664
7665 Note: ARMv4 defines several new instructions that will take the
7666 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7667 not in fact add the new instructions. The new undefined
7668 instructions in ARMv4 are all instructions that had no defined
7669 behaviour in earlier chips. There is no guarantee that they will
7670 raise an exception, but may be treated as NOP's. In practice, it
7671 may only safe to rely on instructions matching:
7672
7673 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7674 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7675 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7676
7677 Even this may only true if the condition predicate is true. The
7678 following use a condition predicate of ALWAYS so it is always TRUE.
7679
7680 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7681 and NetBSD all use a software interrupt rather than an undefined
7682 instruction to force a trap. This can be handled by by the
7683 abi-specific code during establishment of the gdbarch vector. */
7684
7685 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7686 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7687 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7688 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7689
7690 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7691 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7692 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7693 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7694
7695 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7696
7697 static int
7698 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7699 {
7700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7701 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7702
7703 if (arm_pc_is_thumb (gdbarch, *pcptr))
7704 {
7705 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7706
7707 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7708 check whether we are replacing a 32-bit instruction. */
7709 if (tdep->thumb2_breakpoint != NULL)
7710 {
7711 gdb_byte buf[2];
7712
7713 if (target_read_memory (*pcptr, buf, 2) == 0)
7714 {
7715 unsigned short inst1;
7716
7717 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7718 if (thumb_insn_size (inst1) == 4)
7719 return ARM_BP_KIND_THUMB2;
7720 }
7721 }
7722
7723 return ARM_BP_KIND_THUMB;
7724 }
7725 else
7726 return ARM_BP_KIND_ARM;
7727
7728 }
7729
7730 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7731
7732 static const gdb_byte *
7733 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7734 {
7735 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7736
7737 switch (kind)
7738 {
7739 case ARM_BP_KIND_ARM:
7740 *size = tdep->arm_breakpoint_size;
7741 return tdep->arm_breakpoint;
7742 case ARM_BP_KIND_THUMB:
7743 *size = tdep->thumb_breakpoint_size;
7744 return tdep->thumb_breakpoint;
7745 case ARM_BP_KIND_THUMB2:
7746 *size = tdep->thumb2_breakpoint_size;
7747 return tdep->thumb2_breakpoint;
7748 default:
7749 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7750 }
7751 }
7752
7753 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7754
7755 static int
7756 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7757 struct regcache *regcache,
7758 CORE_ADDR *pcptr)
7759 {
7760 gdb_byte buf[4];
7761
7762 /* Check the memory pointed by PC is readable. */
7763 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7764 {
7765 struct arm_get_next_pcs next_pcs_ctx;
7766
7767 arm_get_next_pcs_ctor (&next_pcs_ctx,
7768 &arm_get_next_pcs_ops,
7769 gdbarch_byte_order (gdbarch),
7770 gdbarch_byte_order_for_code (gdbarch),
7771 0,
7772 regcache);
7773
7774 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7775
7776 /* If MEMADDR is the next instruction of current pc, do the
7777 software single step computation, and get the thumb mode by
7778 the destination address. */
7779 for (CORE_ADDR pc : next_pcs)
7780 {
7781 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7782 {
7783 if (IS_THUMB_ADDR (pc))
7784 {
7785 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7786 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7787 }
7788 else
7789 return ARM_BP_KIND_ARM;
7790 }
7791 }
7792 }
7793
7794 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7795 }
7796
7797 /* Extract from an array REGBUF containing the (raw) register state a
7798 function return value of type TYPE, and copy that, in virtual
7799 format, into VALBUF. */
7800
7801 static void
7802 arm_extract_return_value (struct type *type, struct regcache *regs,
7803 gdb_byte *valbuf)
7804 {
7805 struct gdbarch *gdbarch = regs->arch ();
7806 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7807
7808 if (TYPE_CODE_FLT == type->code ())
7809 {
7810 switch (gdbarch_tdep (gdbarch)->fp_model)
7811 {
7812 case ARM_FLOAT_FPA:
7813 {
7814 /* The value is in register F0 in internal format. We need to
7815 extract the raw value and then convert it to the desired
7816 internal type. */
7817 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7818
7819 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7820 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7821 valbuf, type);
7822 }
7823 break;
7824
7825 case ARM_FLOAT_SOFT_FPA:
7826 case ARM_FLOAT_SOFT_VFP:
7827 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7828 not using the VFP ABI code. */
7829 case ARM_FLOAT_VFP:
7830 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7831 if (TYPE_LENGTH (type) > 4)
7832 regs->cooked_read (ARM_A1_REGNUM + 1,
7833 valbuf + ARM_INT_REGISTER_SIZE);
7834 break;
7835
7836 default:
7837 internal_error (__FILE__, __LINE__,
7838 _("arm_extract_return_value: "
7839 "Floating point model not supported"));
7840 break;
7841 }
7842 }
7843 else if (type->code () == TYPE_CODE_INT
7844 || type->code () == TYPE_CODE_CHAR
7845 || type->code () == TYPE_CODE_BOOL
7846 || type->code () == TYPE_CODE_PTR
7847 || TYPE_IS_REFERENCE (type)
7848 || type->code () == TYPE_CODE_ENUM)
7849 {
7850 /* If the type is a plain integer, then the access is
7851 straight-forward. Otherwise we have to play around a bit
7852 more. */
7853 int len = TYPE_LENGTH (type);
7854 int regno = ARM_A1_REGNUM;
7855 ULONGEST tmp;
7856
7857 while (len > 0)
7858 {
7859 /* By using store_unsigned_integer we avoid having to do
7860 anything special for small big-endian values. */
7861 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7862 store_unsigned_integer (valbuf,
7863 (len > ARM_INT_REGISTER_SIZE
7864 ? ARM_INT_REGISTER_SIZE : len),
7865 byte_order, tmp);
7866 len -= ARM_INT_REGISTER_SIZE;
7867 valbuf += ARM_INT_REGISTER_SIZE;
7868 }
7869 }
7870 else
7871 {
7872 /* For a structure or union the behaviour is as if the value had
7873 been stored to word-aligned memory and then loaded into
7874 registers with 32-bit load instruction(s). */
7875 int len = TYPE_LENGTH (type);
7876 int regno = ARM_A1_REGNUM;
7877 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7878
7879 while (len > 0)
7880 {
7881 regs->cooked_read (regno++, tmpbuf);
7882 memcpy (valbuf, tmpbuf,
7883 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7884 len -= ARM_INT_REGISTER_SIZE;
7885 valbuf += ARM_INT_REGISTER_SIZE;
7886 }
7887 }
7888 }
7889
7890
7891 /* Will a function return an aggregate type in memory or in a
7892 register? Return 0 if an aggregate type can be returned in a
7893 register, 1 if it must be returned in memory. */
7894
7895 static int
7896 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7897 {
7898 enum type_code code;
7899
7900 type = check_typedef (type);
7901
7902 /* Simple, non-aggregate types (ie not including vectors and
7903 complex) are always returned in a register (or registers). */
7904 code = type->code ();
7905 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7906 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7907 return 0;
7908
7909 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7910 {
7911 /* Vector values should be returned using ARM registers if they
7912 are not over 16 bytes. */
7913 return (TYPE_LENGTH (type) > 16);
7914 }
7915
7916 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7917 {
7918 /* The AAPCS says all aggregates not larger than a word are returned
7919 in a register. */
7920 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7921 return 0;
7922
7923 return 1;
7924 }
7925 else
7926 {
7927 int nRc;
7928
7929 /* All aggregate types that won't fit in a register must be returned
7930 in memory. */
7931 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7932 return 1;
7933
7934 /* In the ARM ABI, "integer" like aggregate types are returned in
7935 registers. For an aggregate type to be integer like, its size
7936 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7937 offset of each addressable subfield must be zero. Note that bit
7938 fields are not addressable, and all addressable subfields of
7939 unions always start at offset zero.
7940
7941 This function is based on the behaviour of GCC 2.95.1.
7942 See: gcc/arm.c: arm_return_in_memory() for details.
7943
7944 Note: All versions of GCC before GCC 2.95.2 do not set up the
7945 parameters correctly for a function returning the following
7946 structure: struct { float f;}; This should be returned in memory,
7947 not a register. Richard Earnshaw sent me a patch, but I do not
7948 know of any way to detect if a function like the above has been
7949 compiled with the correct calling convention. */
7950
7951 /* Assume all other aggregate types can be returned in a register.
7952 Run a check for structures, unions and arrays. */
7953 nRc = 0;
7954
7955 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7956 {
7957 int i;
7958 /* Need to check if this struct/union is "integer" like. For
7959 this to be true, its size must be less than or equal to
7960 ARM_INT_REGISTER_SIZE and the offset of each addressable
7961 subfield must be zero. Note that bit fields are not
7962 addressable, and unions always start at offset zero. If any
7963 of the subfields is a floating point type, the struct/union
7964 cannot be an integer type. */
7965
7966 /* For each field in the object, check:
7967 1) Is it FP? --> yes, nRc = 1;
7968 2) Is it addressable (bitpos != 0) and
7969 not packed (bitsize == 0)?
7970 --> yes, nRc = 1
7971 */
7972
7973 for (i = 0; i < type->num_fields (); i++)
7974 {
7975 enum type_code field_type_code;
7976
7977 field_type_code
7978 = check_typedef (TYPE_FIELD_TYPE (type, i))->code ();
7979
7980 /* Is it a floating point type field? */
7981 if (field_type_code == TYPE_CODE_FLT)
7982 {
7983 nRc = 1;
7984 break;
7985 }
7986
7987 /* If bitpos != 0, then we have to care about it. */
7988 if (TYPE_FIELD_BITPOS (type, i) != 0)
7989 {
7990 /* Bitfields are not addressable. If the field bitsize is
7991 zero, then the field is not packed. Hence it cannot be
7992 a bitfield or any other packed type. */
7993 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7994 {
7995 nRc = 1;
7996 break;
7997 }
7998 }
7999 }
8000 }
8001
8002 return nRc;
8003 }
8004 }
8005
8006 /* Write into appropriate registers a function return value of type
8007 TYPE, given in virtual format. */
8008
8009 static void
8010 arm_store_return_value (struct type *type, struct regcache *regs,
8011 const gdb_byte *valbuf)
8012 {
8013 struct gdbarch *gdbarch = regs->arch ();
8014 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8015
8016 if (type->code () == TYPE_CODE_FLT)
8017 {
8018 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8019
8020 switch (gdbarch_tdep (gdbarch)->fp_model)
8021 {
8022 case ARM_FLOAT_FPA:
8023
8024 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8025 regs->cooked_write (ARM_F0_REGNUM, buf);
8026 break;
8027
8028 case ARM_FLOAT_SOFT_FPA:
8029 case ARM_FLOAT_SOFT_VFP:
8030 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8031 not using the VFP ABI code. */
8032 case ARM_FLOAT_VFP:
8033 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8034 if (TYPE_LENGTH (type) > 4)
8035 regs->cooked_write (ARM_A1_REGNUM + 1,
8036 valbuf + ARM_INT_REGISTER_SIZE);
8037 break;
8038
8039 default:
8040 internal_error (__FILE__, __LINE__,
8041 _("arm_store_return_value: Floating "
8042 "point model not supported"));
8043 break;
8044 }
8045 }
8046 else if (type->code () == TYPE_CODE_INT
8047 || type->code () == TYPE_CODE_CHAR
8048 || type->code () == TYPE_CODE_BOOL
8049 || type->code () == TYPE_CODE_PTR
8050 || TYPE_IS_REFERENCE (type)
8051 || type->code () == TYPE_CODE_ENUM)
8052 {
8053 if (TYPE_LENGTH (type) <= 4)
8054 {
8055 /* Values of one word or less are zero/sign-extended and
8056 returned in r0. */
8057 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8058 LONGEST val = unpack_long (type, valbuf);
8059
8060 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8061 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8062 }
8063 else
8064 {
8065 /* Integral values greater than one word are stored in consecutive
8066 registers starting with r0. This will always be a multiple of
8067 the regiser size. */
8068 int len = TYPE_LENGTH (type);
8069 int regno = ARM_A1_REGNUM;
8070
8071 while (len > 0)
8072 {
8073 regs->cooked_write (regno++, valbuf);
8074 len -= ARM_INT_REGISTER_SIZE;
8075 valbuf += ARM_INT_REGISTER_SIZE;
8076 }
8077 }
8078 }
8079 else
8080 {
8081 /* For a structure or union the behaviour is as if the value had
8082 been stored to word-aligned memory and then loaded into
8083 registers with 32-bit load instruction(s). */
8084 int len = TYPE_LENGTH (type);
8085 int regno = ARM_A1_REGNUM;
8086 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8087
8088 while (len > 0)
8089 {
8090 memcpy (tmpbuf, valbuf,
8091 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8092 regs->cooked_write (regno++, tmpbuf);
8093 len -= ARM_INT_REGISTER_SIZE;
8094 valbuf += ARM_INT_REGISTER_SIZE;
8095 }
8096 }
8097 }
8098
8099
8100 /* Handle function return values. */
8101
8102 static enum return_value_convention
8103 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8104 struct type *valtype, struct regcache *regcache,
8105 gdb_byte *readbuf, const gdb_byte *writebuf)
8106 {
8107 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8108 struct type *func_type = function ? value_type (function) : NULL;
8109 enum arm_vfp_cprc_base_type vfp_base_type;
8110 int vfp_base_count;
8111
8112 if (arm_vfp_abi_for_function (gdbarch, func_type)
8113 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8114 {
8115 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8116 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8117 int i;
8118 for (i = 0; i < vfp_base_count; i++)
8119 {
8120 if (reg_char == 'q')
8121 {
8122 if (writebuf)
8123 arm_neon_quad_write (gdbarch, regcache, i,
8124 writebuf + i * unit_length);
8125
8126 if (readbuf)
8127 arm_neon_quad_read (gdbarch, regcache, i,
8128 readbuf + i * unit_length);
8129 }
8130 else
8131 {
8132 char name_buf[4];
8133 int regnum;
8134
8135 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8136 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8137 strlen (name_buf));
8138 if (writebuf)
8139 regcache->cooked_write (regnum, writebuf + i * unit_length);
8140 if (readbuf)
8141 regcache->cooked_read (regnum, readbuf + i * unit_length);
8142 }
8143 }
8144 return RETURN_VALUE_REGISTER_CONVENTION;
8145 }
8146
8147 if (valtype->code () == TYPE_CODE_STRUCT
8148 || valtype->code () == TYPE_CODE_UNION
8149 || valtype->code () == TYPE_CODE_ARRAY)
8150 {
8151 if (tdep->struct_return == pcc_struct_return
8152 || arm_return_in_memory (gdbarch, valtype))
8153 return RETURN_VALUE_STRUCT_CONVENTION;
8154 }
8155 else if (valtype->code () == TYPE_CODE_COMPLEX)
8156 {
8157 if (arm_return_in_memory (gdbarch, valtype))
8158 return RETURN_VALUE_STRUCT_CONVENTION;
8159 }
8160
8161 if (writebuf)
8162 arm_store_return_value (valtype, regcache, writebuf);
8163
8164 if (readbuf)
8165 arm_extract_return_value (valtype, regcache, readbuf);
8166
8167 return RETURN_VALUE_REGISTER_CONVENTION;
8168 }
8169
8170
8171 static int
8172 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8173 {
8174 struct gdbarch *gdbarch = get_frame_arch (frame);
8175 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8176 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8177 CORE_ADDR jb_addr;
8178 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8179
8180 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8181
8182 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8183 ARM_INT_REGISTER_SIZE))
8184 return 0;
8185
8186 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8187 return 1;
8188 }
8189 /* A call to cmse secure entry function "foo" at "a" is modified by
8190 GNU ld as "b".
8191 a) bl xxxx <foo>
8192
8193 <foo>
8194 xxxx:
8195
8196 b) bl yyyy <__acle_se_foo>
8197
8198 section .gnu.sgstubs:
8199 <foo>
8200 yyyy: sg // secure gateway
8201 b.w xxxx <__acle_se_foo> // original_branch_dest
8202
8203 <__acle_se_foo>
8204 xxxx:
8205
8206 When the control at "b", the pc contains "yyyy" (sg address) which is a
8207 trampoline and does not exist in source code. This function returns the
8208 target pc "xxxx". For more details please refer to section 5.4
8209 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8210 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8211 document on www.developer.arm.com. */
8212
8213 static CORE_ADDR
8214 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8215 {
8216 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8217 char *target_name = (char *) alloca (target_len);
8218 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8219
8220 struct bound_minimal_symbol minsym
8221 = lookup_minimal_symbol (target_name, NULL, objfile);
8222
8223 if (minsym.minsym != nullptr)
8224 return BMSYMBOL_VALUE_ADDRESS (minsym);
8225
8226 return 0;
8227 }
8228
8229 /* Return true when SEC points to ".gnu.sgstubs" section. */
8230
8231 static bool
8232 arm_is_sgstubs_section (struct obj_section *sec)
8233 {
8234 return (sec != nullptr
8235 && sec->the_bfd_section != nullptr
8236 && sec->the_bfd_section->name != nullptr
8237 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8238 }
8239
8240 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8241 return the target PC. Otherwise return 0. */
8242
8243 CORE_ADDR
8244 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8245 {
8246 const char *name;
8247 int namelen;
8248 CORE_ADDR start_addr;
8249
8250 /* Find the starting address and name of the function containing the PC. */
8251 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8252 {
8253 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8254 check here. */
8255 start_addr = arm_skip_bx_reg (frame, pc);
8256 if (start_addr != 0)
8257 return start_addr;
8258
8259 return 0;
8260 }
8261
8262 /* If PC is in a Thumb call or return stub, return the address of the
8263 target PC, which is in a register. The thunk functions are called
8264 _call_via_xx, where x is the register name. The possible names
8265 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8266 functions, named __ARM_call_via_r[0-7]. */
8267 if (startswith (name, "_call_via_")
8268 || startswith (name, "__ARM_call_via_"))
8269 {
8270 /* Use the name suffix to determine which register contains the
8271 target PC. */
8272 static const char *table[15] =
8273 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8274 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8275 };
8276 int regno;
8277 int offset = strlen (name) - 2;
8278
8279 for (regno = 0; regno <= 14; regno++)
8280 if (strcmp (&name[offset], table[regno]) == 0)
8281 return get_frame_register_unsigned (frame, regno);
8282 }
8283
8284 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8285 non-interworking calls to foo. We could decode the stubs
8286 to find the target but it's easier to use the symbol table. */
8287 namelen = strlen (name);
8288 if (name[0] == '_' && name[1] == '_'
8289 && ((namelen > 2 + strlen ("_from_thumb")
8290 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8291 || (namelen > 2 + strlen ("_from_arm")
8292 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8293 {
8294 char *target_name;
8295 int target_len = namelen - 2;
8296 struct bound_minimal_symbol minsym;
8297 struct objfile *objfile;
8298 struct obj_section *sec;
8299
8300 if (name[namelen - 1] == 'b')
8301 target_len -= strlen ("_from_thumb");
8302 else
8303 target_len -= strlen ("_from_arm");
8304
8305 target_name = (char *) alloca (target_len + 1);
8306 memcpy (target_name, name + 2, target_len);
8307 target_name[target_len] = '\0';
8308
8309 sec = find_pc_section (pc);
8310 objfile = (sec == NULL) ? NULL : sec->objfile;
8311 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8312 if (minsym.minsym != NULL)
8313 return BMSYMBOL_VALUE_ADDRESS (minsym);
8314 else
8315 return 0;
8316 }
8317
8318 struct obj_section *section = find_pc_section (pc);
8319
8320 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8321 if (arm_is_sgstubs_section (section))
8322 return arm_skip_cmse_entry (pc, name, section->objfile);
8323
8324 return 0; /* not a stub */
8325 }
8326
8327 static void
8328 arm_update_current_architecture (void)
8329 {
8330 struct gdbarch_info info;
8331
8332 /* If the current architecture is not ARM, we have nothing to do. */
8333 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8334 return;
8335
8336 /* Update the architecture. */
8337 gdbarch_info_init (&info);
8338
8339 if (!gdbarch_update_p (info))
8340 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8341 }
8342
8343 static void
8344 set_fp_model_sfunc (const char *args, int from_tty,
8345 struct cmd_list_element *c)
8346 {
8347 int fp_model;
8348
8349 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8350 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8351 {
8352 arm_fp_model = (enum arm_float_model) fp_model;
8353 break;
8354 }
8355
8356 if (fp_model == ARM_FLOAT_LAST)
8357 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8358 current_fp_model);
8359
8360 arm_update_current_architecture ();
8361 }
8362
8363 static void
8364 show_fp_model (struct ui_file *file, int from_tty,
8365 struct cmd_list_element *c, const char *value)
8366 {
8367 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8368
8369 if (arm_fp_model == ARM_FLOAT_AUTO
8370 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8371 fprintf_filtered (file, _("\
8372 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8373 fp_model_strings[tdep->fp_model]);
8374 else
8375 fprintf_filtered (file, _("\
8376 The current ARM floating point model is \"%s\".\n"),
8377 fp_model_strings[arm_fp_model]);
8378 }
8379
8380 static void
8381 arm_set_abi (const char *args, int from_tty,
8382 struct cmd_list_element *c)
8383 {
8384 int arm_abi;
8385
8386 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8387 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8388 {
8389 arm_abi_global = (enum arm_abi_kind) arm_abi;
8390 break;
8391 }
8392
8393 if (arm_abi == ARM_ABI_LAST)
8394 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8395 arm_abi_string);
8396
8397 arm_update_current_architecture ();
8398 }
8399
8400 static void
8401 arm_show_abi (struct ui_file *file, int from_tty,
8402 struct cmd_list_element *c, const char *value)
8403 {
8404 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8405
8406 if (arm_abi_global == ARM_ABI_AUTO
8407 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8408 fprintf_filtered (file, _("\
8409 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8410 arm_abi_strings[tdep->arm_abi]);
8411 else
8412 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8413 arm_abi_string);
8414 }
8415
8416 static void
8417 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8418 struct cmd_list_element *c, const char *value)
8419 {
8420 fprintf_filtered (file,
8421 _("The current execution mode assumed "
8422 "(when symbols are unavailable) is \"%s\".\n"),
8423 arm_fallback_mode_string);
8424 }
8425
8426 static void
8427 arm_show_force_mode (struct ui_file *file, int from_tty,
8428 struct cmd_list_element *c, const char *value)
8429 {
8430 fprintf_filtered (file,
8431 _("The current execution mode assumed "
8432 "(even when symbols are available) is \"%s\".\n"),
8433 arm_force_mode_string);
8434 }
8435
8436 /* If the user changes the register disassembly style used for info
8437 register and other commands, we have to also switch the style used
8438 in opcodes for disassembly output. This function is run in the "set
8439 arm disassembly" command, and does that. */
8440
8441 static void
8442 set_disassembly_style_sfunc (const char *args, int from_tty,
8443 struct cmd_list_element *c)
8444 {
8445 /* Convert the short style name into the long style name (eg, reg-names-*)
8446 before calling the generic set_disassembler_options() function. */
8447 std::string long_name = std::string ("reg-names-") + disassembly_style;
8448 set_disassembler_options (&long_name[0]);
8449 }
8450
8451 static void
8452 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8453 struct cmd_list_element *c, const char *value)
8454 {
8455 struct gdbarch *gdbarch = get_current_arch ();
8456 char *options = get_disassembler_options (gdbarch);
8457 const char *style = "";
8458 int len = 0;
8459 const char *opt;
8460
8461 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8462 if (CONST_STRNEQ (opt, "reg-names-"))
8463 {
8464 style = &opt[strlen ("reg-names-")];
8465 len = strcspn (style, ",");
8466 }
8467
8468 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8469 }
8470 \f
8471 /* Return the ARM register name corresponding to register I. */
8472 static const char *
8473 arm_register_name (struct gdbarch *gdbarch, int i)
8474 {
8475 const int num_regs = gdbarch_num_regs (gdbarch);
8476
8477 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8478 && i >= num_regs && i < num_regs + 32)
8479 {
8480 static const char *const vfp_pseudo_names[] = {
8481 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8482 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8483 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8484 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8485 };
8486
8487 return vfp_pseudo_names[i - num_regs];
8488 }
8489
8490 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8491 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8492 {
8493 static const char *const neon_pseudo_names[] = {
8494 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8495 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8496 };
8497
8498 return neon_pseudo_names[i - num_regs - 32];
8499 }
8500
8501 if (i >= ARRAY_SIZE (arm_register_names))
8502 /* These registers are only supported on targets which supply
8503 an XML description. */
8504 return "";
8505
8506 return arm_register_names[i];
8507 }
8508
8509 /* Test whether the coff symbol specific value corresponds to a Thumb
8510 function. */
8511
8512 static int
8513 coff_sym_is_thumb (int val)
8514 {
8515 return (val == C_THUMBEXT
8516 || val == C_THUMBSTAT
8517 || val == C_THUMBEXTFUNC
8518 || val == C_THUMBSTATFUNC
8519 || val == C_THUMBLABEL);
8520 }
8521
8522 /* arm_coff_make_msymbol_special()
8523 arm_elf_make_msymbol_special()
8524
8525 These functions test whether the COFF or ELF symbol corresponds to
8526 an address in thumb code, and set a "special" bit in a minimal
8527 symbol to indicate that it does. */
8528
8529 static void
8530 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8531 {
8532 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8533
8534 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8535 == ST_BRANCH_TO_THUMB)
8536 MSYMBOL_SET_SPECIAL (msym);
8537 }
8538
8539 static void
8540 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8541 {
8542 if (coff_sym_is_thumb (val))
8543 MSYMBOL_SET_SPECIAL (msym);
8544 }
8545
8546 static void
8547 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8548 asymbol *sym)
8549 {
8550 const char *name = bfd_asymbol_name (sym);
8551 struct arm_per_bfd *data;
8552 struct arm_mapping_symbol new_map_sym;
8553
8554 gdb_assert (name[0] == '$');
8555 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8556 return;
8557
8558 data = arm_bfd_data_key.get (objfile->obfd);
8559 if (data == NULL)
8560 data = arm_bfd_data_key.emplace (objfile->obfd,
8561 objfile->obfd->section_count);
8562 arm_mapping_symbol_vec &map
8563 = data->section_maps[bfd_asymbol_section (sym)->index];
8564
8565 new_map_sym.value = sym->value;
8566 new_map_sym.type = name[1];
8567
8568 /* Insert at the end, the vector will be sorted on first use. */
8569 map.push_back (new_map_sym);
8570 }
8571
8572 static void
8573 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8574 {
8575 struct gdbarch *gdbarch = regcache->arch ();
8576 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8577
8578 /* If necessary, set the T bit. */
8579 if (arm_apcs_32)
8580 {
8581 ULONGEST val, t_bit;
8582 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8583 t_bit = arm_psr_thumb_bit (gdbarch);
8584 if (arm_pc_is_thumb (gdbarch, pc))
8585 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8586 val | t_bit);
8587 else
8588 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8589 val & ~t_bit);
8590 }
8591 }
8592
8593 /* Read the contents of a NEON quad register, by reading from two
8594 double registers. This is used to implement the quad pseudo
8595 registers, and for argument passing in case the quad registers are
8596 missing; vectors are passed in quad registers when using the VFP
8597 ABI, even if a NEON unit is not present. REGNUM is the index of
8598 the quad register, in [0, 15]. */
8599
8600 static enum register_status
8601 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8602 int regnum, gdb_byte *buf)
8603 {
8604 char name_buf[4];
8605 gdb_byte reg_buf[8];
8606 int offset, double_regnum;
8607 enum register_status status;
8608
8609 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8610 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8611 strlen (name_buf));
8612
8613 /* d0 is always the least significant half of q0. */
8614 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8615 offset = 8;
8616 else
8617 offset = 0;
8618
8619 status = regcache->raw_read (double_regnum, reg_buf);
8620 if (status != REG_VALID)
8621 return status;
8622 memcpy (buf + offset, reg_buf, 8);
8623
8624 offset = 8 - offset;
8625 status = regcache->raw_read (double_regnum + 1, reg_buf);
8626 if (status != REG_VALID)
8627 return status;
8628 memcpy (buf + offset, reg_buf, 8);
8629
8630 return REG_VALID;
8631 }
8632
8633 static enum register_status
8634 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8635 int regnum, gdb_byte *buf)
8636 {
8637 const int num_regs = gdbarch_num_regs (gdbarch);
8638 char name_buf[4];
8639 gdb_byte reg_buf[8];
8640 int offset, double_regnum;
8641
8642 gdb_assert (regnum >= num_regs);
8643 regnum -= num_regs;
8644
8645 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8646 /* Quad-precision register. */
8647 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8648 else
8649 {
8650 enum register_status status;
8651
8652 /* Single-precision register. */
8653 gdb_assert (regnum < 32);
8654
8655 /* s0 is always the least significant half of d0. */
8656 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8657 offset = (regnum & 1) ? 0 : 4;
8658 else
8659 offset = (regnum & 1) ? 4 : 0;
8660
8661 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8662 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8663 strlen (name_buf));
8664
8665 status = regcache->raw_read (double_regnum, reg_buf);
8666 if (status == REG_VALID)
8667 memcpy (buf, reg_buf + offset, 4);
8668 return status;
8669 }
8670 }
8671
8672 /* Store the contents of BUF to a NEON quad register, by writing to
8673 two double registers. This is used to implement the quad pseudo
8674 registers, and for argument passing in case the quad registers are
8675 missing; vectors are passed in quad registers when using the VFP
8676 ABI, even if a NEON unit is not present. REGNUM is the index
8677 of the quad register, in [0, 15]. */
8678
8679 static void
8680 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8681 int regnum, const gdb_byte *buf)
8682 {
8683 char name_buf[4];
8684 int offset, double_regnum;
8685
8686 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8687 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8688 strlen (name_buf));
8689
8690 /* d0 is always the least significant half of q0. */
8691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8692 offset = 8;
8693 else
8694 offset = 0;
8695
8696 regcache->raw_write (double_regnum, buf + offset);
8697 offset = 8 - offset;
8698 regcache->raw_write (double_regnum + 1, buf + offset);
8699 }
8700
8701 static void
8702 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8703 int regnum, const gdb_byte *buf)
8704 {
8705 const int num_regs = gdbarch_num_regs (gdbarch);
8706 char name_buf[4];
8707 gdb_byte reg_buf[8];
8708 int offset, double_regnum;
8709
8710 gdb_assert (regnum >= num_regs);
8711 regnum -= num_regs;
8712
8713 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8714 /* Quad-precision register. */
8715 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8716 else
8717 {
8718 /* Single-precision register. */
8719 gdb_assert (regnum < 32);
8720
8721 /* s0 is always the least significant half of d0. */
8722 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8723 offset = (regnum & 1) ? 0 : 4;
8724 else
8725 offset = (regnum & 1) ? 4 : 0;
8726
8727 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8728 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8729 strlen (name_buf));
8730
8731 regcache->raw_read (double_regnum, reg_buf);
8732 memcpy (reg_buf + offset, buf, 4);
8733 regcache->raw_write (double_regnum, reg_buf);
8734 }
8735 }
8736
8737 static struct value *
8738 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8739 {
8740 const int *reg_p = (const int *) baton;
8741 return value_of_register (*reg_p, frame);
8742 }
8743 \f
8744 static enum gdb_osabi
8745 arm_elf_osabi_sniffer (bfd *abfd)
8746 {
8747 unsigned int elfosabi;
8748 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8749
8750 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8751
8752 if (elfosabi == ELFOSABI_ARM)
8753 /* GNU tools use this value. Check note sections in this case,
8754 as well. */
8755 bfd_map_over_sections (abfd,
8756 generic_elf_osabi_sniff_abi_tag_sections,
8757 &osabi);
8758
8759 /* Anything else will be handled by the generic ELF sniffer. */
8760 return osabi;
8761 }
8762
8763 static int
8764 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8765 struct reggroup *group)
8766 {
8767 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8768 this, FPS register belongs to save_regroup, restore_reggroup, and
8769 all_reggroup, of course. */
8770 if (regnum == ARM_FPS_REGNUM)
8771 return (group == float_reggroup
8772 || group == save_reggroup
8773 || group == restore_reggroup
8774 || group == all_reggroup);
8775 else
8776 return default_register_reggroup_p (gdbarch, regnum, group);
8777 }
8778
8779 /* For backward-compatibility we allow two 'g' packet lengths with
8780 the remote protocol depending on whether FPA registers are
8781 supplied. M-profile targets do not have FPA registers, but some
8782 stubs already exist in the wild which use a 'g' packet which
8783 supplies them albeit with dummy values. The packet format which
8784 includes FPA registers should be considered deprecated for
8785 M-profile targets. */
8786
8787 static void
8788 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8789 {
8790 if (gdbarch_tdep (gdbarch)->is_m)
8791 {
8792 const target_desc *tdesc;
8793
8794 /* If we know from the executable this is an M-profile target,
8795 cater for remote targets whose register set layout is the
8796 same as the FPA layout. */
8797 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8798 register_remote_g_packet_guess (gdbarch,
8799 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8800 tdesc);
8801
8802 /* The regular M-profile layout. */
8803 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8804 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8805 tdesc);
8806
8807 /* M-profile plus M4F VFP. */
8808 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8809 register_remote_g_packet_guess (gdbarch,
8810 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8811 tdesc);
8812 }
8813
8814 /* Otherwise we don't have a useful guess. */
8815 }
8816
8817 /* Implement the code_of_frame_writable gdbarch method. */
8818
8819 static int
8820 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8821 {
8822 if (gdbarch_tdep (gdbarch)->is_m
8823 && get_frame_type (frame) == SIGTRAMP_FRAME)
8824 {
8825 /* M-profile exception frames return to some magic PCs, where
8826 isn't writable at all. */
8827 return 0;
8828 }
8829 else
8830 return 1;
8831 }
8832
8833 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8834 to be postfixed by a version (eg armv7hl). */
8835
8836 static const char *
8837 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8838 {
8839 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8840 return "arm(v[^- ]*)?";
8841 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8842 }
8843
8844 /* Initialize the current architecture based on INFO. If possible,
8845 re-use an architecture from ARCHES, which is a list of
8846 architectures already created during this debugging session.
8847
8848 Called e.g. at program startup, when reading a core file, and when
8849 reading a binary file. */
8850
8851 static struct gdbarch *
8852 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8853 {
8854 struct gdbarch_tdep *tdep;
8855 struct gdbarch *gdbarch;
8856 struct gdbarch_list *best_arch;
8857 enum arm_abi_kind arm_abi = arm_abi_global;
8858 enum arm_float_model fp_model = arm_fp_model;
8859 struct tdesc_arch_data *tdesc_data = NULL;
8860 int i;
8861 bool is_m = false;
8862 int vfp_register_count = 0;
8863 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8864 bool have_wmmx_registers = false;
8865 bool have_neon = false;
8866 bool have_fpa_registers = true;
8867 const struct target_desc *tdesc = info.target_desc;
8868
8869 /* If we have an object to base this architecture on, try to determine
8870 its ABI. */
8871
8872 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8873 {
8874 int ei_osabi, e_flags;
8875
8876 switch (bfd_get_flavour (info.abfd))
8877 {
8878 case bfd_target_coff_flavour:
8879 /* Assume it's an old APCS-style ABI. */
8880 /* XXX WinCE? */
8881 arm_abi = ARM_ABI_APCS;
8882 break;
8883
8884 case bfd_target_elf_flavour:
8885 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8886 e_flags = elf_elfheader (info.abfd)->e_flags;
8887
8888 if (ei_osabi == ELFOSABI_ARM)
8889 {
8890 /* GNU tools used to use this value, but do not for EABI
8891 objects. There's nowhere to tag an EABI version
8892 anyway, so assume APCS. */
8893 arm_abi = ARM_ABI_APCS;
8894 }
8895 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8896 {
8897 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8898
8899 switch (eabi_ver)
8900 {
8901 case EF_ARM_EABI_UNKNOWN:
8902 /* Assume GNU tools. */
8903 arm_abi = ARM_ABI_APCS;
8904 break;
8905
8906 case EF_ARM_EABI_VER4:
8907 case EF_ARM_EABI_VER5:
8908 arm_abi = ARM_ABI_AAPCS;
8909 /* EABI binaries default to VFP float ordering.
8910 They may also contain build attributes that can
8911 be used to identify if the VFP argument-passing
8912 ABI is in use. */
8913 if (fp_model == ARM_FLOAT_AUTO)
8914 {
8915 #ifdef HAVE_ELF
8916 switch (bfd_elf_get_obj_attr_int (info.abfd,
8917 OBJ_ATTR_PROC,
8918 Tag_ABI_VFP_args))
8919 {
8920 case AEABI_VFP_args_base:
8921 /* "The user intended FP parameter/result
8922 passing to conform to AAPCS, base
8923 variant". */
8924 fp_model = ARM_FLOAT_SOFT_VFP;
8925 break;
8926 case AEABI_VFP_args_vfp:
8927 /* "The user intended FP parameter/result
8928 passing to conform to AAPCS, VFP
8929 variant". */
8930 fp_model = ARM_FLOAT_VFP;
8931 break;
8932 case AEABI_VFP_args_toolchain:
8933 /* "The user intended FP parameter/result
8934 passing to conform to tool chain-specific
8935 conventions" - we don't know any such
8936 conventions, so leave it as "auto". */
8937 break;
8938 case AEABI_VFP_args_compatible:
8939 /* "Code is compatible with both the base
8940 and VFP variants; the user did not permit
8941 non-variadic functions to pass FP
8942 parameters/results" - leave it as
8943 "auto". */
8944 break;
8945 default:
8946 /* Attribute value not mentioned in the
8947 November 2012 ABI, so leave it as
8948 "auto". */
8949 break;
8950 }
8951 #else
8952 fp_model = ARM_FLOAT_SOFT_VFP;
8953 #endif
8954 }
8955 break;
8956
8957 default:
8958 /* Leave it as "auto". */
8959 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8960 break;
8961 }
8962
8963 #ifdef HAVE_ELF
8964 /* Detect M-profile programs. This only works if the
8965 executable file includes build attributes; GCC does
8966 copy them to the executable, but e.g. RealView does
8967 not. */
8968 int attr_arch
8969 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8970 Tag_CPU_arch);
8971 int attr_profile
8972 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8973 Tag_CPU_arch_profile);
8974
8975 /* GCC specifies the profile for v6-M; RealView only
8976 specifies the profile for architectures starting with
8977 V7 (as opposed to architectures with a tag
8978 numerically greater than TAG_CPU_ARCH_V7). */
8979 if (!tdesc_has_registers (tdesc)
8980 && (attr_arch == TAG_CPU_ARCH_V6_M
8981 || attr_arch == TAG_CPU_ARCH_V6S_M
8982 || attr_profile == 'M'))
8983 is_m = true;
8984 #endif
8985 }
8986
8987 if (fp_model == ARM_FLOAT_AUTO)
8988 {
8989 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8990 {
8991 case 0:
8992 /* Leave it as "auto". Strictly speaking this case
8993 means FPA, but almost nobody uses that now, and
8994 many toolchains fail to set the appropriate bits
8995 for the floating-point model they use. */
8996 break;
8997 case EF_ARM_SOFT_FLOAT:
8998 fp_model = ARM_FLOAT_SOFT_FPA;
8999 break;
9000 case EF_ARM_VFP_FLOAT:
9001 fp_model = ARM_FLOAT_VFP;
9002 break;
9003 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9004 fp_model = ARM_FLOAT_SOFT_VFP;
9005 break;
9006 }
9007 }
9008
9009 if (e_flags & EF_ARM_BE8)
9010 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9011
9012 break;
9013
9014 default:
9015 /* Leave it as "auto". */
9016 break;
9017 }
9018 }
9019
9020 /* Check any target description for validity. */
9021 if (tdesc_has_registers (tdesc))
9022 {
9023 /* For most registers we require GDB's default names; but also allow
9024 the numeric names for sp / lr / pc, as a convenience. */
9025 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9026 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9027 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9028
9029 const struct tdesc_feature *feature;
9030 int valid_p;
9031
9032 feature = tdesc_find_feature (tdesc,
9033 "org.gnu.gdb.arm.core");
9034 if (feature == NULL)
9035 {
9036 feature = tdesc_find_feature (tdesc,
9037 "org.gnu.gdb.arm.m-profile");
9038 if (feature == NULL)
9039 return NULL;
9040 else
9041 is_m = true;
9042 }
9043
9044 tdesc_data = tdesc_data_alloc ();
9045
9046 valid_p = 1;
9047 for (i = 0; i < ARM_SP_REGNUM; i++)
9048 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9049 arm_register_names[i]);
9050 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9051 ARM_SP_REGNUM,
9052 arm_sp_names);
9053 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9054 ARM_LR_REGNUM,
9055 arm_lr_names);
9056 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9057 ARM_PC_REGNUM,
9058 arm_pc_names);
9059 if (is_m)
9060 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9061 ARM_PS_REGNUM, "xpsr");
9062 else
9063 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9064 ARM_PS_REGNUM, "cpsr");
9065
9066 if (!valid_p)
9067 {
9068 tdesc_data_cleanup (tdesc_data);
9069 return NULL;
9070 }
9071
9072 feature = tdesc_find_feature (tdesc,
9073 "org.gnu.gdb.arm.fpa");
9074 if (feature != NULL)
9075 {
9076 valid_p = 1;
9077 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9078 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9079 arm_register_names[i]);
9080 if (!valid_p)
9081 {
9082 tdesc_data_cleanup (tdesc_data);
9083 return NULL;
9084 }
9085 }
9086 else
9087 have_fpa_registers = false;
9088
9089 feature = tdesc_find_feature (tdesc,
9090 "org.gnu.gdb.xscale.iwmmxt");
9091 if (feature != NULL)
9092 {
9093 static const char *const iwmmxt_names[] = {
9094 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9095 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9096 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9097 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9098 };
9099
9100 valid_p = 1;
9101 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9102 valid_p
9103 &= tdesc_numbered_register (feature, tdesc_data, i,
9104 iwmmxt_names[i - ARM_WR0_REGNUM]);
9105
9106 /* Check for the control registers, but do not fail if they
9107 are missing. */
9108 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9109 tdesc_numbered_register (feature, tdesc_data, i,
9110 iwmmxt_names[i - ARM_WR0_REGNUM]);
9111
9112 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9113 valid_p
9114 &= tdesc_numbered_register (feature, tdesc_data, i,
9115 iwmmxt_names[i - ARM_WR0_REGNUM]);
9116
9117 if (!valid_p)
9118 {
9119 tdesc_data_cleanup (tdesc_data);
9120 return NULL;
9121 }
9122
9123 have_wmmx_registers = true;
9124 }
9125
9126 /* If we have a VFP unit, check whether the single precision registers
9127 are present. If not, then we will synthesize them as pseudo
9128 registers. */
9129 feature = tdesc_find_feature (tdesc,
9130 "org.gnu.gdb.arm.vfp");
9131 if (feature != NULL)
9132 {
9133 static const char *const vfp_double_names[] = {
9134 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9135 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9136 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9137 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9138 };
9139
9140 /* Require the double precision registers. There must be either
9141 16 or 32. */
9142 valid_p = 1;
9143 for (i = 0; i < 32; i++)
9144 {
9145 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9146 ARM_D0_REGNUM + i,
9147 vfp_double_names[i]);
9148 if (!valid_p)
9149 break;
9150 }
9151 if (!valid_p && i == 16)
9152 valid_p = 1;
9153
9154 /* Also require FPSCR. */
9155 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9156 ARM_FPSCR_REGNUM, "fpscr");
9157 if (!valid_p)
9158 {
9159 tdesc_data_cleanup (tdesc_data);
9160 return NULL;
9161 }
9162
9163 if (tdesc_unnumbered_register (feature, "s0") == 0)
9164 have_vfp_pseudos = true;
9165
9166 vfp_register_count = i;
9167
9168 /* If we have VFP, also check for NEON. The architecture allows
9169 NEON without VFP (integer vector operations only), but GDB
9170 does not support that. */
9171 feature = tdesc_find_feature (tdesc,
9172 "org.gnu.gdb.arm.neon");
9173 if (feature != NULL)
9174 {
9175 /* NEON requires 32 double-precision registers. */
9176 if (i != 32)
9177 {
9178 tdesc_data_cleanup (tdesc_data);
9179 return NULL;
9180 }
9181
9182 /* If there are quad registers defined by the stub, use
9183 their type; otherwise (normally) provide them with
9184 the default type. */
9185 if (tdesc_unnumbered_register (feature, "q0") == 0)
9186 have_neon_pseudos = true;
9187
9188 have_neon = true;
9189 }
9190 }
9191 }
9192
9193 /* If there is already a candidate, use it. */
9194 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9195 best_arch != NULL;
9196 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9197 {
9198 if (arm_abi != ARM_ABI_AUTO
9199 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9200 continue;
9201
9202 if (fp_model != ARM_FLOAT_AUTO
9203 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9204 continue;
9205
9206 /* There are various other properties in tdep that we do not
9207 need to check here: those derived from a target description,
9208 since gdbarches with a different target description are
9209 automatically disqualified. */
9210
9211 /* Do check is_m, though, since it might come from the binary. */
9212 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9213 continue;
9214
9215 /* Found a match. */
9216 break;
9217 }
9218
9219 if (best_arch != NULL)
9220 {
9221 if (tdesc_data != NULL)
9222 tdesc_data_cleanup (tdesc_data);
9223 return best_arch->gdbarch;
9224 }
9225
9226 tdep = XCNEW (struct gdbarch_tdep);
9227 gdbarch = gdbarch_alloc (&info, tdep);
9228
9229 /* Record additional information about the architecture we are defining.
9230 These are gdbarch discriminators, like the OSABI. */
9231 tdep->arm_abi = arm_abi;
9232 tdep->fp_model = fp_model;
9233 tdep->is_m = is_m;
9234 tdep->have_fpa_registers = have_fpa_registers;
9235 tdep->have_wmmx_registers = have_wmmx_registers;
9236 gdb_assert (vfp_register_count == 0
9237 || vfp_register_count == 16
9238 || vfp_register_count == 32);
9239 tdep->vfp_register_count = vfp_register_count;
9240 tdep->have_vfp_pseudos = have_vfp_pseudos;
9241 tdep->have_neon_pseudos = have_neon_pseudos;
9242 tdep->have_neon = have_neon;
9243
9244 arm_register_g_packet_guesses (gdbarch);
9245
9246 /* Breakpoints. */
9247 switch (info.byte_order_for_code)
9248 {
9249 case BFD_ENDIAN_BIG:
9250 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9251 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9252 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9253 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9254
9255 break;
9256
9257 case BFD_ENDIAN_LITTLE:
9258 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9259 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9260 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9261 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9262
9263 break;
9264
9265 default:
9266 internal_error (__FILE__, __LINE__,
9267 _("arm_gdbarch_init: bad byte order for float format"));
9268 }
9269
9270 /* On ARM targets char defaults to unsigned. */
9271 set_gdbarch_char_signed (gdbarch, 0);
9272
9273 /* wchar_t is unsigned under the AAPCS. */
9274 if (tdep->arm_abi == ARM_ABI_AAPCS)
9275 set_gdbarch_wchar_signed (gdbarch, 0);
9276 else
9277 set_gdbarch_wchar_signed (gdbarch, 1);
9278
9279 /* Compute type alignment. */
9280 set_gdbarch_type_align (gdbarch, arm_type_align);
9281
9282 /* Note: for displaced stepping, this includes the breakpoint, and one word
9283 of additional scratch space. This setting isn't used for anything beside
9284 displaced stepping at present. */
9285 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9286
9287 /* This should be low enough for everything. */
9288 tdep->lowest_pc = 0x20;
9289 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9290
9291 /* The default, for both APCS and AAPCS, is to return small
9292 structures in registers. */
9293 tdep->struct_return = reg_struct_return;
9294
9295 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9296 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9297
9298 if (is_m)
9299 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9300
9301 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9302
9303 frame_base_set_default (gdbarch, &arm_normal_base);
9304
9305 /* Address manipulation. */
9306 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9307
9308 /* Advance PC across function entry code. */
9309 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9310
9311 /* Detect whether PC is at a point where the stack has been destroyed. */
9312 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9313
9314 /* Skip trampolines. */
9315 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9316
9317 /* The stack grows downward. */
9318 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9319
9320 /* Breakpoint manipulation. */
9321 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9322 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9323 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9324 arm_breakpoint_kind_from_current_state);
9325
9326 /* Information about registers, etc. */
9327 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9328 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9329 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9330 set_gdbarch_register_type (gdbarch, arm_register_type);
9331 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9332
9333 /* This "info float" is FPA-specific. Use the generic version if we
9334 do not have FPA. */
9335 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9336 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9337
9338 /* Internal <-> external register number maps. */
9339 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9340 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9341
9342 set_gdbarch_register_name (gdbarch, arm_register_name);
9343
9344 /* Returning results. */
9345 set_gdbarch_return_value (gdbarch, arm_return_value);
9346
9347 /* Disassembly. */
9348 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9349
9350 /* Minsymbol frobbing. */
9351 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9352 set_gdbarch_coff_make_msymbol_special (gdbarch,
9353 arm_coff_make_msymbol_special);
9354 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9355
9356 /* Thumb-2 IT block support. */
9357 set_gdbarch_adjust_breakpoint_address (gdbarch,
9358 arm_adjust_breakpoint_address);
9359
9360 /* Virtual tables. */
9361 set_gdbarch_vbit_in_delta (gdbarch, 1);
9362
9363 /* Hook in the ABI-specific overrides, if they have been registered. */
9364 gdbarch_init_osabi (info, gdbarch);
9365
9366 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9367
9368 /* Add some default predicates. */
9369 if (is_m)
9370 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9371 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9372 dwarf2_append_unwinders (gdbarch);
9373 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9374 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9375 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9376
9377 /* Now we have tuned the configuration, set a few final things,
9378 based on what the OS ABI has told us. */
9379
9380 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9381 binaries are always marked. */
9382 if (tdep->arm_abi == ARM_ABI_AUTO)
9383 tdep->arm_abi = ARM_ABI_APCS;
9384
9385 /* Watchpoints are not steppable. */
9386 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9387
9388 /* We used to default to FPA for generic ARM, but almost nobody
9389 uses that now, and we now provide a way for the user to force
9390 the model. So default to the most useful variant. */
9391 if (tdep->fp_model == ARM_FLOAT_AUTO)
9392 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9393
9394 if (tdep->jb_pc >= 0)
9395 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9396
9397 /* Floating point sizes and format. */
9398 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9399 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9400 {
9401 set_gdbarch_double_format
9402 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9403 set_gdbarch_long_double_format
9404 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9405 }
9406 else
9407 {
9408 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9409 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9410 }
9411
9412 if (have_vfp_pseudos)
9413 {
9414 /* NOTE: These are the only pseudo registers used by
9415 the ARM target at the moment. If more are added, a
9416 little more care in numbering will be needed. */
9417
9418 int num_pseudos = 32;
9419 if (have_neon_pseudos)
9420 num_pseudos += 16;
9421 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9422 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9423 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9424 }
9425
9426 if (tdesc_data)
9427 {
9428 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9429
9430 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9431
9432 /* Override tdesc_register_type to adjust the types of VFP
9433 registers for NEON. */
9434 set_gdbarch_register_type (gdbarch, arm_register_type);
9435 }
9436
9437 /* Add standard register aliases. We add aliases even for those
9438 names which are used by the current architecture - it's simpler,
9439 and does no harm, since nothing ever lists user registers. */
9440 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9441 user_reg_add (gdbarch, arm_register_aliases[i].name,
9442 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9443
9444 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9445 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9446
9447 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9448
9449 return gdbarch;
9450 }
9451
9452 static void
9453 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9454 {
9455 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9456
9457 if (tdep == NULL)
9458 return;
9459
9460 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9461 (int) tdep->fp_model);
9462 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9463 (int) tdep->have_fpa_registers);
9464 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9465 (int) tdep->have_wmmx_registers);
9466 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9467 (int) tdep->vfp_register_count);
9468 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9469 (int) tdep->have_vfp_pseudos);
9470 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9471 (int) tdep->have_neon_pseudos);
9472 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9473 (int) tdep->have_neon);
9474 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9475 (unsigned long) tdep->lowest_pc);
9476 }
9477
9478 #if GDB_SELF_TEST
9479 namespace selftests
9480 {
9481 static void arm_record_test (void);
9482 }
9483 #endif
9484
9485 void _initialize_arm_tdep ();
9486 void
9487 _initialize_arm_tdep ()
9488 {
9489 long length;
9490 int i, j;
9491 char regdesc[1024], *rdptr = regdesc;
9492 size_t rest = sizeof (regdesc);
9493
9494 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9495
9496 /* Add ourselves to objfile event chain. */
9497 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9498
9499 /* Register an ELF OS ABI sniffer for ARM binaries. */
9500 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9501 bfd_target_elf_flavour,
9502 arm_elf_osabi_sniffer);
9503
9504 /* Add root prefix command for all "set arm"/"show arm" commands. */
9505 add_basic_prefix_cmd ("arm", no_class,
9506 _("Various ARM-specific commands."),
9507 &setarmcmdlist, "set arm ", 0, &setlist);
9508
9509 add_show_prefix_cmd ("arm", no_class,
9510 _("Various ARM-specific commands."),
9511 &showarmcmdlist, "show arm ", 0, &showlist);
9512
9513
9514 arm_disassembler_options = xstrdup ("reg-names-std");
9515 const disasm_options_t *disasm_options
9516 = &disassembler_options_arm ()->options;
9517 int num_disassembly_styles = 0;
9518 for (i = 0; disasm_options->name[i] != NULL; i++)
9519 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9520 num_disassembly_styles++;
9521
9522 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9523 valid_disassembly_styles = XNEWVEC (const char *,
9524 num_disassembly_styles + 1);
9525 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9526 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9527 {
9528 size_t offset = strlen ("reg-names-");
9529 const char *style = disasm_options->name[i];
9530 valid_disassembly_styles[j++] = &style[offset];
9531 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9532 disasm_options->description[i]);
9533 rdptr += length;
9534 rest -= length;
9535 }
9536 /* Mark the end of valid options. */
9537 valid_disassembly_styles[num_disassembly_styles] = NULL;
9538
9539 /* Create the help text. */
9540 std::string helptext = string_printf ("%s%s%s",
9541 _("The valid values are:\n"),
9542 regdesc,
9543 _("The default is \"std\"."));
9544
9545 add_setshow_enum_cmd("disassembler", no_class,
9546 valid_disassembly_styles, &disassembly_style,
9547 _("Set the disassembly style."),
9548 _("Show the disassembly style."),
9549 helptext.c_str (),
9550 set_disassembly_style_sfunc,
9551 show_disassembly_style_sfunc,
9552 &setarmcmdlist, &showarmcmdlist);
9553
9554 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9555 _("Set usage of ARM 32-bit mode."),
9556 _("Show usage of ARM 32-bit mode."),
9557 _("When off, a 26-bit PC will be used."),
9558 NULL,
9559 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9560 mode is %s. */
9561 &setarmcmdlist, &showarmcmdlist);
9562
9563 /* Add a command to allow the user to force the FPU model. */
9564 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9565 _("Set the floating point type."),
9566 _("Show the floating point type."),
9567 _("auto - Determine the FP typefrom the OS-ABI.\n\
9568 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9569 fpa - FPA co-processor (GCC compiled).\n\
9570 softvfp - Software FP with pure-endian doubles.\n\
9571 vfp - VFP co-processor."),
9572 set_fp_model_sfunc, show_fp_model,
9573 &setarmcmdlist, &showarmcmdlist);
9574
9575 /* Add a command to allow the user to force the ABI. */
9576 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9577 _("Set the ABI."),
9578 _("Show the ABI."),
9579 NULL, arm_set_abi, arm_show_abi,
9580 &setarmcmdlist, &showarmcmdlist);
9581
9582 /* Add two commands to allow the user to force the assumed
9583 execution mode. */
9584 add_setshow_enum_cmd ("fallback-mode", class_support,
9585 arm_mode_strings, &arm_fallback_mode_string,
9586 _("Set the mode assumed when symbols are unavailable."),
9587 _("Show the mode assumed when symbols are unavailable."),
9588 NULL, NULL, arm_show_fallback_mode,
9589 &setarmcmdlist, &showarmcmdlist);
9590 add_setshow_enum_cmd ("force-mode", class_support,
9591 arm_mode_strings, &arm_force_mode_string,
9592 _("Set the mode assumed even when symbols are available."),
9593 _("Show the mode assumed even when symbols are available."),
9594 NULL, NULL, arm_show_force_mode,
9595 &setarmcmdlist, &showarmcmdlist);
9596
9597 /* Debugging flag. */
9598 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9599 _("Set ARM debugging."),
9600 _("Show ARM debugging."),
9601 _("When on, arm-specific debugging is enabled."),
9602 NULL,
9603 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9604 &setdebuglist, &showdebuglist);
9605
9606 #if GDB_SELF_TEST
9607 selftests::register_test ("arm-record", selftests::arm_record_test);
9608 #endif
9609
9610 }
9611
9612 /* ARM-reversible process record data structures. */
9613
9614 #define ARM_INSN_SIZE_BYTES 4
9615 #define THUMB_INSN_SIZE_BYTES 2
9616 #define THUMB2_INSN_SIZE_BYTES 4
9617
9618
9619 /* Position of the bit within a 32-bit ARM instruction
9620 that defines whether the instruction is a load or store. */
9621 #define INSN_S_L_BIT_NUM 20
9622
9623 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9624 do \
9625 { \
9626 unsigned int reg_len = LENGTH; \
9627 if (reg_len) \
9628 { \
9629 REGS = XNEWVEC (uint32_t, reg_len); \
9630 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9631 } \
9632 } \
9633 while (0)
9634
9635 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9636 do \
9637 { \
9638 unsigned int mem_len = LENGTH; \
9639 if (mem_len) \
9640 { \
9641 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9642 memcpy(&MEMS->len, &RECORD_BUF[0], \
9643 sizeof(struct arm_mem_r) * LENGTH); \
9644 } \
9645 } \
9646 while (0)
9647
9648 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9649 #define INSN_RECORDED(ARM_RECORD) \
9650 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9651
9652 /* ARM memory record structure. */
9653 struct arm_mem_r
9654 {
9655 uint32_t len; /* Record length. */
9656 uint32_t addr; /* Memory address. */
9657 };
9658
9659 /* ARM instruction record contains opcode of current insn
9660 and execution state (before entry to decode_insn()),
9661 contains list of to-be-modified registers and
9662 memory blocks (on return from decode_insn()). */
9663
9664 typedef struct insn_decode_record_t
9665 {
9666 struct gdbarch *gdbarch;
9667 struct regcache *regcache;
9668 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9669 uint32_t arm_insn; /* Should accommodate thumb. */
9670 uint32_t cond; /* Condition code. */
9671 uint32_t opcode; /* Insn opcode. */
9672 uint32_t decode; /* Insn decode bits. */
9673 uint32_t mem_rec_count; /* No of mem records. */
9674 uint32_t reg_rec_count; /* No of reg records. */
9675 uint32_t *arm_regs; /* Registers to be saved for this record. */
9676 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9677 } insn_decode_record;
9678
9679
9680 /* Checks ARM SBZ and SBO mandatory fields. */
9681
9682 static int
9683 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9684 {
9685 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9686
9687 if (!len)
9688 return 1;
9689
9690 if (!sbo)
9691 ones = ~ones;
9692
9693 while (ones)
9694 {
9695 if (!(ones & sbo))
9696 {
9697 return 0;
9698 }
9699 ones = ones >> 1;
9700 }
9701 return 1;
9702 }
9703
9704 enum arm_record_result
9705 {
9706 ARM_RECORD_SUCCESS = 0,
9707 ARM_RECORD_FAILURE = 1
9708 };
9709
9710 typedef enum
9711 {
9712 ARM_RECORD_STRH=1,
9713 ARM_RECORD_STRD
9714 } arm_record_strx_t;
9715
9716 typedef enum
9717 {
9718 ARM_RECORD=1,
9719 THUMB_RECORD,
9720 THUMB2_RECORD
9721 } record_type_t;
9722
9723
9724 static int
9725 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9726 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9727 {
9728
9729 struct regcache *reg_cache = arm_insn_r->regcache;
9730 ULONGEST u_regval[2]= {0};
9731
9732 uint32_t reg_src1 = 0, reg_src2 = 0;
9733 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9734
9735 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9736 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9737
9738 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9739 {
9740 /* 1) Handle misc store, immediate offset. */
9741 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9742 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9743 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9744 regcache_raw_read_unsigned (reg_cache, reg_src1,
9745 &u_regval[0]);
9746 if (ARM_PC_REGNUM == reg_src1)
9747 {
9748 /* If R15 was used as Rn, hence current PC+8. */
9749 u_regval[0] = u_regval[0] + 8;
9750 }
9751 offset_8 = (immed_high << 4) | immed_low;
9752 /* Calculate target store address. */
9753 if (14 == arm_insn_r->opcode)
9754 {
9755 tgt_mem_addr = u_regval[0] + offset_8;
9756 }
9757 else
9758 {
9759 tgt_mem_addr = u_regval[0] - offset_8;
9760 }
9761 if (ARM_RECORD_STRH == str_type)
9762 {
9763 record_buf_mem[0] = 2;
9764 record_buf_mem[1] = tgt_mem_addr;
9765 arm_insn_r->mem_rec_count = 1;
9766 }
9767 else if (ARM_RECORD_STRD == str_type)
9768 {
9769 record_buf_mem[0] = 4;
9770 record_buf_mem[1] = tgt_mem_addr;
9771 record_buf_mem[2] = 4;
9772 record_buf_mem[3] = tgt_mem_addr + 4;
9773 arm_insn_r->mem_rec_count = 2;
9774 }
9775 }
9776 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9777 {
9778 /* 2) Store, register offset. */
9779 /* Get Rm. */
9780 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9781 /* Get Rn. */
9782 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9783 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9784 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9785 if (15 == reg_src2)
9786 {
9787 /* If R15 was used as Rn, hence current PC+8. */
9788 u_regval[0] = u_regval[0] + 8;
9789 }
9790 /* Calculate target store address, Rn +/- Rm, register offset. */
9791 if (12 == arm_insn_r->opcode)
9792 {
9793 tgt_mem_addr = u_regval[0] + u_regval[1];
9794 }
9795 else
9796 {
9797 tgt_mem_addr = u_regval[1] - u_regval[0];
9798 }
9799 if (ARM_RECORD_STRH == str_type)
9800 {
9801 record_buf_mem[0] = 2;
9802 record_buf_mem[1] = tgt_mem_addr;
9803 arm_insn_r->mem_rec_count = 1;
9804 }
9805 else if (ARM_RECORD_STRD == str_type)
9806 {
9807 record_buf_mem[0] = 4;
9808 record_buf_mem[1] = tgt_mem_addr;
9809 record_buf_mem[2] = 4;
9810 record_buf_mem[3] = tgt_mem_addr + 4;
9811 arm_insn_r->mem_rec_count = 2;
9812 }
9813 }
9814 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9815 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9816 {
9817 /* 3) Store, immediate pre-indexed. */
9818 /* 5) Store, immediate post-indexed. */
9819 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9820 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9821 offset_8 = (immed_high << 4) | immed_low;
9822 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9823 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9824 /* Calculate target store address, Rn +/- Rm, register offset. */
9825 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9826 {
9827 tgt_mem_addr = u_regval[0] + offset_8;
9828 }
9829 else
9830 {
9831 tgt_mem_addr = u_regval[0] - offset_8;
9832 }
9833 if (ARM_RECORD_STRH == str_type)
9834 {
9835 record_buf_mem[0] = 2;
9836 record_buf_mem[1] = tgt_mem_addr;
9837 arm_insn_r->mem_rec_count = 1;
9838 }
9839 else if (ARM_RECORD_STRD == str_type)
9840 {
9841 record_buf_mem[0] = 4;
9842 record_buf_mem[1] = tgt_mem_addr;
9843 record_buf_mem[2] = 4;
9844 record_buf_mem[3] = tgt_mem_addr + 4;
9845 arm_insn_r->mem_rec_count = 2;
9846 }
9847 /* Record Rn also as it changes. */
9848 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9849 arm_insn_r->reg_rec_count = 1;
9850 }
9851 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9852 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9853 {
9854 /* 4) Store, register pre-indexed. */
9855 /* 6) Store, register post -indexed. */
9856 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9857 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9858 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9859 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9860 /* Calculate target store address, Rn +/- Rm, register offset. */
9861 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9862 {
9863 tgt_mem_addr = u_regval[0] + u_regval[1];
9864 }
9865 else
9866 {
9867 tgt_mem_addr = u_regval[1] - u_regval[0];
9868 }
9869 if (ARM_RECORD_STRH == str_type)
9870 {
9871 record_buf_mem[0] = 2;
9872 record_buf_mem[1] = tgt_mem_addr;
9873 arm_insn_r->mem_rec_count = 1;
9874 }
9875 else if (ARM_RECORD_STRD == str_type)
9876 {
9877 record_buf_mem[0] = 4;
9878 record_buf_mem[1] = tgt_mem_addr;
9879 record_buf_mem[2] = 4;
9880 record_buf_mem[3] = tgt_mem_addr + 4;
9881 arm_insn_r->mem_rec_count = 2;
9882 }
9883 /* Record Rn also as it changes. */
9884 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9885 arm_insn_r->reg_rec_count = 1;
9886 }
9887 return 0;
9888 }
9889
9890 /* Handling ARM extension space insns. */
9891
9892 static int
9893 arm_record_extension_space (insn_decode_record *arm_insn_r)
9894 {
9895 int ret = 0; /* Return value: -1:record failure ; 0:success */
9896 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9897 uint32_t record_buf[8], record_buf_mem[8];
9898 uint32_t reg_src1 = 0;
9899 struct regcache *reg_cache = arm_insn_r->regcache;
9900 ULONGEST u_regval = 0;
9901
9902 gdb_assert (!INSN_RECORDED(arm_insn_r));
9903 /* Handle unconditional insn extension space. */
9904
9905 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9906 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9907 if (arm_insn_r->cond)
9908 {
9909 /* PLD has no affect on architectural state, it just affects
9910 the caches. */
9911 if (5 == ((opcode1 & 0xE0) >> 5))
9912 {
9913 /* BLX(1) */
9914 record_buf[0] = ARM_PS_REGNUM;
9915 record_buf[1] = ARM_LR_REGNUM;
9916 arm_insn_r->reg_rec_count = 2;
9917 }
9918 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9919 }
9920
9921
9922 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9923 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9924 {
9925 ret = -1;
9926 /* Undefined instruction on ARM V5; need to handle if later
9927 versions define it. */
9928 }
9929
9930 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9931 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9932 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9933
9934 /* Handle arithmetic insn extension space. */
9935 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9936 && !INSN_RECORDED(arm_insn_r))
9937 {
9938 /* Handle MLA(S) and MUL(S). */
9939 if (in_inclusive_range (insn_op1, 0U, 3U))
9940 {
9941 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9942 record_buf[1] = ARM_PS_REGNUM;
9943 arm_insn_r->reg_rec_count = 2;
9944 }
9945 else if (in_inclusive_range (insn_op1, 4U, 15U))
9946 {
9947 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9948 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9949 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9950 record_buf[2] = ARM_PS_REGNUM;
9951 arm_insn_r->reg_rec_count = 3;
9952 }
9953 }
9954
9955 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9956 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9957 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9958
9959 /* Handle control insn extension space. */
9960
9961 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9962 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9963 {
9964 if (!bit (arm_insn_r->arm_insn,25))
9965 {
9966 if (!bits (arm_insn_r->arm_insn, 4, 7))
9967 {
9968 if ((0 == insn_op1) || (2 == insn_op1))
9969 {
9970 /* MRS. */
9971 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9972 arm_insn_r->reg_rec_count = 1;
9973 }
9974 else if (1 == insn_op1)
9975 {
9976 /* CSPR is going to be changed. */
9977 record_buf[0] = ARM_PS_REGNUM;
9978 arm_insn_r->reg_rec_count = 1;
9979 }
9980 else if (3 == insn_op1)
9981 {
9982 /* SPSR is going to be changed. */
9983 /* We need to get SPSR value, which is yet to be done. */
9984 return -1;
9985 }
9986 }
9987 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9988 {
9989 if (1 == insn_op1)
9990 {
9991 /* BX. */
9992 record_buf[0] = ARM_PS_REGNUM;
9993 arm_insn_r->reg_rec_count = 1;
9994 }
9995 else if (3 == insn_op1)
9996 {
9997 /* CLZ. */
9998 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9999 arm_insn_r->reg_rec_count = 1;
10000 }
10001 }
10002 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10003 {
10004 /* BLX. */
10005 record_buf[0] = ARM_PS_REGNUM;
10006 record_buf[1] = ARM_LR_REGNUM;
10007 arm_insn_r->reg_rec_count = 2;
10008 }
10009 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10010 {
10011 /* QADD, QSUB, QDADD, QDSUB */
10012 record_buf[0] = ARM_PS_REGNUM;
10013 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10014 arm_insn_r->reg_rec_count = 2;
10015 }
10016 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10017 {
10018 /* BKPT. */
10019 record_buf[0] = ARM_PS_REGNUM;
10020 record_buf[1] = ARM_LR_REGNUM;
10021 arm_insn_r->reg_rec_count = 2;
10022
10023 /* Save SPSR also;how? */
10024 return -1;
10025 }
10026 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10027 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10028 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10029 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10030 )
10031 {
10032 if (0 == insn_op1 || 1 == insn_op1)
10033 {
10034 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10035 /* We dont do optimization for SMULW<y> where we
10036 need only Rd. */
10037 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10038 record_buf[1] = ARM_PS_REGNUM;
10039 arm_insn_r->reg_rec_count = 2;
10040 }
10041 else if (2 == insn_op1)
10042 {
10043 /* SMLAL<x><y>. */
10044 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10045 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10046 arm_insn_r->reg_rec_count = 2;
10047 }
10048 else if (3 == insn_op1)
10049 {
10050 /* SMUL<x><y>. */
10051 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10052 arm_insn_r->reg_rec_count = 1;
10053 }
10054 }
10055 }
10056 else
10057 {
10058 /* MSR : immediate form. */
10059 if (1 == insn_op1)
10060 {
10061 /* CSPR is going to be changed. */
10062 record_buf[0] = ARM_PS_REGNUM;
10063 arm_insn_r->reg_rec_count = 1;
10064 }
10065 else if (3 == insn_op1)
10066 {
10067 /* SPSR is going to be changed. */
10068 /* we need to get SPSR value, which is yet to be done */
10069 return -1;
10070 }
10071 }
10072 }
10073
10074 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10075 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10076 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10077
10078 /* Handle load/store insn extension space. */
10079
10080 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10081 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10082 && !INSN_RECORDED(arm_insn_r))
10083 {
10084 /* SWP/SWPB. */
10085 if (0 == insn_op1)
10086 {
10087 /* These insn, changes register and memory as well. */
10088 /* SWP or SWPB insn. */
10089 /* Get memory address given by Rn. */
10090 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10091 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10092 /* SWP insn ?, swaps word. */
10093 if (8 == arm_insn_r->opcode)
10094 {
10095 record_buf_mem[0] = 4;
10096 }
10097 else
10098 {
10099 /* SWPB insn, swaps only byte. */
10100 record_buf_mem[0] = 1;
10101 }
10102 record_buf_mem[1] = u_regval;
10103 arm_insn_r->mem_rec_count = 1;
10104 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10105 arm_insn_r->reg_rec_count = 1;
10106 }
10107 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10108 {
10109 /* STRH. */
10110 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10111 ARM_RECORD_STRH);
10112 }
10113 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10114 {
10115 /* LDRD. */
10116 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10117 record_buf[1] = record_buf[0] + 1;
10118 arm_insn_r->reg_rec_count = 2;
10119 }
10120 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10121 {
10122 /* STRD. */
10123 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10124 ARM_RECORD_STRD);
10125 }
10126 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10127 {
10128 /* LDRH, LDRSB, LDRSH. */
10129 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10130 arm_insn_r->reg_rec_count = 1;
10131 }
10132
10133 }
10134
10135 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10136 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10137 && !INSN_RECORDED(arm_insn_r))
10138 {
10139 ret = -1;
10140 /* Handle coprocessor insn extension space. */
10141 }
10142
10143 /* To be done for ARMv5 and later; as of now we return -1. */
10144 if (-1 == ret)
10145 return ret;
10146
10147 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10148 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10149
10150 return ret;
10151 }
10152
10153 /* Handling opcode 000 insns. */
10154
10155 static int
10156 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10157 {
10158 struct regcache *reg_cache = arm_insn_r->regcache;
10159 uint32_t record_buf[8], record_buf_mem[8];
10160 ULONGEST u_regval[2] = {0};
10161
10162 uint32_t reg_src1 = 0;
10163 uint32_t opcode1 = 0;
10164
10165 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10166 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10167 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10168
10169 if (!((opcode1 & 0x19) == 0x10))
10170 {
10171 /* Data-processing (register) and Data-processing (register-shifted
10172 register */
10173 /* Out of 11 shifter operands mode, all the insn modifies destination
10174 register, which is specified by 13-16 decode. */
10175 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10176 record_buf[1] = ARM_PS_REGNUM;
10177 arm_insn_r->reg_rec_count = 2;
10178 }
10179 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10180 {
10181 /* Miscellaneous instructions */
10182
10183 if (3 == arm_insn_r->decode && 0x12 == opcode1
10184 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10185 {
10186 /* Handle BLX, branch and link/exchange. */
10187 if (9 == arm_insn_r->opcode)
10188 {
10189 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10190 and R14 stores the return address. */
10191 record_buf[0] = ARM_PS_REGNUM;
10192 record_buf[1] = ARM_LR_REGNUM;
10193 arm_insn_r->reg_rec_count = 2;
10194 }
10195 }
10196 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10197 {
10198 /* Handle enhanced software breakpoint insn, BKPT. */
10199 /* CPSR is changed to be executed in ARM state, disabling normal
10200 interrupts, entering abort mode. */
10201 /* According to high vector configuration PC is set. */
10202 /* user hit breakpoint and type reverse, in
10203 that case, we need to go back with previous CPSR and
10204 Program Counter. */
10205 record_buf[0] = ARM_PS_REGNUM;
10206 record_buf[1] = ARM_LR_REGNUM;
10207 arm_insn_r->reg_rec_count = 2;
10208
10209 /* Save SPSR also; how? */
10210 return -1;
10211 }
10212 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10213 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10214 {
10215 /* Handle BX, branch and link/exchange. */
10216 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10217 record_buf[0] = ARM_PS_REGNUM;
10218 arm_insn_r->reg_rec_count = 1;
10219 }
10220 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10221 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10222 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10223 {
10224 /* Count leading zeros: CLZ. */
10225 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10226 arm_insn_r->reg_rec_count = 1;
10227 }
10228 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10229 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10230 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10231 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10232 {
10233 /* Handle MRS insn. */
10234 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10235 arm_insn_r->reg_rec_count = 1;
10236 }
10237 }
10238 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10239 {
10240 /* Multiply and multiply-accumulate */
10241
10242 /* Handle multiply instructions. */
10243 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10244 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10245 {
10246 /* Handle MLA and MUL. */
10247 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10248 record_buf[1] = ARM_PS_REGNUM;
10249 arm_insn_r->reg_rec_count = 2;
10250 }
10251 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10252 {
10253 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10254 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10255 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10256 record_buf[2] = ARM_PS_REGNUM;
10257 arm_insn_r->reg_rec_count = 3;
10258 }
10259 }
10260 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10261 {
10262 /* Synchronization primitives */
10263
10264 /* Handling SWP, SWPB. */
10265 /* These insn, changes register and memory as well. */
10266 /* SWP or SWPB insn. */
10267
10268 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10269 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10270 /* SWP insn ?, swaps word. */
10271 if (8 == arm_insn_r->opcode)
10272 {
10273 record_buf_mem[0] = 4;
10274 }
10275 else
10276 {
10277 /* SWPB insn, swaps only byte. */
10278 record_buf_mem[0] = 1;
10279 }
10280 record_buf_mem[1] = u_regval[0];
10281 arm_insn_r->mem_rec_count = 1;
10282 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10283 arm_insn_r->reg_rec_count = 1;
10284 }
10285 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10286 || 15 == arm_insn_r->decode)
10287 {
10288 if ((opcode1 & 0x12) == 2)
10289 {
10290 /* Extra load/store (unprivileged) */
10291 return -1;
10292 }
10293 else
10294 {
10295 /* Extra load/store */
10296 switch (bits (arm_insn_r->arm_insn, 5, 6))
10297 {
10298 case 1:
10299 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10300 {
10301 /* STRH (register), STRH (immediate) */
10302 arm_record_strx (arm_insn_r, &record_buf[0],
10303 &record_buf_mem[0], ARM_RECORD_STRH);
10304 }
10305 else if ((opcode1 & 0x05) == 0x1)
10306 {
10307 /* LDRH (register) */
10308 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10309 arm_insn_r->reg_rec_count = 1;
10310
10311 if (bit (arm_insn_r->arm_insn, 21))
10312 {
10313 /* Write back to Rn. */
10314 record_buf[arm_insn_r->reg_rec_count++]
10315 = bits (arm_insn_r->arm_insn, 16, 19);
10316 }
10317 }
10318 else if ((opcode1 & 0x05) == 0x5)
10319 {
10320 /* LDRH (immediate), LDRH (literal) */
10321 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10322
10323 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10324 arm_insn_r->reg_rec_count = 1;
10325
10326 if (rn != 15)
10327 {
10328 /*LDRH (immediate) */
10329 if (bit (arm_insn_r->arm_insn, 21))
10330 {
10331 /* Write back to Rn. */
10332 record_buf[arm_insn_r->reg_rec_count++] = rn;
10333 }
10334 }
10335 }
10336 else
10337 return -1;
10338 break;
10339 case 2:
10340 if ((opcode1 & 0x05) == 0x0)
10341 {
10342 /* LDRD (register) */
10343 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10344 record_buf[1] = record_buf[0] + 1;
10345 arm_insn_r->reg_rec_count = 2;
10346
10347 if (bit (arm_insn_r->arm_insn, 21))
10348 {
10349 /* Write back to Rn. */
10350 record_buf[arm_insn_r->reg_rec_count++]
10351 = bits (arm_insn_r->arm_insn, 16, 19);
10352 }
10353 }
10354 else if ((opcode1 & 0x05) == 0x1)
10355 {
10356 /* LDRSB (register) */
10357 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10358 arm_insn_r->reg_rec_count = 1;
10359
10360 if (bit (arm_insn_r->arm_insn, 21))
10361 {
10362 /* Write back to Rn. */
10363 record_buf[arm_insn_r->reg_rec_count++]
10364 = bits (arm_insn_r->arm_insn, 16, 19);
10365 }
10366 }
10367 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10368 {
10369 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10370 LDRSB (literal) */
10371 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10372
10373 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10374 arm_insn_r->reg_rec_count = 1;
10375
10376 if (rn != 15)
10377 {
10378 /*LDRD (immediate), LDRSB (immediate) */
10379 if (bit (arm_insn_r->arm_insn, 21))
10380 {
10381 /* Write back to Rn. */
10382 record_buf[arm_insn_r->reg_rec_count++] = rn;
10383 }
10384 }
10385 }
10386 else
10387 return -1;
10388 break;
10389 case 3:
10390 if ((opcode1 & 0x05) == 0x0)
10391 {
10392 /* STRD (register) */
10393 arm_record_strx (arm_insn_r, &record_buf[0],
10394 &record_buf_mem[0], ARM_RECORD_STRD);
10395 }
10396 else if ((opcode1 & 0x05) == 0x1)
10397 {
10398 /* LDRSH (register) */
10399 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10400 arm_insn_r->reg_rec_count = 1;
10401
10402 if (bit (arm_insn_r->arm_insn, 21))
10403 {
10404 /* Write back to Rn. */
10405 record_buf[arm_insn_r->reg_rec_count++]
10406 = bits (arm_insn_r->arm_insn, 16, 19);
10407 }
10408 }
10409 else if ((opcode1 & 0x05) == 0x4)
10410 {
10411 /* STRD (immediate) */
10412 arm_record_strx (arm_insn_r, &record_buf[0],
10413 &record_buf_mem[0], ARM_RECORD_STRD);
10414 }
10415 else if ((opcode1 & 0x05) == 0x5)
10416 {
10417 /* LDRSH (immediate), LDRSH (literal) */
10418 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10419 arm_insn_r->reg_rec_count = 1;
10420
10421 if (bit (arm_insn_r->arm_insn, 21))
10422 {
10423 /* Write back to Rn. */
10424 record_buf[arm_insn_r->reg_rec_count++]
10425 = bits (arm_insn_r->arm_insn, 16, 19);
10426 }
10427 }
10428 else
10429 return -1;
10430 break;
10431 default:
10432 return -1;
10433 }
10434 }
10435 }
10436 else
10437 {
10438 return -1;
10439 }
10440
10441 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10442 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10443 return 0;
10444 }
10445
10446 /* Handling opcode 001 insns. */
10447
10448 static int
10449 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10450 {
10451 uint32_t record_buf[8], record_buf_mem[8];
10452
10453 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10454 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10455
10456 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10457 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10458 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10459 )
10460 {
10461 /* Handle MSR insn. */
10462 if (9 == arm_insn_r->opcode)
10463 {
10464 /* CSPR is going to be changed. */
10465 record_buf[0] = ARM_PS_REGNUM;
10466 arm_insn_r->reg_rec_count = 1;
10467 }
10468 else
10469 {
10470 /* SPSR is going to be changed. */
10471 }
10472 }
10473 else if (arm_insn_r->opcode <= 15)
10474 {
10475 /* Normal data processing insns. */
10476 /* Out of 11 shifter operands mode, all the insn modifies destination
10477 register, which is specified by 13-16 decode. */
10478 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10479 record_buf[1] = ARM_PS_REGNUM;
10480 arm_insn_r->reg_rec_count = 2;
10481 }
10482 else
10483 {
10484 return -1;
10485 }
10486
10487 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10488 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10489 return 0;
10490 }
10491
10492 static int
10493 arm_record_media (insn_decode_record *arm_insn_r)
10494 {
10495 uint32_t record_buf[8];
10496
10497 switch (bits (arm_insn_r->arm_insn, 22, 24))
10498 {
10499 case 0:
10500 /* Parallel addition and subtraction, signed */
10501 case 1:
10502 /* Parallel addition and subtraction, unsigned */
10503 case 2:
10504 case 3:
10505 /* Packing, unpacking, saturation and reversal */
10506 {
10507 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10508
10509 record_buf[arm_insn_r->reg_rec_count++] = rd;
10510 }
10511 break;
10512
10513 case 4:
10514 case 5:
10515 /* Signed multiplies */
10516 {
10517 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10518 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10519
10520 record_buf[arm_insn_r->reg_rec_count++] = rd;
10521 if (op1 == 0x0)
10522 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10523 else if (op1 == 0x4)
10524 record_buf[arm_insn_r->reg_rec_count++]
10525 = bits (arm_insn_r->arm_insn, 12, 15);
10526 }
10527 break;
10528
10529 case 6:
10530 {
10531 if (bit (arm_insn_r->arm_insn, 21)
10532 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10533 {
10534 /* SBFX */
10535 record_buf[arm_insn_r->reg_rec_count++]
10536 = bits (arm_insn_r->arm_insn, 12, 15);
10537 }
10538 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10539 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10540 {
10541 /* USAD8 and USADA8 */
10542 record_buf[arm_insn_r->reg_rec_count++]
10543 = bits (arm_insn_r->arm_insn, 16, 19);
10544 }
10545 }
10546 break;
10547
10548 case 7:
10549 {
10550 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10551 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10552 {
10553 /* Permanently UNDEFINED */
10554 return -1;
10555 }
10556 else
10557 {
10558 /* BFC, BFI and UBFX */
10559 record_buf[arm_insn_r->reg_rec_count++]
10560 = bits (arm_insn_r->arm_insn, 12, 15);
10561 }
10562 }
10563 break;
10564
10565 default:
10566 return -1;
10567 }
10568
10569 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10570
10571 return 0;
10572 }
10573
10574 /* Handle ARM mode instructions with opcode 010. */
10575
10576 static int
10577 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10578 {
10579 struct regcache *reg_cache = arm_insn_r->regcache;
10580
10581 uint32_t reg_base , reg_dest;
10582 uint32_t offset_12, tgt_mem_addr;
10583 uint32_t record_buf[8], record_buf_mem[8];
10584 unsigned char wback;
10585 ULONGEST u_regval;
10586
10587 /* Calculate wback. */
10588 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10589 || (bit (arm_insn_r->arm_insn, 21) == 1);
10590
10591 arm_insn_r->reg_rec_count = 0;
10592 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10593
10594 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10595 {
10596 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10597 and LDRT. */
10598
10599 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10600 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10601
10602 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10603 preceeds a LDR instruction having R15 as reg_base, it
10604 emulates a branch and link instruction, and hence we need to save
10605 CPSR and PC as well. */
10606 if (ARM_PC_REGNUM == reg_dest)
10607 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10608
10609 /* If wback is true, also save the base register, which is going to be
10610 written to. */
10611 if (wback)
10612 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10613 }
10614 else
10615 {
10616 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10617
10618 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10619 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10620
10621 /* Handle bit U. */
10622 if (bit (arm_insn_r->arm_insn, 23))
10623 {
10624 /* U == 1: Add the offset. */
10625 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10626 }
10627 else
10628 {
10629 /* U == 0: subtract the offset. */
10630 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10631 }
10632
10633 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10634 bytes. */
10635 if (bit (arm_insn_r->arm_insn, 22))
10636 {
10637 /* STRB and STRBT: 1 byte. */
10638 record_buf_mem[0] = 1;
10639 }
10640 else
10641 {
10642 /* STR and STRT: 4 bytes. */
10643 record_buf_mem[0] = 4;
10644 }
10645
10646 /* Handle bit P. */
10647 if (bit (arm_insn_r->arm_insn, 24))
10648 record_buf_mem[1] = tgt_mem_addr;
10649 else
10650 record_buf_mem[1] = (uint32_t) u_regval;
10651
10652 arm_insn_r->mem_rec_count = 1;
10653
10654 /* If wback is true, also save the base register, which is going to be
10655 written to. */
10656 if (wback)
10657 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10658 }
10659
10660 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10661 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10662 return 0;
10663 }
10664
10665 /* Handling opcode 011 insns. */
10666
10667 static int
10668 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10669 {
10670 struct regcache *reg_cache = arm_insn_r->regcache;
10671
10672 uint32_t shift_imm = 0;
10673 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10674 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10675 uint32_t record_buf[8], record_buf_mem[8];
10676
10677 LONGEST s_word;
10678 ULONGEST u_regval[2];
10679
10680 if (bit (arm_insn_r->arm_insn, 4))
10681 return arm_record_media (arm_insn_r);
10682
10683 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10684 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10685
10686 /* Handle enhanced store insns and LDRD DSP insn,
10687 order begins according to addressing modes for store insns
10688 STRH insn. */
10689
10690 /* LDR or STR? */
10691 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10692 {
10693 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10694 /* LDR insn has a capability to do branching, if
10695 MOV LR, PC is preceded by LDR insn having Rn as R15
10696 in that case, it emulates branch and link insn, and hence we
10697 need to save CSPR and PC as well. */
10698 if (15 != reg_dest)
10699 {
10700 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10701 arm_insn_r->reg_rec_count = 1;
10702 }
10703 else
10704 {
10705 record_buf[0] = reg_dest;
10706 record_buf[1] = ARM_PS_REGNUM;
10707 arm_insn_r->reg_rec_count = 2;
10708 }
10709 }
10710 else
10711 {
10712 if (! bits (arm_insn_r->arm_insn, 4, 11))
10713 {
10714 /* Store insn, register offset and register pre-indexed,
10715 register post-indexed. */
10716 /* Get Rm. */
10717 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10718 /* Get Rn. */
10719 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10720 regcache_raw_read_unsigned (reg_cache, reg_src1
10721 , &u_regval[0]);
10722 regcache_raw_read_unsigned (reg_cache, reg_src2
10723 , &u_regval[1]);
10724 if (15 == reg_src2)
10725 {
10726 /* If R15 was used as Rn, hence current PC+8. */
10727 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10728 u_regval[0] = u_regval[0] + 8;
10729 }
10730 /* Calculate target store address, Rn +/- Rm, register offset. */
10731 /* U == 1. */
10732 if (bit (arm_insn_r->arm_insn, 23))
10733 {
10734 tgt_mem_addr = u_regval[0] + u_regval[1];
10735 }
10736 else
10737 {
10738 tgt_mem_addr = u_regval[1] - u_regval[0];
10739 }
10740
10741 switch (arm_insn_r->opcode)
10742 {
10743 /* STR. */
10744 case 8:
10745 case 12:
10746 /* STR. */
10747 case 9:
10748 case 13:
10749 /* STRT. */
10750 case 1:
10751 case 5:
10752 /* STR. */
10753 case 0:
10754 case 4:
10755 record_buf_mem[0] = 4;
10756 break;
10757
10758 /* STRB. */
10759 case 10:
10760 case 14:
10761 /* STRB. */
10762 case 11:
10763 case 15:
10764 /* STRBT. */
10765 case 3:
10766 case 7:
10767 /* STRB. */
10768 case 2:
10769 case 6:
10770 record_buf_mem[0] = 1;
10771 break;
10772
10773 default:
10774 gdb_assert_not_reached ("no decoding pattern found");
10775 break;
10776 }
10777 record_buf_mem[1] = tgt_mem_addr;
10778 arm_insn_r->mem_rec_count = 1;
10779
10780 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10781 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10782 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10783 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10784 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10785 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10786 )
10787 {
10788 /* Rn is going to be changed in pre-indexed mode and
10789 post-indexed mode as well. */
10790 record_buf[0] = reg_src2;
10791 arm_insn_r->reg_rec_count = 1;
10792 }
10793 }
10794 else
10795 {
10796 /* Store insn, scaled register offset; scaled pre-indexed. */
10797 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10798 /* Get Rm. */
10799 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10800 /* Get Rn. */
10801 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10802 /* Get shift_imm. */
10803 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10804 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10805 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10806 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10807 /* Offset_12 used as shift. */
10808 switch (offset_12)
10809 {
10810 case 0:
10811 /* Offset_12 used as index. */
10812 offset_12 = u_regval[0] << shift_imm;
10813 break;
10814
10815 case 1:
10816 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10817 break;
10818
10819 case 2:
10820 if (!shift_imm)
10821 {
10822 if (bit (u_regval[0], 31))
10823 {
10824 offset_12 = 0xFFFFFFFF;
10825 }
10826 else
10827 {
10828 offset_12 = 0;
10829 }
10830 }
10831 else
10832 {
10833 /* This is arithmetic shift. */
10834 offset_12 = s_word >> shift_imm;
10835 }
10836 break;
10837
10838 case 3:
10839 if (!shift_imm)
10840 {
10841 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10842 &u_regval[1]);
10843 /* Get C flag value and shift it by 31. */
10844 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10845 | (u_regval[0]) >> 1);
10846 }
10847 else
10848 {
10849 offset_12 = (u_regval[0] >> shift_imm) \
10850 | (u_regval[0] <<
10851 (sizeof(uint32_t) - shift_imm));
10852 }
10853 break;
10854
10855 default:
10856 gdb_assert_not_reached ("no decoding pattern found");
10857 break;
10858 }
10859
10860 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10861 /* bit U set. */
10862 if (bit (arm_insn_r->arm_insn, 23))
10863 {
10864 tgt_mem_addr = u_regval[1] + offset_12;
10865 }
10866 else
10867 {
10868 tgt_mem_addr = u_regval[1] - offset_12;
10869 }
10870
10871 switch (arm_insn_r->opcode)
10872 {
10873 /* STR. */
10874 case 8:
10875 case 12:
10876 /* STR. */
10877 case 9:
10878 case 13:
10879 /* STRT. */
10880 case 1:
10881 case 5:
10882 /* STR. */
10883 case 0:
10884 case 4:
10885 record_buf_mem[0] = 4;
10886 break;
10887
10888 /* STRB. */
10889 case 10:
10890 case 14:
10891 /* STRB. */
10892 case 11:
10893 case 15:
10894 /* STRBT. */
10895 case 3:
10896 case 7:
10897 /* STRB. */
10898 case 2:
10899 case 6:
10900 record_buf_mem[0] = 1;
10901 break;
10902
10903 default:
10904 gdb_assert_not_reached ("no decoding pattern found");
10905 break;
10906 }
10907 record_buf_mem[1] = tgt_mem_addr;
10908 arm_insn_r->mem_rec_count = 1;
10909
10910 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10911 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10912 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10913 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10914 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10915 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10916 )
10917 {
10918 /* Rn is going to be changed in register scaled pre-indexed
10919 mode,and scaled post indexed mode. */
10920 record_buf[0] = reg_src2;
10921 arm_insn_r->reg_rec_count = 1;
10922 }
10923 }
10924 }
10925
10926 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10927 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10928 return 0;
10929 }
10930
10931 /* Handle ARM mode instructions with opcode 100. */
10932
10933 static int
10934 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10935 {
10936 struct regcache *reg_cache = arm_insn_r->regcache;
10937 uint32_t register_count = 0, register_bits;
10938 uint32_t reg_base, addr_mode;
10939 uint32_t record_buf[24], record_buf_mem[48];
10940 uint32_t wback;
10941 ULONGEST u_regval;
10942
10943 /* Fetch the list of registers. */
10944 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10945 arm_insn_r->reg_rec_count = 0;
10946
10947 /* Fetch the base register that contains the address we are loading data
10948 to. */
10949 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10950
10951 /* Calculate wback. */
10952 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10953
10954 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10955 {
10956 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10957
10958 /* Find out which registers are going to be loaded from memory. */
10959 while (register_bits)
10960 {
10961 if (register_bits & 0x00000001)
10962 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10963 register_bits = register_bits >> 1;
10964 register_count++;
10965 }
10966
10967
10968 /* If wback is true, also save the base register, which is going to be
10969 written to. */
10970 if (wback)
10971 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10972
10973 /* Save the CPSR register. */
10974 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10975 }
10976 else
10977 {
10978 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10979
10980 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10981
10982 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10983
10984 /* Find out how many registers are going to be stored to memory. */
10985 while (register_bits)
10986 {
10987 if (register_bits & 0x00000001)
10988 register_count++;
10989 register_bits = register_bits >> 1;
10990 }
10991
10992 switch (addr_mode)
10993 {
10994 /* STMDA (STMED): Decrement after. */
10995 case 0:
10996 record_buf_mem[1] = (uint32_t) u_regval
10997 - register_count * ARM_INT_REGISTER_SIZE + 4;
10998 break;
10999 /* STM (STMIA, STMEA): Increment after. */
11000 case 1:
11001 record_buf_mem[1] = (uint32_t) u_regval;
11002 break;
11003 /* STMDB (STMFD): Decrement before. */
11004 case 2:
11005 record_buf_mem[1] = (uint32_t) u_regval
11006 - register_count * ARM_INT_REGISTER_SIZE;
11007 break;
11008 /* STMIB (STMFA): Increment before. */
11009 case 3:
11010 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11011 break;
11012 default:
11013 gdb_assert_not_reached ("no decoding pattern found");
11014 break;
11015 }
11016
11017 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11018 arm_insn_r->mem_rec_count = 1;
11019
11020 /* If wback is true, also save the base register, which is going to be
11021 written to. */
11022 if (wback)
11023 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11024 }
11025
11026 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11027 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11028 return 0;
11029 }
11030
11031 /* Handling opcode 101 insns. */
11032
11033 static int
11034 arm_record_b_bl (insn_decode_record *arm_insn_r)
11035 {
11036 uint32_t record_buf[8];
11037
11038 /* Handle B, BL, BLX(1) insns. */
11039 /* B simply branches so we do nothing here. */
11040 /* Note: BLX(1) doesnt fall here but instead it falls into
11041 extension space. */
11042 if (bit (arm_insn_r->arm_insn, 24))
11043 {
11044 record_buf[0] = ARM_LR_REGNUM;
11045 arm_insn_r->reg_rec_count = 1;
11046 }
11047
11048 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11049
11050 return 0;
11051 }
11052
11053 static int
11054 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11055 {
11056 printf_unfiltered (_("Process record does not support instruction "
11057 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11058 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11059
11060 return -1;
11061 }
11062
11063 /* Record handler for vector data transfer instructions. */
11064
11065 static int
11066 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11067 {
11068 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11069 uint32_t record_buf[4];
11070
11071 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11072 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11073 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11074 bit_l = bit (arm_insn_r->arm_insn, 20);
11075 bit_c = bit (arm_insn_r->arm_insn, 8);
11076
11077 /* Handle VMOV instruction. */
11078 if (bit_l && bit_c)
11079 {
11080 record_buf[0] = reg_t;
11081 arm_insn_r->reg_rec_count = 1;
11082 }
11083 else if (bit_l && !bit_c)
11084 {
11085 /* Handle VMOV instruction. */
11086 if (bits_a == 0x00)
11087 {
11088 record_buf[0] = reg_t;
11089 arm_insn_r->reg_rec_count = 1;
11090 }
11091 /* Handle VMRS instruction. */
11092 else if (bits_a == 0x07)
11093 {
11094 if (reg_t == 15)
11095 reg_t = ARM_PS_REGNUM;
11096
11097 record_buf[0] = reg_t;
11098 arm_insn_r->reg_rec_count = 1;
11099 }
11100 }
11101 else if (!bit_l && !bit_c)
11102 {
11103 /* Handle VMOV instruction. */
11104 if (bits_a == 0x00)
11105 {
11106 record_buf[0] = ARM_D0_REGNUM + reg_v;
11107
11108 arm_insn_r->reg_rec_count = 1;
11109 }
11110 /* Handle VMSR instruction. */
11111 else if (bits_a == 0x07)
11112 {
11113 record_buf[0] = ARM_FPSCR_REGNUM;
11114 arm_insn_r->reg_rec_count = 1;
11115 }
11116 }
11117 else if (!bit_l && bit_c)
11118 {
11119 /* Handle VMOV instruction. */
11120 if (!(bits_a & 0x04))
11121 {
11122 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11123 + ARM_D0_REGNUM;
11124 arm_insn_r->reg_rec_count = 1;
11125 }
11126 /* Handle VDUP instruction. */
11127 else
11128 {
11129 if (bit (arm_insn_r->arm_insn, 21))
11130 {
11131 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11132 record_buf[0] = reg_v + ARM_D0_REGNUM;
11133 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11134 arm_insn_r->reg_rec_count = 2;
11135 }
11136 else
11137 {
11138 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11139 record_buf[0] = reg_v + ARM_D0_REGNUM;
11140 arm_insn_r->reg_rec_count = 1;
11141 }
11142 }
11143 }
11144
11145 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11146 return 0;
11147 }
11148
11149 /* Record handler for extension register load/store instructions. */
11150
11151 static int
11152 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11153 {
11154 uint32_t opcode, single_reg;
11155 uint8_t op_vldm_vstm;
11156 uint32_t record_buf[8], record_buf_mem[128];
11157 ULONGEST u_regval = 0;
11158
11159 struct regcache *reg_cache = arm_insn_r->regcache;
11160
11161 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11162 single_reg = !bit (arm_insn_r->arm_insn, 8);
11163 op_vldm_vstm = opcode & 0x1b;
11164
11165 /* Handle VMOV instructions. */
11166 if ((opcode & 0x1e) == 0x04)
11167 {
11168 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11169 {
11170 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11171 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11172 arm_insn_r->reg_rec_count = 2;
11173 }
11174 else
11175 {
11176 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11177 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11178
11179 if (single_reg)
11180 {
11181 /* The first S register number m is REG_M:M (M is bit 5),
11182 the corresponding D register number is REG_M:M / 2, which
11183 is REG_M. */
11184 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11185 /* The second S register number is REG_M:M + 1, the
11186 corresponding D register number is (REG_M:M + 1) / 2.
11187 IOW, if bit M is 1, the first and second S registers
11188 are mapped to different D registers, otherwise, they are
11189 in the same D register. */
11190 if (bit_m)
11191 {
11192 record_buf[arm_insn_r->reg_rec_count++]
11193 = ARM_D0_REGNUM + reg_m + 1;
11194 }
11195 }
11196 else
11197 {
11198 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11199 arm_insn_r->reg_rec_count = 1;
11200 }
11201 }
11202 }
11203 /* Handle VSTM and VPUSH instructions. */
11204 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11205 || op_vldm_vstm == 0x12)
11206 {
11207 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11208 uint32_t memory_index = 0;
11209
11210 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11211 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11212 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11213 imm_off32 = imm_off8 << 2;
11214 memory_count = imm_off8;
11215
11216 if (bit (arm_insn_r->arm_insn, 23))
11217 start_address = u_regval;
11218 else
11219 start_address = u_regval - imm_off32;
11220
11221 if (bit (arm_insn_r->arm_insn, 21))
11222 {
11223 record_buf[0] = reg_rn;
11224 arm_insn_r->reg_rec_count = 1;
11225 }
11226
11227 while (memory_count > 0)
11228 {
11229 if (single_reg)
11230 {
11231 record_buf_mem[memory_index] = 4;
11232 record_buf_mem[memory_index + 1] = start_address;
11233 start_address = start_address + 4;
11234 memory_index = memory_index + 2;
11235 }
11236 else
11237 {
11238 record_buf_mem[memory_index] = 4;
11239 record_buf_mem[memory_index + 1] = start_address;
11240 record_buf_mem[memory_index + 2] = 4;
11241 record_buf_mem[memory_index + 3] = start_address + 4;
11242 start_address = start_address + 8;
11243 memory_index = memory_index + 4;
11244 }
11245 memory_count--;
11246 }
11247 arm_insn_r->mem_rec_count = (memory_index >> 1);
11248 }
11249 /* Handle VLDM instructions. */
11250 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11251 || op_vldm_vstm == 0x13)
11252 {
11253 uint32_t reg_count, reg_vd;
11254 uint32_t reg_index = 0;
11255 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11256
11257 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11258 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11259
11260 /* REG_VD is the first D register number. If the instruction
11261 loads memory to S registers (SINGLE_REG is TRUE), the register
11262 number is (REG_VD << 1 | bit D), so the corresponding D
11263 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11264 if (!single_reg)
11265 reg_vd = reg_vd | (bit_d << 4);
11266
11267 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11268 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11269
11270 /* If the instruction loads memory to D register, REG_COUNT should
11271 be divided by 2, according to the ARM Architecture Reference
11272 Manual. If the instruction loads memory to S register, divide by
11273 2 as well because two S registers are mapped to D register. */
11274 reg_count = reg_count / 2;
11275 if (single_reg && bit_d)
11276 {
11277 /* Increase the register count if S register list starts from
11278 an odd number (bit d is one). */
11279 reg_count++;
11280 }
11281
11282 while (reg_count > 0)
11283 {
11284 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11285 reg_count--;
11286 }
11287 arm_insn_r->reg_rec_count = reg_index;
11288 }
11289 /* VSTR Vector store register. */
11290 else if ((opcode & 0x13) == 0x10)
11291 {
11292 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11293 uint32_t memory_index = 0;
11294
11295 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11296 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11297 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11298 imm_off32 = imm_off8 << 2;
11299
11300 if (bit (arm_insn_r->arm_insn, 23))
11301 start_address = u_regval + imm_off32;
11302 else
11303 start_address = u_regval - imm_off32;
11304
11305 if (single_reg)
11306 {
11307 record_buf_mem[memory_index] = 4;
11308 record_buf_mem[memory_index + 1] = start_address;
11309 arm_insn_r->mem_rec_count = 1;
11310 }
11311 else
11312 {
11313 record_buf_mem[memory_index] = 4;
11314 record_buf_mem[memory_index + 1] = start_address;
11315 record_buf_mem[memory_index + 2] = 4;
11316 record_buf_mem[memory_index + 3] = start_address + 4;
11317 arm_insn_r->mem_rec_count = 2;
11318 }
11319 }
11320 /* VLDR Vector load register. */
11321 else if ((opcode & 0x13) == 0x11)
11322 {
11323 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11324
11325 if (!single_reg)
11326 {
11327 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11328 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11329 }
11330 else
11331 {
11332 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11333 /* Record register D rather than pseudo register S. */
11334 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11335 }
11336 arm_insn_r->reg_rec_count = 1;
11337 }
11338
11339 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11340 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11341 return 0;
11342 }
11343
11344 /* Record handler for arm/thumb mode VFP data processing instructions. */
11345
11346 static int
11347 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11348 {
11349 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11350 uint32_t record_buf[4];
11351 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11352 enum insn_types curr_insn_type = INSN_INV;
11353
11354 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11355 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11356 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11357 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11358 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11359 bit_d = bit (arm_insn_r->arm_insn, 22);
11360 /* Mask off the "D" bit. */
11361 opc1 = opc1 & ~0x04;
11362
11363 /* Handle VMLA, VMLS. */
11364 if (opc1 == 0x00)
11365 {
11366 if (bit (arm_insn_r->arm_insn, 10))
11367 {
11368 if (bit (arm_insn_r->arm_insn, 6))
11369 curr_insn_type = INSN_T0;
11370 else
11371 curr_insn_type = INSN_T1;
11372 }
11373 else
11374 {
11375 if (dp_op_sz)
11376 curr_insn_type = INSN_T1;
11377 else
11378 curr_insn_type = INSN_T2;
11379 }
11380 }
11381 /* Handle VNMLA, VNMLS, VNMUL. */
11382 else if (opc1 == 0x01)
11383 {
11384 if (dp_op_sz)
11385 curr_insn_type = INSN_T1;
11386 else
11387 curr_insn_type = INSN_T2;
11388 }
11389 /* Handle VMUL. */
11390 else if (opc1 == 0x02 && !(opc3 & 0x01))
11391 {
11392 if (bit (arm_insn_r->arm_insn, 10))
11393 {
11394 if (bit (arm_insn_r->arm_insn, 6))
11395 curr_insn_type = INSN_T0;
11396 else
11397 curr_insn_type = INSN_T1;
11398 }
11399 else
11400 {
11401 if (dp_op_sz)
11402 curr_insn_type = INSN_T1;
11403 else
11404 curr_insn_type = INSN_T2;
11405 }
11406 }
11407 /* Handle VADD, VSUB. */
11408 else if (opc1 == 0x03)
11409 {
11410 if (!bit (arm_insn_r->arm_insn, 9))
11411 {
11412 if (bit (arm_insn_r->arm_insn, 6))
11413 curr_insn_type = INSN_T0;
11414 else
11415 curr_insn_type = INSN_T1;
11416 }
11417 else
11418 {
11419 if (dp_op_sz)
11420 curr_insn_type = INSN_T1;
11421 else
11422 curr_insn_type = INSN_T2;
11423 }
11424 }
11425 /* Handle VDIV. */
11426 else if (opc1 == 0x08)
11427 {
11428 if (dp_op_sz)
11429 curr_insn_type = INSN_T1;
11430 else
11431 curr_insn_type = INSN_T2;
11432 }
11433 /* Handle all other vfp data processing instructions. */
11434 else if (opc1 == 0x0b)
11435 {
11436 /* Handle VMOV. */
11437 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11438 {
11439 if (bit (arm_insn_r->arm_insn, 4))
11440 {
11441 if (bit (arm_insn_r->arm_insn, 6))
11442 curr_insn_type = INSN_T0;
11443 else
11444 curr_insn_type = INSN_T1;
11445 }
11446 else
11447 {
11448 if (dp_op_sz)
11449 curr_insn_type = INSN_T1;
11450 else
11451 curr_insn_type = INSN_T2;
11452 }
11453 }
11454 /* Handle VNEG and VABS. */
11455 else if ((opc2 == 0x01 && opc3 == 0x01)
11456 || (opc2 == 0x00 && opc3 == 0x03))
11457 {
11458 if (!bit (arm_insn_r->arm_insn, 11))
11459 {
11460 if (bit (arm_insn_r->arm_insn, 6))
11461 curr_insn_type = INSN_T0;
11462 else
11463 curr_insn_type = INSN_T1;
11464 }
11465 else
11466 {
11467 if (dp_op_sz)
11468 curr_insn_type = INSN_T1;
11469 else
11470 curr_insn_type = INSN_T2;
11471 }
11472 }
11473 /* Handle VSQRT. */
11474 else if (opc2 == 0x01 && opc3 == 0x03)
11475 {
11476 if (dp_op_sz)
11477 curr_insn_type = INSN_T1;
11478 else
11479 curr_insn_type = INSN_T2;
11480 }
11481 /* Handle VCVT. */
11482 else if (opc2 == 0x07 && opc3 == 0x03)
11483 {
11484 if (!dp_op_sz)
11485 curr_insn_type = INSN_T1;
11486 else
11487 curr_insn_type = INSN_T2;
11488 }
11489 else if (opc3 & 0x01)
11490 {
11491 /* Handle VCVT. */
11492 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11493 {
11494 if (!bit (arm_insn_r->arm_insn, 18))
11495 curr_insn_type = INSN_T2;
11496 else
11497 {
11498 if (dp_op_sz)
11499 curr_insn_type = INSN_T1;
11500 else
11501 curr_insn_type = INSN_T2;
11502 }
11503 }
11504 /* Handle VCVT. */
11505 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11506 {
11507 if (dp_op_sz)
11508 curr_insn_type = INSN_T1;
11509 else
11510 curr_insn_type = INSN_T2;
11511 }
11512 /* Handle VCVTB, VCVTT. */
11513 else if ((opc2 & 0x0e) == 0x02)
11514 curr_insn_type = INSN_T2;
11515 /* Handle VCMP, VCMPE. */
11516 else if ((opc2 & 0x0e) == 0x04)
11517 curr_insn_type = INSN_T3;
11518 }
11519 }
11520
11521 switch (curr_insn_type)
11522 {
11523 case INSN_T0:
11524 reg_vd = reg_vd | (bit_d << 4);
11525 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11526 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11527 arm_insn_r->reg_rec_count = 2;
11528 break;
11529
11530 case INSN_T1:
11531 reg_vd = reg_vd | (bit_d << 4);
11532 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11533 arm_insn_r->reg_rec_count = 1;
11534 break;
11535
11536 case INSN_T2:
11537 reg_vd = (reg_vd << 1) | bit_d;
11538 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11539 arm_insn_r->reg_rec_count = 1;
11540 break;
11541
11542 case INSN_T3:
11543 record_buf[0] = ARM_FPSCR_REGNUM;
11544 arm_insn_r->reg_rec_count = 1;
11545 break;
11546
11547 default:
11548 gdb_assert_not_reached ("no decoding pattern found");
11549 break;
11550 }
11551
11552 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11553 return 0;
11554 }
11555
11556 /* Handling opcode 110 insns. */
11557
11558 static int
11559 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11560 {
11561 uint32_t op1, op1_ebit, coproc;
11562
11563 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11564 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11565 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11566
11567 if ((coproc & 0x0e) == 0x0a)
11568 {
11569 /* Handle extension register ld/st instructions. */
11570 if (!(op1 & 0x20))
11571 return arm_record_exreg_ld_st_insn (arm_insn_r);
11572
11573 /* 64-bit transfers between arm core and extension registers. */
11574 if ((op1 & 0x3e) == 0x04)
11575 return arm_record_exreg_ld_st_insn (arm_insn_r);
11576 }
11577 else
11578 {
11579 /* Handle coprocessor ld/st instructions. */
11580 if (!(op1 & 0x3a))
11581 {
11582 /* Store. */
11583 if (!op1_ebit)
11584 return arm_record_unsupported_insn (arm_insn_r);
11585 else
11586 /* Load. */
11587 return arm_record_unsupported_insn (arm_insn_r);
11588 }
11589
11590 /* Move to coprocessor from two arm core registers. */
11591 if (op1 == 0x4)
11592 return arm_record_unsupported_insn (arm_insn_r);
11593
11594 /* Move to two arm core registers from coprocessor. */
11595 if (op1 == 0x5)
11596 {
11597 uint32_t reg_t[2];
11598
11599 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11600 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11601 arm_insn_r->reg_rec_count = 2;
11602
11603 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11604 return 0;
11605 }
11606 }
11607 return arm_record_unsupported_insn (arm_insn_r);
11608 }
11609
11610 /* Handling opcode 111 insns. */
11611
11612 static int
11613 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11614 {
11615 uint32_t op, op1_ebit, coproc, bits_24_25;
11616 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11617 struct regcache *reg_cache = arm_insn_r->regcache;
11618
11619 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11620 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11621 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11622 op = bit (arm_insn_r->arm_insn, 4);
11623 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11624
11625 /* Handle arm SWI/SVC system call instructions. */
11626 if (bits_24_25 == 0x3)
11627 {
11628 if (tdep->arm_syscall_record != NULL)
11629 {
11630 ULONGEST svc_operand, svc_number;
11631
11632 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11633
11634 if (svc_operand) /* OABI. */
11635 svc_number = svc_operand - 0x900000;
11636 else /* EABI. */
11637 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11638
11639 return tdep->arm_syscall_record (reg_cache, svc_number);
11640 }
11641 else
11642 {
11643 printf_unfiltered (_("no syscall record support\n"));
11644 return -1;
11645 }
11646 }
11647 else if (bits_24_25 == 0x02)
11648 {
11649 if (op)
11650 {
11651 if ((coproc & 0x0e) == 0x0a)
11652 {
11653 /* 8, 16, and 32-bit transfer */
11654 return arm_record_vdata_transfer_insn (arm_insn_r);
11655 }
11656 else
11657 {
11658 if (op1_ebit)
11659 {
11660 /* MRC, MRC2 */
11661 uint32_t record_buf[1];
11662
11663 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11664 if (record_buf[0] == 15)
11665 record_buf[0] = ARM_PS_REGNUM;
11666
11667 arm_insn_r->reg_rec_count = 1;
11668 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11669 record_buf);
11670 return 0;
11671 }
11672 else
11673 {
11674 /* MCR, MCR2 */
11675 return -1;
11676 }
11677 }
11678 }
11679 else
11680 {
11681 if ((coproc & 0x0e) == 0x0a)
11682 {
11683 /* VFP data-processing instructions. */
11684 return arm_record_vfp_data_proc_insn (arm_insn_r);
11685 }
11686 else
11687 {
11688 /* CDP, CDP2 */
11689 return -1;
11690 }
11691 }
11692 }
11693 else
11694 {
11695 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11696
11697 if (op1 == 5)
11698 {
11699 if ((coproc & 0x0e) != 0x0a)
11700 {
11701 /* MRRC, MRRC2 */
11702 return -1;
11703 }
11704 }
11705 else if (op1 == 4 || op1 == 5)
11706 {
11707 if ((coproc & 0x0e) == 0x0a)
11708 {
11709 /* 64-bit transfers between ARM core and extension */
11710 return -1;
11711 }
11712 else if (op1 == 4)
11713 {
11714 /* MCRR, MCRR2 */
11715 return -1;
11716 }
11717 }
11718 else if (op1 == 0 || op1 == 1)
11719 {
11720 /* UNDEFINED */
11721 return -1;
11722 }
11723 else
11724 {
11725 if ((coproc & 0x0e) == 0x0a)
11726 {
11727 /* Extension register load/store */
11728 }
11729 else
11730 {
11731 /* STC, STC2, LDC, LDC2 */
11732 }
11733 return -1;
11734 }
11735 }
11736
11737 return -1;
11738 }
11739
11740 /* Handling opcode 000 insns. */
11741
11742 static int
11743 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11744 {
11745 uint32_t record_buf[8];
11746 uint32_t reg_src1 = 0;
11747
11748 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11749
11750 record_buf[0] = ARM_PS_REGNUM;
11751 record_buf[1] = reg_src1;
11752 thumb_insn_r->reg_rec_count = 2;
11753
11754 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11755
11756 return 0;
11757 }
11758
11759
11760 /* Handling opcode 001 insns. */
11761
11762 static int
11763 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11764 {
11765 uint32_t record_buf[8];
11766 uint32_t reg_src1 = 0;
11767
11768 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11769
11770 record_buf[0] = ARM_PS_REGNUM;
11771 record_buf[1] = reg_src1;
11772 thumb_insn_r->reg_rec_count = 2;
11773
11774 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11775
11776 return 0;
11777 }
11778
11779 /* Handling opcode 010 insns. */
11780
11781 static int
11782 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11783 {
11784 struct regcache *reg_cache = thumb_insn_r->regcache;
11785 uint32_t record_buf[8], record_buf_mem[8];
11786
11787 uint32_t reg_src1 = 0, reg_src2 = 0;
11788 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11789
11790 ULONGEST u_regval[2] = {0};
11791
11792 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11793
11794 if (bit (thumb_insn_r->arm_insn, 12))
11795 {
11796 /* Handle load/store register offset. */
11797 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11798
11799 if (in_inclusive_range (opB, 4U, 7U))
11800 {
11801 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11802 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11803 record_buf[0] = reg_src1;
11804 thumb_insn_r->reg_rec_count = 1;
11805 }
11806 else if (in_inclusive_range (opB, 0U, 2U))
11807 {
11808 /* STR(2), STRB(2), STRH(2) . */
11809 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11810 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11811 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11812 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11813 if (0 == opB)
11814 record_buf_mem[0] = 4; /* STR (2). */
11815 else if (2 == opB)
11816 record_buf_mem[0] = 1; /* STRB (2). */
11817 else if (1 == opB)
11818 record_buf_mem[0] = 2; /* STRH (2). */
11819 record_buf_mem[1] = u_regval[0] + u_regval[1];
11820 thumb_insn_r->mem_rec_count = 1;
11821 }
11822 }
11823 else if (bit (thumb_insn_r->arm_insn, 11))
11824 {
11825 /* Handle load from literal pool. */
11826 /* LDR(3). */
11827 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11828 record_buf[0] = reg_src1;
11829 thumb_insn_r->reg_rec_count = 1;
11830 }
11831 else if (opcode1)
11832 {
11833 /* Special data instructions and branch and exchange */
11834 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11835 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11836 if ((3 == opcode2) && (!opcode3))
11837 {
11838 /* Branch with exchange. */
11839 record_buf[0] = ARM_PS_REGNUM;
11840 thumb_insn_r->reg_rec_count = 1;
11841 }
11842 else
11843 {
11844 /* Format 8; special data processing insns. */
11845 record_buf[0] = ARM_PS_REGNUM;
11846 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11847 | bits (thumb_insn_r->arm_insn, 0, 2));
11848 thumb_insn_r->reg_rec_count = 2;
11849 }
11850 }
11851 else
11852 {
11853 /* Format 5; data processing insns. */
11854 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11855 if (bit (thumb_insn_r->arm_insn, 7))
11856 {
11857 reg_src1 = reg_src1 + 8;
11858 }
11859 record_buf[0] = ARM_PS_REGNUM;
11860 record_buf[1] = reg_src1;
11861 thumb_insn_r->reg_rec_count = 2;
11862 }
11863
11864 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11865 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11866 record_buf_mem);
11867
11868 return 0;
11869 }
11870
11871 /* Handling opcode 001 insns. */
11872
11873 static int
11874 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11875 {
11876 struct regcache *reg_cache = thumb_insn_r->regcache;
11877 uint32_t record_buf[8], record_buf_mem[8];
11878
11879 uint32_t reg_src1 = 0;
11880 uint32_t opcode = 0, immed_5 = 0;
11881
11882 ULONGEST u_regval = 0;
11883
11884 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11885
11886 if (opcode)
11887 {
11888 /* LDR(1). */
11889 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11890 record_buf[0] = reg_src1;
11891 thumb_insn_r->reg_rec_count = 1;
11892 }
11893 else
11894 {
11895 /* STR(1). */
11896 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11897 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11898 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11899 record_buf_mem[0] = 4;
11900 record_buf_mem[1] = u_regval + (immed_5 * 4);
11901 thumb_insn_r->mem_rec_count = 1;
11902 }
11903
11904 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11905 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11906 record_buf_mem);
11907
11908 return 0;
11909 }
11910
11911 /* Handling opcode 100 insns. */
11912
11913 static int
11914 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11915 {
11916 struct regcache *reg_cache = thumb_insn_r->regcache;
11917 uint32_t record_buf[8], record_buf_mem[8];
11918
11919 uint32_t reg_src1 = 0;
11920 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11921
11922 ULONGEST u_regval = 0;
11923
11924 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11925
11926 if (3 == opcode)
11927 {
11928 /* LDR(4). */
11929 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11930 record_buf[0] = reg_src1;
11931 thumb_insn_r->reg_rec_count = 1;
11932 }
11933 else if (1 == opcode)
11934 {
11935 /* LDRH(1). */
11936 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11937 record_buf[0] = reg_src1;
11938 thumb_insn_r->reg_rec_count = 1;
11939 }
11940 else if (2 == opcode)
11941 {
11942 /* STR(3). */
11943 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11944 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11945 record_buf_mem[0] = 4;
11946 record_buf_mem[1] = u_regval + (immed_8 * 4);
11947 thumb_insn_r->mem_rec_count = 1;
11948 }
11949 else if (0 == opcode)
11950 {
11951 /* STRH(1). */
11952 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11953 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11954 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11955 record_buf_mem[0] = 2;
11956 record_buf_mem[1] = u_regval + (immed_5 * 2);
11957 thumb_insn_r->mem_rec_count = 1;
11958 }
11959
11960 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11961 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11962 record_buf_mem);
11963
11964 return 0;
11965 }
11966
11967 /* Handling opcode 101 insns. */
11968
11969 static int
11970 thumb_record_misc (insn_decode_record *thumb_insn_r)
11971 {
11972 struct regcache *reg_cache = thumb_insn_r->regcache;
11973
11974 uint32_t opcode = 0;
11975 uint32_t register_bits = 0, register_count = 0;
11976 uint32_t index = 0, start_address = 0;
11977 uint32_t record_buf[24], record_buf_mem[48];
11978 uint32_t reg_src1;
11979
11980 ULONGEST u_regval = 0;
11981
11982 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11983
11984 if (opcode == 0 || opcode == 1)
11985 {
11986 /* ADR and ADD (SP plus immediate) */
11987
11988 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11989 record_buf[0] = reg_src1;
11990 thumb_insn_r->reg_rec_count = 1;
11991 }
11992 else
11993 {
11994 /* Miscellaneous 16-bit instructions */
11995 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11996
11997 switch (opcode2)
11998 {
11999 case 6:
12000 /* SETEND and CPS */
12001 break;
12002 case 0:
12003 /* ADD/SUB (SP plus immediate) */
12004 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12005 record_buf[0] = ARM_SP_REGNUM;
12006 thumb_insn_r->reg_rec_count = 1;
12007 break;
12008 case 1: /* fall through */
12009 case 3: /* fall through */
12010 case 9: /* fall through */
12011 case 11:
12012 /* CBNZ, CBZ */
12013 break;
12014 case 2:
12015 /* SXTH, SXTB, UXTH, UXTB */
12016 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12017 thumb_insn_r->reg_rec_count = 1;
12018 break;
12019 case 4: /* fall through */
12020 case 5:
12021 /* PUSH. */
12022 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12023 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12024 while (register_bits)
12025 {
12026 if (register_bits & 0x00000001)
12027 register_count++;
12028 register_bits = register_bits >> 1;
12029 }
12030 start_address = u_regval - \
12031 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12032 thumb_insn_r->mem_rec_count = register_count;
12033 while (register_count)
12034 {
12035 record_buf_mem[(register_count * 2) - 1] = start_address;
12036 record_buf_mem[(register_count * 2) - 2] = 4;
12037 start_address = start_address + 4;
12038 register_count--;
12039 }
12040 record_buf[0] = ARM_SP_REGNUM;
12041 thumb_insn_r->reg_rec_count = 1;
12042 break;
12043 case 10:
12044 /* REV, REV16, REVSH */
12045 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12046 thumb_insn_r->reg_rec_count = 1;
12047 break;
12048 case 12: /* fall through */
12049 case 13:
12050 /* POP. */
12051 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12052 while (register_bits)
12053 {
12054 if (register_bits & 0x00000001)
12055 record_buf[index++] = register_count;
12056 register_bits = register_bits >> 1;
12057 register_count++;
12058 }
12059 record_buf[index++] = ARM_PS_REGNUM;
12060 record_buf[index++] = ARM_SP_REGNUM;
12061 thumb_insn_r->reg_rec_count = index;
12062 break;
12063 case 0xe:
12064 /* BKPT insn. */
12065 /* Handle enhanced software breakpoint insn, BKPT. */
12066 /* CPSR is changed to be executed in ARM state, disabling normal
12067 interrupts, entering abort mode. */
12068 /* According to high vector configuration PC is set. */
12069 /* User hits breakpoint and type reverse, in that case, we need to go back with
12070 previous CPSR and Program Counter. */
12071 record_buf[0] = ARM_PS_REGNUM;
12072 record_buf[1] = ARM_LR_REGNUM;
12073 thumb_insn_r->reg_rec_count = 2;
12074 /* We need to save SPSR value, which is not yet done. */
12075 printf_unfiltered (_("Process record does not support instruction "
12076 "0x%0x at address %s.\n"),
12077 thumb_insn_r->arm_insn,
12078 paddress (thumb_insn_r->gdbarch,
12079 thumb_insn_r->this_addr));
12080 return -1;
12081
12082 case 0xf:
12083 /* If-Then, and hints */
12084 break;
12085 default:
12086 return -1;
12087 };
12088 }
12089
12090 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12091 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12092 record_buf_mem);
12093
12094 return 0;
12095 }
12096
12097 /* Handling opcode 110 insns. */
12098
12099 static int
12100 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12101 {
12102 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12103 struct regcache *reg_cache = thumb_insn_r->regcache;
12104
12105 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12106 uint32_t reg_src1 = 0;
12107 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12108 uint32_t index = 0, start_address = 0;
12109 uint32_t record_buf[24], record_buf_mem[48];
12110
12111 ULONGEST u_regval = 0;
12112
12113 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12114 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12115
12116 if (1 == opcode2)
12117 {
12118
12119 /* LDMIA. */
12120 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12121 /* Get Rn. */
12122 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12123 while (register_bits)
12124 {
12125 if (register_bits & 0x00000001)
12126 record_buf[index++] = register_count;
12127 register_bits = register_bits >> 1;
12128 register_count++;
12129 }
12130 record_buf[index++] = reg_src1;
12131 thumb_insn_r->reg_rec_count = index;
12132 }
12133 else if (0 == opcode2)
12134 {
12135 /* It handles both STMIA. */
12136 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12137 /* Get Rn. */
12138 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12139 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12140 while (register_bits)
12141 {
12142 if (register_bits & 0x00000001)
12143 register_count++;
12144 register_bits = register_bits >> 1;
12145 }
12146 start_address = u_regval;
12147 thumb_insn_r->mem_rec_count = register_count;
12148 while (register_count)
12149 {
12150 record_buf_mem[(register_count * 2) - 1] = start_address;
12151 record_buf_mem[(register_count * 2) - 2] = 4;
12152 start_address = start_address + 4;
12153 register_count--;
12154 }
12155 }
12156 else if (0x1F == opcode1)
12157 {
12158 /* Handle arm syscall insn. */
12159 if (tdep->arm_syscall_record != NULL)
12160 {
12161 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12162 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12163 }
12164 else
12165 {
12166 printf_unfiltered (_("no syscall record support\n"));
12167 return -1;
12168 }
12169 }
12170
12171 /* B (1), conditional branch is automatically taken care in process_record,
12172 as PC is saved there. */
12173
12174 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12175 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12176 record_buf_mem);
12177
12178 return ret;
12179 }
12180
12181 /* Handling opcode 111 insns. */
12182
12183 static int
12184 thumb_record_branch (insn_decode_record *thumb_insn_r)
12185 {
12186 uint32_t record_buf[8];
12187 uint32_t bits_h = 0;
12188
12189 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12190
12191 if (2 == bits_h || 3 == bits_h)
12192 {
12193 /* BL */
12194 record_buf[0] = ARM_LR_REGNUM;
12195 thumb_insn_r->reg_rec_count = 1;
12196 }
12197 else if (1 == bits_h)
12198 {
12199 /* BLX(1). */
12200 record_buf[0] = ARM_PS_REGNUM;
12201 record_buf[1] = ARM_LR_REGNUM;
12202 thumb_insn_r->reg_rec_count = 2;
12203 }
12204
12205 /* B(2) is automatically taken care in process_record, as PC is
12206 saved there. */
12207
12208 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12209
12210 return 0;
12211 }
12212
12213 /* Handler for thumb2 load/store multiple instructions. */
12214
12215 static int
12216 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12217 {
12218 struct regcache *reg_cache = thumb2_insn_r->regcache;
12219
12220 uint32_t reg_rn, op;
12221 uint32_t register_bits = 0, register_count = 0;
12222 uint32_t index = 0, start_address = 0;
12223 uint32_t record_buf[24], record_buf_mem[48];
12224
12225 ULONGEST u_regval = 0;
12226
12227 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12228 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12229
12230 if (0 == op || 3 == op)
12231 {
12232 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12233 {
12234 /* Handle RFE instruction. */
12235 record_buf[0] = ARM_PS_REGNUM;
12236 thumb2_insn_r->reg_rec_count = 1;
12237 }
12238 else
12239 {
12240 /* Handle SRS instruction after reading banked SP. */
12241 return arm_record_unsupported_insn (thumb2_insn_r);
12242 }
12243 }
12244 else if (1 == op || 2 == op)
12245 {
12246 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12247 {
12248 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12249 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12250 while (register_bits)
12251 {
12252 if (register_bits & 0x00000001)
12253 record_buf[index++] = register_count;
12254
12255 register_count++;
12256 register_bits = register_bits >> 1;
12257 }
12258 record_buf[index++] = reg_rn;
12259 record_buf[index++] = ARM_PS_REGNUM;
12260 thumb2_insn_r->reg_rec_count = index;
12261 }
12262 else
12263 {
12264 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12265 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12266 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12267 while (register_bits)
12268 {
12269 if (register_bits & 0x00000001)
12270 register_count++;
12271
12272 register_bits = register_bits >> 1;
12273 }
12274
12275 if (1 == op)
12276 {
12277 /* Start address calculation for LDMDB/LDMEA. */
12278 start_address = u_regval;
12279 }
12280 else if (2 == op)
12281 {
12282 /* Start address calculation for LDMDB/LDMEA. */
12283 start_address = u_regval - register_count * 4;
12284 }
12285
12286 thumb2_insn_r->mem_rec_count = register_count;
12287 while (register_count)
12288 {
12289 record_buf_mem[register_count * 2 - 1] = start_address;
12290 record_buf_mem[register_count * 2 - 2] = 4;
12291 start_address = start_address + 4;
12292 register_count--;
12293 }
12294 record_buf[0] = reg_rn;
12295 record_buf[1] = ARM_PS_REGNUM;
12296 thumb2_insn_r->reg_rec_count = 2;
12297 }
12298 }
12299
12300 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12301 record_buf_mem);
12302 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12303 record_buf);
12304 return ARM_RECORD_SUCCESS;
12305 }
12306
12307 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12308 instructions. */
12309
12310 static int
12311 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12312 {
12313 struct regcache *reg_cache = thumb2_insn_r->regcache;
12314
12315 uint32_t reg_rd, reg_rn, offset_imm;
12316 uint32_t reg_dest1, reg_dest2;
12317 uint32_t address, offset_addr;
12318 uint32_t record_buf[8], record_buf_mem[8];
12319 uint32_t op1, op2, op3;
12320
12321 ULONGEST u_regval[2];
12322
12323 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12324 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12325 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12326
12327 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12328 {
12329 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12330 {
12331 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12332 record_buf[0] = reg_dest1;
12333 record_buf[1] = ARM_PS_REGNUM;
12334 thumb2_insn_r->reg_rec_count = 2;
12335 }
12336
12337 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12338 {
12339 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12340 record_buf[2] = reg_dest2;
12341 thumb2_insn_r->reg_rec_count = 3;
12342 }
12343 }
12344 else
12345 {
12346 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12347 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12348
12349 if (0 == op1 && 0 == op2)
12350 {
12351 /* Handle STREX. */
12352 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12353 address = u_regval[0] + (offset_imm * 4);
12354 record_buf_mem[0] = 4;
12355 record_buf_mem[1] = address;
12356 thumb2_insn_r->mem_rec_count = 1;
12357 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12358 record_buf[0] = reg_rd;
12359 thumb2_insn_r->reg_rec_count = 1;
12360 }
12361 else if (1 == op1 && 0 == op2)
12362 {
12363 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12364 record_buf[0] = reg_rd;
12365 thumb2_insn_r->reg_rec_count = 1;
12366 address = u_regval[0];
12367 record_buf_mem[1] = address;
12368
12369 if (4 == op3)
12370 {
12371 /* Handle STREXB. */
12372 record_buf_mem[0] = 1;
12373 thumb2_insn_r->mem_rec_count = 1;
12374 }
12375 else if (5 == op3)
12376 {
12377 /* Handle STREXH. */
12378 record_buf_mem[0] = 2 ;
12379 thumb2_insn_r->mem_rec_count = 1;
12380 }
12381 else if (7 == op3)
12382 {
12383 /* Handle STREXD. */
12384 address = u_regval[0];
12385 record_buf_mem[0] = 4;
12386 record_buf_mem[2] = 4;
12387 record_buf_mem[3] = address + 4;
12388 thumb2_insn_r->mem_rec_count = 2;
12389 }
12390 }
12391 else
12392 {
12393 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12394
12395 if (bit (thumb2_insn_r->arm_insn, 24))
12396 {
12397 if (bit (thumb2_insn_r->arm_insn, 23))
12398 offset_addr = u_regval[0] + (offset_imm * 4);
12399 else
12400 offset_addr = u_regval[0] - (offset_imm * 4);
12401
12402 address = offset_addr;
12403 }
12404 else
12405 address = u_regval[0];
12406
12407 record_buf_mem[0] = 4;
12408 record_buf_mem[1] = address;
12409 record_buf_mem[2] = 4;
12410 record_buf_mem[3] = address + 4;
12411 thumb2_insn_r->mem_rec_count = 2;
12412 record_buf[0] = reg_rn;
12413 thumb2_insn_r->reg_rec_count = 1;
12414 }
12415 }
12416
12417 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12418 record_buf);
12419 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12420 record_buf_mem);
12421 return ARM_RECORD_SUCCESS;
12422 }
12423
12424 /* Handler for thumb2 data processing (shift register and modified immediate)
12425 instructions. */
12426
12427 static int
12428 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12429 {
12430 uint32_t reg_rd, op;
12431 uint32_t record_buf[8];
12432
12433 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12434 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12435
12436 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12437 {
12438 record_buf[0] = ARM_PS_REGNUM;
12439 thumb2_insn_r->reg_rec_count = 1;
12440 }
12441 else
12442 {
12443 record_buf[0] = reg_rd;
12444 record_buf[1] = ARM_PS_REGNUM;
12445 thumb2_insn_r->reg_rec_count = 2;
12446 }
12447
12448 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12449 record_buf);
12450 return ARM_RECORD_SUCCESS;
12451 }
12452
12453 /* Generic handler for thumb2 instructions which effect destination and PS
12454 registers. */
12455
12456 static int
12457 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12458 {
12459 uint32_t reg_rd;
12460 uint32_t record_buf[8];
12461
12462 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12463
12464 record_buf[0] = reg_rd;
12465 record_buf[1] = ARM_PS_REGNUM;
12466 thumb2_insn_r->reg_rec_count = 2;
12467
12468 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12469 record_buf);
12470 return ARM_RECORD_SUCCESS;
12471 }
12472
12473 /* Handler for thumb2 branch and miscellaneous control instructions. */
12474
12475 static int
12476 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12477 {
12478 uint32_t op, op1, op2;
12479 uint32_t record_buf[8];
12480
12481 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12482 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12483 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12484
12485 /* Handle MSR insn. */
12486 if (!(op1 & 0x2) && 0x38 == op)
12487 {
12488 if (!(op2 & 0x3))
12489 {
12490 /* CPSR is going to be changed. */
12491 record_buf[0] = ARM_PS_REGNUM;
12492 thumb2_insn_r->reg_rec_count = 1;
12493 }
12494 else
12495 {
12496 arm_record_unsupported_insn(thumb2_insn_r);
12497 return -1;
12498 }
12499 }
12500 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12501 {
12502 /* BLX. */
12503 record_buf[0] = ARM_PS_REGNUM;
12504 record_buf[1] = ARM_LR_REGNUM;
12505 thumb2_insn_r->reg_rec_count = 2;
12506 }
12507
12508 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12509 record_buf);
12510 return ARM_RECORD_SUCCESS;
12511 }
12512
12513 /* Handler for thumb2 store single data item instructions. */
12514
12515 static int
12516 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12517 {
12518 struct regcache *reg_cache = thumb2_insn_r->regcache;
12519
12520 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12521 uint32_t address, offset_addr;
12522 uint32_t record_buf[8], record_buf_mem[8];
12523 uint32_t op1, op2;
12524
12525 ULONGEST u_regval[2];
12526
12527 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12528 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12529 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12530 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12531
12532 if (bit (thumb2_insn_r->arm_insn, 23))
12533 {
12534 /* T2 encoding. */
12535 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12536 offset_addr = u_regval[0] + offset_imm;
12537 address = offset_addr;
12538 }
12539 else
12540 {
12541 /* T3 encoding. */
12542 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12543 {
12544 /* Handle STRB (register). */
12545 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12546 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12547 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12548 offset_addr = u_regval[1] << shift_imm;
12549 address = u_regval[0] + offset_addr;
12550 }
12551 else
12552 {
12553 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12554 if (bit (thumb2_insn_r->arm_insn, 10))
12555 {
12556 if (bit (thumb2_insn_r->arm_insn, 9))
12557 offset_addr = u_regval[0] + offset_imm;
12558 else
12559 offset_addr = u_regval[0] - offset_imm;
12560
12561 address = offset_addr;
12562 }
12563 else
12564 address = u_regval[0];
12565 }
12566 }
12567
12568 switch (op1)
12569 {
12570 /* Store byte instructions. */
12571 case 4:
12572 case 0:
12573 record_buf_mem[0] = 1;
12574 break;
12575 /* Store half word instructions. */
12576 case 1:
12577 case 5:
12578 record_buf_mem[0] = 2;
12579 break;
12580 /* Store word instructions. */
12581 case 2:
12582 case 6:
12583 record_buf_mem[0] = 4;
12584 break;
12585
12586 default:
12587 gdb_assert_not_reached ("no decoding pattern found");
12588 break;
12589 }
12590
12591 record_buf_mem[1] = address;
12592 thumb2_insn_r->mem_rec_count = 1;
12593 record_buf[0] = reg_rn;
12594 thumb2_insn_r->reg_rec_count = 1;
12595
12596 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12597 record_buf);
12598 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12599 record_buf_mem);
12600 return ARM_RECORD_SUCCESS;
12601 }
12602
12603 /* Handler for thumb2 load memory hints instructions. */
12604
12605 static int
12606 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12607 {
12608 uint32_t record_buf[8];
12609 uint32_t reg_rt, reg_rn;
12610
12611 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12612 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12613
12614 if (ARM_PC_REGNUM != reg_rt)
12615 {
12616 record_buf[0] = reg_rt;
12617 record_buf[1] = reg_rn;
12618 record_buf[2] = ARM_PS_REGNUM;
12619 thumb2_insn_r->reg_rec_count = 3;
12620
12621 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12622 record_buf);
12623 return ARM_RECORD_SUCCESS;
12624 }
12625
12626 return ARM_RECORD_FAILURE;
12627 }
12628
12629 /* Handler for thumb2 load word instructions. */
12630
12631 static int
12632 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12633 {
12634 uint32_t record_buf[8];
12635
12636 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12637 record_buf[1] = ARM_PS_REGNUM;
12638 thumb2_insn_r->reg_rec_count = 2;
12639
12640 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12641 record_buf);
12642 return ARM_RECORD_SUCCESS;
12643 }
12644
12645 /* Handler for thumb2 long multiply, long multiply accumulate, and
12646 divide instructions. */
12647
12648 static int
12649 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12650 {
12651 uint32_t opcode1 = 0, opcode2 = 0;
12652 uint32_t record_buf[8];
12653
12654 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12655 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12656
12657 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12658 {
12659 /* Handle SMULL, UMULL, SMULAL. */
12660 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12661 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12662 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12663 record_buf[2] = ARM_PS_REGNUM;
12664 thumb2_insn_r->reg_rec_count = 3;
12665 }
12666 else if (1 == opcode1 || 3 == opcode2)
12667 {
12668 /* Handle SDIV and UDIV. */
12669 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12670 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12671 record_buf[2] = ARM_PS_REGNUM;
12672 thumb2_insn_r->reg_rec_count = 3;
12673 }
12674 else
12675 return ARM_RECORD_FAILURE;
12676
12677 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12678 record_buf);
12679 return ARM_RECORD_SUCCESS;
12680 }
12681
12682 /* Record handler for thumb32 coprocessor instructions. */
12683
12684 static int
12685 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12686 {
12687 if (bit (thumb2_insn_r->arm_insn, 25))
12688 return arm_record_coproc_data_proc (thumb2_insn_r);
12689 else
12690 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12691 }
12692
12693 /* Record handler for advance SIMD structure load/store instructions. */
12694
12695 static int
12696 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12697 {
12698 struct regcache *reg_cache = thumb2_insn_r->regcache;
12699 uint32_t l_bit, a_bit, b_bits;
12700 uint32_t record_buf[128], record_buf_mem[128];
12701 uint32_t reg_rn, reg_vd, address, f_elem;
12702 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12703 uint8_t f_ebytes;
12704
12705 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12706 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12707 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12708 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12709 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12710 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12711 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12712 f_elem = 8 / f_ebytes;
12713
12714 if (!l_bit)
12715 {
12716 ULONGEST u_regval = 0;
12717 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12718 address = u_regval;
12719
12720 if (!a_bit)
12721 {
12722 /* Handle VST1. */
12723 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12724 {
12725 if (b_bits == 0x07)
12726 bf_regs = 1;
12727 else if (b_bits == 0x0a)
12728 bf_regs = 2;
12729 else if (b_bits == 0x06)
12730 bf_regs = 3;
12731 else if (b_bits == 0x02)
12732 bf_regs = 4;
12733 else
12734 bf_regs = 0;
12735
12736 for (index_r = 0; index_r < bf_regs; index_r++)
12737 {
12738 for (index_e = 0; index_e < f_elem; index_e++)
12739 {
12740 record_buf_mem[index_m++] = f_ebytes;
12741 record_buf_mem[index_m++] = address;
12742 address = address + f_ebytes;
12743 thumb2_insn_r->mem_rec_count += 1;
12744 }
12745 }
12746 }
12747 /* Handle VST2. */
12748 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12749 {
12750 if (b_bits == 0x09 || b_bits == 0x08)
12751 bf_regs = 1;
12752 else if (b_bits == 0x03)
12753 bf_regs = 2;
12754 else
12755 bf_regs = 0;
12756
12757 for (index_r = 0; index_r < bf_regs; index_r++)
12758 for (index_e = 0; index_e < f_elem; index_e++)
12759 {
12760 for (loop_t = 0; loop_t < 2; loop_t++)
12761 {
12762 record_buf_mem[index_m++] = f_ebytes;
12763 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12764 thumb2_insn_r->mem_rec_count += 1;
12765 }
12766 address = address + (2 * f_ebytes);
12767 }
12768 }
12769 /* Handle VST3. */
12770 else if ((b_bits & 0x0e) == 0x04)
12771 {
12772 for (index_e = 0; index_e < f_elem; index_e++)
12773 {
12774 for (loop_t = 0; loop_t < 3; loop_t++)
12775 {
12776 record_buf_mem[index_m++] = f_ebytes;
12777 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12778 thumb2_insn_r->mem_rec_count += 1;
12779 }
12780 address = address + (3 * f_ebytes);
12781 }
12782 }
12783 /* Handle VST4. */
12784 else if (!(b_bits & 0x0e))
12785 {
12786 for (index_e = 0; index_e < f_elem; index_e++)
12787 {
12788 for (loop_t = 0; loop_t < 4; loop_t++)
12789 {
12790 record_buf_mem[index_m++] = f_ebytes;
12791 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12792 thumb2_insn_r->mem_rec_count += 1;
12793 }
12794 address = address + (4 * f_ebytes);
12795 }
12796 }
12797 }
12798 else
12799 {
12800 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12801
12802 if (bft_size == 0x00)
12803 f_ebytes = 1;
12804 else if (bft_size == 0x01)
12805 f_ebytes = 2;
12806 else if (bft_size == 0x02)
12807 f_ebytes = 4;
12808 else
12809 f_ebytes = 0;
12810
12811 /* Handle VST1. */
12812 if (!(b_bits & 0x0b) || b_bits == 0x08)
12813 thumb2_insn_r->mem_rec_count = 1;
12814 /* Handle VST2. */
12815 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12816 thumb2_insn_r->mem_rec_count = 2;
12817 /* Handle VST3. */
12818 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12819 thumb2_insn_r->mem_rec_count = 3;
12820 /* Handle VST4. */
12821 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12822 thumb2_insn_r->mem_rec_count = 4;
12823
12824 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12825 {
12826 record_buf_mem[index_m] = f_ebytes;
12827 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12828 }
12829 }
12830 }
12831 else
12832 {
12833 if (!a_bit)
12834 {
12835 /* Handle VLD1. */
12836 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12837 thumb2_insn_r->reg_rec_count = 1;
12838 /* Handle VLD2. */
12839 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12840 thumb2_insn_r->reg_rec_count = 2;
12841 /* Handle VLD3. */
12842 else if ((b_bits & 0x0e) == 0x04)
12843 thumb2_insn_r->reg_rec_count = 3;
12844 /* Handle VLD4. */
12845 else if (!(b_bits & 0x0e))
12846 thumb2_insn_r->reg_rec_count = 4;
12847 }
12848 else
12849 {
12850 /* Handle VLD1. */
12851 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12852 thumb2_insn_r->reg_rec_count = 1;
12853 /* Handle VLD2. */
12854 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12855 thumb2_insn_r->reg_rec_count = 2;
12856 /* Handle VLD3. */
12857 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12858 thumb2_insn_r->reg_rec_count = 3;
12859 /* Handle VLD4. */
12860 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12861 thumb2_insn_r->reg_rec_count = 4;
12862
12863 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12864 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12865 }
12866 }
12867
12868 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12869 {
12870 record_buf[index_r] = reg_rn;
12871 thumb2_insn_r->reg_rec_count += 1;
12872 }
12873
12874 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12875 record_buf);
12876 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12877 record_buf_mem);
12878 return 0;
12879 }
12880
12881 /* Decodes thumb2 instruction type and invokes its record handler. */
12882
12883 static unsigned int
12884 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12885 {
12886 uint32_t op, op1, op2;
12887
12888 op = bit (thumb2_insn_r->arm_insn, 15);
12889 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12890 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12891
12892 if (op1 == 0x01)
12893 {
12894 if (!(op2 & 0x64 ))
12895 {
12896 /* Load/store multiple instruction. */
12897 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12898 }
12899 else if ((op2 & 0x64) == 0x4)
12900 {
12901 /* Load/store (dual/exclusive) and table branch instruction. */
12902 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12903 }
12904 else if ((op2 & 0x60) == 0x20)
12905 {
12906 /* Data-processing (shifted register). */
12907 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12908 }
12909 else if (op2 & 0x40)
12910 {
12911 /* Co-processor instructions. */
12912 return thumb2_record_coproc_insn (thumb2_insn_r);
12913 }
12914 }
12915 else if (op1 == 0x02)
12916 {
12917 if (op)
12918 {
12919 /* Branches and miscellaneous control instructions. */
12920 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12921 }
12922 else if (op2 & 0x20)
12923 {
12924 /* Data-processing (plain binary immediate) instruction. */
12925 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12926 }
12927 else
12928 {
12929 /* Data-processing (modified immediate). */
12930 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12931 }
12932 }
12933 else if (op1 == 0x03)
12934 {
12935 if (!(op2 & 0x71 ))
12936 {
12937 /* Store single data item. */
12938 return thumb2_record_str_single_data (thumb2_insn_r);
12939 }
12940 else if (!((op2 & 0x71) ^ 0x10))
12941 {
12942 /* Advanced SIMD or structure load/store instructions. */
12943 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12944 }
12945 else if (!((op2 & 0x67) ^ 0x01))
12946 {
12947 /* Load byte, memory hints instruction. */
12948 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12949 }
12950 else if (!((op2 & 0x67) ^ 0x03))
12951 {
12952 /* Load halfword, memory hints instruction. */
12953 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12954 }
12955 else if (!((op2 & 0x67) ^ 0x05))
12956 {
12957 /* Load word instruction. */
12958 return thumb2_record_ld_word (thumb2_insn_r);
12959 }
12960 else if (!((op2 & 0x70) ^ 0x20))
12961 {
12962 /* Data-processing (register) instruction. */
12963 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12964 }
12965 else if (!((op2 & 0x78) ^ 0x30))
12966 {
12967 /* Multiply, multiply accumulate, abs diff instruction. */
12968 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12969 }
12970 else if (!((op2 & 0x78) ^ 0x38))
12971 {
12972 /* Long multiply, long multiply accumulate, and divide. */
12973 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12974 }
12975 else if (op2 & 0x40)
12976 {
12977 /* Co-processor instructions. */
12978 return thumb2_record_coproc_insn (thumb2_insn_r);
12979 }
12980 }
12981
12982 return -1;
12983 }
12984
12985 namespace {
12986 /* Abstract memory reader. */
12987
12988 class abstract_memory_reader
12989 {
12990 public:
12991 /* Read LEN bytes of target memory at address MEMADDR, placing the
12992 results in GDB's memory at BUF. Return true on success. */
12993
12994 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12995 };
12996
12997 /* Instruction reader from real target. */
12998
12999 class instruction_reader : public abstract_memory_reader
13000 {
13001 public:
13002 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13003 {
13004 if (target_read_memory (memaddr, buf, len))
13005 return false;
13006 else
13007 return true;
13008 }
13009 };
13010
13011 } // namespace
13012
13013 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13014 and positive val on failure. */
13015
13016 static int
13017 extract_arm_insn (abstract_memory_reader& reader,
13018 insn_decode_record *insn_record, uint32_t insn_size)
13019 {
13020 gdb_byte buf[insn_size];
13021
13022 memset (&buf[0], 0, insn_size);
13023
13024 if (!reader.read (insn_record->this_addr, buf, insn_size))
13025 return 1;
13026 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13027 insn_size,
13028 gdbarch_byte_order_for_code (insn_record->gdbarch));
13029 return 0;
13030 }
13031
13032 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13033
13034 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13035 dispatch it. */
13036
13037 static int
13038 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13039 record_type_t record_type, uint32_t insn_size)
13040 {
13041
13042 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13043 instruction. */
13044 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13045 {
13046 arm_record_data_proc_misc_ld_str, /* 000. */
13047 arm_record_data_proc_imm, /* 001. */
13048 arm_record_ld_st_imm_offset, /* 010. */
13049 arm_record_ld_st_reg_offset, /* 011. */
13050 arm_record_ld_st_multiple, /* 100. */
13051 arm_record_b_bl, /* 101. */
13052 arm_record_asimd_vfp_coproc, /* 110. */
13053 arm_record_coproc_data_proc /* 111. */
13054 };
13055
13056 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13057 instruction. */
13058 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13059 { \
13060 thumb_record_shift_add_sub, /* 000. */
13061 thumb_record_add_sub_cmp_mov, /* 001. */
13062 thumb_record_ld_st_reg_offset, /* 010. */
13063 thumb_record_ld_st_imm_offset, /* 011. */
13064 thumb_record_ld_st_stack, /* 100. */
13065 thumb_record_misc, /* 101. */
13066 thumb_record_ldm_stm_swi, /* 110. */
13067 thumb_record_branch /* 111. */
13068 };
13069
13070 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13071 uint32_t insn_id = 0;
13072
13073 if (extract_arm_insn (reader, arm_record, insn_size))
13074 {
13075 if (record_debug)
13076 {
13077 printf_unfiltered (_("Process record: error reading memory at "
13078 "addr %s len = %d.\n"),
13079 paddress (arm_record->gdbarch,
13080 arm_record->this_addr), insn_size);
13081 }
13082 return -1;
13083 }
13084 else if (ARM_RECORD == record_type)
13085 {
13086 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13087 insn_id = bits (arm_record->arm_insn, 25, 27);
13088
13089 if (arm_record->cond == 0xf)
13090 ret = arm_record_extension_space (arm_record);
13091 else
13092 {
13093 /* If this insn has fallen into extension space
13094 then we need not decode it anymore. */
13095 ret = arm_handle_insn[insn_id] (arm_record);
13096 }
13097 if (ret != ARM_RECORD_SUCCESS)
13098 {
13099 arm_record_unsupported_insn (arm_record);
13100 ret = -1;
13101 }
13102 }
13103 else if (THUMB_RECORD == record_type)
13104 {
13105 /* As thumb does not have condition codes, we set negative. */
13106 arm_record->cond = -1;
13107 insn_id = bits (arm_record->arm_insn, 13, 15);
13108 ret = thumb_handle_insn[insn_id] (arm_record);
13109 if (ret != ARM_RECORD_SUCCESS)
13110 {
13111 arm_record_unsupported_insn (arm_record);
13112 ret = -1;
13113 }
13114 }
13115 else if (THUMB2_RECORD == record_type)
13116 {
13117 /* As thumb does not have condition codes, we set negative. */
13118 arm_record->cond = -1;
13119
13120 /* Swap first half of 32bit thumb instruction with second half. */
13121 arm_record->arm_insn
13122 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13123
13124 ret = thumb2_record_decode_insn_handler (arm_record);
13125
13126 if (ret != ARM_RECORD_SUCCESS)
13127 {
13128 arm_record_unsupported_insn (arm_record);
13129 ret = -1;
13130 }
13131 }
13132 else
13133 {
13134 /* Throw assertion. */
13135 gdb_assert_not_reached ("not a valid instruction, could not decode");
13136 }
13137
13138 return ret;
13139 }
13140
13141 #if GDB_SELF_TEST
13142 namespace selftests {
13143
13144 /* Provide both 16-bit and 32-bit thumb instructions. */
13145
13146 class instruction_reader_thumb : public abstract_memory_reader
13147 {
13148 public:
13149 template<size_t SIZE>
13150 instruction_reader_thumb (enum bfd_endian endian,
13151 const uint16_t (&insns)[SIZE])
13152 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13153 {}
13154
13155 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13156 {
13157 SELF_CHECK (len == 4 || len == 2);
13158 SELF_CHECK (memaddr % 2 == 0);
13159 SELF_CHECK ((memaddr / 2) < m_insns_size);
13160
13161 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13162 if (len == 4)
13163 {
13164 store_unsigned_integer (&buf[2], 2, m_endian,
13165 m_insns[memaddr / 2 + 1]);
13166 }
13167 return true;
13168 }
13169
13170 private:
13171 enum bfd_endian m_endian;
13172 const uint16_t *m_insns;
13173 size_t m_insns_size;
13174 };
13175
13176 static void
13177 arm_record_test (void)
13178 {
13179 struct gdbarch_info info;
13180 gdbarch_info_init (&info);
13181 info.bfd_arch_info = bfd_scan_arch ("arm");
13182
13183 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13184
13185 SELF_CHECK (gdbarch != NULL);
13186
13187 /* 16-bit Thumb instructions. */
13188 {
13189 insn_decode_record arm_record;
13190
13191 memset (&arm_record, 0, sizeof (insn_decode_record));
13192 arm_record.gdbarch = gdbarch;
13193
13194 static const uint16_t insns[] = {
13195 /* db b2 uxtb r3, r3 */
13196 0xb2db,
13197 /* cd 58 ldr r5, [r1, r3] */
13198 0x58cd,
13199 };
13200
13201 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13202 instruction_reader_thumb reader (endian, insns);
13203 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13204 THUMB_INSN_SIZE_BYTES);
13205
13206 SELF_CHECK (ret == 0);
13207 SELF_CHECK (arm_record.mem_rec_count == 0);
13208 SELF_CHECK (arm_record.reg_rec_count == 1);
13209 SELF_CHECK (arm_record.arm_regs[0] == 3);
13210
13211 arm_record.this_addr += 2;
13212 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13213 THUMB_INSN_SIZE_BYTES);
13214
13215 SELF_CHECK (ret == 0);
13216 SELF_CHECK (arm_record.mem_rec_count == 0);
13217 SELF_CHECK (arm_record.reg_rec_count == 1);
13218 SELF_CHECK (arm_record.arm_regs[0] == 5);
13219 }
13220
13221 /* 32-bit Thumb-2 instructions. */
13222 {
13223 insn_decode_record arm_record;
13224
13225 memset (&arm_record, 0, sizeof (insn_decode_record));
13226 arm_record.gdbarch = gdbarch;
13227
13228 static const uint16_t insns[] = {
13229 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13230 0xee1d, 0x7f70,
13231 };
13232
13233 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13234 instruction_reader_thumb reader (endian, insns);
13235 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13236 THUMB2_INSN_SIZE_BYTES);
13237
13238 SELF_CHECK (ret == 0);
13239 SELF_CHECK (arm_record.mem_rec_count == 0);
13240 SELF_CHECK (arm_record.reg_rec_count == 1);
13241 SELF_CHECK (arm_record.arm_regs[0] == 7);
13242 }
13243 }
13244 } // namespace selftests
13245 #endif /* GDB_SELF_TEST */
13246
13247 /* Cleans up local record registers and memory allocations. */
13248
13249 static void
13250 deallocate_reg_mem (insn_decode_record *record)
13251 {
13252 xfree (record->arm_regs);
13253 xfree (record->arm_mems);
13254 }
13255
13256
13257 /* Parse the current instruction and record the values of the registers and
13258 memory that will be changed in current instruction to record_arch_list".
13259 Return -1 if something is wrong. */
13260
13261 int
13262 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13263 CORE_ADDR insn_addr)
13264 {
13265
13266 uint32_t no_of_rec = 0;
13267 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13268 ULONGEST t_bit = 0, insn_id = 0;
13269
13270 ULONGEST u_regval = 0;
13271
13272 insn_decode_record arm_record;
13273
13274 memset (&arm_record, 0, sizeof (insn_decode_record));
13275 arm_record.regcache = regcache;
13276 arm_record.this_addr = insn_addr;
13277 arm_record.gdbarch = gdbarch;
13278
13279
13280 if (record_debug > 1)
13281 {
13282 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13283 "addr = %s\n",
13284 paddress (gdbarch, arm_record.this_addr));
13285 }
13286
13287 instruction_reader reader;
13288 if (extract_arm_insn (reader, &arm_record, 2))
13289 {
13290 if (record_debug)
13291 {
13292 printf_unfiltered (_("Process record: error reading memory at "
13293 "addr %s len = %d.\n"),
13294 paddress (arm_record.gdbarch,
13295 arm_record.this_addr), 2);
13296 }
13297 return -1;
13298 }
13299
13300 /* Check the insn, whether it is thumb or arm one. */
13301
13302 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13303 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13304
13305
13306 if (!(u_regval & t_bit))
13307 {
13308 /* We are decoding arm insn. */
13309 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13310 }
13311 else
13312 {
13313 insn_id = bits (arm_record.arm_insn, 11, 15);
13314 /* is it thumb2 insn? */
13315 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13316 {
13317 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13318 THUMB2_INSN_SIZE_BYTES);
13319 }
13320 else
13321 {
13322 /* We are decoding thumb insn. */
13323 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13324 THUMB_INSN_SIZE_BYTES);
13325 }
13326 }
13327
13328 if (0 == ret)
13329 {
13330 /* Record registers. */
13331 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13332 if (arm_record.arm_regs)
13333 {
13334 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13335 {
13336 if (record_full_arch_list_add_reg
13337 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13338 ret = -1;
13339 }
13340 }
13341 /* Record memories. */
13342 if (arm_record.arm_mems)
13343 {
13344 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13345 {
13346 if (record_full_arch_list_add_mem
13347 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13348 arm_record.arm_mems[no_of_rec].len))
13349 ret = -1;
13350 }
13351 }
13352
13353 if (record_full_arch_list_add_end ())
13354 ret = -1;
13355 }
13356
13357
13358 deallocate_reg_mem (&arm_record);
13359
13360 return ret;
13361 }
13362
13363 /* See arm-tdep.h. */
13364
13365 const target_desc *
13366 arm_read_description (arm_fp_type fp_type)
13367 {
13368 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13369
13370 if (tdesc == nullptr)
13371 {
13372 tdesc = arm_create_target_description (fp_type);
13373 tdesc_arm_list[fp_type] = tdesc;
13374 }
13375
13376 return tdesc;
13377 }
13378
13379 /* See arm-tdep.h. */
13380
13381 const target_desc *
13382 arm_read_mprofile_description (arm_m_profile_type m_type)
13383 {
13384 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13385
13386 if (tdesc == nullptr)
13387 {
13388 tdesc = arm_create_mprofile_target_description (m_type);
13389 tdesc_arm_mprofile_list[m_type] = tdesc;
13390 }
13391
13392 return tdesc;
13393 }
This page took 0.318586 seconds and 4 git commands to generate.