Arm: Remove unused feature files and tests
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "gdbsupport/vec.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #if GDB_SELF_TEST
65 #include "gdbsupport/selftest.h"
66 #endif
67
68 static int arm_debug;
69
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
73
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
76
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
79
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
82
83 struct arm_mapping_symbol
84 {
85 bfd_vma value;
86 char type;
87
88 bool operator< (const arm_mapping_symbol &other) const
89 { return this->value < other.value; }
90 };
91
92 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
93
94 struct arm_per_objfile
95 {
96 explicit arm_per_objfile (size_t num_sections)
97 : section_maps (new arm_mapping_symbol_vec[num_sections]),
98 section_maps_sorted (new bool[num_sections] ())
99 {}
100
101 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
102
103 /* Information about mapping symbols ($a, $d, $t) in the objfile.
104
105 The format is an array of vectors of arm_mapping_symbols, there is one
106 vector for each section of the objfile (the array is index by BFD section
107 index).
108
109 For each section, the vector of arm_mapping_symbol is sorted by
110 symbol value (address). */
111 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
112
113 /* For each corresponding element of section_maps above, is this vector
114 sorted. */
115 std::unique_ptr<bool[]> section_maps_sorted;
116 };
117
118 /* Per-objfile data used for mapping symbols. */
119 static objfile_key<arm_per_objfile> arm_objfile_data_key;
120
121 /* The list of available "set arm ..." and "show arm ..." commands. */
122 static struct cmd_list_element *setarmcmdlist = NULL;
123 static struct cmd_list_element *showarmcmdlist = NULL;
124
125 /* The type of floating-point to use. Keep this in sync with enum
126 arm_float_model, and the help string in _initialize_arm_tdep. */
127 static const char *const fp_model_strings[] =
128 {
129 "auto",
130 "softfpa",
131 "fpa",
132 "softvfp",
133 "vfp",
134 NULL
135 };
136
137 /* A variable that can be configured by the user. */
138 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
139 static const char *current_fp_model = "auto";
140
141 /* The ABI to use. Keep this in sync with arm_abi_kind. */
142 static const char *const arm_abi_strings[] =
143 {
144 "auto",
145 "APCS",
146 "AAPCS",
147 NULL
148 };
149
150 /* A variable that can be configured by the user. */
151 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
152 static const char *arm_abi_string = "auto";
153
154 /* The execution mode to assume. */
155 static const char *const arm_mode_strings[] =
156 {
157 "auto",
158 "arm",
159 "thumb",
160 NULL
161 };
162
163 static const char *arm_fallback_mode_string = "auto";
164 static const char *arm_force_mode_string = "auto";
165
166 /* The standard register names, and all the valid aliases for them. Note
167 that `fp', `sp' and `pc' are not added in this alias list, because they
168 have been added as builtin user registers in
169 std-regs.c:_initialize_frame_reg. */
170 static const struct
171 {
172 const char *name;
173 int regnum;
174 } arm_register_aliases[] = {
175 /* Basic register numbers. */
176 { "r0", 0 },
177 { "r1", 1 },
178 { "r2", 2 },
179 { "r3", 3 },
180 { "r4", 4 },
181 { "r5", 5 },
182 { "r6", 6 },
183 { "r7", 7 },
184 { "r8", 8 },
185 { "r9", 9 },
186 { "r10", 10 },
187 { "r11", 11 },
188 { "r12", 12 },
189 { "r13", 13 },
190 { "r14", 14 },
191 { "r15", 15 },
192 /* Synonyms (argument and variable registers). */
193 { "a1", 0 },
194 { "a2", 1 },
195 { "a3", 2 },
196 { "a4", 3 },
197 { "v1", 4 },
198 { "v2", 5 },
199 { "v3", 6 },
200 { "v4", 7 },
201 { "v5", 8 },
202 { "v6", 9 },
203 { "v7", 10 },
204 { "v8", 11 },
205 /* Other platform-specific names for r9. */
206 { "sb", 9 },
207 { "tr", 9 },
208 /* Special names. */
209 { "ip", 12 },
210 { "lr", 14 },
211 /* Names used by GCC (not listed in the ARM EABI). */
212 { "sl", 10 },
213 /* A special name from the older ATPCS. */
214 { "wr", 7 },
215 };
216
217 static const char *const arm_register_names[] =
218 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
219 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
220 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
221 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
222 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
223 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
224 "fps", "cpsr" }; /* 24 25 */
225
226 /* Holds the current set of options to be passed to the disassembler. */
227 static char *arm_disassembler_options;
228
229 /* Valid register name styles. */
230 static const char **valid_disassembly_styles;
231
232 /* Disassembly style to use. Default to "std" register names. */
233 static const char *disassembly_style;
234
235 /* All possible arm target descriptors. */
236 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
237 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
238
239 /* This is used to keep the bfd arch_info in sync with the disassembly
240 style. */
241 static void set_disassembly_style_sfunc (const char *, int,
242 struct cmd_list_element *);
243 static void show_disassembly_style_sfunc (struct ui_file *, int,
244 struct cmd_list_element *,
245 const char *);
246
247 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
248 readable_regcache *regcache,
249 int regnum, gdb_byte *buf);
250 static void arm_neon_quad_write (struct gdbarch *gdbarch,
251 struct regcache *regcache,
252 int regnum, const gdb_byte *buf);
253
254 static CORE_ADDR
255 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
256
257
258 /* get_next_pcs operations. */
259 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
260 arm_get_next_pcs_read_memory_unsigned_integer,
261 arm_get_next_pcs_syscall_next_pc,
262 arm_get_next_pcs_addr_bits_remove,
263 arm_get_next_pcs_is_thumb,
264 NULL,
265 };
266
267 struct arm_prologue_cache
268 {
269 /* The stack pointer at the time this frame was created; i.e. the
270 caller's stack pointer when this function was called. It is used
271 to identify this frame. */
272 CORE_ADDR prev_sp;
273
274 /* The frame base for this frame is just prev_sp - frame size.
275 FRAMESIZE is the distance from the frame pointer to the
276 initial stack pointer. */
277
278 int framesize;
279
280 /* The register used to hold the frame pointer for this frame. */
281 int framereg;
282
283 /* Saved register offsets. */
284 struct trad_frame_saved_reg *saved_regs;
285 };
286
287 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
288 CORE_ADDR prologue_start,
289 CORE_ADDR prologue_end,
290 struct arm_prologue_cache *cache);
291
292 /* Architecture version for displaced stepping. This effects the behaviour of
293 certain instructions, and really should not be hard-wired. */
294
295 #define DISPLACED_STEPPING_ARCH_VERSION 5
296
297 /* Set to true if the 32-bit mode is in use. */
298
299 int arm_apcs_32 = 1;
300
301 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
302
303 int
304 arm_psr_thumb_bit (struct gdbarch *gdbarch)
305 {
306 if (gdbarch_tdep (gdbarch)->is_m)
307 return XPSR_T;
308 else
309 return CPSR_T;
310 }
311
312 /* Determine if the processor is currently executing in Thumb mode. */
313
314 int
315 arm_is_thumb (struct regcache *regcache)
316 {
317 ULONGEST cpsr;
318 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
319
320 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
321
322 return (cpsr & t_bit) != 0;
323 }
324
325 /* Determine if FRAME is executing in Thumb mode. */
326
327 int
328 arm_frame_is_thumb (struct frame_info *frame)
329 {
330 CORE_ADDR cpsr;
331 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
332
333 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
334 directly (from a signal frame or dummy frame) or by interpreting
335 the saved LR (from a prologue or DWARF frame). So consult it and
336 trust the unwinders. */
337 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
338
339 return (cpsr & t_bit) != 0;
340 }
341
342 /* Search for the mapping symbol covering MEMADDR. If one is found,
343 return its type. Otherwise, return 0. If START is non-NULL,
344 set *START to the location of the mapping symbol. */
345
346 static char
347 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
348 {
349 struct obj_section *sec;
350
351 /* If there are mapping symbols, consult them. */
352 sec = find_pc_section (memaddr);
353 if (sec != NULL)
354 {
355 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
356 if (data != NULL)
357 {
358 unsigned int section_idx = sec->the_bfd_section->index;
359 arm_mapping_symbol_vec &map
360 = data->section_maps[section_idx];
361
362 /* Sort the vector on first use. */
363 if (!data->section_maps_sorted[section_idx])
364 {
365 std::sort (map.begin (), map.end ());
366 data->section_maps_sorted[section_idx] = true;
367 }
368
369 struct arm_mapping_symbol map_key
370 = { memaddr - obj_section_addr (sec), 0 };
371 arm_mapping_symbol_vec::const_iterator it
372 = std::lower_bound (map.begin (), map.end (), map_key);
373
374 /* std::lower_bound finds the earliest ordered insertion
375 point. If the symbol at this position starts at this exact
376 address, we use that; otherwise, the preceding
377 mapping symbol covers this address. */
378 if (it < map.end ())
379 {
380 if (it->value == map_key.value)
381 {
382 if (start)
383 *start = it->value + obj_section_addr (sec);
384 return it->type;
385 }
386 }
387
388 if (it > map.begin ())
389 {
390 arm_mapping_symbol_vec::const_iterator prev_it
391 = it - 1;
392
393 if (start)
394 *start = prev_it->value + obj_section_addr (sec);
395 return prev_it->type;
396 }
397 }
398 }
399
400 return 0;
401 }
402
403 /* Determine if the program counter specified in MEMADDR is in a Thumb
404 function. This function should be called for addresses unrelated to
405 any executing frame; otherwise, prefer arm_frame_is_thumb. */
406
407 int
408 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
409 {
410 struct bound_minimal_symbol sym;
411 char type;
412 arm_displaced_step_closure *dsc
413 = ((arm_displaced_step_closure * )
414 get_displaced_step_closure_by_addr (memaddr));
415
416 /* If checking the mode of displaced instruction in copy area, the mode
417 should be determined by instruction on the original address. */
418 if (dsc)
419 {
420 if (debug_displaced)
421 fprintf_unfiltered (gdb_stdlog,
422 "displaced: check mode of %.8lx instead of %.8lx\n",
423 (unsigned long) dsc->insn_addr,
424 (unsigned long) memaddr);
425 memaddr = dsc->insn_addr;
426 }
427
428 /* If bit 0 of the address is set, assume this is a Thumb address. */
429 if (IS_THUMB_ADDR (memaddr))
430 return 1;
431
432 /* If the user wants to override the symbol table, let him. */
433 if (strcmp (arm_force_mode_string, "arm") == 0)
434 return 0;
435 if (strcmp (arm_force_mode_string, "thumb") == 0)
436 return 1;
437
438 /* ARM v6-M and v7-M are always in Thumb mode. */
439 if (gdbarch_tdep (gdbarch)->is_m)
440 return 1;
441
442 /* If there are mapping symbols, consult them. */
443 type = arm_find_mapping_symbol (memaddr, NULL);
444 if (type)
445 return type == 't';
446
447 /* Thumb functions have a "special" bit set in minimal symbols. */
448 sym = lookup_minimal_symbol_by_pc (memaddr);
449 if (sym.minsym)
450 return (MSYMBOL_IS_SPECIAL (sym.minsym));
451
452 /* If the user wants to override the fallback mode, let them. */
453 if (strcmp (arm_fallback_mode_string, "arm") == 0)
454 return 0;
455 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
456 return 1;
457
458 /* If we couldn't find any symbol, but we're talking to a running
459 target, then trust the current value of $cpsr. This lets
460 "display/i $pc" always show the correct mode (though if there is
461 a symbol table we will not reach here, so it still may not be
462 displayed in the mode it will be executed). */
463 if (target_has_registers)
464 return arm_frame_is_thumb (get_current_frame ());
465
466 /* Otherwise we're out of luck; we assume ARM. */
467 return 0;
468 }
469
470 /* Determine if the address specified equals any of these magic return
471 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
472 architectures.
473
474 From ARMv6-M Reference Manual B1.5.8
475 Table B1-5 Exception return behavior
476
477 EXC_RETURN Return To Return Stack
478 0xFFFFFFF1 Handler mode Main
479 0xFFFFFFF9 Thread mode Main
480 0xFFFFFFFD Thread mode Process
481
482 From ARMv7-M Reference Manual B1.5.8
483 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
484
485 EXC_RETURN Return To Return Stack
486 0xFFFFFFF1 Handler mode Main
487 0xFFFFFFF9 Thread mode Main
488 0xFFFFFFFD Thread mode Process
489
490 Table B1-9 EXC_RETURN definition of exception return behavior, with
491 FP
492
493 EXC_RETURN Return To Return Stack Frame Type
494 0xFFFFFFE1 Handler mode Main Extended
495 0xFFFFFFE9 Thread mode Main Extended
496 0xFFFFFFED Thread mode Process Extended
497 0xFFFFFFF1 Handler mode Main Basic
498 0xFFFFFFF9 Thread mode Main Basic
499 0xFFFFFFFD Thread mode Process Basic
500
501 For more details see "B1.5.8 Exception return behavior"
502 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
503
504 static int
505 arm_m_addr_is_magic (CORE_ADDR addr)
506 {
507 switch (addr)
508 {
509 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
510 the exception return behavior. */
511 case 0xffffffe1:
512 case 0xffffffe9:
513 case 0xffffffed:
514 case 0xfffffff1:
515 case 0xfffffff9:
516 case 0xfffffffd:
517 /* Address is magic. */
518 return 1;
519
520 default:
521 /* Address is not magic. */
522 return 0;
523 }
524 }
525
526 /* Remove useless bits from addresses in a running program. */
527 static CORE_ADDR
528 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
529 {
530 /* On M-profile devices, do not strip the low bit from EXC_RETURN
531 (the magic exception return address). */
532 if (gdbarch_tdep (gdbarch)->is_m
533 && arm_m_addr_is_magic (val))
534 return val;
535
536 if (arm_apcs_32)
537 return UNMAKE_THUMB_ADDR (val);
538 else
539 return (val & 0x03fffffc);
540 }
541
542 /* Return 1 if PC is the start of a compiler helper function which
543 can be safely ignored during prologue skipping. IS_THUMB is true
544 if the function is known to be a Thumb function due to the way it
545 is being called. */
546 static int
547 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
548 {
549 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
550 struct bound_minimal_symbol msym;
551
552 msym = lookup_minimal_symbol_by_pc (pc);
553 if (msym.minsym != NULL
554 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
555 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
556 {
557 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
558
559 /* The GNU linker's Thumb call stub to foo is named
560 __foo_from_thumb. */
561 if (strstr (name, "_from_thumb") != NULL)
562 name += 2;
563
564 /* On soft-float targets, __truncdfsf2 is called to convert promoted
565 arguments to their argument types in non-prototyped
566 functions. */
567 if (startswith (name, "__truncdfsf2"))
568 return 1;
569 if (startswith (name, "__aeabi_d2f"))
570 return 1;
571
572 /* Internal functions related to thread-local storage. */
573 if (startswith (name, "__tls_get_addr"))
574 return 1;
575 if (startswith (name, "__aeabi_read_tp"))
576 return 1;
577 }
578 else
579 {
580 /* If we run against a stripped glibc, we may be unable to identify
581 special functions by name. Check for one important case,
582 __aeabi_read_tp, by comparing the *code* against the default
583 implementation (this is hand-written ARM assembler in glibc). */
584
585 if (!is_thumb
586 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
587 == 0xe3e00a0f /* mov r0, #0xffff0fff */
588 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
589 == 0xe240f01f) /* sub pc, r0, #31 */
590 return 1;
591 }
592
593 return 0;
594 }
595
596 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
597 the first 16-bit of instruction, and INSN2 is the second 16-bit of
598 instruction. */
599 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
600 ((bits ((insn1), 0, 3) << 12) \
601 | (bits ((insn1), 10, 10) << 11) \
602 | (bits ((insn2), 12, 14) << 8) \
603 | bits ((insn2), 0, 7))
604
605 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
606 the 32-bit instruction. */
607 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
608 ((bits ((insn), 16, 19) << 12) \
609 | bits ((insn), 0, 11))
610
611 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
612
613 static unsigned int
614 thumb_expand_immediate (unsigned int imm)
615 {
616 unsigned int count = imm >> 7;
617
618 if (count < 8)
619 switch (count / 2)
620 {
621 case 0:
622 return imm & 0xff;
623 case 1:
624 return (imm & 0xff) | ((imm & 0xff) << 16);
625 case 2:
626 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
627 case 3:
628 return (imm & 0xff) | ((imm & 0xff) << 8)
629 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
630 }
631
632 return (0x80 | (imm & 0x7f)) << (32 - count);
633 }
634
635 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
636 epilogue, 0 otherwise. */
637
638 static int
639 thumb_instruction_restores_sp (unsigned short insn)
640 {
641 return (insn == 0x46bd /* mov sp, r7 */
642 || (insn & 0xff80) == 0xb000 /* add sp, imm */
643 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
644 }
645
646 /* Analyze a Thumb prologue, looking for a recognizable stack frame
647 and frame pointer. Scan until we encounter a store that could
648 clobber the stack frame unexpectedly, or an unknown instruction.
649 Return the last address which is definitely safe to skip for an
650 initial breakpoint. */
651
652 static CORE_ADDR
653 thumb_analyze_prologue (struct gdbarch *gdbarch,
654 CORE_ADDR start, CORE_ADDR limit,
655 struct arm_prologue_cache *cache)
656 {
657 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
658 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
659 int i;
660 pv_t regs[16];
661 CORE_ADDR offset;
662 CORE_ADDR unrecognized_pc = 0;
663
664 for (i = 0; i < 16; i++)
665 regs[i] = pv_register (i, 0);
666 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
667
668 while (start < limit)
669 {
670 unsigned short insn;
671
672 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
673
674 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
675 {
676 int regno;
677 int mask;
678
679 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
680 break;
681
682 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
683 whether to save LR (R14). */
684 mask = (insn & 0xff) | ((insn & 0x100) << 6);
685
686 /* Calculate offsets of saved R0-R7 and LR. */
687 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
688 if (mask & (1 << regno))
689 {
690 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 -4);
692 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
693 }
694 }
695 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
696 {
697 offset = (insn & 0x7f) << 2; /* get scaled offset */
698 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
699 -offset);
700 }
701 else if (thumb_instruction_restores_sp (insn))
702 {
703 /* Don't scan past the epilogue. */
704 break;
705 }
706 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
707 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
708 (insn & 0xff) << 2);
709 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
710 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
711 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
712 bits (insn, 6, 8));
713 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
714 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
715 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
716 bits (insn, 0, 7));
717 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
718 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
719 && pv_is_constant (regs[bits (insn, 3, 5)]))
720 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
721 regs[bits (insn, 6, 8)]);
722 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
723 && pv_is_constant (regs[bits (insn, 3, 6)]))
724 {
725 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
726 int rm = bits (insn, 3, 6);
727 regs[rd] = pv_add (regs[rd], regs[rm]);
728 }
729 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
730 {
731 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
732 int src_reg = (insn & 0x78) >> 3;
733 regs[dst_reg] = regs[src_reg];
734 }
735 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
736 {
737 /* Handle stores to the stack. Normally pushes are used,
738 but with GCC -mtpcs-frame, there may be other stores
739 in the prologue to create the frame. */
740 int regno = (insn >> 8) & 0x7;
741 pv_t addr;
742
743 offset = (insn & 0xff) << 2;
744 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
745
746 if (stack.store_would_trash (addr))
747 break;
748
749 stack.store (addr, 4, regs[regno]);
750 }
751 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
752 {
753 int rd = bits (insn, 0, 2);
754 int rn = bits (insn, 3, 5);
755 pv_t addr;
756
757 offset = bits (insn, 6, 10) << 2;
758 addr = pv_add_constant (regs[rn], offset);
759
760 if (stack.store_would_trash (addr))
761 break;
762
763 stack.store (addr, 4, regs[rd]);
764 }
765 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
766 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
767 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
768 /* Ignore stores of argument registers to the stack. */
769 ;
770 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
771 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
772 /* Ignore block loads from the stack, potentially copying
773 parameters from memory. */
774 ;
775 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
776 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
777 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
778 /* Similarly ignore single loads from the stack. */
779 ;
780 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
781 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
782 /* Skip register copies, i.e. saves to another register
783 instead of the stack. */
784 ;
785 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
786 /* Recognize constant loads; even with small stacks these are necessary
787 on Thumb. */
788 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
789 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
790 {
791 /* Constant pool loads, for the same reason. */
792 unsigned int constant;
793 CORE_ADDR loc;
794
795 loc = start + 4 + bits (insn, 0, 7) * 4;
796 constant = read_memory_unsigned_integer (loc, 4, byte_order);
797 regs[bits (insn, 8, 10)] = pv_constant (constant);
798 }
799 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
800 {
801 unsigned short inst2;
802
803 inst2 = read_code_unsigned_integer (start + 2, 2,
804 byte_order_for_code);
805
806 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
807 {
808 /* BL, BLX. Allow some special function calls when
809 skipping the prologue; GCC generates these before
810 storing arguments to the stack. */
811 CORE_ADDR nextpc;
812 int j1, j2, imm1, imm2;
813
814 imm1 = sbits (insn, 0, 10);
815 imm2 = bits (inst2, 0, 10);
816 j1 = bit (inst2, 13);
817 j2 = bit (inst2, 11);
818
819 offset = ((imm1 << 12) + (imm2 << 1));
820 offset ^= ((!j2) << 22) | ((!j1) << 23);
821
822 nextpc = start + 4 + offset;
823 /* For BLX make sure to clear the low bits. */
824 if (bit (inst2, 12) == 0)
825 nextpc = nextpc & 0xfffffffc;
826
827 if (!skip_prologue_function (gdbarch, nextpc,
828 bit (inst2, 12) != 0))
829 break;
830 }
831
832 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
833 { registers } */
834 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
835 {
836 pv_t addr = regs[bits (insn, 0, 3)];
837 int regno;
838
839 if (stack.store_would_trash (addr))
840 break;
841
842 /* Calculate offsets of saved registers. */
843 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
844 if (inst2 & (1 << regno))
845 {
846 addr = pv_add_constant (addr, -4);
847 stack.store (addr, 4, regs[regno]);
848 }
849
850 if (insn & 0x0020)
851 regs[bits (insn, 0, 3)] = addr;
852 }
853
854 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
855 [Rn, #+/-imm]{!} */
856 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
857 {
858 int regno1 = bits (inst2, 12, 15);
859 int regno2 = bits (inst2, 8, 11);
860 pv_t addr = regs[bits (insn, 0, 3)];
861
862 offset = inst2 & 0xff;
863 if (insn & 0x0080)
864 addr = pv_add_constant (addr, offset);
865 else
866 addr = pv_add_constant (addr, -offset);
867
868 if (stack.store_would_trash (addr))
869 break;
870
871 stack.store (addr, 4, regs[regno1]);
872 stack.store (pv_add_constant (addr, 4),
873 4, regs[regno2]);
874
875 if (insn & 0x0020)
876 regs[bits (insn, 0, 3)] = addr;
877 }
878
879 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
880 && (inst2 & 0x0c00) == 0x0c00
881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
882 {
883 int regno = bits (inst2, 12, 15);
884 pv_t addr = regs[bits (insn, 0, 3)];
885
886 offset = inst2 & 0xff;
887 if (inst2 & 0x0200)
888 addr = pv_add_constant (addr, offset);
889 else
890 addr = pv_add_constant (addr, -offset);
891
892 if (stack.store_would_trash (addr))
893 break;
894
895 stack.store (addr, 4, regs[regno]);
896
897 if (inst2 & 0x0100)
898 regs[bits (insn, 0, 3)] = addr;
899 }
900
901 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
902 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 {
904 int regno = bits (inst2, 12, 15);
905 pv_t addr;
906
907 offset = inst2 & 0xfff;
908 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
909
910 if (stack.store_would_trash (addr))
911 break;
912
913 stack.store (addr, 4, regs[regno]);
914 }
915
916 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
917 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
918 /* Ignore stores of argument registers to the stack. */
919 ;
920
921 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
922 && (inst2 & 0x0d00) == 0x0c00
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 /* Ignore stores of argument registers to the stack. */
925 ;
926
927 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
928 { registers } */
929 && (inst2 & 0x8000) == 0x0000
930 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 /* Ignore block loads from the stack, potentially copying
932 parameters from memory. */
933 ;
934
935 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
936 [Rn, #+/-imm] */
937 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 /* Similarly ignore dual loads from the stack. */
939 ;
940
941 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
942 && (inst2 & 0x0d00) == 0x0c00
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Similarly ignore single loads from the stack. */
945 ;
946
947 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
948 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 /* Similarly ignore single loads from the stack. */
950 ;
951
952 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
953 && (inst2 & 0x8000) == 0x0000)
954 {
955 unsigned int imm = ((bits (insn, 10, 10) << 11)
956 | (bits (inst2, 12, 14) << 8)
957 | bits (inst2, 0, 7));
958
959 regs[bits (inst2, 8, 11)]
960 = pv_add_constant (regs[bits (insn, 0, 3)],
961 thumb_expand_immediate (imm));
962 }
963
964 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
965 && (inst2 & 0x8000) == 0x0000)
966 {
967 unsigned int imm = ((bits (insn, 10, 10) << 11)
968 | (bits (inst2, 12, 14) << 8)
969 | bits (inst2, 0, 7));
970
971 regs[bits (inst2, 8, 11)]
972 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
973 }
974
975 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
976 && (inst2 & 0x8000) == 0x0000)
977 {
978 unsigned int imm = ((bits (insn, 10, 10) << 11)
979 | (bits (inst2, 12, 14) << 8)
980 | bits (inst2, 0, 7));
981
982 regs[bits (inst2, 8, 11)]
983 = pv_add_constant (regs[bits (insn, 0, 3)],
984 - (CORE_ADDR) thumb_expand_immediate (imm));
985 }
986
987 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
988 && (inst2 & 0x8000) == 0x0000)
989 {
990 unsigned int imm = ((bits (insn, 10, 10) << 11)
991 | (bits (inst2, 12, 14) << 8)
992 | bits (inst2, 0, 7));
993
994 regs[bits (inst2, 8, 11)]
995 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
996 }
997
998 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
999 {
1000 unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 | (bits (inst2, 12, 14) << 8)
1002 | bits (inst2, 0, 7));
1003
1004 regs[bits (inst2, 8, 11)]
1005 = pv_constant (thumb_expand_immediate (imm));
1006 }
1007
1008 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1009 {
1010 unsigned int imm
1011 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1012
1013 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1014 }
1015
1016 else if (insn == 0xea5f /* mov.w Rd,Rm */
1017 && (inst2 & 0xf0f0) == 0)
1018 {
1019 int dst_reg = (inst2 & 0x0f00) >> 8;
1020 int src_reg = inst2 & 0xf;
1021 regs[dst_reg] = regs[src_reg];
1022 }
1023
1024 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1025 {
1026 /* Constant pool loads. */
1027 unsigned int constant;
1028 CORE_ADDR loc;
1029
1030 offset = bits (inst2, 0, 11);
1031 if (insn & 0x0080)
1032 loc = start + 4 + offset;
1033 else
1034 loc = start + 4 - offset;
1035
1036 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1037 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1038 }
1039
1040 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1041 {
1042 /* Constant pool loads. */
1043 unsigned int constant;
1044 CORE_ADDR loc;
1045
1046 offset = bits (inst2, 0, 7) << 2;
1047 if (insn & 0x0080)
1048 loc = start + 4 + offset;
1049 else
1050 loc = start + 4 - offset;
1051
1052 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1053 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1054
1055 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1056 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1057 }
1058
1059 else if (thumb2_instruction_changes_pc (insn, inst2))
1060 {
1061 /* Don't scan past anything that might change control flow. */
1062 break;
1063 }
1064 else
1065 {
1066 /* The optimizer might shove anything into the prologue,
1067 so we just skip what we don't recognize. */
1068 unrecognized_pc = start;
1069 }
1070
1071 start += 2;
1072 }
1073 else if (thumb_instruction_changes_pc (insn))
1074 {
1075 /* Don't scan past anything that might change control flow. */
1076 break;
1077 }
1078 else
1079 {
1080 /* The optimizer might shove anything into the prologue,
1081 so we just skip what we don't recognize. */
1082 unrecognized_pc = start;
1083 }
1084
1085 start += 2;
1086 }
1087
1088 if (arm_debug)
1089 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1090 paddress (gdbarch, start));
1091
1092 if (unrecognized_pc == 0)
1093 unrecognized_pc = start;
1094
1095 if (cache == NULL)
1096 return unrecognized_pc;
1097
1098 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1099 {
1100 /* Frame pointer is fp. Frame size is constant. */
1101 cache->framereg = ARM_FP_REGNUM;
1102 cache->framesize = -regs[ARM_FP_REGNUM].k;
1103 }
1104 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1105 {
1106 /* Frame pointer is r7. Frame size is constant. */
1107 cache->framereg = THUMB_FP_REGNUM;
1108 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1109 }
1110 else
1111 {
1112 /* Try the stack pointer... this is a bit desperate. */
1113 cache->framereg = ARM_SP_REGNUM;
1114 cache->framesize = -regs[ARM_SP_REGNUM].k;
1115 }
1116
1117 for (i = 0; i < 16; i++)
1118 if (stack.find_reg (gdbarch, i, &offset))
1119 cache->saved_regs[i].addr = offset;
1120
1121 return unrecognized_pc;
1122 }
1123
1124
1125 /* Try to analyze the instructions starting from PC, which load symbol
1126 __stack_chk_guard. Return the address of instruction after loading this
1127 symbol, set the dest register number to *BASEREG, and set the size of
1128 instructions for loading symbol in OFFSET. Return 0 if instructions are
1129 not recognized. */
1130
1131 static CORE_ADDR
1132 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1133 unsigned int *destreg, int *offset)
1134 {
1135 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1136 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1137 unsigned int low, high, address;
1138
1139 address = 0;
1140 if (is_thumb)
1141 {
1142 unsigned short insn1
1143 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1144
1145 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1146 {
1147 *destreg = bits (insn1, 8, 10);
1148 *offset = 2;
1149 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1150 address = read_memory_unsigned_integer (address, 4,
1151 byte_order_for_code);
1152 }
1153 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1154 {
1155 unsigned short insn2
1156 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1157
1158 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159
1160 insn1
1161 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1162 insn2
1163 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1164
1165 /* movt Rd, #const */
1166 if ((insn1 & 0xfbc0) == 0xf2c0)
1167 {
1168 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1169 *destreg = bits (insn2, 8, 11);
1170 *offset = 8;
1171 address = (high << 16 | low);
1172 }
1173 }
1174 }
1175 else
1176 {
1177 unsigned int insn
1178 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1179
1180 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1181 {
1182 address = bits (insn, 0, 11) + pc + 8;
1183 address = read_memory_unsigned_integer (address, 4,
1184 byte_order_for_code);
1185
1186 *destreg = bits (insn, 12, 15);
1187 *offset = 4;
1188 }
1189 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1190 {
1191 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1192
1193 insn
1194 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1195
1196 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1197 {
1198 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1199 *destreg = bits (insn, 12, 15);
1200 *offset = 8;
1201 address = (high << 16 | low);
1202 }
1203 }
1204 }
1205
1206 return address;
1207 }
1208
1209 /* Try to skip a sequence of instructions used for stack protector. If PC
1210 points to the first instruction of this sequence, return the address of
1211 first instruction after this sequence, otherwise, return original PC.
1212
1213 On arm, this sequence of instructions is composed of mainly three steps,
1214 Step 1: load symbol __stack_chk_guard,
1215 Step 2: load from address of __stack_chk_guard,
1216 Step 3: store it to somewhere else.
1217
1218 Usually, instructions on step 2 and step 3 are the same on various ARM
1219 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1220 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1221 instructions in step 1 vary from different ARM architectures. On ARMv7,
1222 they are,
1223
1224 movw Rn, #:lower16:__stack_chk_guard
1225 movt Rn, #:upper16:__stack_chk_guard
1226
1227 On ARMv5t, it is,
1228
1229 ldr Rn, .Label
1230 ....
1231 .Lable:
1232 .word __stack_chk_guard
1233
1234 Since ldr/str is a very popular instruction, we can't use them as
1235 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1236 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1237 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1238
1239 static CORE_ADDR
1240 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1241 {
1242 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1243 unsigned int basereg;
1244 struct bound_minimal_symbol stack_chk_guard;
1245 int offset;
1246 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1247 CORE_ADDR addr;
1248
1249 /* Try to parse the instructions in Step 1. */
1250 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1251 &basereg, &offset);
1252 if (!addr)
1253 return pc;
1254
1255 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1256 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1257 Otherwise, this sequence cannot be for stack protector. */
1258 if (stack_chk_guard.minsym == NULL
1259 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1260 return pc;
1261
1262 if (is_thumb)
1263 {
1264 unsigned int destreg;
1265 unsigned short insn
1266 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1267
1268 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1269 if ((insn & 0xf800) != 0x6800)
1270 return pc;
1271 if (bits (insn, 3, 5) != basereg)
1272 return pc;
1273 destreg = bits (insn, 0, 2);
1274
1275 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1276 byte_order_for_code);
1277 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1278 if ((insn & 0xf800) != 0x6000)
1279 return pc;
1280 if (destreg != bits (insn, 0, 2))
1281 return pc;
1282 }
1283 else
1284 {
1285 unsigned int destreg;
1286 unsigned int insn
1287 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1288
1289 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1290 if ((insn & 0x0e500000) != 0x04100000)
1291 return pc;
1292 if (bits (insn, 16, 19) != basereg)
1293 return pc;
1294 destreg = bits (insn, 12, 15);
1295 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1296 insn = read_code_unsigned_integer (pc + offset + 4,
1297 4, byte_order_for_code);
1298 if ((insn & 0x0e500000) != 0x04000000)
1299 return pc;
1300 if (bits (insn, 12, 15) != destreg)
1301 return pc;
1302 }
1303 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1304 on arm. */
1305 if (is_thumb)
1306 return pc + offset + 4;
1307 else
1308 return pc + offset + 8;
1309 }
1310
1311 /* Advance the PC across any function entry prologue instructions to
1312 reach some "real" code.
1313
1314 The APCS (ARM Procedure Call Standard) defines the following
1315 prologue:
1316
1317 mov ip, sp
1318 [stmfd sp!, {a1,a2,a3,a4}]
1319 stmfd sp!, {...,fp,ip,lr,pc}
1320 [stfe f7, [sp, #-12]!]
1321 [stfe f6, [sp, #-12]!]
1322 [stfe f5, [sp, #-12]!]
1323 [stfe f4, [sp, #-12]!]
1324 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1325
1326 static CORE_ADDR
1327 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1328 {
1329 CORE_ADDR func_addr, limit_pc;
1330
1331 /* See if we can determine the end of the prologue via the symbol table.
1332 If so, then return either PC, or the PC after the prologue, whichever
1333 is greater. */
1334 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1335 {
1336 CORE_ADDR post_prologue_pc
1337 = skip_prologue_using_sal (gdbarch, func_addr);
1338 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1339
1340 if (post_prologue_pc)
1341 post_prologue_pc
1342 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1343
1344
1345 /* GCC always emits a line note before the prologue and another
1346 one after, even if the two are at the same address or on the
1347 same line. Take advantage of this so that we do not need to
1348 know every instruction that might appear in the prologue. We
1349 will have producer information for most binaries; if it is
1350 missing (e.g. for -gstabs), assuming the GNU tools. */
1351 if (post_prologue_pc
1352 && (cust == NULL
1353 || COMPUNIT_PRODUCER (cust) == NULL
1354 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1355 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1356 return post_prologue_pc;
1357
1358 if (post_prologue_pc != 0)
1359 {
1360 CORE_ADDR analyzed_limit;
1361
1362 /* For non-GCC compilers, make sure the entire line is an
1363 acceptable prologue; GDB will round this function's
1364 return value up to the end of the following line so we
1365 can not skip just part of a line (and we do not want to).
1366
1367 RealView does not treat the prologue specially, but does
1368 associate prologue code with the opening brace; so this
1369 lets us skip the first line if we think it is the opening
1370 brace. */
1371 if (arm_pc_is_thumb (gdbarch, func_addr))
1372 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1373 post_prologue_pc, NULL);
1374 else
1375 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1376 post_prologue_pc, NULL);
1377
1378 if (analyzed_limit != post_prologue_pc)
1379 return func_addr;
1380
1381 return post_prologue_pc;
1382 }
1383 }
1384
1385 /* Can't determine prologue from the symbol table, need to examine
1386 instructions. */
1387
1388 /* Find an upper limit on the function prologue using the debug
1389 information. If the debug information could not be used to provide
1390 that bound, then use an arbitrary large number as the upper bound. */
1391 /* Like arm_scan_prologue, stop no later than pc + 64. */
1392 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1393 if (limit_pc == 0)
1394 limit_pc = pc + 64; /* Magic. */
1395
1396
1397 /* Check if this is Thumb code. */
1398 if (arm_pc_is_thumb (gdbarch, pc))
1399 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1400 else
1401 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1402 }
1403
1404 /* *INDENT-OFF* */
1405 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1406 This function decodes a Thumb function prologue to determine:
1407 1) the size of the stack frame
1408 2) which registers are saved on it
1409 3) the offsets of saved regs
1410 4) the offset from the stack pointer to the frame pointer
1411
1412 A typical Thumb function prologue would create this stack frame
1413 (offsets relative to FP)
1414 old SP -> 24 stack parameters
1415 20 LR
1416 16 R7
1417 R7 -> 0 local variables (16 bytes)
1418 SP -> -12 additional stack space (12 bytes)
1419 The frame size would thus be 36 bytes, and the frame offset would be
1420 12 bytes. The frame register is R7.
1421
1422 The comments for thumb_skip_prolog() describe the algorithm we use
1423 to detect the end of the prolog. */
1424 /* *INDENT-ON* */
1425
1426 static void
1427 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1428 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1429 {
1430 CORE_ADDR prologue_start;
1431 CORE_ADDR prologue_end;
1432
1433 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1434 &prologue_end))
1435 {
1436 /* See comment in arm_scan_prologue for an explanation of
1437 this heuristics. */
1438 if (prologue_end > prologue_start + 64)
1439 {
1440 prologue_end = prologue_start + 64;
1441 }
1442 }
1443 else
1444 /* We're in the boondocks: we have no idea where the start of the
1445 function is. */
1446 return;
1447
1448 prologue_end = std::min (prologue_end, prev_pc);
1449
1450 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1451 }
1452
1453 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1454 otherwise. */
1455
1456 static int
1457 arm_instruction_restores_sp (unsigned int insn)
1458 {
1459 if (bits (insn, 28, 31) != INST_NV)
1460 {
1461 if ((insn & 0x0df0f000) == 0x0080d000
1462 /* ADD SP (register or immediate). */
1463 || (insn & 0x0df0f000) == 0x0040d000
1464 /* SUB SP (register or immediate). */
1465 || (insn & 0x0ffffff0) == 0x01a0d000
1466 /* MOV SP. */
1467 || (insn & 0x0fff0000) == 0x08bd0000
1468 /* POP (LDMIA). */
1469 || (insn & 0x0fff0000) == 0x049d0000)
1470 /* POP of a single register. */
1471 return 1;
1472 }
1473
1474 return 0;
1475 }
1476
1477 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1478 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1479 fill it in. Return the first address not recognized as a prologue
1480 instruction.
1481
1482 We recognize all the instructions typically found in ARM prologues,
1483 plus harmless instructions which can be skipped (either for analysis
1484 purposes, or a more restrictive set that can be skipped when finding
1485 the end of the prologue). */
1486
1487 static CORE_ADDR
1488 arm_analyze_prologue (struct gdbarch *gdbarch,
1489 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1490 struct arm_prologue_cache *cache)
1491 {
1492 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1493 int regno;
1494 CORE_ADDR offset, current_pc;
1495 pv_t regs[ARM_FPS_REGNUM];
1496 CORE_ADDR unrecognized_pc = 0;
1497
1498 /* Search the prologue looking for instructions that set up the
1499 frame pointer, adjust the stack pointer, and save registers.
1500
1501 Be careful, however, and if it doesn't look like a prologue,
1502 don't try to scan it. If, for instance, a frameless function
1503 begins with stmfd sp!, then we will tell ourselves there is
1504 a frame, which will confuse stack traceback, as well as "finish"
1505 and other operations that rely on a knowledge of the stack
1506 traceback. */
1507
1508 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1509 regs[regno] = pv_register (regno, 0);
1510 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1511
1512 for (current_pc = prologue_start;
1513 current_pc < prologue_end;
1514 current_pc += 4)
1515 {
1516 unsigned int insn
1517 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1518
1519 if (insn == 0xe1a0c00d) /* mov ip, sp */
1520 {
1521 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1522 continue;
1523 }
1524 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1525 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1526 {
1527 unsigned imm = insn & 0xff; /* immediate value */
1528 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1529 int rd = bits (insn, 12, 15);
1530 imm = (imm >> rot) | (imm << (32 - rot));
1531 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1532 continue;
1533 }
1534 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1535 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1536 {
1537 unsigned imm = insn & 0xff; /* immediate value */
1538 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1539 int rd = bits (insn, 12, 15);
1540 imm = (imm >> rot) | (imm << (32 - rot));
1541 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1542 continue;
1543 }
1544 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1545 [sp, #-4]! */
1546 {
1547 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1548 break;
1549 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1550 stack.store (regs[ARM_SP_REGNUM], 4,
1551 regs[bits (insn, 12, 15)]);
1552 continue;
1553 }
1554 else if ((insn & 0xffff0000) == 0xe92d0000)
1555 /* stmfd sp!, {..., fp, ip, lr, pc}
1556 or
1557 stmfd sp!, {a1, a2, a3, a4} */
1558 {
1559 int mask = insn & 0xffff;
1560
1561 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1562 break;
1563
1564 /* Calculate offsets of saved registers. */
1565 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1566 if (mask & (1 << regno))
1567 {
1568 regs[ARM_SP_REGNUM]
1569 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1570 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1571 }
1572 }
1573 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1574 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1575 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1576 {
1577 /* No need to add this to saved_regs -- it's just an arg reg. */
1578 continue;
1579 }
1580 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1581 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1582 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1583 {
1584 /* No need to add this to saved_regs -- it's just an arg reg. */
1585 continue;
1586 }
1587 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1588 { registers } */
1589 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1590 {
1591 /* No need to add this to saved_regs -- it's just arg regs. */
1592 continue;
1593 }
1594 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1595 {
1596 unsigned imm = insn & 0xff; /* immediate value */
1597 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1598 imm = (imm >> rot) | (imm << (32 - rot));
1599 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1600 }
1601 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1602 {
1603 unsigned imm = insn & 0xff; /* immediate value */
1604 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1605 imm = (imm >> rot) | (imm << (32 - rot));
1606 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1607 }
1608 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1609 [sp, -#c]! */
1610 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1611 {
1612 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1613 break;
1614
1615 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1616 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1617 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1618 }
1619 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1620 [sp!] */
1621 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1622 {
1623 int n_saved_fp_regs;
1624 unsigned int fp_start_reg, fp_bound_reg;
1625
1626 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1627 break;
1628
1629 if ((insn & 0x800) == 0x800) /* N0 is set */
1630 {
1631 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1632 n_saved_fp_regs = 3;
1633 else
1634 n_saved_fp_regs = 1;
1635 }
1636 else
1637 {
1638 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1639 n_saved_fp_regs = 2;
1640 else
1641 n_saved_fp_regs = 4;
1642 }
1643
1644 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1645 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1646 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1647 {
1648 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1649 stack.store (regs[ARM_SP_REGNUM], 12,
1650 regs[fp_start_reg++]);
1651 }
1652 }
1653 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1654 {
1655 /* Allow some special function calls when skipping the
1656 prologue; GCC generates these before storing arguments to
1657 the stack. */
1658 CORE_ADDR dest = BranchDest (current_pc, insn);
1659
1660 if (skip_prologue_function (gdbarch, dest, 0))
1661 continue;
1662 else
1663 break;
1664 }
1665 else if ((insn & 0xf0000000) != 0xe0000000)
1666 break; /* Condition not true, exit early. */
1667 else if (arm_instruction_changes_pc (insn))
1668 /* Don't scan past anything that might change control flow. */
1669 break;
1670 else if (arm_instruction_restores_sp (insn))
1671 {
1672 /* Don't scan past the epilogue. */
1673 break;
1674 }
1675 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1676 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1677 /* Ignore block loads from the stack, potentially copying
1678 parameters from memory. */
1679 continue;
1680 else if ((insn & 0xfc500000) == 0xe4100000
1681 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1682 /* Similarly ignore single loads from the stack. */
1683 continue;
1684 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1685 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1686 register instead of the stack. */
1687 continue;
1688 else
1689 {
1690 /* The optimizer might shove anything into the prologue, if
1691 we build up cache (cache != NULL) from scanning prologue,
1692 we just skip what we don't recognize and scan further to
1693 make cache as complete as possible. However, if we skip
1694 prologue, we'll stop immediately on unrecognized
1695 instruction. */
1696 unrecognized_pc = current_pc;
1697 if (cache != NULL)
1698 continue;
1699 else
1700 break;
1701 }
1702 }
1703
1704 if (unrecognized_pc == 0)
1705 unrecognized_pc = current_pc;
1706
1707 if (cache)
1708 {
1709 int framereg, framesize;
1710
1711 /* The frame size is just the distance from the frame register
1712 to the original stack pointer. */
1713 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1714 {
1715 /* Frame pointer is fp. */
1716 framereg = ARM_FP_REGNUM;
1717 framesize = -regs[ARM_FP_REGNUM].k;
1718 }
1719 else
1720 {
1721 /* Try the stack pointer... this is a bit desperate. */
1722 framereg = ARM_SP_REGNUM;
1723 framesize = -regs[ARM_SP_REGNUM].k;
1724 }
1725
1726 cache->framereg = framereg;
1727 cache->framesize = framesize;
1728
1729 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1730 if (stack.find_reg (gdbarch, regno, &offset))
1731 cache->saved_regs[regno].addr = offset;
1732 }
1733
1734 if (arm_debug)
1735 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1736 paddress (gdbarch, unrecognized_pc));
1737
1738 return unrecognized_pc;
1739 }
1740
1741 static void
1742 arm_scan_prologue (struct frame_info *this_frame,
1743 struct arm_prologue_cache *cache)
1744 {
1745 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1746 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1747 CORE_ADDR prologue_start, prologue_end;
1748 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1749 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1750
1751 /* Assume there is no frame until proven otherwise. */
1752 cache->framereg = ARM_SP_REGNUM;
1753 cache->framesize = 0;
1754
1755 /* Check for Thumb prologue. */
1756 if (arm_frame_is_thumb (this_frame))
1757 {
1758 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1759 return;
1760 }
1761
1762 /* Find the function prologue. If we can't find the function in
1763 the symbol table, peek in the stack frame to find the PC. */
1764 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1765 &prologue_end))
1766 {
1767 /* One way to find the end of the prologue (which works well
1768 for unoptimized code) is to do the following:
1769
1770 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1771
1772 if (sal.line == 0)
1773 prologue_end = prev_pc;
1774 else if (sal.end < prologue_end)
1775 prologue_end = sal.end;
1776
1777 This mechanism is very accurate so long as the optimizer
1778 doesn't move any instructions from the function body into the
1779 prologue. If this happens, sal.end will be the last
1780 instruction in the first hunk of prologue code just before
1781 the first instruction that the scheduler has moved from
1782 the body to the prologue.
1783
1784 In order to make sure that we scan all of the prologue
1785 instructions, we use a slightly less accurate mechanism which
1786 may scan more than necessary. To help compensate for this
1787 lack of accuracy, the prologue scanning loop below contains
1788 several clauses which'll cause the loop to terminate early if
1789 an implausible prologue instruction is encountered.
1790
1791 The expression
1792
1793 prologue_start + 64
1794
1795 is a suitable endpoint since it accounts for the largest
1796 possible prologue plus up to five instructions inserted by
1797 the scheduler. */
1798
1799 if (prologue_end > prologue_start + 64)
1800 {
1801 prologue_end = prologue_start + 64; /* See above. */
1802 }
1803 }
1804 else
1805 {
1806 /* We have no symbol information. Our only option is to assume this
1807 function has a standard stack frame and the normal frame register.
1808 Then, we can find the value of our frame pointer on entrance to
1809 the callee (or at the present moment if this is the innermost frame).
1810 The value stored there should be the address of the stmfd + 8. */
1811 CORE_ADDR frame_loc;
1812 ULONGEST return_value;
1813
1814 /* AAPCS does not use a frame register, so we can abort here. */
1815 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1816 return;
1817
1818 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1819 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1820 &return_value))
1821 return;
1822 else
1823 {
1824 prologue_start = gdbarch_addr_bits_remove
1825 (gdbarch, return_value) - 8;
1826 prologue_end = prologue_start + 64; /* See above. */
1827 }
1828 }
1829
1830 if (prev_pc < prologue_end)
1831 prologue_end = prev_pc;
1832
1833 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1834 }
1835
1836 static struct arm_prologue_cache *
1837 arm_make_prologue_cache (struct frame_info *this_frame)
1838 {
1839 int reg;
1840 struct arm_prologue_cache *cache;
1841 CORE_ADDR unwound_fp;
1842
1843 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1844 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1845
1846 arm_scan_prologue (this_frame, cache);
1847
1848 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1849 if (unwound_fp == 0)
1850 return cache;
1851
1852 cache->prev_sp = unwound_fp + cache->framesize;
1853
1854 /* Calculate actual addresses of saved registers using offsets
1855 determined by arm_scan_prologue. */
1856 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1857 if (trad_frame_addr_p (cache->saved_regs, reg))
1858 cache->saved_regs[reg].addr += cache->prev_sp;
1859
1860 return cache;
1861 }
1862
1863 /* Implementation of the stop_reason hook for arm_prologue frames. */
1864
1865 static enum unwind_stop_reason
1866 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1867 void **this_cache)
1868 {
1869 struct arm_prologue_cache *cache;
1870 CORE_ADDR pc;
1871
1872 if (*this_cache == NULL)
1873 *this_cache = arm_make_prologue_cache (this_frame);
1874 cache = (struct arm_prologue_cache *) *this_cache;
1875
1876 /* This is meant to halt the backtrace at "_start". */
1877 pc = get_frame_pc (this_frame);
1878 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1879 return UNWIND_OUTERMOST;
1880
1881 /* If we've hit a wall, stop. */
1882 if (cache->prev_sp == 0)
1883 return UNWIND_OUTERMOST;
1884
1885 return UNWIND_NO_REASON;
1886 }
1887
1888 /* Our frame ID for a normal frame is the current function's starting PC
1889 and the caller's SP when we were called. */
1890
1891 static void
1892 arm_prologue_this_id (struct frame_info *this_frame,
1893 void **this_cache,
1894 struct frame_id *this_id)
1895 {
1896 struct arm_prologue_cache *cache;
1897 struct frame_id id;
1898 CORE_ADDR pc, func;
1899
1900 if (*this_cache == NULL)
1901 *this_cache = arm_make_prologue_cache (this_frame);
1902 cache = (struct arm_prologue_cache *) *this_cache;
1903
1904 /* Use function start address as part of the frame ID. If we cannot
1905 identify the start address (due to missing symbol information),
1906 fall back to just using the current PC. */
1907 pc = get_frame_pc (this_frame);
1908 func = get_frame_func (this_frame);
1909 if (!func)
1910 func = pc;
1911
1912 id = frame_id_build (cache->prev_sp, func);
1913 *this_id = id;
1914 }
1915
1916 static struct value *
1917 arm_prologue_prev_register (struct frame_info *this_frame,
1918 void **this_cache,
1919 int prev_regnum)
1920 {
1921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1922 struct arm_prologue_cache *cache;
1923
1924 if (*this_cache == NULL)
1925 *this_cache = arm_make_prologue_cache (this_frame);
1926 cache = (struct arm_prologue_cache *) *this_cache;
1927
1928 /* If we are asked to unwind the PC, then we need to return the LR
1929 instead. The prologue may save PC, but it will point into this
1930 frame's prologue, not the next frame's resume location. Also
1931 strip the saved T bit. A valid LR may have the low bit set, but
1932 a valid PC never does. */
1933 if (prev_regnum == ARM_PC_REGNUM)
1934 {
1935 CORE_ADDR lr;
1936
1937 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1938 return frame_unwind_got_constant (this_frame, prev_regnum,
1939 arm_addr_bits_remove (gdbarch, lr));
1940 }
1941
1942 /* SP is generally not saved to the stack, but this frame is
1943 identified by the next frame's stack pointer at the time of the call.
1944 The value was already reconstructed into PREV_SP. */
1945 if (prev_regnum == ARM_SP_REGNUM)
1946 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1947
1948 /* The CPSR may have been changed by the call instruction and by the
1949 called function. The only bit we can reconstruct is the T bit,
1950 by checking the low bit of LR as of the call. This is a reliable
1951 indicator of Thumb-ness except for some ARM v4T pre-interworking
1952 Thumb code, which could get away with a clear low bit as long as
1953 the called function did not use bx. Guess that all other
1954 bits are unchanged; the condition flags are presumably lost,
1955 but the processor status is likely valid. */
1956 if (prev_regnum == ARM_PS_REGNUM)
1957 {
1958 CORE_ADDR lr, cpsr;
1959 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1960
1961 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1962 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1963 if (IS_THUMB_ADDR (lr))
1964 cpsr |= t_bit;
1965 else
1966 cpsr &= ~t_bit;
1967 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1968 }
1969
1970 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1971 prev_regnum);
1972 }
1973
1974 struct frame_unwind arm_prologue_unwind = {
1975 NORMAL_FRAME,
1976 arm_prologue_unwind_stop_reason,
1977 arm_prologue_this_id,
1978 arm_prologue_prev_register,
1979 NULL,
1980 default_frame_sniffer
1981 };
1982
1983 /* Maintain a list of ARM exception table entries per objfile, similar to the
1984 list of mapping symbols. We only cache entries for standard ARM-defined
1985 personality routines; the cache will contain only the frame unwinding
1986 instructions associated with the entry (not the descriptors). */
1987
1988 struct arm_exidx_entry
1989 {
1990 bfd_vma addr;
1991 gdb_byte *entry;
1992
1993 bool operator< (const arm_exidx_entry &other) const
1994 {
1995 return addr < other.addr;
1996 }
1997 };
1998
1999 struct arm_exidx_data
2000 {
2001 std::vector<std::vector<arm_exidx_entry>> section_maps;
2002 };
2003
2004 static const struct objfile_key<arm_exidx_data> arm_exidx_data_key;
2005
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 {
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_get_section_flags (objfile->obfd,
2013 osect->the_bfd_section) & SEC_ALLOC)
2014 {
2015 bfd_vma start, size;
2016 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2017 size = bfd_get_section_size (osect->the_bfd_section);
2018
2019 if (start <= vma && vma < start + size)
2020 return osect;
2021 }
2022
2023 return NULL;
2024 }
2025
2026 /* Parse contents of exception table and exception index sections
2027 of OBJFILE, and fill in the exception table entry cache.
2028
2029 For each entry that refers to a standard ARM-defined personality
2030 routine, extract the frame unwinding instructions (from either
2031 the index or the table section). The unwinding instructions
2032 are normalized by:
2033 - extracting them from the rest of the table data
2034 - converting to host endianness
2035 - appending the implicit 0xb0 ("Finish") code
2036
2037 The extracted and normalized instructions are stored for later
2038 retrieval by the arm_find_exidx_entry routine. */
2039
2040 static void
2041 arm_exidx_new_objfile (struct objfile *objfile)
2042 {
2043 struct arm_exidx_data *data;
2044 asection *exidx, *extab;
2045 bfd_vma exidx_vma = 0, extab_vma = 0;
2046 LONGEST i;
2047
2048 /* If we've already touched this file, do nothing. */
2049 if (!objfile || arm_exidx_data_key.get (objfile) != NULL)
2050 return;
2051
2052 /* Read contents of exception table and index. */
2053 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2054 gdb::byte_vector exidx_data;
2055 if (exidx)
2056 {
2057 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2058 exidx_data.resize (bfd_get_section_size (exidx));
2059
2060 if (!bfd_get_section_contents (objfile->obfd, exidx,
2061 exidx_data.data (), 0,
2062 exidx_data.size ()))
2063 return;
2064 }
2065
2066 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2067 gdb::byte_vector extab_data;
2068 if (extab)
2069 {
2070 extab_vma = bfd_section_vma (objfile->obfd, extab);
2071 extab_data.resize (bfd_get_section_size (extab));
2072
2073 if (!bfd_get_section_contents (objfile->obfd, extab,
2074 extab_data.data (), 0,
2075 extab_data.size ()))
2076 return;
2077 }
2078
2079 /* Allocate exception table data structure. */
2080 data = arm_exidx_data_key.emplace (objfile);
2081 data->section_maps.resize (objfile->obfd->section_count);
2082
2083 /* Fill in exception table. */
2084 for (i = 0; i < exidx_data.size () / 8; i++)
2085 {
2086 struct arm_exidx_entry new_exidx_entry;
2087 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2088 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2089 exidx_data.data () + i * 8 + 4);
2090 bfd_vma addr = 0, word = 0;
2091 int n_bytes = 0, n_words = 0;
2092 struct obj_section *sec;
2093 gdb_byte *entry = NULL;
2094
2095 /* Extract address of start of function. */
2096 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2097 idx += exidx_vma + i * 8;
2098
2099 /* Find section containing function and compute section offset. */
2100 sec = arm_obj_section_from_vma (objfile, idx);
2101 if (sec == NULL)
2102 continue;
2103 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2104
2105 /* Determine address of exception table entry. */
2106 if (val == 1)
2107 {
2108 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2109 }
2110 else if ((val & 0xff000000) == 0x80000000)
2111 {
2112 /* Exception table entry embedded in .ARM.exidx
2113 -- must be short form. */
2114 word = val;
2115 n_bytes = 3;
2116 }
2117 else if (!(val & 0x80000000))
2118 {
2119 /* Exception table entry in .ARM.extab. */
2120 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2121 addr += exidx_vma + i * 8 + 4;
2122
2123 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2124 {
2125 word = bfd_h_get_32 (objfile->obfd,
2126 extab_data.data () + addr - extab_vma);
2127 addr += 4;
2128
2129 if ((word & 0xff000000) == 0x80000000)
2130 {
2131 /* Short form. */
2132 n_bytes = 3;
2133 }
2134 else if ((word & 0xff000000) == 0x81000000
2135 || (word & 0xff000000) == 0x82000000)
2136 {
2137 /* Long form. */
2138 n_bytes = 2;
2139 n_words = ((word >> 16) & 0xff);
2140 }
2141 else if (!(word & 0x80000000))
2142 {
2143 bfd_vma pers;
2144 struct obj_section *pers_sec;
2145 int gnu_personality = 0;
2146
2147 /* Custom personality routine. */
2148 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2149 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2150
2151 /* Check whether we've got one of the variants of the
2152 GNU personality routines. */
2153 pers_sec = arm_obj_section_from_vma (objfile, pers);
2154 if (pers_sec)
2155 {
2156 static const char *personality[] =
2157 {
2158 "__gcc_personality_v0",
2159 "__gxx_personality_v0",
2160 "__gcj_personality_v0",
2161 "__gnu_objc_personality_v0",
2162 NULL
2163 };
2164
2165 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2166 int k;
2167
2168 for (k = 0; personality[k]; k++)
2169 if (lookup_minimal_symbol_by_pc_name
2170 (pc, personality[k], objfile))
2171 {
2172 gnu_personality = 1;
2173 break;
2174 }
2175 }
2176
2177 /* If so, the next word contains a word count in the high
2178 byte, followed by the same unwind instructions as the
2179 pre-defined forms. */
2180 if (gnu_personality
2181 && addr + 4 <= extab_vma + extab_data.size ())
2182 {
2183 word = bfd_h_get_32 (objfile->obfd,
2184 (extab_data.data ()
2185 + addr - extab_vma));
2186 addr += 4;
2187 n_bytes = 3;
2188 n_words = ((word >> 24) & 0xff);
2189 }
2190 }
2191 }
2192 }
2193
2194 /* Sanity check address. */
2195 if (n_words)
2196 if (addr < extab_vma
2197 || addr + 4 * n_words > extab_vma + extab_data.size ())
2198 n_words = n_bytes = 0;
2199
2200 /* The unwind instructions reside in WORD (only the N_BYTES least
2201 significant bytes are valid), followed by N_WORDS words in the
2202 extab section starting at ADDR. */
2203 if (n_bytes || n_words)
2204 {
2205 gdb_byte *p = entry
2206 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2207 n_bytes + n_words * 4 + 1);
2208
2209 while (n_bytes--)
2210 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2211
2212 while (n_words--)
2213 {
2214 word = bfd_h_get_32 (objfile->obfd,
2215 extab_data.data () + addr - extab_vma);
2216 addr += 4;
2217
2218 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2219 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2220 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2221 *p++ = (gdb_byte) (word & 0xff);
2222 }
2223
2224 /* Implied "Finish" to terminate the list. */
2225 *p++ = 0xb0;
2226 }
2227
2228 /* Push entry onto vector. They are guaranteed to always
2229 appear in order of increasing addresses. */
2230 new_exidx_entry.addr = idx;
2231 new_exidx_entry.entry = entry;
2232 data->section_maps[sec->the_bfd_section->index].push_back
2233 (new_exidx_entry);
2234 }
2235 }
2236
2237 /* Search for the exception table entry covering MEMADDR. If one is found,
2238 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2239 set *START to the start of the region covered by this entry. */
2240
2241 static gdb_byte *
2242 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2243 {
2244 struct obj_section *sec;
2245
2246 sec = find_pc_section (memaddr);
2247 if (sec != NULL)
2248 {
2249 struct arm_exidx_data *data;
2250 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2251
2252 data = arm_exidx_data_key.get (sec->objfile);
2253 if (data != NULL)
2254 {
2255 std::vector<arm_exidx_entry> &map
2256 = data->section_maps[sec->the_bfd_section->index];
2257 if (!map.empty ())
2258 {
2259 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2260
2261 /* std::lower_bound finds the earliest ordered insertion
2262 point. If the following symbol starts at this exact
2263 address, we use that; otherwise, the preceding
2264 exception table entry covers this address. */
2265 if (idx < map.end ())
2266 {
2267 if (idx->addr == map_key.addr)
2268 {
2269 if (start)
2270 *start = idx->addr + obj_section_addr (sec);
2271 return idx->entry;
2272 }
2273 }
2274
2275 if (idx > map.begin ())
2276 {
2277 idx = idx - 1;
2278 if (start)
2279 *start = idx->addr + obj_section_addr (sec);
2280 return idx->entry;
2281 }
2282 }
2283 }
2284 }
2285
2286 return NULL;
2287 }
2288
2289 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2290 instruction list from the ARM exception table entry ENTRY, allocate and
2291 return a prologue cache structure describing how to unwind this frame.
2292
2293 Return NULL if the unwinding instruction list contains a "spare",
2294 "reserved" or "refuse to unwind" instruction as defined in section
2295 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2296 for the ARM Architecture" document. */
2297
2298 static struct arm_prologue_cache *
2299 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2300 {
2301 CORE_ADDR vsp = 0;
2302 int vsp_valid = 0;
2303
2304 struct arm_prologue_cache *cache;
2305 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2306 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2307
2308 for (;;)
2309 {
2310 gdb_byte insn;
2311
2312 /* Whenever we reload SP, we actually have to retrieve its
2313 actual value in the current frame. */
2314 if (!vsp_valid)
2315 {
2316 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2317 {
2318 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2319 vsp = get_frame_register_unsigned (this_frame, reg);
2320 }
2321 else
2322 {
2323 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2324 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2325 }
2326
2327 vsp_valid = 1;
2328 }
2329
2330 /* Decode next unwind instruction. */
2331 insn = *entry++;
2332
2333 if ((insn & 0xc0) == 0)
2334 {
2335 int offset = insn & 0x3f;
2336 vsp += (offset << 2) + 4;
2337 }
2338 else if ((insn & 0xc0) == 0x40)
2339 {
2340 int offset = insn & 0x3f;
2341 vsp -= (offset << 2) + 4;
2342 }
2343 else if ((insn & 0xf0) == 0x80)
2344 {
2345 int mask = ((insn & 0xf) << 8) | *entry++;
2346 int i;
2347
2348 /* The special case of an all-zero mask identifies
2349 "Refuse to unwind". We return NULL to fall back
2350 to the prologue analyzer. */
2351 if (mask == 0)
2352 return NULL;
2353
2354 /* Pop registers r4..r15 under mask. */
2355 for (i = 0; i < 12; i++)
2356 if (mask & (1 << i))
2357 {
2358 cache->saved_regs[4 + i].addr = vsp;
2359 vsp += 4;
2360 }
2361
2362 /* Special-case popping SP -- we need to reload vsp. */
2363 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2364 vsp_valid = 0;
2365 }
2366 else if ((insn & 0xf0) == 0x90)
2367 {
2368 int reg = insn & 0xf;
2369
2370 /* Reserved cases. */
2371 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2372 return NULL;
2373
2374 /* Set SP from another register and mark VSP for reload. */
2375 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2376 vsp_valid = 0;
2377 }
2378 else if ((insn & 0xf0) == 0xa0)
2379 {
2380 int count = insn & 0x7;
2381 int pop_lr = (insn & 0x8) != 0;
2382 int i;
2383
2384 /* Pop r4..r[4+count]. */
2385 for (i = 0; i <= count; i++)
2386 {
2387 cache->saved_regs[4 + i].addr = vsp;
2388 vsp += 4;
2389 }
2390
2391 /* If indicated by flag, pop LR as well. */
2392 if (pop_lr)
2393 {
2394 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2395 vsp += 4;
2396 }
2397 }
2398 else if (insn == 0xb0)
2399 {
2400 /* We could only have updated PC by popping into it; if so, it
2401 will show up as address. Otherwise, copy LR into PC. */
2402 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2403 cache->saved_regs[ARM_PC_REGNUM]
2404 = cache->saved_regs[ARM_LR_REGNUM];
2405
2406 /* We're done. */
2407 break;
2408 }
2409 else if (insn == 0xb1)
2410 {
2411 int mask = *entry++;
2412 int i;
2413
2414 /* All-zero mask and mask >= 16 is "spare". */
2415 if (mask == 0 || mask >= 16)
2416 return NULL;
2417
2418 /* Pop r0..r3 under mask. */
2419 for (i = 0; i < 4; i++)
2420 if (mask & (1 << i))
2421 {
2422 cache->saved_regs[i].addr = vsp;
2423 vsp += 4;
2424 }
2425 }
2426 else if (insn == 0xb2)
2427 {
2428 ULONGEST offset = 0;
2429 unsigned shift = 0;
2430
2431 do
2432 {
2433 offset |= (*entry & 0x7f) << shift;
2434 shift += 7;
2435 }
2436 while (*entry++ & 0x80);
2437
2438 vsp += 0x204 + (offset << 2);
2439 }
2440 else if (insn == 0xb3)
2441 {
2442 int start = *entry >> 4;
2443 int count = (*entry++) & 0xf;
2444 int i;
2445
2446 /* Only registers D0..D15 are valid here. */
2447 if (start + count >= 16)
2448 return NULL;
2449
2450 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2451 for (i = 0; i <= count; i++)
2452 {
2453 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2454 vsp += 8;
2455 }
2456
2457 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2458 vsp += 4;
2459 }
2460 else if ((insn & 0xf8) == 0xb8)
2461 {
2462 int count = insn & 0x7;
2463 int i;
2464
2465 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2466 for (i = 0; i <= count; i++)
2467 {
2468 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2469 vsp += 8;
2470 }
2471
2472 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2473 vsp += 4;
2474 }
2475 else if (insn == 0xc6)
2476 {
2477 int start = *entry >> 4;
2478 int count = (*entry++) & 0xf;
2479 int i;
2480
2481 /* Only registers WR0..WR15 are valid. */
2482 if (start + count >= 16)
2483 return NULL;
2484
2485 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2486 for (i = 0; i <= count; i++)
2487 {
2488 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2489 vsp += 8;
2490 }
2491 }
2492 else if (insn == 0xc7)
2493 {
2494 int mask = *entry++;
2495 int i;
2496
2497 /* All-zero mask and mask >= 16 is "spare". */
2498 if (mask == 0 || mask >= 16)
2499 return NULL;
2500
2501 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2502 for (i = 0; i < 4; i++)
2503 if (mask & (1 << i))
2504 {
2505 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2506 vsp += 4;
2507 }
2508 }
2509 else if ((insn & 0xf8) == 0xc0)
2510 {
2511 int count = insn & 0x7;
2512 int i;
2513
2514 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2515 for (i = 0; i <= count; i++)
2516 {
2517 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2518 vsp += 8;
2519 }
2520 }
2521 else if (insn == 0xc8)
2522 {
2523 int start = *entry >> 4;
2524 int count = (*entry++) & 0xf;
2525 int i;
2526
2527 /* Only registers D0..D31 are valid. */
2528 if (start + count >= 16)
2529 return NULL;
2530
2531 /* Pop VFP double-precision registers
2532 D[16+start]..D[16+start+count]. */
2533 for (i = 0; i <= count; i++)
2534 {
2535 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2536 vsp += 8;
2537 }
2538 }
2539 else if (insn == 0xc9)
2540 {
2541 int start = *entry >> 4;
2542 int count = (*entry++) & 0xf;
2543 int i;
2544
2545 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2546 for (i = 0; i <= count; i++)
2547 {
2548 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2549 vsp += 8;
2550 }
2551 }
2552 else if ((insn & 0xf8) == 0xd0)
2553 {
2554 int count = insn & 0x7;
2555 int i;
2556
2557 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2558 for (i = 0; i <= count; i++)
2559 {
2560 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2561 vsp += 8;
2562 }
2563 }
2564 else
2565 {
2566 /* Everything else is "spare". */
2567 return NULL;
2568 }
2569 }
2570
2571 /* If we restore SP from a register, assume this was the frame register.
2572 Otherwise just fall back to SP as frame register. */
2573 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2574 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2575 else
2576 cache->framereg = ARM_SP_REGNUM;
2577
2578 /* Determine offset to previous frame. */
2579 cache->framesize
2580 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2581
2582 /* We already got the previous SP. */
2583 cache->prev_sp = vsp;
2584
2585 return cache;
2586 }
2587
2588 /* Unwinding via ARM exception table entries. Note that the sniffer
2589 already computes a filled-in prologue cache, which is then used
2590 with the same arm_prologue_this_id and arm_prologue_prev_register
2591 routines also used for prologue-parsing based unwinding. */
2592
2593 static int
2594 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2595 struct frame_info *this_frame,
2596 void **this_prologue_cache)
2597 {
2598 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2599 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2600 CORE_ADDR addr_in_block, exidx_region, func_start;
2601 struct arm_prologue_cache *cache;
2602 gdb_byte *entry;
2603
2604 /* See if we have an ARM exception table entry covering this address. */
2605 addr_in_block = get_frame_address_in_block (this_frame);
2606 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2607 if (!entry)
2608 return 0;
2609
2610 /* The ARM exception table does not describe unwind information
2611 for arbitrary PC values, but is guaranteed to be correct only
2612 at call sites. We have to decide here whether we want to use
2613 ARM exception table information for this frame, or fall back
2614 to using prologue parsing. (Note that if we have DWARF CFI,
2615 this sniffer isn't even called -- CFI is always preferred.)
2616
2617 Before we make this decision, however, we check whether we
2618 actually have *symbol* information for the current frame.
2619 If not, prologue parsing would not work anyway, so we might
2620 as well use the exception table and hope for the best. */
2621 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2622 {
2623 int exc_valid = 0;
2624
2625 /* If the next frame is "normal", we are at a call site in this
2626 frame, so exception information is guaranteed to be valid. */
2627 if (get_next_frame (this_frame)
2628 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2629 exc_valid = 1;
2630
2631 /* We also assume exception information is valid if we're currently
2632 blocked in a system call. The system library is supposed to
2633 ensure this, so that e.g. pthread cancellation works. */
2634 if (arm_frame_is_thumb (this_frame))
2635 {
2636 ULONGEST insn;
2637
2638 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2639 2, byte_order_for_code, &insn)
2640 && (insn & 0xff00) == 0xdf00 /* svc */)
2641 exc_valid = 1;
2642 }
2643 else
2644 {
2645 ULONGEST insn;
2646
2647 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2648 4, byte_order_for_code, &insn)
2649 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2650 exc_valid = 1;
2651 }
2652
2653 /* Bail out if we don't know that exception information is valid. */
2654 if (!exc_valid)
2655 return 0;
2656
2657 /* The ARM exception index does not mark the *end* of the region
2658 covered by the entry, and some functions will not have any entry.
2659 To correctly recognize the end of the covered region, the linker
2660 should have inserted dummy records with a CANTUNWIND marker.
2661
2662 Unfortunately, current versions of GNU ld do not reliably do
2663 this, and thus we may have found an incorrect entry above.
2664 As a (temporary) sanity check, we only use the entry if it
2665 lies *within* the bounds of the function. Note that this check
2666 might reject perfectly valid entries that just happen to cover
2667 multiple functions; therefore this check ought to be removed
2668 once the linker is fixed. */
2669 if (func_start > exidx_region)
2670 return 0;
2671 }
2672
2673 /* Decode the list of unwinding instructions into a prologue cache.
2674 Note that this may fail due to e.g. a "refuse to unwind" code. */
2675 cache = arm_exidx_fill_cache (this_frame, entry);
2676 if (!cache)
2677 return 0;
2678
2679 *this_prologue_cache = cache;
2680 return 1;
2681 }
2682
2683 struct frame_unwind arm_exidx_unwind = {
2684 NORMAL_FRAME,
2685 default_frame_unwind_stop_reason,
2686 arm_prologue_this_id,
2687 arm_prologue_prev_register,
2688 NULL,
2689 arm_exidx_unwind_sniffer
2690 };
2691
2692 static struct arm_prologue_cache *
2693 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2694 {
2695 struct arm_prologue_cache *cache;
2696 int reg;
2697
2698 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2699 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2700
2701 /* Still rely on the offset calculated from prologue. */
2702 arm_scan_prologue (this_frame, cache);
2703
2704 /* Since we are in epilogue, the SP has been restored. */
2705 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2706
2707 /* Calculate actual addresses of saved registers using offsets
2708 determined by arm_scan_prologue. */
2709 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2710 if (trad_frame_addr_p (cache->saved_regs, reg))
2711 cache->saved_regs[reg].addr += cache->prev_sp;
2712
2713 return cache;
2714 }
2715
2716 /* Implementation of function hook 'this_id' in
2717 'struct frame_uwnind' for epilogue unwinder. */
2718
2719 static void
2720 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2721 void **this_cache,
2722 struct frame_id *this_id)
2723 {
2724 struct arm_prologue_cache *cache;
2725 CORE_ADDR pc, func;
2726
2727 if (*this_cache == NULL)
2728 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2729 cache = (struct arm_prologue_cache *) *this_cache;
2730
2731 /* Use function start address as part of the frame ID. If we cannot
2732 identify the start address (due to missing symbol information),
2733 fall back to just using the current PC. */
2734 pc = get_frame_pc (this_frame);
2735 func = get_frame_func (this_frame);
2736 if (func == 0)
2737 func = pc;
2738
2739 (*this_id) = frame_id_build (cache->prev_sp, pc);
2740 }
2741
2742 /* Implementation of function hook 'prev_register' in
2743 'struct frame_uwnind' for epilogue unwinder. */
2744
2745 static struct value *
2746 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2747 void **this_cache, int regnum)
2748 {
2749 if (*this_cache == NULL)
2750 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2751
2752 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2753 }
2754
2755 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2756 CORE_ADDR pc);
2757 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2758 CORE_ADDR pc);
2759
2760 /* Implementation of function hook 'sniffer' in
2761 'struct frame_uwnind' for epilogue unwinder. */
2762
2763 static int
2764 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2765 struct frame_info *this_frame,
2766 void **this_prologue_cache)
2767 {
2768 if (frame_relative_level (this_frame) == 0)
2769 {
2770 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2771 CORE_ADDR pc = get_frame_pc (this_frame);
2772
2773 if (arm_frame_is_thumb (this_frame))
2774 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2775 else
2776 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2777 }
2778 else
2779 return 0;
2780 }
2781
2782 /* Frame unwinder from epilogue. */
2783
2784 static const struct frame_unwind arm_epilogue_frame_unwind =
2785 {
2786 NORMAL_FRAME,
2787 default_frame_unwind_stop_reason,
2788 arm_epilogue_frame_this_id,
2789 arm_epilogue_frame_prev_register,
2790 NULL,
2791 arm_epilogue_frame_sniffer,
2792 };
2793
2794 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2795 trampoline, return the target PC. Otherwise return 0.
2796
2797 void call0a (char c, short s, int i, long l) {}
2798
2799 int main (void)
2800 {
2801 (*pointer_to_call0a) (c, s, i, l);
2802 }
2803
2804 Instead of calling a stub library function _call_via_xx (xx is
2805 the register name), GCC may inline the trampoline in the object
2806 file as below (register r2 has the address of call0a).
2807
2808 .global main
2809 .type main, %function
2810 ...
2811 bl .L1
2812 ...
2813 .size main, .-main
2814
2815 .L1:
2816 bx r2
2817
2818 The trampoline 'bx r2' doesn't belong to main. */
2819
2820 static CORE_ADDR
2821 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2822 {
2823 /* The heuristics of recognizing such trampoline is that FRAME is
2824 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2825 if (arm_frame_is_thumb (frame))
2826 {
2827 gdb_byte buf[2];
2828
2829 if (target_read_memory (pc, buf, 2) == 0)
2830 {
2831 struct gdbarch *gdbarch = get_frame_arch (frame);
2832 enum bfd_endian byte_order_for_code
2833 = gdbarch_byte_order_for_code (gdbarch);
2834 uint16_t insn
2835 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2836
2837 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2838 {
2839 CORE_ADDR dest
2840 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2841
2842 /* Clear the LSB so that gdb core sets step-resume
2843 breakpoint at the right address. */
2844 return UNMAKE_THUMB_ADDR (dest);
2845 }
2846 }
2847 }
2848
2849 return 0;
2850 }
2851
2852 static struct arm_prologue_cache *
2853 arm_make_stub_cache (struct frame_info *this_frame)
2854 {
2855 struct arm_prologue_cache *cache;
2856
2857 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2858 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2859
2860 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2861
2862 return cache;
2863 }
2864
2865 /* Our frame ID for a stub frame is the current SP and LR. */
2866
2867 static void
2868 arm_stub_this_id (struct frame_info *this_frame,
2869 void **this_cache,
2870 struct frame_id *this_id)
2871 {
2872 struct arm_prologue_cache *cache;
2873
2874 if (*this_cache == NULL)
2875 *this_cache = arm_make_stub_cache (this_frame);
2876 cache = (struct arm_prologue_cache *) *this_cache;
2877
2878 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2879 }
2880
2881 static int
2882 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2883 struct frame_info *this_frame,
2884 void **this_prologue_cache)
2885 {
2886 CORE_ADDR addr_in_block;
2887 gdb_byte dummy[4];
2888 CORE_ADDR pc, start_addr;
2889 const char *name;
2890
2891 addr_in_block = get_frame_address_in_block (this_frame);
2892 pc = get_frame_pc (this_frame);
2893 if (in_plt_section (addr_in_block)
2894 /* We also use the stub winder if the target memory is unreadable
2895 to avoid having the prologue unwinder trying to read it. */
2896 || target_read_memory (pc, dummy, 4) != 0)
2897 return 1;
2898
2899 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2900 && arm_skip_bx_reg (this_frame, pc) != 0)
2901 return 1;
2902
2903 return 0;
2904 }
2905
2906 struct frame_unwind arm_stub_unwind = {
2907 NORMAL_FRAME,
2908 default_frame_unwind_stop_reason,
2909 arm_stub_this_id,
2910 arm_prologue_prev_register,
2911 NULL,
2912 arm_stub_unwind_sniffer
2913 };
2914
2915 /* Put here the code to store, into CACHE->saved_regs, the addresses
2916 of the saved registers of frame described by THIS_FRAME. CACHE is
2917 returned. */
2918
2919 static struct arm_prologue_cache *
2920 arm_m_exception_cache (struct frame_info *this_frame)
2921 {
2922 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2923 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2924 struct arm_prologue_cache *cache;
2925 CORE_ADDR unwound_sp;
2926 LONGEST xpsr;
2927
2928 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2929 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2930
2931 unwound_sp = get_frame_register_unsigned (this_frame,
2932 ARM_SP_REGNUM);
2933
2934 /* The hardware saves eight 32-bit words, comprising xPSR,
2935 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2936 "B1.5.6 Exception entry behavior" in
2937 "ARMv7-M Architecture Reference Manual". */
2938 cache->saved_regs[0].addr = unwound_sp;
2939 cache->saved_regs[1].addr = unwound_sp + 4;
2940 cache->saved_regs[2].addr = unwound_sp + 8;
2941 cache->saved_regs[3].addr = unwound_sp + 12;
2942 cache->saved_regs[12].addr = unwound_sp + 16;
2943 cache->saved_regs[14].addr = unwound_sp + 20;
2944 cache->saved_regs[15].addr = unwound_sp + 24;
2945 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2946
2947 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2948 aligner between the top of the 32-byte stack frame and the
2949 previous context's stack pointer. */
2950 cache->prev_sp = unwound_sp + 32;
2951 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2952 && (xpsr & (1 << 9)) != 0)
2953 cache->prev_sp += 4;
2954
2955 return cache;
2956 }
2957
2958 /* Implementation of function hook 'this_id' in
2959 'struct frame_uwnind'. */
2960
2961 static void
2962 arm_m_exception_this_id (struct frame_info *this_frame,
2963 void **this_cache,
2964 struct frame_id *this_id)
2965 {
2966 struct arm_prologue_cache *cache;
2967
2968 if (*this_cache == NULL)
2969 *this_cache = arm_m_exception_cache (this_frame);
2970 cache = (struct arm_prologue_cache *) *this_cache;
2971
2972 /* Our frame ID for a stub frame is the current SP and LR. */
2973 *this_id = frame_id_build (cache->prev_sp,
2974 get_frame_pc (this_frame));
2975 }
2976
2977 /* Implementation of function hook 'prev_register' in
2978 'struct frame_uwnind'. */
2979
2980 static struct value *
2981 arm_m_exception_prev_register (struct frame_info *this_frame,
2982 void **this_cache,
2983 int prev_regnum)
2984 {
2985 struct arm_prologue_cache *cache;
2986
2987 if (*this_cache == NULL)
2988 *this_cache = arm_m_exception_cache (this_frame);
2989 cache = (struct arm_prologue_cache *) *this_cache;
2990
2991 /* The value was already reconstructed into PREV_SP. */
2992 if (prev_regnum == ARM_SP_REGNUM)
2993 return frame_unwind_got_constant (this_frame, prev_regnum,
2994 cache->prev_sp);
2995
2996 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2997 prev_regnum);
2998 }
2999
3000 /* Implementation of function hook 'sniffer' in
3001 'struct frame_uwnind'. */
3002
3003 static int
3004 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3005 struct frame_info *this_frame,
3006 void **this_prologue_cache)
3007 {
3008 CORE_ADDR this_pc = get_frame_pc (this_frame);
3009
3010 /* No need to check is_m; this sniffer is only registered for
3011 M-profile architectures. */
3012
3013 /* Check if exception frame returns to a magic PC value. */
3014 return arm_m_addr_is_magic (this_pc);
3015 }
3016
3017 /* Frame unwinder for M-profile exceptions. */
3018
3019 struct frame_unwind arm_m_exception_unwind =
3020 {
3021 SIGTRAMP_FRAME,
3022 default_frame_unwind_stop_reason,
3023 arm_m_exception_this_id,
3024 arm_m_exception_prev_register,
3025 NULL,
3026 arm_m_exception_unwind_sniffer
3027 };
3028
3029 static CORE_ADDR
3030 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3031 {
3032 struct arm_prologue_cache *cache;
3033
3034 if (*this_cache == NULL)
3035 *this_cache = arm_make_prologue_cache (this_frame);
3036 cache = (struct arm_prologue_cache *) *this_cache;
3037
3038 return cache->prev_sp - cache->framesize;
3039 }
3040
3041 struct frame_base arm_normal_base = {
3042 &arm_prologue_unwind,
3043 arm_normal_frame_base,
3044 arm_normal_frame_base,
3045 arm_normal_frame_base
3046 };
3047
3048 static struct value *
3049 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3050 int regnum)
3051 {
3052 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3053 CORE_ADDR lr, cpsr;
3054 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3055
3056 switch (regnum)
3057 {
3058 case ARM_PC_REGNUM:
3059 /* The PC is normally copied from the return column, which
3060 describes saves of LR. However, that version may have an
3061 extra bit set to indicate Thumb state. The bit is not
3062 part of the PC. */
3063 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3064 return frame_unwind_got_constant (this_frame, regnum,
3065 arm_addr_bits_remove (gdbarch, lr));
3066
3067 case ARM_PS_REGNUM:
3068 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3069 cpsr = get_frame_register_unsigned (this_frame, regnum);
3070 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3071 if (IS_THUMB_ADDR (lr))
3072 cpsr |= t_bit;
3073 else
3074 cpsr &= ~t_bit;
3075 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3076
3077 default:
3078 internal_error (__FILE__, __LINE__,
3079 _("Unexpected register %d"), regnum);
3080 }
3081 }
3082
3083 static void
3084 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3085 struct dwarf2_frame_state_reg *reg,
3086 struct frame_info *this_frame)
3087 {
3088 switch (regnum)
3089 {
3090 case ARM_PC_REGNUM:
3091 case ARM_PS_REGNUM:
3092 reg->how = DWARF2_FRAME_REG_FN;
3093 reg->loc.fn = arm_dwarf2_prev_register;
3094 break;
3095 case ARM_SP_REGNUM:
3096 reg->how = DWARF2_FRAME_REG_CFA;
3097 break;
3098 }
3099 }
3100
3101 /* Implement the stack_frame_destroyed_p gdbarch method. */
3102
3103 static int
3104 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3105 {
3106 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3107 unsigned int insn, insn2;
3108 int found_return = 0, found_stack_adjust = 0;
3109 CORE_ADDR func_start, func_end;
3110 CORE_ADDR scan_pc;
3111 gdb_byte buf[4];
3112
3113 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3114 return 0;
3115
3116 /* The epilogue is a sequence of instructions along the following lines:
3117
3118 - add stack frame size to SP or FP
3119 - [if frame pointer used] restore SP from FP
3120 - restore registers from SP [may include PC]
3121 - a return-type instruction [if PC wasn't already restored]
3122
3123 In a first pass, we scan forward from the current PC and verify the
3124 instructions we find as compatible with this sequence, ending in a
3125 return instruction.
3126
3127 However, this is not sufficient to distinguish indirect function calls
3128 within a function from indirect tail calls in the epilogue in some cases.
3129 Therefore, if we didn't already find any SP-changing instruction during
3130 forward scan, we add a backward scanning heuristic to ensure we actually
3131 are in the epilogue. */
3132
3133 scan_pc = pc;
3134 while (scan_pc < func_end && !found_return)
3135 {
3136 if (target_read_memory (scan_pc, buf, 2))
3137 break;
3138
3139 scan_pc += 2;
3140 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3141
3142 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3143 found_return = 1;
3144 else if (insn == 0x46f7) /* mov pc, lr */
3145 found_return = 1;
3146 else if (thumb_instruction_restores_sp (insn))
3147 {
3148 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3149 found_return = 1;
3150 }
3151 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3152 {
3153 if (target_read_memory (scan_pc, buf, 2))
3154 break;
3155
3156 scan_pc += 2;
3157 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3158
3159 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3160 {
3161 if (insn2 & 0x8000) /* <registers> include PC. */
3162 found_return = 1;
3163 }
3164 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3165 && (insn2 & 0x0fff) == 0x0b04)
3166 {
3167 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3168 found_return = 1;
3169 }
3170 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3171 && (insn2 & 0x0e00) == 0x0a00)
3172 ;
3173 else
3174 break;
3175 }
3176 else
3177 break;
3178 }
3179
3180 if (!found_return)
3181 return 0;
3182
3183 /* Since any instruction in the epilogue sequence, with the possible
3184 exception of return itself, updates the stack pointer, we need to
3185 scan backwards for at most one instruction. Try either a 16-bit or
3186 a 32-bit instruction. This is just a heuristic, so we do not worry
3187 too much about false positives. */
3188
3189 if (pc - 4 < func_start)
3190 return 0;
3191 if (target_read_memory (pc - 4, buf, 4))
3192 return 0;
3193
3194 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3195 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3196
3197 if (thumb_instruction_restores_sp (insn2))
3198 found_stack_adjust = 1;
3199 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3200 found_stack_adjust = 1;
3201 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3202 && (insn2 & 0x0fff) == 0x0b04)
3203 found_stack_adjust = 1;
3204 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3205 && (insn2 & 0x0e00) == 0x0a00)
3206 found_stack_adjust = 1;
3207
3208 return found_stack_adjust;
3209 }
3210
3211 static int
3212 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3213 {
3214 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3215 unsigned int insn;
3216 int found_return;
3217 CORE_ADDR func_start, func_end;
3218
3219 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3220 return 0;
3221
3222 /* We are in the epilogue if the previous instruction was a stack
3223 adjustment and the next instruction is a possible return (bx, mov
3224 pc, or pop). We could have to scan backwards to find the stack
3225 adjustment, or forwards to find the return, but this is a decent
3226 approximation. First scan forwards. */
3227
3228 found_return = 0;
3229 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3230 if (bits (insn, 28, 31) != INST_NV)
3231 {
3232 if ((insn & 0x0ffffff0) == 0x012fff10)
3233 /* BX. */
3234 found_return = 1;
3235 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3236 /* MOV PC. */
3237 found_return = 1;
3238 else if ((insn & 0x0fff0000) == 0x08bd0000
3239 && (insn & 0x0000c000) != 0)
3240 /* POP (LDMIA), including PC or LR. */
3241 found_return = 1;
3242 }
3243
3244 if (!found_return)
3245 return 0;
3246
3247 /* Scan backwards. This is just a heuristic, so do not worry about
3248 false positives from mode changes. */
3249
3250 if (pc < func_start + 4)
3251 return 0;
3252
3253 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3254 if (arm_instruction_restores_sp (insn))
3255 return 1;
3256
3257 return 0;
3258 }
3259
3260 /* Implement the stack_frame_destroyed_p gdbarch method. */
3261
3262 static int
3263 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3264 {
3265 if (arm_pc_is_thumb (gdbarch, pc))
3266 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3267 else
3268 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3269 }
3270
3271 /* When arguments must be pushed onto the stack, they go on in reverse
3272 order. The code below implements a FILO (stack) to do this. */
3273
3274 struct stack_item
3275 {
3276 int len;
3277 struct stack_item *prev;
3278 gdb_byte *data;
3279 };
3280
3281 static struct stack_item *
3282 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3283 {
3284 struct stack_item *si;
3285 si = XNEW (struct stack_item);
3286 si->data = (gdb_byte *) xmalloc (len);
3287 si->len = len;
3288 si->prev = prev;
3289 memcpy (si->data, contents, len);
3290 return si;
3291 }
3292
3293 static struct stack_item *
3294 pop_stack_item (struct stack_item *si)
3295 {
3296 struct stack_item *dead = si;
3297 si = si->prev;
3298 xfree (dead->data);
3299 xfree (dead);
3300 return si;
3301 }
3302
3303 /* Implement the gdbarch type alignment method, overrides the generic
3304 alignment algorithm for anything that is arm specific. */
3305
3306 static ULONGEST
3307 arm_type_align (gdbarch *gdbarch, struct type *t)
3308 {
3309 t = check_typedef (t);
3310 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3311 {
3312 /* Use the natural alignment for vector types (the same for
3313 scalar type), but the maximum alignment is 64-bit. */
3314 if (TYPE_LENGTH (t) > 8)
3315 return 8;
3316 else
3317 return TYPE_LENGTH (t);
3318 }
3319
3320 /* Allow the common code to calculate the alignment. */
3321 return 0;
3322 }
3323
3324 /* Possible base types for a candidate for passing and returning in
3325 VFP registers. */
3326
3327 enum arm_vfp_cprc_base_type
3328 {
3329 VFP_CPRC_UNKNOWN,
3330 VFP_CPRC_SINGLE,
3331 VFP_CPRC_DOUBLE,
3332 VFP_CPRC_VEC64,
3333 VFP_CPRC_VEC128
3334 };
3335
3336 /* The length of one element of base type B. */
3337
3338 static unsigned
3339 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3340 {
3341 switch (b)
3342 {
3343 case VFP_CPRC_SINGLE:
3344 return 4;
3345 case VFP_CPRC_DOUBLE:
3346 return 8;
3347 case VFP_CPRC_VEC64:
3348 return 8;
3349 case VFP_CPRC_VEC128:
3350 return 16;
3351 default:
3352 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3353 (int) b);
3354 }
3355 }
3356
3357 /* The character ('s', 'd' or 'q') for the type of VFP register used
3358 for passing base type B. */
3359
3360 static int
3361 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3362 {
3363 switch (b)
3364 {
3365 case VFP_CPRC_SINGLE:
3366 return 's';
3367 case VFP_CPRC_DOUBLE:
3368 return 'd';
3369 case VFP_CPRC_VEC64:
3370 return 'd';
3371 case VFP_CPRC_VEC128:
3372 return 'q';
3373 default:
3374 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3375 (int) b);
3376 }
3377 }
3378
3379 /* Determine whether T may be part of a candidate for passing and
3380 returning in VFP registers, ignoring the limit on the total number
3381 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3382 classification of the first valid component found; if it is not
3383 VFP_CPRC_UNKNOWN, all components must have the same classification
3384 as *BASE_TYPE. If it is found that T contains a type not permitted
3385 for passing and returning in VFP registers, a type differently
3386 classified from *BASE_TYPE, or two types differently classified
3387 from each other, return -1, otherwise return the total number of
3388 base-type elements found (possibly 0 in an empty structure or
3389 array). Vector types are not currently supported, matching the
3390 generic AAPCS support. */
3391
3392 static int
3393 arm_vfp_cprc_sub_candidate (struct type *t,
3394 enum arm_vfp_cprc_base_type *base_type)
3395 {
3396 t = check_typedef (t);
3397 switch (TYPE_CODE (t))
3398 {
3399 case TYPE_CODE_FLT:
3400 switch (TYPE_LENGTH (t))
3401 {
3402 case 4:
3403 if (*base_type == VFP_CPRC_UNKNOWN)
3404 *base_type = VFP_CPRC_SINGLE;
3405 else if (*base_type != VFP_CPRC_SINGLE)
3406 return -1;
3407 return 1;
3408
3409 case 8:
3410 if (*base_type == VFP_CPRC_UNKNOWN)
3411 *base_type = VFP_CPRC_DOUBLE;
3412 else if (*base_type != VFP_CPRC_DOUBLE)
3413 return -1;
3414 return 1;
3415
3416 default:
3417 return -1;
3418 }
3419 break;
3420
3421 case TYPE_CODE_COMPLEX:
3422 /* Arguments of complex T where T is one of the types float or
3423 double get treated as if they are implemented as:
3424
3425 struct complexT
3426 {
3427 T real;
3428 T imag;
3429 };
3430
3431 */
3432 switch (TYPE_LENGTH (t))
3433 {
3434 case 8:
3435 if (*base_type == VFP_CPRC_UNKNOWN)
3436 *base_type = VFP_CPRC_SINGLE;
3437 else if (*base_type != VFP_CPRC_SINGLE)
3438 return -1;
3439 return 2;
3440
3441 case 16:
3442 if (*base_type == VFP_CPRC_UNKNOWN)
3443 *base_type = VFP_CPRC_DOUBLE;
3444 else if (*base_type != VFP_CPRC_DOUBLE)
3445 return -1;
3446 return 2;
3447
3448 default:
3449 return -1;
3450 }
3451 break;
3452
3453 case TYPE_CODE_ARRAY:
3454 {
3455 if (TYPE_VECTOR (t))
3456 {
3457 /* A 64-bit or 128-bit containerized vector type are VFP
3458 CPRCs. */
3459 switch (TYPE_LENGTH (t))
3460 {
3461 case 8:
3462 if (*base_type == VFP_CPRC_UNKNOWN)
3463 *base_type = VFP_CPRC_VEC64;
3464 return 1;
3465 case 16:
3466 if (*base_type == VFP_CPRC_UNKNOWN)
3467 *base_type = VFP_CPRC_VEC128;
3468 return 1;
3469 default:
3470 return -1;
3471 }
3472 }
3473 else
3474 {
3475 int count;
3476 unsigned unitlen;
3477
3478 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3479 base_type);
3480 if (count == -1)
3481 return -1;
3482 if (TYPE_LENGTH (t) == 0)
3483 {
3484 gdb_assert (count == 0);
3485 return 0;
3486 }
3487 else if (count == 0)
3488 return -1;
3489 unitlen = arm_vfp_cprc_unit_length (*base_type);
3490 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3491 return TYPE_LENGTH (t) / unitlen;
3492 }
3493 }
3494 break;
3495
3496 case TYPE_CODE_STRUCT:
3497 {
3498 int count = 0;
3499 unsigned unitlen;
3500 int i;
3501 for (i = 0; i < TYPE_NFIELDS (t); i++)
3502 {
3503 int sub_count = 0;
3504
3505 if (!field_is_static (&TYPE_FIELD (t, i)))
3506 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3507 base_type);
3508 if (sub_count == -1)
3509 return -1;
3510 count += sub_count;
3511 }
3512 if (TYPE_LENGTH (t) == 0)
3513 {
3514 gdb_assert (count == 0);
3515 return 0;
3516 }
3517 else if (count == 0)
3518 return -1;
3519 unitlen = arm_vfp_cprc_unit_length (*base_type);
3520 if (TYPE_LENGTH (t) != unitlen * count)
3521 return -1;
3522 return count;
3523 }
3524
3525 case TYPE_CODE_UNION:
3526 {
3527 int count = 0;
3528 unsigned unitlen;
3529 int i;
3530 for (i = 0; i < TYPE_NFIELDS (t); i++)
3531 {
3532 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3533 base_type);
3534 if (sub_count == -1)
3535 return -1;
3536 count = (count > sub_count ? count : sub_count);
3537 }
3538 if (TYPE_LENGTH (t) == 0)
3539 {
3540 gdb_assert (count == 0);
3541 return 0;
3542 }
3543 else if (count == 0)
3544 return -1;
3545 unitlen = arm_vfp_cprc_unit_length (*base_type);
3546 if (TYPE_LENGTH (t) != unitlen * count)
3547 return -1;
3548 return count;
3549 }
3550
3551 default:
3552 break;
3553 }
3554
3555 return -1;
3556 }
3557
3558 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3559 if passed to or returned from a non-variadic function with the VFP
3560 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3561 *BASE_TYPE to the base type for T and *COUNT to the number of
3562 elements of that base type before returning. */
3563
3564 static int
3565 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3566 int *count)
3567 {
3568 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3569 int c = arm_vfp_cprc_sub_candidate (t, &b);
3570 if (c <= 0 || c > 4)
3571 return 0;
3572 *base_type = b;
3573 *count = c;
3574 return 1;
3575 }
3576
3577 /* Return 1 if the VFP ABI should be used for passing arguments to and
3578 returning values from a function of type FUNC_TYPE, 0
3579 otherwise. */
3580
3581 static int
3582 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3583 {
3584 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3585 /* Variadic functions always use the base ABI. Assume that functions
3586 without debug info are not variadic. */
3587 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3588 return 0;
3589 /* The VFP ABI is only supported as a variant of AAPCS. */
3590 if (tdep->arm_abi != ARM_ABI_AAPCS)
3591 return 0;
3592 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3593 }
3594
3595 /* We currently only support passing parameters in integer registers, which
3596 conforms with GCC's default model, and VFP argument passing following
3597 the VFP variant of AAPCS. Several other variants exist and
3598 we should probably support some of them based on the selected ABI. */
3599
3600 static CORE_ADDR
3601 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3602 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3603 struct value **args, CORE_ADDR sp,
3604 function_call_return_method return_method,
3605 CORE_ADDR struct_addr)
3606 {
3607 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3608 int argnum;
3609 int argreg;
3610 int nstack;
3611 struct stack_item *si = NULL;
3612 int use_vfp_abi;
3613 struct type *ftype;
3614 unsigned vfp_regs_free = (1 << 16) - 1;
3615
3616 /* Determine the type of this function and whether the VFP ABI
3617 applies. */
3618 ftype = check_typedef (value_type (function));
3619 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3620 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3621 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3622
3623 /* Set the return address. For the ARM, the return breakpoint is
3624 always at BP_ADDR. */
3625 if (arm_pc_is_thumb (gdbarch, bp_addr))
3626 bp_addr |= 1;
3627 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3628
3629 /* Walk through the list of args and determine how large a temporary
3630 stack is required. Need to take care here as structs may be
3631 passed on the stack, and we have to push them. */
3632 nstack = 0;
3633
3634 argreg = ARM_A1_REGNUM;
3635 nstack = 0;
3636
3637 /* The struct_return pointer occupies the first parameter
3638 passing register. */
3639 if (return_method == return_method_struct)
3640 {
3641 if (arm_debug)
3642 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3643 gdbarch_register_name (gdbarch, argreg),
3644 paddress (gdbarch, struct_addr));
3645 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3646 argreg++;
3647 }
3648
3649 for (argnum = 0; argnum < nargs; argnum++)
3650 {
3651 int len;
3652 struct type *arg_type;
3653 struct type *target_type;
3654 enum type_code typecode;
3655 const bfd_byte *val;
3656 int align;
3657 enum arm_vfp_cprc_base_type vfp_base_type;
3658 int vfp_base_count;
3659 int may_use_core_reg = 1;
3660
3661 arg_type = check_typedef (value_type (args[argnum]));
3662 len = TYPE_LENGTH (arg_type);
3663 target_type = TYPE_TARGET_TYPE (arg_type);
3664 typecode = TYPE_CODE (arg_type);
3665 val = value_contents (args[argnum]);
3666
3667 align = type_align (arg_type);
3668 /* Round alignment up to a whole number of words. */
3669 align = (align + ARM_INT_REGISTER_SIZE - 1)
3670 & ~(ARM_INT_REGISTER_SIZE - 1);
3671 /* Different ABIs have different maximum alignments. */
3672 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3673 {
3674 /* The APCS ABI only requires word alignment. */
3675 align = ARM_INT_REGISTER_SIZE;
3676 }
3677 else
3678 {
3679 /* The AAPCS requires at most doubleword alignment. */
3680 if (align > ARM_INT_REGISTER_SIZE * 2)
3681 align = ARM_INT_REGISTER_SIZE * 2;
3682 }
3683
3684 if (use_vfp_abi
3685 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3686 &vfp_base_count))
3687 {
3688 int regno;
3689 int unit_length;
3690 int shift;
3691 unsigned mask;
3692
3693 /* Because this is a CPRC it cannot go in a core register or
3694 cause a core register to be skipped for alignment.
3695 Either it goes in VFP registers and the rest of this loop
3696 iteration is skipped for this argument, or it goes on the
3697 stack (and the stack alignment code is correct for this
3698 case). */
3699 may_use_core_reg = 0;
3700
3701 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3702 shift = unit_length / 4;
3703 mask = (1 << (shift * vfp_base_count)) - 1;
3704 for (regno = 0; regno < 16; regno += shift)
3705 if (((vfp_regs_free >> regno) & mask) == mask)
3706 break;
3707
3708 if (regno < 16)
3709 {
3710 int reg_char;
3711 int reg_scaled;
3712 int i;
3713
3714 vfp_regs_free &= ~(mask << regno);
3715 reg_scaled = regno / shift;
3716 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3717 for (i = 0; i < vfp_base_count; i++)
3718 {
3719 char name_buf[4];
3720 int regnum;
3721 if (reg_char == 'q')
3722 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3723 val + i * unit_length);
3724 else
3725 {
3726 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3727 reg_char, reg_scaled + i);
3728 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3729 strlen (name_buf));
3730 regcache->cooked_write (regnum, val + i * unit_length);
3731 }
3732 }
3733 continue;
3734 }
3735 else
3736 {
3737 /* This CPRC could not go in VFP registers, so all VFP
3738 registers are now marked as used. */
3739 vfp_regs_free = 0;
3740 }
3741 }
3742
3743 /* Push stack padding for dowubleword alignment. */
3744 if (nstack & (align - 1))
3745 {
3746 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3747 nstack += ARM_INT_REGISTER_SIZE;
3748 }
3749
3750 /* Doubleword aligned quantities must go in even register pairs. */
3751 if (may_use_core_reg
3752 && argreg <= ARM_LAST_ARG_REGNUM
3753 && align > ARM_INT_REGISTER_SIZE
3754 && argreg & 1)
3755 argreg++;
3756
3757 /* If the argument is a pointer to a function, and it is a
3758 Thumb function, create a LOCAL copy of the value and set
3759 the THUMB bit in it. */
3760 if (TYPE_CODE_PTR == typecode
3761 && target_type != NULL
3762 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3763 {
3764 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3765 if (arm_pc_is_thumb (gdbarch, regval))
3766 {
3767 bfd_byte *copy = (bfd_byte *) alloca (len);
3768 store_unsigned_integer (copy, len, byte_order,
3769 MAKE_THUMB_ADDR (regval));
3770 val = copy;
3771 }
3772 }
3773
3774 /* Copy the argument to general registers or the stack in
3775 register-sized pieces. Large arguments are split between
3776 registers and stack. */
3777 while (len > 0)
3778 {
3779 int partial_len = len < ARM_INT_REGISTER_SIZE
3780 ? len : ARM_INT_REGISTER_SIZE;
3781 CORE_ADDR regval
3782 = extract_unsigned_integer (val, partial_len, byte_order);
3783
3784 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3785 {
3786 /* The argument is being passed in a general purpose
3787 register. */
3788 if (byte_order == BFD_ENDIAN_BIG)
3789 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3790 if (arm_debug)
3791 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3792 argnum,
3793 gdbarch_register_name
3794 (gdbarch, argreg),
3795 phex (regval, ARM_INT_REGISTER_SIZE));
3796 regcache_cooked_write_unsigned (regcache, argreg, regval);
3797 argreg++;
3798 }
3799 else
3800 {
3801 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3802
3803 memset (buf, 0, sizeof (buf));
3804 store_unsigned_integer (buf, partial_len, byte_order, regval);
3805
3806 /* Push the arguments onto the stack. */
3807 if (arm_debug)
3808 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3809 argnum, nstack);
3810 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3811 nstack += ARM_INT_REGISTER_SIZE;
3812 }
3813
3814 len -= partial_len;
3815 val += partial_len;
3816 }
3817 }
3818 /* If we have an odd number of words to push, then decrement the stack
3819 by one word now, so first stack argument will be dword aligned. */
3820 if (nstack & 4)
3821 sp -= 4;
3822
3823 while (si)
3824 {
3825 sp -= si->len;
3826 write_memory (sp, si->data, si->len);
3827 si = pop_stack_item (si);
3828 }
3829
3830 /* Finally, update teh SP register. */
3831 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3832
3833 return sp;
3834 }
3835
3836
3837 /* Always align the frame to an 8-byte boundary. This is required on
3838 some platforms and harmless on the rest. */
3839
3840 static CORE_ADDR
3841 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3842 {
3843 /* Align the stack to eight bytes. */
3844 return sp & ~ (CORE_ADDR) 7;
3845 }
3846
3847 static void
3848 print_fpu_flags (struct ui_file *file, int flags)
3849 {
3850 if (flags & (1 << 0))
3851 fputs_filtered ("IVO ", file);
3852 if (flags & (1 << 1))
3853 fputs_filtered ("DVZ ", file);
3854 if (flags & (1 << 2))
3855 fputs_filtered ("OFL ", file);
3856 if (flags & (1 << 3))
3857 fputs_filtered ("UFL ", file);
3858 if (flags & (1 << 4))
3859 fputs_filtered ("INX ", file);
3860 fputc_filtered ('\n', file);
3861 }
3862
3863 /* Print interesting information about the floating point processor
3864 (if present) or emulator. */
3865 static void
3866 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3867 struct frame_info *frame, const char *args)
3868 {
3869 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3870 int type;
3871
3872 type = (status >> 24) & 127;
3873 if (status & (1 << 31))
3874 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3875 else
3876 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3877 /* i18n: [floating point unit] mask */
3878 fputs_filtered (_("mask: "), file);
3879 print_fpu_flags (file, status >> 16);
3880 /* i18n: [floating point unit] flags */
3881 fputs_filtered (_("flags: "), file);
3882 print_fpu_flags (file, status);
3883 }
3884
3885 /* Construct the ARM extended floating point type. */
3886 static struct type *
3887 arm_ext_type (struct gdbarch *gdbarch)
3888 {
3889 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3890
3891 if (!tdep->arm_ext_type)
3892 tdep->arm_ext_type
3893 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3894 floatformats_arm_ext);
3895
3896 return tdep->arm_ext_type;
3897 }
3898
3899 static struct type *
3900 arm_neon_double_type (struct gdbarch *gdbarch)
3901 {
3902 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3903
3904 if (tdep->neon_double_type == NULL)
3905 {
3906 struct type *t, *elem;
3907
3908 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3909 TYPE_CODE_UNION);
3910 elem = builtin_type (gdbarch)->builtin_uint8;
3911 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3912 elem = builtin_type (gdbarch)->builtin_uint16;
3913 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3914 elem = builtin_type (gdbarch)->builtin_uint32;
3915 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3916 elem = builtin_type (gdbarch)->builtin_uint64;
3917 append_composite_type_field (t, "u64", elem);
3918 elem = builtin_type (gdbarch)->builtin_float;
3919 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3920 elem = builtin_type (gdbarch)->builtin_double;
3921 append_composite_type_field (t, "f64", elem);
3922
3923 TYPE_VECTOR (t) = 1;
3924 TYPE_NAME (t) = "neon_d";
3925 tdep->neon_double_type = t;
3926 }
3927
3928 return tdep->neon_double_type;
3929 }
3930
3931 /* FIXME: The vector types are not correctly ordered on big-endian
3932 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3933 bits of d0 - regardless of what unit size is being held in d0. So
3934 the offset of the first uint8 in d0 is 7, but the offset of the
3935 first float is 4. This code works as-is for little-endian
3936 targets. */
3937
3938 static struct type *
3939 arm_neon_quad_type (struct gdbarch *gdbarch)
3940 {
3941 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3942
3943 if (tdep->neon_quad_type == NULL)
3944 {
3945 struct type *t, *elem;
3946
3947 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3948 TYPE_CODE_UNION);
3949 elem = builtin_type (gdbarch)->builtin_uint8;
3950 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3951 elem = builtin_type (gdbarch)->builtin_uint16;
3952 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3953 elem = builtin_type (gdbarch)->builtin_uint32;
3954 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3955 elem = builtin_type (gdbarch)->builtin_uint64;
3956 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3957 elem = builtin_type (gdbarch)->builtin_float;
3958 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3959 elem = builtin_type (gdbarch)->builtin_double;
3960 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3961
3962 TYPE_VECTOR (t) = 1;
3963 TYPE_NAME (t) = "neon_q";
3964 tdep->neon_quad_type = t;
3965 }
3966
3967 return tdep->neon_quad_type;
3968 }
3969
3970 /* Return the GDB type object for the "standard" data type of data in
3971 register N. */
3972
3973 static struct type *
3974 arm_register_type (struct gdbarch *gdbarch, int regnum)
3975 {
3976 int num_regs = gdbarch_num_regs (gdbarch);
3977
3978 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3979 && regnum >= num_regs && regnum < num_regs + 32)
3980 return builtin_type (gdbarch)->builtin_float;
3981
3982 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3983 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3984 return arm_neon_quad_type (gdbarch);
3985
3986 /* If the target description has register information, we are only
3987 in this function so that we can override the types of
3988 double-precision registers for NEON. */
3989 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3990 {
3991 struct type *t = tdesc_register_type (gdbarch, regnum);
3992
3993 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3994 && TYPE_CODE (t) == TYPE_CODE_FLT
3995 && gdbarch_tdep (gdbarch)->have_neon)
3996 return arm_neon_double_type (gdbarch);
3997 else
3998 return t;
3999 }
4000
4001 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4002 {
4003 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4004 return builtin_type (gdbarch)->builtin_void;
4005
4006 return arm_ext_type (gdbarch);
4007 }
4008 else if (regnum == ARM_SP_REGNUM)
4009 return builtin_type (gdbarch)->builtin_data_ptr;
4010 else if (regnum == ARM_PC_REGNUM)
4011 return builtin_type (gdbarch)->builtin_func_ptr;
4012 else if (regnum >= ARRAY_SIZE (arm_register_names))
4013 /* These registers are only supported on targets which supply
4014 an XML description. */
4015 return builtin_type (gdbarch)->builtin_int0;
4016 else
4017 return builtin_type (gdbarch)->builtin_uint32;
4018 }
4019
4020 /* Map a DWARF register REGNUM onto the appropriate GDB register
4021 number. */
4022
4023 static int
4024 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4025 {
4026 /* Core integer regs. */
4027 if (reg >= 0 && reg <= 15)
4028 return reg;
4029
4030 /* Legacy FPA encoding. These were once used in a way which
4031 overlapped with VFP register numbering, so their use is
4032 discouraged, but GDB doesn't support the ARM toolchain
4033 which used them for VFP. */
4034 if (reg >= 16 && reg <= 23)
4035 return ARM_F0_REGNUM + reg - 16;
4036
4037 /* New assignments for the FPA registers. */
4038 if (reg >= 96 && reg <= 103)
4039 return ARM_F0_REGNUM + reg - 96;
4040
4041 /* WMMX register assignments. */
4042 if (reg >= 104 && reg <= 111)
4043 return ARM_WCGR0_REGNUM + reg - 104;
4044
4045 if (reg >= 112 && reg <= 127)
4046 return ARM_WR0_REGNUM + reg - 112;
4047
4048 if (reg >= 192 && reg <= 199)
4049 return ARM_WC0_REGNUM + reg - 192;
4050
4051 /* VFP v2 registers. A double precision value is actually
4052 in d1 rather than s2, but the ABI only defines numbering
4053 for the single precision registers. This will "just work"
4054 in GDB for little endian targets (we'll read eight bytes,
4055 starting in s0 and then progressing to s1), but will be
4056 reversed on big endian targets with VFP. This won't
4057 be a problem for the new Neon quad registers; you're supposed
4058 to use DW_OP_piece for those. */
4059 if (reg >= 64 && reg <= 95)
4060 {
4061 char name_buf[4];
4062
4063 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4064 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4065 strlen (name_buf));
4066 }
4067
4068 /* VFP v3 / Neon registers. This range is also used for VFP v2
4069 registers, except that it now describes d0 instead of s0. */
4070 if (reg >= 256 && reg <= 287)
4071 {
4072 char name_buf[4];
4073
4074 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4075 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4076 strlen (name_buf));
4077 }
4078
4079 return -1;
4080 }
4081
4082 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4083 static int
4084 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4085 {
4086 int reg = regnum;
4087 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4088
4089 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4090 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4091
4092 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4093 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4094
4095 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4096 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4097
4098 if (reg < NUM_GREGS)
4099 return SIM_ARM_R0_REGNUM + reg;
4100 reg -= NUM_GREGS;
4101
4102 if (reg < NUM_FREGS)
4103 return SIM_ARM_FP0_REGNUM + reg;
4104 reg -= NUM_FREGS;
4105
4106 if (reg < NUM_SREGS)
4107 return SIM_ARM_FPS_REGNUM + reg;
4108 reg -= NUM_SREGS;
4109
4110 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4111 }
4112
4113 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4114 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4115 NULL if an error occurs. BUF is freed. */
4116
4117 static gdb_byte *
4118 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4119 int old_len, int new_len)
4120 {
4121 gdb_byte *new_buf;
4122 int bytes_to_read = new_len - old_len;
4123
4124 new_buf = (gdb_byte *) xmalloc (new_len);
4125 memcpy (new_buf + bytes_to_read, buf, old_len);
4126 xfree (buf);
4127 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4128 {
4129 xfree (new_buf);
4130 return NULL;
4131 }
4132 return new_buf;
4133 }
4134
4135 /* An IT block is at most the 2-byte IT instruction followed by
4136 four 4-byte instructions. The furthest back we must search to
4137 find an IT block that affects the current instruction is thus
4138 2 + 3 * 4 == 14 bytes. */
4139 #define MAX_IT_BLOCK_PREFIX 14
4140
4141 /* Use a quick scan if there are more than this many bytes of
4142 code. */
4143 #define IT_SCAN_THRESHOLD 32
4144
4145 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4146 A breakpoint in an IT block may not be hit, depending on the
4147 condition flags. */
4148 static CORE_ADDR
4149 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4150 {
4151 gdb_byte *buf;
4152 char map_type;
4153 CORE_ADDR boundary, func_start;
4154 int buf_len;
4155 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4156 int i, any, last_it, last_it_count;
4157
4158 /* If we are using BKPT breakpoints, none of this is necessary. */
4159 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4160 return bpaddr;
4161
4162 /* ARM mode does not have this problem. */
4163 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4164 return bpaddr;
4165
4166 /* We are setting a breakpoint in Thumb code that could potentially
4167 contain an IT block. The first step is to find how much Thumb
4168 code there is; we do not need to read outside of known Thumb
4169 sequences. */
4170 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4171 if (map_type == 0)
4172 /* Thumb-2 code must have mapping symbols to have a chance. */
4173 return bpaddr;
4174
4175 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4176
4177 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4178 && func_start > boundary)
4179 boundary = func_start;
4180
4181 /* Search for a candidate IT instruction. We have to do some fancy
4182 footwork to distinguish a real IT instruction from the second
4183 half of a 32-bit instruction, but there is no need for that if
4184 there's no candidate. */
4185 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4186 if (buf_len == 0)
4187 /* No room for an IT instruction. */
4188 return bpaddr;
4189
4190 buf = (gdb_byte *) xmalloc (buf_len);
4191 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4192 return bpaddr;
4193 any = 0;
4194 for (i = 0; i < buf_len; i += 2)
4195 {
4196 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4197 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4198 {
4199 any = 1;
4200 break;
4201 }
4202 }
4203
4204 if (any == 0)
4205 {
4206 xfree (buf);
4207 return bpaddr;
4208 }
4209
4210 /* OK, the code bytes before this instruction contain at least one
4211 halfword which resembles an IT instruction. We know that it's
4212 Thumb code, but there are still two possibilities. Either the
4213 halfword really is an IT instruction, or it is the second half of
4214 a 32-bit Thumb instruction. The only way we can tell is to
4215 scan forwards from a known instruction boundary. */
4216 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4217 {
4218 int definite;
4219
4220 /* There's a lot of code before this instruction. Start with an
4221 optimistic search; it's easy to recognize halfwords that can
4222 not be the start of a 32-bit instruction, and use that to
4223 lock on to the instruction boundaries. */
4224 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4225 if (buf == NULL)
4226 return bpaddr;
4227 buf_len = IT_SCAN_THRESHOLD;
4228
4229 definite = 0;
4230 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4231 {
4232 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4233 if (thumb_insn_size (inst1) == 2)
4234 {
4235 definite = 1;
4236 break;
4237 }
4238 }
4239
4240 /* At this point, if DEFINITE, BUF[I] is the first place we
4241 are sure that we know the instruction boundaries, and it is far
4242 enough from BPADDR that we could not miss an IT instruction
4243 affecting BPADDR. If ! DEFINITE, give up - start from a
4244 known boundary. */
4245 if (! definite)
4246 {
4247 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4248 bpaddr - boundary);
4249 if (buf == NULL)
4250 return bpaddr;
4251 buf_len = bpaddr - boundary;
4252 i = 0;
4253 }
4254 }
4255 else
4256 {
4257 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4258 if (buf == NULL)
4259 return bpaddr;
4260 buf_len = bpaddr - boundary;
4261 i = 0;
4262 }
4263
4264 /* Scan forwards. Find the last IT instruction before BPADDR. */
4265 last_it = -1;
4266 last_it_count = 0;
4267 while (i < buf_len)
4268 {
4269 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4270 last_it_count--;
4271 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4272 {
4273 last_it = i;
4274 if (inst1 & 0x0001)
4275 last_it_count = 4;
4276 else if (inst1 & 0x0002)
4277 last_it_count = 3;
4278 else if (inst1 & 0x0004)
4279 last_it_count = 2;
4280 else
4281 last_it_count = 1;
4282 }
4283 i += thumb_insn_size (inst1);
4284 }
4285
4286 xfree (buf);
4287
4288 if (last_it == -1)
4289 /* There wasn't really an IT instruction after all. */
4290 return bpaddr;
4291
4292 if (last_it_count < 1)
4293 /* It was too far away. */
4294 return bpaddr;
4295
4296 /* This really is a trouble spot. Move the breakpoint to the IT
4297 instruction. */
4298 return bpaddr - buf_len + last_it;
4299 }
4300
4301 /* ARM displaced stepping support.
4302
4303 Generally ARM displaced stepping works as follows:
4304
4305 1. When an instruction is to be single-stepped, it is first decoded by
4306 arm_process_displaced_insn. Depending on the type of instruction, it is
4307 then copied to a scratch location, possibly in a modified form. The
4308 copy_* set of functions performs such modification, as necessary. A
4309 breakpoint is placed after the modified instruction in the scratch space
4310 to return control to GDB. Note in particular that instructions which
4311 modify the PC will no longer do so after modification.
4312
4313 2. The instruction is single-stepped, by setting the PC to the scratch
4314 location address, and resuming. Control returns to GDB when the
4315 breakpoint is hit.
4316
4317 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4318 function used for the current instruction. This function's job is to
4319 put the CPU/memory state back to what it would have been if the
4320 instruction had been executed unmodified in its original location. */
4321
4322 /* NOP instruction (mov r0, r0). */
4323 #define ARM_NOP 0xe1a00000
4324 #define THUMB_NOP 0x4600
4325
4326 /* Helper for register reads for displaced stepping. In particular, this
4327 returns the PC as it would be seen by the instruction at its original
4328 location. */
4329
4330 ULONGEST
4331 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4332 int regno)
4333 {
4334 ULONGEST ret;
4335 CORE_ADDR from = dsc->insn_addr;
4336
4337 if (regno == ARM_PC_REGNUM)
4338 {
4339 /* Compute pipeline offset:
4340 - When executing an ARM instruction, PC reads as the address of the
4341 current instruction plus 8.
4342 - When executing a Thumb instruction, PC reads as the address of the
4343 current instruction plus 4. */
4344
4345 if (!dsc->is_thumb)
4346 from += 8;
4347 else
4348 from += 4;
4349
4350 if (debug_displaced)
4351 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4352 (unsigned long) from);
4353 return (ULONGEST) from;
4354 }
4355 else
4356 {
4357 regcache_cooked_read_unsigned (regs, regno, &ret);
4358 if (debug_displaced)
4359 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4360 regno, (unsigned long) ret);
4361 return ret;
4362 }
4363 }
4364
4365 static int
4366 displaced_in_arm_mode (struct regcache *regs)
4367 {
4368 ULONGEST ps;
4369 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4370
4371 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4372
4373 return (ps & t_bit) == 0;
4374 }
4375
4376 /* Write to the PC as from a branch instruction. */
4377
4378 static void
4379 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4380 ULONGEST val)
4381 {
4382 if (!dsc->is_thumb)
4383 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4384 architecture versions < 6. */
4385 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4386 val & ~(ULONGEST) 0x3);
4387 else
4388 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4389 val & ~(ULONGEST) 0x1);
4390 }
4391
4392 /* Write to the PC as from a branch-exchange instruction. */
4393
4394 static void
4395 bx_write_pc (struct regcache *regs, ULONGEST val)
4396 {
4397 ULONGEST ps;
4398 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4399
4400 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4401
4402 if ((val & 1) == 1)
4403 {
4404 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4405 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4406 }
4407 else if ((val & 2) == 0)
4408 {
4409 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4410 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4411 }
4412 else
4413 {
4414 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4415 mode, align dest to 4 bytes). */
4416 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4417 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4418 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4419 }
4420 }
4421
4422 /* Write to the PC as if from a load instruction. */
4423
4424 static void
4425 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4426 ULONGEST val)
4427 {
4428 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4429 bx_write_pc (regs, val);
4430 else
4431 branch_write_pc (regs, dsc, val);
4432 }
4433
4434 /* Write to the PC as if from an ALU instruction. */
4435
4436 static void
4437 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4438 ULONGEST val)
4439 {
4440 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4441 bx_write_pc (regs, val);
4442 else
4443 branch_write_pc (regs, dsc, val);
4444 }
4445
4446 /* Helper for writing to registers for displaced stepping. Writing to the PC
4447 has a varying effects depending on the instruction which does the write:
4448 this is controlled by the WRITE_PC argument. */
4449
4450 void
4451 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4452 int regno, ULONGEST val, enum pc_write_style write_pc)
4453 {
4454 if (regno == ARM_PC_REGNUM)
4455 {
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4458 (unsigned long) val);
4459 switch (write_pc)
4460 {
4461 case BRANCH_WRITE_PC:
4462 branch_write_pc (regs, dsc, val);
4463 break;
4464
4465 case BX_WRITE_PC:
4466 bx_write_pc (regs, val);
4467 break;
4468
4469 case LOAD_WRITE_PC:
4470 load_write_pc (regs, dsc, val);
4471 break;
4472
4473 case ALU_WRITE_PC:
4474 alu_write_pc (regs, dsc, val);
4475 break;
4476
4477 case CANNOT_WRITE_PC:
4478 warning (_("Instruction wrote to PC in an unexpected way when "
4479 "single-stepping"));
4480 break;
4481
4482 default:
4483 internal_error (__FILE__, __LINE__,
4484 _("Invalid argument to displaced_write_reg"));
4485 }
4486
4487 dsc->wrote_to_pc = 1;
4488 }
4489 else
4490 {
4491 if (debug_displaced)
4492 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4493 regno, (unsigned long) val);
4494 regcache_cooked_write_unsigned (regs, regno, val);
4495 }
4496 }
4497
4498 /* This function is used to concisely determine if an instruction INSN
4499 references PC. Register fields of interest in INSN should have the
4500 corresponding fields of BITMASK set to 0b1111. The function
4501 returns return 1 if any of these fields in INSN reference the PC
4502 (also 0b1111, r15), else it returns 0. */
4503
4504 static int
4505 insn_references_pc (uint32_t insn, uint32_t bitmask)
4506 {
4507 uint32_t lowbit = 1;
4508
4509 while (bitmask != 0)
4510 {
4511 uint32_t mask;
4512
4513 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4514 ;
4515
4516 if (!lowbit)
4517 break;
4518
4519 mask = lowbit * 0xf;
4520
4521 if ((insn & mask) == mask)
4522 return 1;
4523
4524 bitmask &= ~mask;
4525 }
4526
4527 return 0;
4528 }
4529
4530 /* The simplest copy function. Many instructions have the same effect no
4531 matter what address they are executed at: in those cases, use this. */
4532
4533 static int
4534 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4535 const char *iname, arm_displaced_step_closure *dsc)
4536 {
4537 if (debug_displaced)
4538 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4539 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4540 iname);
4541
4542 dsc->modinsn[0] = insn;
4543
4544 return 0;
4545 }
4546
4547 static int
4548 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4549 uint16_t insn2, const char *iname,
4550 arm_displaced_step_closure *dsc)
4551 {
4552 if (debug_displaced)
4553 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4554 "opcode/class '%s' unmodified\n", insn1, insn2,
4555 iname);
4556
4557 dsc->modinsn[0] = insn1;
4558 dsc->modinsn[1] = insn2;
4559 dsc->numinsns = 2;
4560
4561 return 0;
4562 }
4563
4564 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4565 modification. */
4566 static int
4567 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4568 const char *iname,
4569 arm_displaced_step_closure *dsc)
4570 {
4571 if (debug_displaced)
4572 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4573 "opcode/class '%s' unmodified\n", insn,
4574 iname);
4575
4576 dsc->modinsn[0] = insn;
4577
4578 return 0;
4579 }
4580
4581 /* Preload instructions with immediate offset. */
4582
4583 static void
4584 cleanup_preload (struct gdbarch *gdbarch,
4585 struct regcache *regs, arm_displaced_step_closure *dsc)
4586 {
4587 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4588 if (!dsc->u.preload.immed)
4589 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4590 }
4591
4592 static void
4593 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4594 arm_displaced_step_closure *dsc, unsigned int rn)
4595 {
4596 ULONGEST rn_val;
4597 /* Preload instructions:
4598
4599 {pli/pld} [rn, #+/-imm]
4600 ->
4601 {pli/pld} [r0, #+/-imm]. */
4602
4603 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4604 rn_val = displaced_read_reg (regs, dsc, rn);
4605 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4606 dsc->u.preload.immed = 1;
4607
4608 dsc->cleanup = &cleanup_preload;
4609 }
4610
4611 static int
4612 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4613 arm_displaced_step_closure *dsc)
4614 {
4615 unsigned int rn = bits (insn, 16, 19);
4616
4617 if (!insn_references_pc (insn, 0x000f0000ul))
4618 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4619
4620 if (debug_displaced)
4621 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4622 (unsigned long) insn);
4623
4624 dsc->modinsn[0] = insn & 0xfff0ffff;
4625
4626 install_preload (gdbarch, regs, dsc, rn);
4627
4628 return 0;
4629 }
4630
4631 static int
4632 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4633 struct regcache *regs, arm_displaced_step_closure *dsc)
4634 {
4635 unsigned int rn = bits (insn1, 0, 3);
4636 unsigned int u_bit = bit (insn1, 7);
4637 int imm12 = bits (insn2, 0, 11);
4638 ULONGEST pc_val;
4639
4640 if (rn != ARM_PC_REGNUM)
4641 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4642
4643 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4644 PLD (literal) Encoding T1. */
4645 if (debug_displaced)
4646 fprintf_unfiltered (gdb_stdlog,
4647 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4648 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4649 imm12);
4650
4651 if (!u_bit)
4652 imm12 = -1 * imm12;
4653
4654 /* Rewrite instruction {pli/pld} PC imm12 into:
4655 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4656
4657 {pli/pld} [r0, r1]
4658
4659 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4660
4661 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4662 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4663
4664 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4665
4666 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4667 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4668 dsc->u.preload.immed = 0;
4669
4670 /* {pli/pld} [r0, r1] */
4671 dsc->modinsn[0] = insn1 & 0xfff0;
4672 dsc->modinsn[1] = 0xf001;
4673 dsc->numinsns = 2;
4674
4675 dsc->cleanup = &cleanup_preload;
4676 return 0;
4677 }
4678
4679 /* Preload instructions with register offset. */
4680
4681 static void
4682 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4683 arm_displaced_step_closure *dsc, unsigned int rn,
4684 unsigned int rm)
4685 {
4686 ULONGEST rn_val, rm_val;
4687
4688 /* Preload register-offset instructions:
4689
4690 {pli/pld} [rn, rm {, shift}]
4691 ->
4692 {pli/pld} [r0, r1 {, shift}]. */
4693
4694 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4695 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4696 rn_val = displaced_read_reg (regs, dsc, rn);
4697 rm_val = displaced_read_reg (regs, dsc, rm);
4698 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4699 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4700 dsc->u.preload.immed = 0;
4701
4702 dsc->cleanup = &cleanup_preload;
4703 }
4704
4705 static int
4706 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4707 struct regcache *regs,
4708 arm_displaced_step_closure *dsc)
4709 {
4710 unsigned int rn = bits (insn, 16, 19);
4711 unsigned int rm = bits (insn, 0, 3);
4712
4713
4714 if (!insn_references_pc (insn, 0x000f000ful))
4715 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4716
4717 if (debug_displaced)
4718 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4719 (unsigned long) insn);
4720
4721 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4722
4723 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4724 return 0;
4725 }
4726
4727 /* Copy/cleanup coprocessor load and store instructions. */
4728
4729 static void
4730 cleanup_copro_load_store (struct gdbarch *gdbarch,
4731 struct regcache *regs,
4732 arm_displaced_step_closure *dsc)
4733 {
4734 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4735
4736 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4737
4738 if (dsc->u.ldst.writeback)
4739 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4740 }
4741
4742 static void
4743 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4744 arm_displaced_step_closure *dsc,
4745 int writeback, unsigned int rn)
4746 {
4747 ULONGEST rn_val;
4748
4749 /* Coprocessor load/store instructions:
4750
4751 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4752 ->
4753 {stc/stc2} [r0, #+/-imm].
4754
4755 ldc/ldc2 are handled identically. */
4756
4757 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4758 rn_val = displaced_read_reg (regs, dsc, rn);
4759 /* PC should be 4-byte aligned. */
4760 rn_val = rn_val & 0xfffffffc;
4761 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4762
4763 dsc->u.ldst.writeback = writeback;
4764 dsc->u.ldst.rn = rn;
4765
4766 dsc->cleanup = &cleanup_copro_load_store;
4767 }
4768
4769 static int
4770 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4771 struct regcache *regs,
4772 arm_displaced_step_closure *dsc)
4773 {
4774 unsigned int rn = bits (insn, 16, 19);
4775
4776 if (!insn_references_pc (insn, 0x000f0000ul))
4777 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4778
4779 if (debug_displaced)
4780 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4781 "load/store insn %.8lx\n", (unsigned long) insn);
4782
4783 dsc->modinsn[0] = insn & 0xfff0ffff;
4784
4785 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4786
4787 return 0;
4788 }
4789
4790 static int
4791 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4792 uint16_t insn2, struct regcache *regs,
4793 arm_displaced_step_closure *dsc)
4794 {
4795 unsigned int rn = bits (insn1, 0, 3);
4796
4797 if (rn != ARM_PC_REGNUM)
4798 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4799 "copro load/store", dsc);
4800
4801 if (debug_displaced)
4802 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4803 "load/store insn %.4x%.4x\n", insn1, insn2);
4804
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = insn2;
4807 dsc->numinsns = 2;
4808
4809 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4810 doesn't support writeback, so pass 0. */
4811 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4812
4813 return 0;
4814 }
4815
4816 /* Clean up branch instructions (actually perform the branch, by setting
4817 PC). */
4818
4819 static void
4820 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4821 arm_displaced_step_closure *dsc)
4822 {
4823 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4824 int branch_taken = condition_true (dsc->u.branch.cond, status);
4825 enum pc_write_style write_pc = dsc->u.branch.exchange
4826 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4827
4828 if (!branch_taken)
4829 return;
4830
4831 if (dsc->u.branch.link)
4832 {
4833 /* The value of LR should be the next insn of current one. In order
4834 not to confuse logic hanlding later insn `bx lr', if current insn mode
4835 is Thumb, the bit 0 of LR value should be set to 1. */
4836 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4837
4838 if (dsc->is_thumb)
4839 next_insn_addr |= 0x1;
4840
4841 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4842 CANNOT_WRITE_PC);
4843 }
4844
4845 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4846 }
4847
4848 /* Copy B/BL/BLX instructions with immediate destinations. */
4849
4850 static void
4851 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4852 arm_displaced_step_closure *dsc,
4853 unsigned int cond, int exchange, int link, long offset)
4854 {
4855 /* Implement "BL<cond> <label>" as:
4856
4857 Preparation: cond <- instruction condition
4858 Insn: mov r0, r0 (nop)
4859 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4860
4861 B<cond> similar, but don't set r14 in cleanup. */
4862
4863 dsc->u.branch.cond = cond;
4864 dsc->u.branch.link = link;
4865 dsc->u.branch.exchange = exchange;
4866
4867 dsc->u.branch.dest = dsc->insn_addr;
4868 if (link && exchange)
4869 /* For BLX, offset is computed from the Align (PC, 4). */
4870 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4871
4872 if (dsc->is_thumb)
4873 dsc->u.branch.dest += 4 + offset;
4874 else
4875 dsc->u.branch.dest += 8 + offset;
4876
4877 dsc->cleanup = &cleanup_branch;
4878 }
4879 static int
4880 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4881 struct regcache *regs, arm_displaced_step_closure *dsc)
4882 {
4883 unsigned int cond = bits (insn, 28, 31);
4884 int exchange = (cond == 0xf);
4885 int link = exchange || bit (insn, 24);
4886 long offset;
4887
4888 if (debug_displaced)
4889 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4890 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4891 (unsigned long) insn);
4892 if (exchange)
4893 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4894 then arrange the switch into Thumb mode. */
4895 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4896 else
4897 offset = bits (insn, 0, 23) << 2;
4898
4899 if (bit (offset, 25))
4900 offset = offset | ~0x3ffffff;
4901
4902 dsc->modinsn[0] = ARM_NOP;
4903
4904 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4905 return 0;
4906 }
4907
4908 static int
4909 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4910 uint16_t insn2, struct regcache *regs,
4911 arm_displaced_step_closure *dsc)
4912 {
4913 int link = bit (insn2, 14);
4914 int exchange = link && !bit (insn2, 12);
4915 int cond = INST_AL;
4916 long offset = 0;
4917 int j1 = bit (insn2, 13);
4918 int j2 = bit (insn2, 11);
4919 int s = sbits (insn1, 10, 10);
4920 int i1 = !(j1 ^ bit (insn1, 10));
4921 int i2 = !(j2 ^ bit (insn1, 10));
4922
4923 if (!link && !exchange) /* B */
4924 {
4925 offset = (bits (insn2, 0, 10) << 1);
4926 if (bit (insn2, 12)) /* Encoding T4 */
4927 {
4928 offset |= (bits (insn1, 0, 9) << 12)
4929 | (i2 << 22)
4930 | (i1 << 23)
4931 | (s << 24);
4932 cond = INST_AL;
4933 }
4934 else /* Encoding T3 */
4935 {
4936 offset |= (bits (insn1, 0, 5) << 12)
4937 | (j1 << 18)
4938 | (j2 << 19)
4939 | (s << 20);
4940 cond = bits (insn1, 6, 9);
4941 }
4942 }
4943 else
4944 {
4945 offset = (bits (insn1, 0, 9) << 12);
4946 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4947 offset |= exchange ?
4948 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4949 }
4950
4951 if (debug_displaced)
4952 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4953 "%.4x %.4x with offset %.8lx\n",
4954 link ? (exchange) ? "blx" : "bl" : "b",
4955 insn1, insn2, offset);
4956
4957 dsc->modinsn[0] = THUMB_NOP;
4958
4959 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4960 return 0;
4961 }
4962
4963 /* Copy B Thumb instructions. */
4964 static int
4965 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4966 arm_displaced_step_closure *dsc)
4967 {
4968 unsigned int cond = 0;
4969 int offset = 0;
4970 unsigned short bit_12_15 = bits (insn, 12, 15);
4971 CORE_ADDR from = dsc->insn_addr;
4972
4973 if (bit_12_15 == 0xd)
4974 {
4975 /* offset = SignExtend (imm8:0, 32) */
4976 offset = sbits ((insn << 1), 0, 8);
4977 cond = bits (insn, 8, 11);
4978 }
4979 else if (bit_12_15 == 0xe) /* Encoding T2 */
4980 {
4981 offset = sbits ((insn << 1), 0, 11);
4982 cond = INST_AL;
4983 }
4984
4985 if (debug_displaced)
4986 fprintf_unfiltered (gdb_stdlog,
4987 "displaced: copying b immediate insn %.4x "
4988 "with offset %d\n", insn, offset);
4989
4990 dsc->u.branch.cond = cond;
4991 dsc->u.branch.link = 0;
4992 dsc->u.branch.exchange = 0;
4993 dsc->u.branch.dest = from + 4 + offset;
4994
4995 dsc->modinsn[0] = THUMB_NOP;
4996
4997 dsc->cleanup = &cleanup_branch;
4998
4999 return 0;
5000 }
5001
5002 /* Copy BX/BLX with register-specified destinations. */
5003
5004 static void
5005 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5006 arm_displaced_step_closure *dsc, int link,
5007 unsigned int cond, unsigned int rm)
5008 {
5009 /* Implement {BX,BLX}<cond> <reg>" as:
5010
5011 Preparation: cond <- instruction condition
5012 Insn: mov r0, r0 (nop)
5013 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5014
5015 Don't set r14 in cleanup for BX. */
5016
5017 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5018
5019 dsc->u.branch.cond = cond;
5020 dsc->u.branch.link = link;
5021
5022 dsc->u.branch.exchange = 1;
5023
5024 dsc->cleanup = &cleanup_branch;
5025 }
5026
5027 static int
5028 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5029 struct regcache *regs, arm_displaced_step_closure *dsc)
5030 {
5031 unsigned int cond = bits (insn, 28, 31);
5032 /* BX: x12xxx1x
5033 BLX: x12xxx3x. */
5034 int link = bit (insn, 5);
5035 unsigned int rm = bits (insn, 0, 3);
5036
5037 if (debug_displaced)
5038 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5039 (unsigned long) insn);
5040
5041 dsc->modinsn[0] = ARM_NOP;
5042
5043 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5044 return 0;
5045 }
5046
5047 static int
5048 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5049 struct regcache *regs,
5050 arm_displaced_step_closure *dsc)
5051 {
5052 int link = bit (insn, 7);
5053 unsigned int rm = bits (insn, 3, 6);
5054
5055 if (debug_displaced)
5056 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5057 (unsigned short) insn);
5058
5059 dsc->modinsn[0] = THUMB_NOP;
5060
5061 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5062
5063 return 0;
5064 }
5065
5066
5067 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5068
5069 static void
5070 cleanup_alu_imm (struct gdbarch *gdbarch,
5071 struct regcache *regs, arm_displaced_step_closure *dsc)
5072 {
5073 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5074 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5076 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5077 }
5078
5079 static int
5080 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5081 arm_displaced_step_closure *dsc)
5082 {
5083 unsigned int rn = bits (insn, 16, 19);
5084 unsigned int rd = bits (insn, 12, 15);
5085 unsigned int op = bits (insn, 21, 24);
5086 int is_mov = (op == 0xd);
5087 ULONGEST rd_val, rn_val;
5088
5089 if (!insn_references_pc (insn, 0x000ff000ul))
5090 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5091
5092 if (debug_displaced)
5093 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5094 "%.8lx\n", is_mov ? "move" : "ALU",
5095 (unsigned long) insn);
5096
5097 /* Instruction is of form:
5098
5099 <op><cond> rd, [rn,] #imm
5100
5101 Rewrite as:
5102
5103 Preparation: tmp1, tmp2 <- r0, r1;
5104 r0, r1 <- rd, rn
5105 Insn: <op><cond> r0, r1, #imm
5106 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5107 */
5108
5109 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5110 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5111 rn_val = displaced_read_reg (regs, dsc, rn);
5112 rd_val = displaced_read_reg (regs, dsc, rd);
5113 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5114 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5115 dsc->rd = rd;
5116
5117 if (is_mov)
5118 dsc->modinsn[0] = insn & 0xfff00fff;
5119 else
5120 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5121
5122 dsc->cleanup = &cleanup_alu_imm;
5123
5124 return 0;
5125 }
5126
5127 static int
5128 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5129 uint16_t insn2, struct regcache *regs,
5130 arm_displaced_step_closure *dsc)
5131 {
5132 unsigned int op = bits (insn1, 5, 8);
5133 unsigned int rn, rm, rd;
5134 ULONGEST rd_val, rn_val;
5135
5136 rn = bits (insn1, 0, 3); /* Rn */
5137 rm = bits (insn2, 0, 3); /* Rm */
5138 rd = bits (insn2, 8, 11); /* Rd */
5139
5140 /* This routine is only called for instruction MOV. */
5141 gdb_assert (op == 0x2 && rn == 0xf);
5142
5143 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5144 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5145
5146 if (debug_displaced)
5147 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5148 "ALU", insn1, insn2);
5149
5150 /* Instruction is of form:
5151
5152 <op><cond> rd, [rn,] #imm
5153
5154 Rewrite as:
5155
5156 Preparation: tmp1, tmp2 <- r0, r1;
5157 r0, r1 <- rd, rn
5158 Insn: <op><cond> r0, r1, #imm
5159 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5160 */
5161
5162 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5163 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5164 rn_val = displaced_read_reg (regs, dsc, rn);
5165 rd_val = displaced_read_reg (regs, dsc, rd);
5166 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5167 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5168 dsc->rd = rd;
5169
5170 dsc->modinsn[0] = insn1;
5171 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5172 dsc->numinsns = 2;
5173
5174 dsc->cleanup = &cleanup_alu_imm;
5175
5176 return 0;
5177 }
5178
5179 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5180
5181 static void
5182 cleanup_alu_reg (struct gdbarch *gdbarch,
5183 struct regcache *regs, arm_displaced_step_closure *dsc)
5184 {
5185 ULONGEST rd_val;
5186 int i;
5187
5188 rd_val = displaced_read_reg (regs, dsc, 0);
5189
5190 for (i = 0; i < 3; i++)
5191 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5192
5193 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5194 }
5195
5196 static void
5197 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5198 arm_displaced_step_closure *dsc,
5199 unsigned int rd, unsigned int rn, unsigned int rm)
5200 {
5201 ULONGEST rd_val, rn_val, rm_val;
5202
5203 /* Instruction is of form:
5204
5205 <op><cond> rd, [rn,] rm [, <shift>]
5206
5207 Rewrite as:
5208
5209 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5210 r0, r1, r2 <- rd, rn, rm
5211 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5212 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5213 */
5214
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
5219 rn_val = displaced_read_reg (regs, dsc, rn);
5220 rm_val = displaced_read_reg (regs, dsc, rm);
5221 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5223 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5224 dsc->rd = rd;
5225
5226 dsc->cleanup = &cleanup_alu_reg;
5227 }
5228
5229 static int
5230 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5231 arm_displaced_step_closure *dsc)
5232 {
5233 unsigned int op = bits (insn, 21, 24);
5234 int is_mov = (op == 0xd);
5235
5236 if (!insn_references_pc (insn, 0x000ff00ful))
5237 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5238
5239 if (debug_displaced)
5240 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5241 is_mov ? "move" : "ALU", (unsigned long) insn);
5242
5243 if (is_mov)
5244 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5245 else
5246 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5247
5248 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5249 bits (insn, 0, 3));
5250 return 0;
5251 }
5252
5253 static int
5254 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5255 struct regcache *regs,
5256 arm_displaced_step_closure *dsc)
5257 {
5258 unsigned rm, rd;
5259
5260 rm = bits (insn, 3, 6);
5261 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5262
5263 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5264 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5265
5266 if (debug_displaced)
5267 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5268 (unsigned short) insn);
5269
5270 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5271
5272 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5273
5274 return 0;
5275 }
5276
5277 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5278
5279 static void
5280 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5281 struct regcache *regs,
5282 arm_displaced_step_closure *dsc)
5283 {
5284 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5285 int i;
5286
5287 for (i = 0; i < 4; i++)
5288 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5289
5290 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5291 }
5292
5293 static void
5294 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5295 arm_displaced_step_closure *dsc,
5296 unsigned int rd, unsigned int rn, unsigned int rm,
5297 unsigned rs)
5298 {
5299 int i;
5300 ULONGEST rd_val, rn_val, rm_val, rs_val;
5301
5302 /* Instruction is of form:
5303
5304 <op><cond> rd, [rn,] rm, <shift> rs
5305
5306 Rewrite as:
5307
5308 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5309 r0, r1, r2, r3 <- rd, rn, rm, rs
5310 Insn: <op><cond> r0, r1, r2, <shift> r3
5311 Cleanup: tmp5 <- r0
5312 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5313 rd <- tmp5
5314 */
5315
5316 for (i = 0; i < 4; i++)
5317 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5318
5319 rd_val = displaced_read_reg (regs, dsc, rd);
5320 rn_val = displaced_read_reg (regs, dsc, rn);
5321 rm_val = displaced_read_reg (regs, dsc, rm);
5322 rs_val = displaced_read_reg (regs, dsc, rs);
5323 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5326 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5327 dsc->rd = rd;
5328 dsc->cleanup = &cleanup_alu_shifted_reg;
5329 }
5330
5331 static int
5332 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5333 struct regcache *regs,
5334 arm_displaced_step_closure *dsc)
5335 {
5336 unsigned int op = bits (insn, 21, 24);
5337 int is_mov = (op == 0xd);
5338 unsigned int rd, rn, rm, rs;
5339
5340 if (!insn_references_pc (insn, 0x000fff0ful))
5341 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5342
5343 if (debug_displaced)
5344 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5345 "%.8lx\n", is_mov ? "move" : "ALU",
5346 (unsigned long) insn);
5347
5348 rn = bits (insn, 16, 19);
5349 rm = bits (insn, 0, 3);
5350 rs = bits (insn, 8, 11);
5351 rd = bits (insn, 12, 15);
5352
5353 if (is_mov)
5354 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5355 else
5356 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5357
5358 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5359
5360 return 0;
5361 }
5362
5363 /* Clean up load instructions. */
5364
5365 static void
5366 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5367 arm_displaced_step_closure *dsc)
5368 {
5369 ULONGEST rt_val, rt_val2 = 0, rn_val;
5370
5371 rt_val = displaced_read_reg (regs, dsc, 0);
5372 if (dsc->u.ldst.xfersize == 8)
5373 rt_val2 = displaced_read_reg (regs, dsc, 1);
5374 rn_val = displaced_read_reg (regs, dsc, 2);
5375
5376 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5377 if (dsc->u.ldst.xfersize > 4)
5378 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5379 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5380 if (!dsc->u.ldst.immed)
5381 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5382
5383 /* Handle register writeback. */
5384 if (dsc->u.ldst.writeback)
5385 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5386 /* Put result in right place. */
5387 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5388 if (dsc->u.ldst.xfersize == 8)
5389 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5390 }
5391
5392 /* Clean up store instructions. */
5393
5394 static void
5395 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5396 arm_displaced_step_closure *dsc)
5397 {
5398 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5399
5400 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5401 if (dsc->u.ldst.xfersize > 4)
5402 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5403 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5404 if (!dsc->u.ldst.immed)
5405 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5406 if (!dsc->u.ldst.restore_r4)
5407 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5408
5409 /* Writeback. */
5410 if (dsc->u.ldst.writeback)
5411 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5412 }
5413
5414 /* Copy "extra" load/store instructions. These are halfword/doubleword
5415 transfers, which have a different encoding to byte/word transfers. */
5416
5417 static int
5418 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5419 struct regcache *regs, arm_displaced_step_closure *dsc)
5420 {
5421 unsigned int op1 = bits (insn, 20, 24);
5422 unsigned int op2 = bits (insn, 5, 6);
5423 unsigned int rt = bits (insn, 12, 15);
5424 unsigned int rn = bits (insn, 16, 19);
5425 unsigned int rm = bits (insn, 0, 3);
5426 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5427 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5428 int immed = (op1 & 0x4) != 0;
5429 int opcode;
5430 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5431
5432 if (!insn_references_pc (insn, 0x000ff00ful))
5433 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5434
5435 if (debug_displaced)
5436 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5437 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5438 (unsigned long) insn);
5439
5440 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5441
5442 if (opcode < 0)
5443 internal_error (__FILE__, __LINE__,
5444 _("copy_extra_ld_st: instruction decode error"));
5445
5446 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5447 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5448 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5449 if (!immed)
5450 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5451
5452 rt_val = displaced_read_reg (regs, dsc, rt);
5453 if (bytesize[opcode] == 8)
5454 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5455 rn_val = displaced_read_reg (regs, dsc, rn);
5456 if (!immed)
5457 rm_val = displaced_read_reg (regs, dsc, rm);
5458
5459 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5460 if (bytesize[opcode] == 8)
5461 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5462 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5463 if (!immed)
5464 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5465
5466 dsc->rd = rt;
5467 dsc->u.ldst.xfersize = bytesize[opcode];
5468 dsc->u.ldst.rn = rn;
5469 dsc->u.ldst.immed = immed;
5470 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5471 dsc->u.ldst.restore_r4 = 0;
5472
5473 if (immed)
5474 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5475 ->
5476 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5477 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5478 else
5479 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5480 ->
5481 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5482 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5483
5484 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5485
5486 return 0;
5487 }
5488
5489 /* Copy byte/half word/word loads and stores. */
5490
5491 static void
5492 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5493 arm_displaced_step_closure *dsc, int load,
5494 int immed, int writeback, int size, int usermode,
5495 int rt, int rm, int rn)
5496 {
5497 ULONGEST rt_val, rn_val, rm_val = 0;
5498
5499 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5500 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5501 if (!immed)
5502 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5503 if (!load)
5504 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5505
5506 rt_val = displaced_read_reg (regs, dsc, rt);
5507 rn_val = displaced_read_reg (regs, dsc, rn);
5508 if (!immed)
5509 rm_val = displaced_read_reg (regs, dsc, rm);
5510
5511 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5512 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5513 if (!immed)
5514 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5515 dsc->rd = rt;
5516 dsc->u.ldst.xfersize = size;
5517 dsc->u.ldst.rn = rn;
5518 dsc->u.ldst.immed = immed;
5519 dsc->u.ldst.writeback = writeback;
5520
5521 /* To write PC we can do:
5522
5523 Before this sequence of instructions:
5524 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5525 r2 is the Rn value got from dispalced_read_reg.
5526
5527 Insn1: push {pc} Write address of STR instruction + offset on stack
5528 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5529 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5530 = addr(Insn1) + offset - addr(Insn3) - 8
5531 = offset - 16
5532 Insn4: add r4, r4, #8 r4 = offset - 8
5533 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5534 = from + offset
5535 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5536
5537 Otherwise we don't know what value to write for PC, since the offset is
5538 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5539 of this can be found in Section "Saving from r15" in
5540 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5541
5542 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5543 }
5544
5545
5546 static int
5547 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5548 uint16_t insn2, struct regcache *regs,
5549 arm_displaced_step_closure *dsc, int size)
5550 {
5551 unsigned int u_bit = bit (insn1, 7);
5552 unsigned int rt = bits (insn2, 12, 15);
5553 int imm12 = bits (insn2, 0, 11);
5554 ULONGEST pc_val;
5555
5556 if (debug_displaced)
5557 fprintf_unfiltered (gdb_stdlog,
5558 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5559 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5560 imm12);
5561
5562 if (!u_bit)
5563 imm12 = -1 * imm12;
5564
5565 /* Rewrite instruction LDR Rt imm12 into:
5566
5567 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5568
5569 LDR R0, R2, R3,
5570
5571 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5572
5573
5574 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5575 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5576 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5577
5578 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5579
5580 pc_val = pc_val & 0xfffffffc;
5581
5582 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5583 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5584
5585 dsc->rd = rt;
5586
5587 dsc->u.ldst.xfersize = size;
5588 dsc->u.ldst.immed = 0;
5589 dsc->u.ldst.writeback = 0;
5590 dsc->u.ldst.restore_r4 = 0;
5591
5592 /* LDR R0, R2, R3 */
5593 dsc->modinsn[0] = 0xf852;
5594 dsc->modinsn[1] = 0x3;
5595 dsc->numinsns = 2;
5596
5597 dsc->cleanup = &cleanup_load;
5598
5599 return 0;
5600 }
5601
5602 static int
5603 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5604 uint16_t insn2, struct regcache *regs,
5605 arm_displaced_step_closure *dsc,
5606 int writeback, int immed)
5607 {
5608 unsigned int rt = bits (insn2, 12, 15);
5609 unsigned int rn = bits (insn1, 0, 3);
5610 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5611 /* In LDR (register), there is also a register Rm, which is not allowed to
5612 be PC, so we don't have to check it. */
5613
5614 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5615 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5616 dsc);
5617
5618 if (debug_displaced)
5619 fprintf_unfiltered (gdb_stdlog,
5620 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5621 rt, rn, insn1, insn2);
5622
5623 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5624 0, rt, rm, rn);
5625
5626 dsc->u.ldst.restore_r4 = 0;
5627
5628 if (immed)
5629 /* ldr[b]<cond> rt, [rn, #imm], etc.
5630 ->
5631 ldr[b]<cond> r0, [r2, #imm]. */
5632 {
5633 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5634 dsc->modinsn[1] = insn2 & 0x0fff;
5635 }
5636 else
5637 /* ldr[b]<cond> rt, [rn, rm], etc.
5638 ->
5639 ldr[b]<cond> r0, [r2, r3]. */
5640 {
5641 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5642 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5643 }
5644
5645 dsc->numinsns = 2;
5646
5647 return 0;
5648 }
5649
5650
5651 static int
5652 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5653 struct regcache *regs,
5654 arm_displaced_step_closure *dsc,
5655 int load, int size, int usermode)
5656 {
5657 int immed = !bit (insn, 25);
5658 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5659 unsigned int rt = bits (insn, 12, 15);
5660 unsigned int rn = bits (insn, 16, 19);
5661 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5662
5663 if (!insn_references_pc (insn, 0x000ff00ful))
5664 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5665
5666 if (debug_displaced)
5667 fprintf_unfiltered (gdb_stdlog,
5668 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5669 load ? (size == 1 ? "ldrb" : "ldr")
5670 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5671 rt, rn,
5672 (unsigned long) insn);
5673
5674 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5675 usermode, rt, rm, rn);
5676
5677 if (load || rt != ARM_PC_REGNUM)
5678 {
5679 dsc->u.ldst.restore_r4 = 0;
5680
5681 if (immed)
5682 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5683 ->
5684 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5685 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5686 else
5687 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5688 ->
5689 {ldr,str}[b]<cond> r0, [r2, r3]. */
5690 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5691 }
5692 else
5693 {
5694 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5695 dsc->u.ldst.restore_r4 = 1;
5696 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5697 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5698 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5699 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5700 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5701
5702 /* As above. */
5703 if (immed)
5704 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5705 else
5706 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5707
5708 dsc->numinsns = 6;
5709 }
5710
5711 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5712
5713 return 0;
5714 }
5715
5716 /* Cleanup LDM instructions with fully-populated register list. This is an
5717 unfortunate corner case: it's impossible to implement correctly by modifying
5718 the instruction. The issue is as follows: we have an instruction,
5719
5720 ldm rN, {r0-r15}
5721
5722 which we must rewrite to avoid loading PC. A possible solution would be to
5723 do the load in two halves, something like (with suitable cleanup
5724 afterwards):
5725
5726 mov r8, rN
5727 ldm[id][ab] r8!, {r0-r7}
5728 str r7, <temp>
5729 ldm[id][ab] r8, {r7-r14}
5730 <bkpt>
5731
5732 but at present there's no suitable place for <temp>, since the scratch space
5733 is overwritten before the cleanup routine is called. For now, we simply
5734 emulate the instruction. */
5735
5736 static void
5737 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5738 arm_displaced_step_closure *dsc)
5739 {
5740 int inc = dsc->u.block.increment;
5741 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5742 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5743 uint32_t regmask = dsc->u.block.regmask;
5744 int regno = inc ? 0 : 15;
5745 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5746 int exception_return = dsc->u.block.load && dsc->u.block.user
5747 && (regmask & 0x8000) != 0;
5748 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5749 int do_transfer = condition_true (dsc->u.block.cond, status);
5750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5751
5752 if (!do_transfer)
5753 return;
5754
5755 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5756 sensible we can do here. Complain loudly. */
5757 if (exception_return)
5758 error (_("Cannot single-step exception return"));
5759
5760 /* We don't handle any stores here for now. */
5761 gdb_assert (dsc->u.block.load != 0);
5762
5763 if (debug_displaced)
5764 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5765 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5766 dsc->u.block.increment ? "inc" : "dec",
5767 dsc->u.block.before ? "before" : "after");
5768
5769 while (regmask)
5770 {
5771 uint32_t memword;
5772
5773 if (inc)
5774 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5775 regno++;
5776 else
5777 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5778 regno--;
5779
5780 xfer_addr += bump_before;
5781
5782 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5783 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5784
5785 xfer_addr += bump_after;
5786
5787 regmask &= ~(1 << regno);
5788 }
5789
5790 if (dsc->u.block.writeback)
5791 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5792 CANNOT_WRITE_PC);
5793 }
5794
5795 /* Clean up an STM which included the PC in the register list. */
5796
5797 static void
5798 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5799 arm_displaced_step_closure *dsc)
5800 {
5801 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5802 int store_executed = condition_true (dsc->u.block.cond, status);
5803 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5805 uint32_t pc_val;
5806 long offset;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5808
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5811 return;
5812
5813 if (dsc->u.block.increment)
5814 {
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5816
5817 if (dsc->u.block.before)
5818 pc_stored_at += 4;
5819 }
5820 else
5821 {
5822 pc_stored_at = dsc->u.block.xfer_addr;
5823
5824 if (dsc->u.block.before)
5825 pc_stored_at -= 4;
5826 }
5827
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5831
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5835
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5837 instruction. */
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5840 }
5841
5842 /* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5846
5847 static void
5848 cleanup_block_load_pc (struct gdbarch *gdbarch,
5849 struct regcache *regs,
5850 arm_displaced_step_closure *dsc)
5851 {
5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5853 int load_executed = condition_true (dsc->u.block.cond, status);
5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5855 unsigned int regs_loaded = bitcount (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5857
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5861
5862 if (!load_executed)
5863 return;
5864
5865 clobbered = (1 << num_to_shuffle) - 1;
5866
5867 while (num_to_shuffle > 0)
5868 {
5869 if ((mask & (1 << write_reg)) != 0)
5870 {
5871 unsigned int read_reg = num_to_shuffle - 1;
5872
5873 if (read_reg != write_reg)
5874 {
5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5880 write_reg);
5881 }
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5885 write_reg);
5886
5887 clobbered &= ~(1 << write_reg);
5888
5889 num_to_shuffle--;
5890 }
5891
5892 write_reg--;
5893 }
5894
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5897 {
5898 if ((clobbered & (1 << write_reg)) != 0)
5899 {
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5901 CANNOT_WRITE_PC);
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5906 }
5907 }
5908
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5911 {
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5913
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5916 else
5917 new_rn_val -= regs_loaded * 4;
5918
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5920 CANNOT_WRITE_PC);
5921 }
5922 }
5923
5924 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5926
5927 static int
5928 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 arm_displaced_step_closure *dsc)
5931 {
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
5938
5939 /* Block transfers which don't mention PC can be run directly
5940 out-of-line. */
5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5943
5944 if (rn == ARM_PC_REGNUM)
5945 {
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5949 }
5950
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5954
5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5956 dsc->u.block.rn = rn;
5957
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5964
5965 dsc->u.block.regmask = insn & 0xffff;
5966
5967 if (load)
5968 {
5969 if ((insn & 0xffff) == 0xffff)
5970 {
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5975 too much). */
5976 dsc->modinsn[0] = ARM_NOP;
5977
5978 dsc->cleanup = &cleanup_block_load_all;
5979 }
5980 else
5981 {
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = bitcount (regmask), new_regmask;
5988 unsigned int i;
5989
5990 for (i = 0; i < num_in_list; i++)
5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5992
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5996 all cases, e.g.:
5997
5998 ldm r14!, {r0-r13,pc}
5999
6000 which would need to be rewritten as:
6001
6002 ldm rN!, {r0-r14}
6003
6004 but that can't work, because there's no free register for N.
6005
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6008
6009 if (writeback)
6010 insn &= ~(1 << 21);
6011
6012 new_regmask = (1 << num_in_list) - 1;
6013
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6019
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6021
6022 dsc->cleanup = &cleanup_block_load_pc;
6023 }
6024 }
6025 else
6026 {
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6034
6035 dsc->cleanup = &cleanup_block_store_pc;
6036 }
6037
6038 return 0;
6039 }
6040
6041 static int
6042 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 arm_displaced_step_closure *dsc)
6045 {
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6061 }
6062
6063 if (debug_displaced)
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
6066
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
6070
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6078
6079 if (load)
6080 {
6081 if (dsc->u.block.regmask == 0xffff)
6082 {
6083 /* This branch is impossible to happen. */
6084 gdb_assert (0);
6085 }
6086 else
6087 {
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = bitcount (regmask), new_regmask;
6090 unsigned int i;
6091
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6094
6095 if (writeback)
6096 insn1 &= ~(1 << 5);
6097
6098 new_regmask = (1 << num_in_list) - 1;
6099
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6105
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6108 dsc->numinsns = 2;
6109
6110 dsc->cleanup = &cleanup_block_load_pc;
6111 }
6112 }
6113 else
6114 {
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6117 dsc->numinsns = 2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6119 }
6120 return 0;
6121 }
6122
6123 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6125
6126 ULONGEST
6127 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6128 int byte_order)
6129 {
6130 return read_memory_unsigned_integer (memaddr, len,
6131 (enum bfd_endian) byte_order);
6132 }
6133
6134 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6135
6136 CORE_ADDR
6137 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6138 CORE_ADDR val)
6139 {
6140 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6141 }
6142
6143 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6144
6145 static CORE_ADDR
6146 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6147 {
6148 return 0;
6149 }
6150
6151 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6152
6153 int
6154 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6155 {
6156 return arm_is_thumb (self->regcache);
6157 }
6158
6159 /* single_step() is called just before we want to resume the inferior,
6160 if we want to single-step it but there is no hardware or kernel
6161 single-step support. We find the target of the coming instructions
6162 and breakpoint them. */
6163
6164 std::vector<CORE_ADDR>
6165 arm_software_single_step (struct regcache *regcache)
6166 {
6167 struct gdbarch *gdbarch = regcache->arch ();
6168 struct arm_get_next_pcs next_pcs_ctx;
6169
6170 arm_get_next_pcs_ctor (&next_pcs_ctx,
6171 &arm_get_next_pcs_ops,
6172 gdbarch_byte_order (gdbarch),
6173 gdbarch_byte_order_for_code (gdbarch),
6174 0,
6175 regcache);
6176
6177 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6178
6179 for (CORE_ADDR &pc_ref : next_pcs)
6180 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6181
6182 return next_pcs;
6183 }
6184
6185 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6186 for Linux, where some SVC instructions must be treated specially. */
6187
6188 static void
6189 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6190 arm_displaced_step_closure *dsc)
6191 {
6192 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6193
6194 if (debug_displaced)
6195 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6196 "%.8lx\n", (unsigned long) resume_addr);
6197
6198 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6199 }
6200
6201
6202 /* Common copy routine for svc instruciton. */
6203
6204 static int
6205 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6206 arm_displaced_step_closure *dsc)
6207 {
6208 /* Preparation: none.
6209 Insn: unmodified svc.
6210 Cleanup: pc <- insn_addr + insn_size. */
6211
6212 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6213 instruction. */
6214 dsc->wrote_to_pc = 1;
6215
6216 /* Allow OS-specific code to override SVC handling. */
6217 if (dsc->u.svc.copy_svc_os)
6218 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6219 else
6220 {
6221 dsc->cleanup = &cleanup_svc;
6222 return 0;
6223 }
6224 }
6225
6226 static int
6227 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6228 struct regcache *regs, arm_displaced_step_closure *dsc)
6229 {
6230
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6233 (unsigned long) insn);
6234
6235 dsc->modinsn[0] = insn;
6236
6237 return install_svc (gdbarch, regs, dsc);
6238 }
6239
6240 static int
6241 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6242 struct regcache *regs, arm_displaced_step_closure *dsc)
6243 {
6244
6245 if (debug_displaced)
6246 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6247 insn);
6248
6249 dsc->modinsn[0] = insn;
6250
6251 return install_svc (gdbarch, regs, dsc);
6252 }
6253
6254 /* Copy undefined instructions. */
6255
6256 static int
6257 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6258 arm_displaced_step_closure *dsc)
6259 {
6260 if (debug_displaced)
6261 fprintf_unfiltered (gdb_stdlog,
6262 "displaced: copying undefined insn %.8lx\n",
6263 (unsigned long) insn);
6264
6265 dsc->modinsn[0] = insn;
6266
6267 return 0;
6268 }
6269
6270 static int
6271 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6272 arm_displaced_step_closure *dsc)
6273 {
6274
6275 if (debug_displaced)
6276 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6277 "%.4x %.4x\n", (unsigned short) insn1,
6278 (unsigned short) insn2);
6279
6280 dsc->modinsn[0] = insn1;
6281 dsc->modinsn[1] = insn2;
6282 dsc->numinsns = 2;
6283
6284 return 0;
6285 }
6286
6287 /* Copy unpredictable instructions. */
6288
6289 static int
6290 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6291 arm_displaced_step_closure *dsc)
6292 {
6293 if (debug_displaced)
6294 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6295 "%.8lx\n", (unsigned long) insn);
6296
6297 dsc->modinsn[0] = insn;
6298
6299 return 0;
6300 }
6301
6302 /* The decode_* functions are instruction decoding helpers. They mostly follow
6303 the presentation in the ARM ARM. */
6304
6305 static int
6306 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6307 struct regcache *regs,
6308 arm_displaced_step_closure *dsc)
6309 {
6310 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6311 unsigned int rn = bits (insn, 16, 19);
6312
6313 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6314 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6315 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6316 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6317 else if ((op1 & 0x60) == 0x20)
6318 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6319 else if ((op1 & 0x71) == 0x40)
6320 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6321 dsc);
6322 else if ((op1 & 0x77) == 0x41)
6323 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6324 else if ((op1 & 0x77) == 0x45)
6325 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6326 else if ((op1 & 0x77) == 0x51)
6327 {
6328 if (rn != 0xf)
6329 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6330 else
6331 return arm_copy_unpred (gdbarch, insn, dsc);
6332 }
6333 else if ((op1 & 0x77) == 0x55)
6334 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6335 else if (op1 == 0x57)
6336 switch (op2)
6337 {
6338 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6339 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6340 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6341 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6342 default: return arm_copy_unpred (gdbarch, insn, dsc);
6343 }
6344 else if ((op1 & 0x63) == 0x43)
6345 return arm_copy_unpred (gdbarch, insn, dsc);
6346 else if ((op2 & 0x1) == 0x0)
6347 switch (op1 & ~0x80)
6348 {
6349 case 0x61:
6350 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6351 case 0x65:
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6353 case 0x71: case 0x75:
6354 /* pld/pldw reg. */
6355 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6356 case 0x63: case 0x67: case 0x73: case 0x77:
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 default:
6359 return arm_copy_undef (gdbarch, insn, dsc);
6360 }
6361 else
6362 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6363 }
6364
6365 static int
6366 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6367 struct regcache *regs,
6368 arm_displaced_step_closure *dsc)
6369 {
6370 if (bit (insn, 27) == 0)
6371 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6372 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6373 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6374 {
6375 case 0x0: case 0x2:
6376 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6377
6378 case 0x1: case 0x3:
6379 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6380
6381 case 0x4: case 0x5: case 0x6: case 0x7:
6382 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6383
6384 case 0x8:
6385 switch ((insn & 0xe00000) >> 21)
6386 {
6387 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6388 /* stc/stc2. */
6389 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6390
6391 case 0x2:
6392 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6393
6394 default:
6395 return arm_copy_undef (gdbarch, insn, dsc);
6396 }
6397
6398 case 0x9:
6399 {
6400 int rn_f = (bits (insn, 16, 19) == 0xf);
6401 switch ((insn & 0xe00000) >> 21)
6402 {
6403 case 0x1: case 0x3:
6404 /* ldc/ldc2 imm (undefined for rn == pc). */
6405 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6406 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6407
6408 case 0x2:
6409 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6410
6411 case 0x4: case 0x5: case 0x6: case 0x7:
6412 /* ldc/ldc2 lit (undefined for rn != pc). */
6413 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6414 : arm_copy_undef (gdbarch, insn, dsc);
6415
6416 default:
6417 return arm_copy_undef (gdbarch, insn, dsc);
6418 }
6419 }
6420
6421 case 0xa:
6422 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6423
6424 case 0xb:
6425 if (bits (insn, 16, 19) == 0xf)
6426 /* ldc/ldc2 lit. */
6427 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6428 else
6429 return arm_copy_undef (gdbarch, insn, dsc);
6430
6431 case 0xc:
6432 if (bit (insn, 4))
6433 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6434 else
6435 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6436
6437 case 0xd:
6438 if (bit (insn, 4))
6439 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6440 else
6441 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6442
6443 default:
6444 return arm_copy_undef (gdbarch, insn, dsc);
6445 }
6446 }
6447
6448 /* Decode miscellaneous instructions in dp/misc encoding space. */
6449
6450 static int
6451 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 arm_displaced_step_closure *dsc)
6454 {
6455 unsigned int op2 = bits (insn, 4, 6);
6456 unsigned int op = bits (insn, 21, 22);
6457
6458 switch (op2)
6459 {
6460 case 0x0:
6461 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6462
6463 case 0x1:
6464 if (op == 0x1) /* bx. */
6465 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6466 else if (op == 0x3)
6467 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6468 else
6469 return arm_copy_undef (gdbarch, insn, dsc);
6470
6471 case 0x2:
6472 if (op == 0x1)
6473 /* Not really supported. */
6474 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6475 else
6476 return arm_copy_undef (gdbarch, insn, dsc);
6477
6478 case 0x3:
6479 if (op == 0x1)
6480 return arm_copy_bx_blx_reg (gdbarch, insn,
6481 regs, dsc); /* blx register. */
6482 else
6483 return arm_copy_undef (gdbarch, insn, dsc);
6484
6485 case 0x5:
6486 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6487
6488 case 0x7:
6489 if (op == 0x1)
6490 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6491 else if (op == 0x3)
6492 /* Not really supported. */
6493 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6494 /* Fall through. */
6495
6496 default:
6497 return arm_copy_undef (gdbarch, insn, dsc);
6498 }
6499 }
6500
6501 static int
6502 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6503 struct regcache *regs,
6504 arm_displaced_step_closure *dsc)
6505 {
6506 if (bit (insn, 25))
6507 switch (bits (insn, 20, 24))
6508 {
6509 case 0x10:
6510 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6511
6512 case 0x14:
6513 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6514
6515 case 0x12: case 0x16:
6516 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6517
6518 default:
6519 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6520 }
6521 else
6522 {
6523 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6524
6525 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6526 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6527 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6528 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6530 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6532 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6533 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6534 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6535 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6536 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6537 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6538 /* 2nd arg means "unprivileged". */
6539 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6540 dsc);
6541 }
6542
6543 /* Should be unreachable. */
6544 return 1;
6545 }
6546
6547 static int
6548 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6549 struct regcache *regs,
6550 arm_displaced_step_closure *dsc)
6551 {
6552 int a = bit (insn, 25), b = bit (insn, 4);
6553 uint32_t op1 = bits (insn, 20, 24);
6554
6555 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6556 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6558 else if ((!a && (op1 & 0x17) == 0x02)
6559 || (a && (op1 & 0x17) == 0x02 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6561 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6562 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6564 else if ((!a && (op1 & 0x17) == 0x03)
6565 || (a && (op1 & 0x17) == 0x03 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6567 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6568 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6570 else if ((!a && (op1 & 0x17) == 0x06)
6571 || (a && (op1 & 0x17) == 0x06 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6573 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6574 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6576 else if ((!a && (op1 & 0x17) == 0x07)
6577 || (a && (op1 & 0x17) == 0x07 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6579
6580 /* Should be unreachable. */
6581 return 1;
6582 }
6583
6584 static int
6585 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6586 arm_displaced_step_closure *dsc)
6587 {
6588 switch (bits (insn, 20, 24))
6589 {
6590 case 0x00: case 0x01: case 0x02: case 0x03:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6592
6593 case 0x04: case 0x05: case 0x06: case 0x07:
6594 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6595
6596 case 0x08: case 0x09: case 0x0a: case 0x0b:
6597 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6598 return arm_copy_unmodified (gdbarch, insn,
6599 "decode/pack/unpack/saturate/reverse", dsc);
6600
6601 case 0x18:
6602 if (bits (insn, 5, 7) == 0) /* op2. */
6603 {
6604 if (bits (insn, 12, 15) == 0xf)
6605 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6606 else
6607 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6608 }
6609 else
6610 return arm_copy_undef (gdbarch, insn, dsc);
6611
6612 case 0x1a: case 0x1b:
6613 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6614 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6615 else
6616 return arm_copy_undef (gdbarch, insn, dsc);
6617
6618 case 0x1c: case 0x1d:
6619 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6620 {
6621 if (bits (insn, 0, 3) == 0xf)
6622 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6623 else
6624 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6625 }
6626 else
6627 return arm_copy_undef (gdbarch, insn, dsc);
6628
6629 case 0x1e: case 0x1f:
6630 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6631 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6632 else
6633 return arm_copy_undef (gdbarch, insn, dsc);
6634 }
6635
6636 /* Should be unreachable. */
6637 return 1;
6638 }
6639
6640 static int
6641 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6642 struct regcache *regs,
6643 arm_displaced_step_closure *dsc)
6644 {
6645 if (bit (insn, 25))
6646 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6647 else
6648 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6649 }
6650
6651 static int
6652 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6653 struct regcache *regs,
6654 arm_displaced_step_closure *dsc)
6655 {
6656 unsigned int opcode = bits (insn, 20, 24);
6657
6658 switch (opcode)
6659 {
6660 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6661 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6662
6663 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6664 case 0x12: case 0x16:
6665 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6666
6667 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6668 case 0x13: case 0x17:
6669 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6670
6671 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6672 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6673 /* Note: no writeback for these instructions. Bit 25 will always be
6674 zero though (via caller), so the following works OK. */
6675 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6676 }
6677
6678 /* Should be unreachable. */
6679 return 1;
6680 }
6681
6682 /* Decode shifted register instructions. */
6683
6684 static int
6685 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6686 uint16_t insn2, struct regcache *regs,
6687 arm_displaced_step_closure *dsc)
6688 {
6689 /* PC is only allowed to be used in instruction MOV. */
6690
6691 unsigned int op = bits (insn1, 5, 8);
6692 unsigned int rn = bits (insn1, 0, 3);
6693
6694 if (op == 0x2 && rn == 0xf) /* MOV */
6695 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6696 else
6697 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6698 "dp (shift reg)", dsc);
6699 }
6700
6701
6702 /* Decode extension register load/store. Exactly the same as
6703 arm_decode_ext_reg_ld_st. */
6704
6705 static int
6706 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6707 uint16_t insn2, struct regcache *regs,
6708 arm_displaced_step_closure *dsc)
6709 {
6710 unsigned int opcode = bits (insn1, 4, 8);
6711
6712 switch (opcode)
6713 {
6714 case 0x04: case 0x05:
6715 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6716 "vfp/neon vmov", dsc);
6717
6718 case 0x08: case 0x0c: /* 01x00 */
6719 case 0x0a: case 0x0e: /* 01x10 */
6720 case 0x12: case 0x16: /* 10x10 */
6721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6722 "vfp/neon vstm/vpush", dsc);
6723
6724 case 0x09: case 0x0d: /* 01x01 */
6725 case 0x0b: case 0x0f: /* 01x11 */
6726 case 0x13: case 0x17: /* 10x11 */
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vldm/vpop", dsc);
6729
6730 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6731 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6732 "vstr", dsc);
6733 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6734 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6735 }
6736
6737 /* Should be unreachable. */
6738 return 1;
6739 }
6740
6741 static int
6742 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6743 struct regcache *regs, arm_displaced_step_closure *dsc)
6744 {
6745 unsigned int op1 = bits (insn, 20, 25);
6746 int op = bit (insn, 4);
6747 unsigned int coproc = bits (insn, 8, 11);
6748
6749 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6750 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6751 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6752 && (coproc & 0xe) != 0xa)
6753 /* stc/stc2. */
6754 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6755 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6756 && (coproc & 0xe) != 0xa)
6757 /* ldc/ldc2 imm/lit. */
6758 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 else if ((op1 & 0x3e) == 0x00)
6760 return arm_copy_undef (gdbarch, insn, dsc);
6761 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6762 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6763 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6765 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6767 else if ((op1 & 0x30) == 0x20 && !op)
6768 {
6769 if ((coproc & 0xe) == 0xa)
6770 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6771 else
6772 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6773 }
6774 else if ((op1 & 0x30) == 0x20 && op)
6775 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6776 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6778 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6780 else if ((op1 & 0x30) == 0x30)
6781 return arm_copy_svc (gdbarch, insn, regs, dsc);
6782 else
6783 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6784 }
6785
6786 static int
6787 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6788 uint16_t insn2, struct regcache *regs,
6789 arm_displaced_step_closure *dsc)
6790 {
6791 unsigned int coproc = bits (insn2, 8, 11);
6792 unsigned int bit_5_8 = bits (insn1, 5, 8);
6793 unsigned int bit_9 = bit (insn1, 9);
6794 unsigned int bit_4 = bit (insn1, 4);
6795
6796 if (bit_9 == 0)
6797 {
6798 if (bit_5_8 == 2)
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6801 dsc);
6802 else if (bit_5_8 == 0) /* UNDEFINED. */
6803 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6804 else
6805 {
6806 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6807 if ((coproc & 0xe) == 0xa)
6808 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6809 dsc);
6810 else /* coproc is not 101x. */
6811 {
6812 if (bit_4 == 0) /* STC/STC2. */
6813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6814 "stc/stc2", dsc);
6815 else /* LDC/LDC2 {literal, immeidate}. */
6816 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6817 regs, dsc);
6818 }
6819 }
6820 }
6821 else
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6823
6824 return 0;
6825 }
6826
6827 static void
6828 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6829 arm_displaced_step_closure *dsc, int rd)
6830 {
6831 /* ADR Rd, #imm
6832
6833 Rewrite as:
6834
6835 Preparation: Rd <- PC
6836 Insn: ADD Rd, #imm
6837 Cleanup: Null.
6838 */
6839
6840 /* Rd <- PC */
6841 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6842 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6843 }
6844
6845 static int
6846 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6847 arm_displaced_step_closure *dsc,
6848 int rd, unsigned int imm)
6849 {
6850
6851 /* Encoding T2: ADDS Rd, #imm */
6852 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6853
6854 install_pc_relative (gdbarch, regs, dsc, rd);
6855
6856 return 0;
6857 }
6858
6859 static int
6860 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6861 struct regcache *regs,
6862 arm_displaced_step_closure *dsc)
6863 {
6864 unsigned int rd = bits (insn, 8, 10);
6865 unsigned int imm8 = bits (insn, 0, 7);
6866
6867 if (debug_displaced)
6868 fprintf_unfiltered (gdb_stdlog,
6869 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6870 rd, imm8, insn);
6871
6872 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6873 }
6874
6875 static int
6876 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6877 uint16_t insn2, struct regcache *regs,
6878 arm_displaced_step_closure *dsc)
6879 {
6880 unsigned int rd = bits (insn2, 8, 11);
6881 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6882 extract raw immediate encoding rather than computing immediate. When
6883 generating ADD or SUB instruction, we can simply perform OR operation to
6884 set immediate into ADD. */
6885 unsigned int imm_3_8 = insn2 & 0x70ff;
6886 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6887
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog,
6890 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6891 rd, imm_i, imm_3_8, insn1, insn2);
6892
6893 if (bit (insn1, 7)) /* Encoding T2 */
6894 {
6895 /* Encoding T3: SUB Rd, Rd, #imm */
6896 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6897 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6898 }
6899 else /* Encoding T3 */
6900 {
6901 /* Encoding T3: ADD Rd, Rd, #imm */
6902 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6903 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6904 }
6905 dsc->numinsns = 2;
6906
6907 install_pc_relative (gdbarch, regs, dsc, rd);
6908
6909 return 0;
6910 }
6911
6912 static int
6913 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6914 struct regcache *regs,
6915 arm_displaced_step_closure *dsc)
6916 {
6917 unsigned int rt = bits (insn1, 8, 10);
6918 unsigned int pc;
6919 int imm8 = (bits (insn1, 0, 7) << 2);
6920
6921 /* LDR Rd, #imm8
6922
6923 Rwrite as:
6924
6925 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6926
6927 Insn: LDR R0, [R2, R3];
6928 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6929
6930 if (debug_displaced)
6931 fprintf_unfiltered (gdb_stdlog,
6932 "displaced: copying thumb ldr r%d [pc #%d]\n"
6933 , rt, imm8);
6934
6935 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6936 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6937 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6938 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6939 /* The assembler calculates the required value of the offset from the
6940 Align(PC,4) value of this instruction to the label. */
6941 pc = pc & 0xfffffffc;
6942
6943 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6944 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6945
6946 dsc->rd = rt;
6947 dsc->u.ldst.xfersize = 4;
6948 dsc->u.ldst.rn = 0;
6949 dsc->u.ldst.immed = 0;
6950 dsc->u.ldst.writeback = 0;
6951 dsc->u.ldst.restore_r4 = 0;
6952
6953 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6954
6955 dsc->cleanup = &cleanup_load;
6956
6957 return 0;
6958 }
6959
6960 /* Copy Thumb cbnz/cbz insruction. */
6961
6962 static int
6963 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6964 struct regcache *regs,
6965 arm_displaced_step_closure *dsc)
6966 {
6967 int non_zero = bit (insn1, 11);
6968 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6969 CORE_ADDR from = dsc->insn_addr;
6970 int rn = bits (insn1, 0, 2);
6971 int rn_val = displaced_read_reg (regs, dsc, rn);
6972
6973 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6974 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6975 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6976 condition is false, let it be, cleanup_branch will do nothing. */
6977 if (dsc->u.branch.cond)
6978 {
6979 dsc->u.branch.cond = INST_AL;
6980 dsc->u.branch.dest = from + 4 + imm5;
6981 }
6982 else
6983 dsc->u.branch.dest = from + 2;
6984
6985 dsc->u.branch.link = 0;
6986 dsc->u.branch.exchange = 0;
6987
6988 if (debug_displaced)
6989 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6990 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6991 rn, rn_val, insn1, dsc->u.branch.dest);
6992
6993 dsc->modinsn[0] = THUMB_NOP;
6994
6995 dsc->cleanup = &cleanup_branch;
6996 return 0;
6997 }
6998
6999 /* Copy Table Branch Byte/Halfword */
7000 static int
7001 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7002 uint16_t insn2, struct regcache *regs,
7003 arm_displaced_step_closure *dsc)
7004 {
7005 ULONGEST rn_val, rm_val;
7006 int is_tbh = bit (insn2, 4);
7007 CORE_ADDR halfwords = 0;
7008 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7009
7010 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7011 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7012
7013 if (is_tbh)
7014 {
7015 gdb_byte buf[2];
7016
7017 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7018 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7019 }
7020 else
7021 {
7022 gdb_byte buf[1];
7023
7024 target_read_memory (rn_val + rm_val, buf, 1);
7025 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7026 }
7027
7028 if (debug_displaced)
7029 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7030 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7031 (unsigned int) rn_val, (unsigned int) rm_val,
7032 (unsigned int) halfwords);
7033
7034 dsc->u.branch.cond = INST_AL;
7035 dsc->u.branch.link = 0;
7036 dsc->u.branch.exchange = 0;
7037 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7038
7039 dsc->cleanup = &cleanup_branch;
7040
7041 return 0;
7042 }
7043
7044 static void
7045 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7046 arm_displaced_step_closure *dsc)
7047 {
7048 /* PC <- r7 */
7049 int val = displaced_read_reg (regs, dsc, 7);
7050 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7051
7052 /* r7 <- r8 */
7053 val = displaced_read_reg (regs, dsc, 8);
7054 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7055
7056 /* r8 <- tmp[0] */
7057 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7058
7059 }
7060
7061 static int
7062 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7063 struct regcache *regs,
7064 arm_displaced_step_closure *dsc)
7065 {
7066 dsc->u.block.regmask = insn1 & 0x00ff;
7067
7068 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7069 to :
7070
7071 (1) register list is full, that is, r0-r7 are used.
7072 Prepare: tmp[0] <- r8
7073
7074 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7075 MOV r8, r7; Move value of r7 to r8;
7076 POP {r7}; Store PC value into r7.
7077
7078 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7079
7080 (2) register list is not full, supposing there are N registers in
7081 register list (except PC, 0 <= N <= 7).
7082 Prepare: for each i, 0 - N, tmp[i] <- ri.
7083
7084 POP {r0, r1, ...., rN};
7085
7086 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7087 from tmp[] properly.
7088 */
7089 if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog,
7091 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7092 dsc->u.block.regmask, insn1);
7093
7094 if (dsc->u.block.regmask == 0xff)
7095 {
7096 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7097
7098 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7099 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7100 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7101
7102 dsc->numinsns = 3;
7103 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7104 }
7105 else
7106 {
7107 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7108 unsigned int i;
7109 unsigned int new_regmask;
7110
7111 for (i = 0; i < num_in_list + 1; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 new_regmask = (1 << (num_in_list + 1)) - 1;
7115
7116 if (debug_displaced)
7117 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7118 "{..., pc}: original reg list %.4x,"
7119 " modified list %.4x\n"),
7120 (int) dsc->u.block.regmask, new_regmask);
7121
7122 dsc->u.block.regmask |= 0x8000;
7123 dsc->u.block.writeback = 0;
7124 dsc->u.block.cond = INST_AL;
7125
7126 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7127
7128 dsc->cleanup = &cleanup_block_load_pc;
7129 }
7130
7131 return 0;
7132 }
7133
7134 static void
7135 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7136 struct regcache *regs,
7137 arm_displaced_step_closure *dsc)
7138 {
7139 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7140 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7141 int err = 0;
7142
7143 /* 16-bit thumb instructions. */
7144 switch (op_bit_12_15)
7145 {
7146 /* Shift (imme), add, subtract, move and compare. */
7147 case 0: case 1: case 2: case 3:
7148 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7149 "shift/add/sub/mov/cmp",
7150 dsc);
7151 break;
7152 case 4:
7153 switch (op_bit_10_11)
7154 {
7155 case 0: /* Data-processing */
7156 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7157 "data-processing",
7158 dsc);
7159 break;
7160 case 1: /* Special data instructions and branch and exchange. */
7161 {
7162 unsigned short op = bits (insn1, 7, 9);
7163 if (op == 6 || op == 7) /* BX or BLX */
7164 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7165 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7166 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7167 else
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7169 dsc);
7170 }
7171 break;
7172 default: /* LDR (literal) */
7173 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7174 }
7175 break;
7176 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7177 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7178 break;
7179 case 10:
7180 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7181 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7182 else /* Generate SP-relative address */
7183 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7184 break;
7185 case 11: /* Misc 16-bit instructions */
7186 {
7187 switch (bits (insn1, 8, 11))
7188 {
7189 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7190 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7191 break;
7192 case 12: case 13: /* POP */
7193 if (bit (insn1, 8)) /* PC is in register list. */
7194 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7195 else
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7197 break;
7198 case 15: /* If-Then, and hints */
7199 if (bits (insn1, 0, 3))
7200 /* If-Then makes up to four following instructions conditional.
7201 IT instruction itself is not conditional, so handle it as a
7202 common unmodified instruction. */
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7204 dsc);
7205 else
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7207 break;
7208 default:
7209 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7210 }
7211 }
7212 break;
7213 case 12:
7214 if (op_bit_10_11 < 2) /* Store multiple registers */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7216 else /* Load multiple registers */
7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7218 break;
7219 case 13: /* Conditional branch and supervisor call */
7220 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7221 err = thumb_copy_b (gdbarch, insn1, dsc);
7222 else
7223 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7224 break;
7225 case 14: /* Unconditional branch */
7226 err = thumb_copy_b (gdbarch, insn1, dsc);
7227 break;
7228 default:
7229 err = 1;
7230 }
7231
7232 if (err)
7233 internal_error (__FILE__, __LINE__,
7234 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7235 }
7236
7237 static int
7238 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7239 uint16_t insn1, uint16_t insn2,
7240 struct regcache *regs,
7241 arm_displaced_step_closure *dsc)
7242 {
7243 int rt = bits (insn2, 12, 15);
7244 int rn = bits (insn1, 0, 3);
7245 int op1 = bits (insn1, 7, 8);
7246
7247 switch (bits (insn1, 5, 6))
7248 {
7249 case 0: /* Load byte and memory hints */
7250 if (rt == 0xf) /* PLD/PLI */
7251 {
7252 if (rn == 0xf)
7253 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7254 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7255 else
7256 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7257 "pli/pld", dsc);
7258 }
7259 else
7260 {
7261 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7262 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7263 1);
7264 else
7265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7266 "ldrb{reg, immediate}/ldrbt",
7267 dsc);
7268 }
7269
7270 break;
7271 case 1: /* Load halfword and memory hints. */
7272 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7273 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7274 "pld/unalloc memhint", dsc);
7275 else
7276 {
7277 if (rn == 0xf)
7278 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7279 2);
7280 else
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7282 "ldrh/ldrht", dsc);
7283 }
7284 break;
7285 case 2: /* Load word */
7286 {
7287 int insn2_bit_8_11 = bits (insn2, 8, 11);
7288
7289 if (rn == 0xf)
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7291 else if (op1 == 0x1) /* Encoding T3 */
7292 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7293 0, 1);
7294 else /* op1 == 0x0 */
7295 {
7296 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7297 /* LDR (immediate) */
7298 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7299 dsc, bit (insn2, 8), 1);
7300 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7302 "ldrt", dsc);
7303 else
7304 /* LDR (register) */
7305 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7306 dsc, 0, 0);
7307 }
7308 break;
7309 }
7310 default:
7311 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7312 break;
7313 }
7314 return 0;
7315 }
7316
7317 static void
7318 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7319 uint16_t insn2, struct regcache *regs,
7320 arm_displaced_step_closure *dsc)
7321 {
7322 int err = 0;
7323 unsigned short op = bit (insn2, 15);
7324 unsigned int op1 = bits (insn1, 11, 12);
7325
7326 switch (op1)
7327 {
7328 case 1:
7329 {
7330 switch (bits (insn1, 9, 10))
7331 {
7332 case 0:
7333 if (bit (insn1, 6))
7334 {
7335 /* Load/store {dual, execlusive}, table branch. */
7336 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7337 && bits (insn2, 5, 7) == 0)
7338 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7339 dsc);
7340 else
7341 /* PC is not allowed to use in load/store {dual, exclusive}
7342 instructions. */
7343 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "load/store dual/ex", dsc);
7345 }
7346 else /* load/store multiple */
7347 {
7348 switch (bits (insn1, 7, 8))
7349 {
7350 case 0: case 3: /* SRS, RFE */
7351 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "srs/rfe", dsc);
7353 break;
7354 case 1: case 2: /* LDM/STM/PUSH/POP */
7355 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7356 break;
7357 }
7358 }
7359 break;
7360
7361 case 1:
7362 /* Data-processing (shift register). */
7363 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7364 dsc);
7365 break;
7366 default: /* Coprocessor instructions. */
7367 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7368 break;
7369 }
7370 break;
7371 }
7372 case 2: /* op1 = 2 */
7373 if (op) /* Branch and misc control. */
7374 {
7375 if (bit (insn2, 14) /* BLX/BL */
7376 || bit (insn2, 12) /* Unconditional branch */
7377 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7378 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7379 else
7380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7381 "misc ctrl", dsc);
7382 }
7383 else
7384 {
7385 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7386 {
7387 int dp_op = bits (insn1, 4, 8);
7388 int rn = bits (insn1, 0, 3);
7389 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7390 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7391 regs, dsc);
7392 else
7393 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7394 "dp/pb", dsc);
7395 }
7396 else /* Data processing (modified immeidate) */
7397 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7398 "dp/mi", dsc);
7399 }
7400 break;
7401 case 3: /* op1 = 3 */
7402 switch (bits (insn1, 9, 10))
7403 {
7404 case 0:
7405 if (bit (insn1, 4))
7406 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7407 regs, dsc);
7408 else /* NEON Load/Store and Store single data item */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7410 "neon elt/struct load/store",
7411 dsc);
7412 break;
7413 case 1: /* op1 = 3, bits (9, 10) == 1 */
7414 switch (bits (insn1, 7, 8))
7415 {
7416 case 0: case 1: /* Data processing (register) */
7417 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7418 "dp(reg)", dsc);
7419 break;
7420 case 2: /* Multiply and absolute difference */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "mul/mua/diff", dsc);
7423 break;
7424 case 3: /* Long multiply and divide */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "lmul/lmua", dsc);
7427 break;
7428 }
7429 break;
7430 default: /* Coprocessor instructions */
7431 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7432 break;
7433 }
7434 break;
7435 default:
7436 err = 1;
7437 }
7438
7439 if (err)
7440 internal_error (__FILE__, __LINE__,
7441 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7442
7443 }
7444
7445 static void
7446 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7447 struct regcache *regs,
7448 arm_displaced_step_closure *dsc)
7449 {
7450 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7451 uint16_t insn1
7452 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7453
7454 if (debug_displaced)
7455 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7456 "at %.8lx\n", insn1, (unsigned long) from);
7457
7458 dsc->is_thumb = 1;
7459 dsc->insn_size = thumb_insn_size (insn1);
7460 if (thumb_insn_size (insn1) == 4)
7461 {
7462 uint16_t insn2
7463 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7464 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7465 }
7466 else
7467 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7468 }
7469
7470 void
7471 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7472 CORE_ADDR to, struct regcache *regs,
7473 arm_displaced_step_closure *dsc)
7474 {
7475 int err = 0;
7476 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7477 uint32_t insn;
7478
7479 /* Most displaced instructions use a 1-instruction scratch space, so set this
7480 here and override below if/when necessary. */
7481 dsc->numinsns = 1;
7482 dsc->insn_addr = from;
7483 dsc->scratch_base = to;
7484 dsc->cleanup = NULL;
7485 dsc->wrote_to_pc = 0;
7486
7487 if (!displaced_in_arm_mode (regs))
7488 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7489
7490 dsc->is_thumb = 0;
7491 dsc->insn_size = 4;
7492 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7493 if (debug_displaced)
7494 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7495 "at %.8lx\n", (unsigned long) insn,
7496 (unsigned long) from);
7497
7498 if ((insn & 0xf0000000) == 0xf0000000)
7499 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7500 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7501 {
7502 case 0x0: case 0x1: case 0x2: case 0x3:
7503 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7504 break;
7505
7506 case 0x4: case 0x5: case 0x6:
7507 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7508 break;
7509
7510 case 0x7:
7511 err = arm_decode_media (gdbarch, insn, dsc);
7512 break;
7513
7514 case 0x8: case 0x9: case 0xa: case 0xb:
7515 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7516 break;
7517
7518 case 0xc: case 0xd: case 0xe: case 0xf:
7519 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7520 break;
7521 }
7522
7523 if (err)
7524 internal_error (__FILE__, __LINE__,
7525 _("arm_process_displaced_insn: Instruction decode error"));
7526 }
7527
7528 /* Actually set up the scratch space for a displaced instruction. */
7529
7530 void
7531 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7532 CORE_ADDR to, arm_displaced_step_closure *dsc)
7533 {
7534 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7535 unsigned int i, len, offset;
7536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7537 int size = dsc->is_thumb? 2 : 4;
7538 const gdb_byte *bkp_insn;
7539
7540 offset = 0;
7541 /* Poke modified instruction(s). */
7542 for (i = 0; i < dsc->numinsns; i++)
7543 {
7544 if (debug_displaced)
7545 {
7546 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7547 if (size == 4)
7548 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7549 dsc->modinsn[i]);
7550 else if (size == 2)
7551 fprintf_unfiltered (gdb_stdlog, "%.4x",
7552 (unsigned short)dsc->modinsn[i]);
7553
7554 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7555 (unsigned long) to + offset);
7556
7557 }
7558 write_memory_unsigned_integer (to + offset, size,
7559 byte_order_for_code,
7560 dsc->modinsn[i]);
7561 offset += size;
7562 }
7563
7564 /* Choose the correct breakpoint instruction. */
7565 if (dsc->is_thumb)
7566 {
7567 bkp_insn = tdep->thumb_breakpoint;
7568 len = tdep->thumb_breakpoint_size;
7569 }
7570 else
7571 {
7572 bkp_insn = tdep->arm_breakpoint;
7573 len = tdep->arm_breakpoint_size;
7574 }
7575
7576 /* Put breakpoint afterwards. */
7577 write_memory (to + offset, bkp_insn, len);
7578
7579 if (debug_displaced)
7580 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7581 paddress (gdbarch, from), paddress (gdbarch, to));
7582 }
7583
7584 /* Entry point for cleaning things up after a displaced instruction has been
7585 single-stepped. */
7586
7587 void
7588 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7589 struct displaced_step_closure *dsc_,
7590 CORE_ADDR from, CORE_ADDR to,
7591 struct regcache *regs)
7592 {
7593 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7594
7595 if (dsc->cleanup)
7596 dsc->cleanup (gdbarch, regs, dsc);
7597
7598 if (!dsc->wrote_to_pc)
7599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7600 dsc->insn_addr + dsc->insn_size);
7601
7602 }
7603
7604 #include "bfd-in2.h"
7605 #include "libcoff.h"
7606
7607 static int
7608 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7609 {
7610 gdb_disassembler *di
7611 = static_cast<gdb_disassembler *>(info->application_data);
7612 struct gdbarch *gdbarch = di->arch ();
7613
7614 if (arm_pc_is_thumb (gdbarch, memaddr))
7615 {
7616 static asymbol *asym;
7617 static combined_entry_type ce;
7618 static struct coff_symbol_struct csym;
7619 static struct bfd fake_bfd;
7620 static bfd_target fake_target;
7621
7622 if (csym.native == NULL)
7623 {
7624 /* Create a fake symbol vector containing a Thumb symbol.
7625 This is solely so that the code in print_insn_little_arm()
7626 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7627 the presence of a Thumb symbol and switch to decoding
7628 Thumb instructions. */
7629
7630 fake_target.flavour = bfd_target_coff_flavour;
7631 fake_bfd.xvec = &fake_target;
7632 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7633 csym.native = &ce;
7634 csym.symbol.the_bfd = &fake_bfd;
7635 csym.symbol.name = "fake";
7636 asym = (asymbol *) & csym;
7637 }
7638
7639 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7640 info->symbols = &asym;
7641 }
7642 else
7643 info->symbols = NULL;
7644
7645 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7646 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7647 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7648 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7649 in default_print_insn. */
7650 if (exec_bfd != NULL)
7651 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7652
7653 return default_print_insn (memaddr, info);
7654 }
7655
7656 /* The following define instruction sequences that will cause ARM
7657 cpu's to take an undefined instruction trap. These are used to
7658 signal a breakpoint to GDB.
7659
7660 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7661 modes. A different instruction is required for each mode. The ARM
7662 cpu's can also be big or little endian. Thus four different
7663 instructions are needed to support all cases.
7664
7665 Note: ARMv4 defines several new instructions that will take the
7666 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7667 not in fact add the new instructions. The new undefined
7668 instructions in ARMv4 are all instructions that had no defined
7669 behaviour in earlier chips. There is no guarantee that they will
7670 raise an exception, but may be treated as NOP's. In practice, it
7671 may only safe to rely on instructions matching:
7672
7673 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7674 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7675 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7676
7677 Even this may only true if the condition predicate is true. The
7678 following use a condition predicate of ALWAYS so it is always TRUE.
7679
7680 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7681 and NetBSD all use a software interrupt rather than an undefined
7682 instruction to force a trap. This can be handled by by the
7683 abi-specific code during establishment of the gdbarch vector. */
7684
7685 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7686 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7687 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7688 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7689
7690 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7691 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7692 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7693 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7694
7695 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7696
7697 static int
7698 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7699 {
7700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7701 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7702
7703 if (arm_pc_is_thumb (gdbarch, *pcptr))
7704 {
7705 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7706
7707 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7708 check whether we are replacing a 32-bit instruction. */
7709 if (tdep->thumb2_breakpoint != NULL)
7710 {
7711 gdb_byte buf[2];
7712
7713 if (target_read_memory (*pcptr, buf, 2) == 0)
7714 {
7715 unsigned short inst1;
7716
7717 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7718 if (thumb_insn_size (inst1) == 4)
7719 return ARM_BP_KIND_THUMB2;
7720 }
7721 }
7722
7723 return ARM_BP_KIND_THUMB;
7724 }
7725 else
7726 return ARM_BP_KIND_ARM;
7727
7728 }
7729
7730 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7731
7732 static const gdb_byte *
7733 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7734 {
7735 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7736
7737 switch (kind)
7738 {
7739 case ARM_BP_KIND_ARM:
7740 *size = tdep->arm_breakpoint_size;
7741 return tdep->arm_breakpoint;
7742 case ARM_BP_KIND_THUMB:
7743 *size = tdep->thumb_breakpoint_size;
7744 return tdep->thumb_breakpoint;
7745 case ARM_BP_KIND_THUMB2:
7746 *size = tdep->thumb2_breakpoint_size;
7747 return tdep->thumb2_breakpoint;
7748 default:
7749 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7750 }
7751 }
7752
7753 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7754
7755 static int
7756 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7757 struct regcache *regcache,
7758 CORE_ADDR *pcptr)
7759 {
7760 gdb_byte buf[4];
7761
7762 /* Check the memory pointed by PC is readable. */
7763 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7764 {
7765 struct arm_get_next_pcs next_pcs_ctx;
7766
7767 arm_get_next_pcs_ctor (&next_pcs_ctx,
7768 &arm_get_next_pcs_ops,
7769 gdbarch_byte_order (gdbarch),
7770 gdbarch_byte_order_for_code (gdbarch),
7771 0,
7772 regcache);
7773
7774 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7775
7776 /* If MEMADDR is the next instruction of current pc, do the
7777 software single step computation, and get the thumb mode by
7778 the destination address. */
7779 for (CORE_ADDR pc : next_pcs)
7780 {
7781 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7782 {
7783 if (IS_THUMB_ADDR (pc))
7784 {
7785 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7786 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7787 }
7788 else
7789 return ARM_BP_KIND_ARM;
7790 }
7791 }
7792 }
7793
7794 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7795 }
7796
7797 /* Extract from an array REGBUF containing the (raw) register state a
7798 function return value of type TYPE, and copy that, in virtual
7799 format, into VALBUF. */
7800
7801 static void
7802 arm_extract_return_value (struct type *type, struct regcache *regs,
7803 gdb_byte *valbuf)
7804 {
7805 struct gdbarch *gdbarch = regs->arch ();
7806 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7807
7808 if (TYPE_CODE_FLT == TYPE_CODE (type))
7809 {
7810 switch (gdbarch_tdep (gdbarch)->fp_model)
7811 {
7812 case ARM_FLOAT_FPA:
7813 {
7814 /* The value is in register F0 in internal format. We need to
7815 extract the raw value and then convert it to the desired
7816 internal type. */
7817 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7818
7819 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7820 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7821 valbuf, type);
7822 }
7823 break;
7824
7825 case ARM_FLOAT_SOFT_FPA:
7826 case ARM_FLOAT_SOFT_VFP:
7827 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7828 not using the VFP ABI code. */
7829 case ARM_FLOAT_VFP:
7830 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7831 if (TYPE_LENGTH (type) > 4)
7832 regs->cooked_read (ARM_A1_REGNUM + 1,
7833 valbuf + ARM_INT_REGISTER_SIZE);
7834 break;
7835
7836 default:
7837 internal_error (__FILE__, __LINE__,
7838 _("arm_extract_return_value: "
7839 "Floating point model not supported"));
7840 break;
7841 }
7842 }
7843 else if (TYPE_CODE (type) == TYPE_CODE_INT
7844 || TYPE_CODE (type) == TYPE_CODE_CHAR
7845 || TYPE_CODE (type) == TYPE_CODE_BOOL
7846 || TYPE_CODE (type) == TYPE_CODE_PTR
7847 || TYPE_IS_REFERENCE (type)
7848 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7849 {
7850 /* If the type is a plain integer, then the access is
7851 straight-forward. Otherwise we have to play around a bit
7852 more. */
7853 int len = TYPE_LENGTH (type);
7854 int regno = ARM_A1_REGNUM;
7855 ULONGEST tmp;
7856
7857 while (len > 0)
7858 {
7859 /* By using store_unsigned_integer we avoid having to do
7860 anything special for small big-endian values. */
7861 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7862 store_unsigned_integer (valbuf,
7863 (len > ARM_INT_REGISTER_SIZE
7864 ? ARM_INT_REGISTER_SIZE : len),
7865 byte_order, tmp);
7866 len -= ARM_INT_REGISTER_SIZE;
7867 valbuf += ARM_INT_REGISTER_SIZE;
7868 }
7869 }
7870 else
7871 {
7872 /* For a structure or union the behaviour is as if the value had
7873 been stored to word-aligned memory and then loaded into
7874 registers with 32-bit load instruction(s). */
7875 int len = TYPE_LENGTH (type);
7876 int regno = ARM_A1_REGNUM;
7877 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7878
7879 while (len > 0)
7880 {
7881 regs->cooked_read (regno++, tmpbuf);
7882 memcpy (valbuf, tmpbuf,
7883 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7884 len -= ARM_INT_REGISTER_SIZE;
7885 valbuf += ARM_INT_REGISTER_SIZE;
7886 }
7887 }
7888 }
7889
7890
7891 /* Will a function return an aggregate type in memory or in a
7892 register? Return 0 if an aggregate type can be returned in a
7893 register, 1 if it must be returned in memory. */
7894
7895 static int
7896 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7897 {
7898 enum type_code code;
7899
7900 type = check_typedef (type);
7901
7902 /* Simple, non-aggregate types (ie not including vectors and
7903 complex) are always returned in a register (or registers). */
7904 code = TYPE_CODE (type);
7905 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7906 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7907 return 0;
7908
7909 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7910 {
7911 /* Vector values should be returned using ARM registers if they
7912 are not over 16 bytes. */
7913 return (TYPE_LENGTH (type) > 16);
7914 }
7915
7916 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7917 {
7918 /* The AAPCS says all aggregates not larger than a word are returned
7919 in a register. */
7920 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7921 return 0;
7922
7923 return 1;
7924 }
7925 else
7926 {
7927 int nRc;
7928
7929 /* All aggregate types that won't fit in a register must be returned
7930 in memory. */
7931 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7932 return 1;
7933
7934 /* In the ARM ABI, "integer" like aggregate types are returned in
7935 registers. For an aggregate type to be integer like, its size
7936 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7937 offset of each addressable subfield must be zero. Note that bit
7938 fields are not addressable, and all addressable subfields of
7939 unions always start at offset zero.
7940
7941 This function is based on the behaviour of GCC 2.95.1.
7942 See: gcc/arm.c: arm_return_in_memory() for details.
7943
7944 Note: All versions of GCC before GCC 2.95.2 do not set up the
7945 parameters correctly for a function returning the following
7946 structure: struct { float f;}; This should be returned in memory,
7947 not a register. Richard Earnshaw sent me a patch, but I do not
7948 know of any way to detect if a function like the above has been
7949 compiled with the correct calling convention. */
7950
7951 /* Assume all other aggregate types can be returned in a register.
7952 Run a check for structures, unions and arrays. */
7953 nRc = 0;
7954
7955 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7956 {
7957 int i;
7958 /* Need to check if this struct/union is "integer" like. For
7959 this to be true, its size must be less than or equal to
7960 ARM_INT_REGISTER_SIZE and the offset of each addressable
7961 subfield must be zero. Note that bit fields are not
7962 addressable, and unions always start at offset zero. If any
7963 of the subfields is a floating point type, the struct/union
7964 cannot be an integer type. */
7965
7966 /* For each field in the object, check:
7967 1) Is it FP? --> yes, nRc = 1;
7968 2) Is it addressable (bitpos != 0) and
7969 not packed (bitsize == 0)?
7970 --> yes, nRc = 1
7971 */
7972
7973 for (i = 0; i < TYPE_NFIELDS (type); i++)
7974 {
7975 enum type_code field_type_code;
7976
7977 field_type_code
7978 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7979 i)));
7980
7981 /* Is it a floating point type field? */
7982 if (field_type_code == TYPE_CODE_FLT)
7983 {
7984 nRc = 1;
7985 break;
7986 }
7987
7988 /* If bitpos != 0, then we have to care about it. */
7989 if (TYPE_FIELD_BITPOS (type, i) != 0)
7990 {
7991 /* Bitfields are not addressable. If the field bitsize is
7992 zero, then the field is not packed. Hence it cannot be
7993 a bitfield or any other packed type. */
7994 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7995 {
7996 nRc = 1;
7997 break;
7998 }
7999 }
8000 }
8001 }
8002
8003 return nRc;
8004 }
8005 }
8006
8007 /* Write into appropriate registers a function return value of type
8008 TYPE, given in virtual format. */
8009
8010 static void
8011 arm_store_return_value (struct type *type, struct regcache *regs,
8012 const gdb_byte *valbuf)
8013 {
8014 struct gdbarch *gdbarch = regs->arch ();
8015 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8016
8017 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8018 {
8019 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8020
8021 switch (gdbarch_tdep (gdbarch)->fp_model)
8022 {
8023 case ARM_FLOAT_FPA:
8024
8025 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8026 regs->cooked_write (ARM_F0_REGNUM, buf);
8027 break;
8028
8029 case ARM_FLOAT_SOFT_FPA:
8030 case ARM_FLOAT_SOFT_VFP:
8031 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8032 not using the VFP ABI code. */
8033 case ARM_FLOAT_VFP:
8034 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8035 if (TYPE_LENGTH (type) > 4)
8036 regs->cooked_write (ARM_A1_REGNUM + 1,
8037 valbuf + ARM_INT_REGISTER_SIZE);
8038 break;
8039
8040 default:
8041 internal_error (__FILE__, __LINE__,
8042 _("arm_store_return_value: Floating "
8043 "point model not supported"));
8044 break;
8045 }
8046 }
8047 else if (TYPE_CODE (type) == TYPE_CODE_INT
8048 || TYPE_CODE (type) == TYPE_CODE_CHAR
8049 || TYPE_CODE (type) == TYPE_CODE_BOOL
8050 || TYPE_CODE (type) == TYPE_CODE_PTR
8051 || TYPE_IS_REFERENCE (type)
8052 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8053 {
8054 if (TYPE_LENGTH (type) <= 4)
8055 {
8056 /* Values of one word or less are zero/sign-extended and
8057 returned in r0. */
8058 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8059 LONGEST val = unpack_long (type, valbuf);
8060
8061 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8062 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8063 }
8064 else
8065 {
8066 /* Integral values greater than one word are stored in consecutive
8067 registers starting with r0. This will always be a multiple of
8068 the regiser size. */
8069 int len = TYPE_LENGTH (type);
8070 int regno = ARM_A1_REGNUM;
8071
8072 while (len > 0)
8073 {
8074 regs->cooked_write (regno++, valbuf);
8075 len -= ARM_INT_REGISTER_SIZE;
8076 valbuf += ARM_INT_REGISTER_SIZE;
8077 }
8078 }
8079 }
8080 else
8081 {
8082 /* For a structure or union the behaviour is as if the value had
8083 been stored to word-aligned memory and then loaded into
8084 registers with 32-bit load instruction(s). */
8085 int len = TYPE_LENGTH (type);
8086 int regno = ARM_A1_REGNUM;
8087 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8088
8089 while (len > 0)
8090 {
8091 memcpy (tmpbuf, valbuf,
8092 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8093 regs->cooked_write (regno++, tmpbuf);
8094 len -= ARM_INT_REGISTER_SIZE;
8095 valbuf += ARM_INT_REGISTER_SIZE;
8096 }
8097 }
8098 }
8099
8100
8101 /* Handle function return values. */
8102
8103 static enum return_value_convention
8104 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8105 struct type *valtype, struct regcache *regcache,
8106 gdb_byte *readbuf, const gdb_byte *writebuf)
8107 {
8108 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8109 struct type *func_type = function ? value_type (function) : NULL;
8110 enum arm_vfp_cprc_base_type vfp_base_type;
8111 int vfp_base_count;
8112
8113 if (arm_vfp_abi_for_function (gdbarch, func_type)
8114 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8115 {
8116 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8117 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8118 int i;
8119 for (i = 0; i < vfp_base_count; i++)
8120 {
8121 if (reg_char == 'q')
8122 {
8123 if (writebuf)
8124 arm_neon_quad_write (gdbarch, regcache, i,
8125 writebuf + i * unit_length);
8126
8127 if (readbuf)
8128 arm_neon_quad_read (gdbarch, regcache, i,
8129 readbuf + i * unit_length);
8130 }
8131 else
8132 {
8133 char name_buf[4];
8134 int regnum;
8135
8136 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8137 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8138 strlen (name_buf));
8139 if (writebuf)
8140 regcache->cooked_write (regnum, writebuf + i * unit_length);
8141 if (readbuf)
8142 regcache->cooked_read (regnum, readbuf + i * unit_length);
8143 }
8144 }
8145 return RETURN_VALUE_REGISTER_CONVENTION;
8146 }
8147
8148 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8149 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8150 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8151 {
8152 if (tdep->struct_return == pcc_struct_return
8153 || arm_return_in_memory (gdbarch, valtype))
8154 return RETURN_VALUE_STRUCT_CONVENTION;
8155 }
8156 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8157 {
8158 if (arm_return_in_memory (gdbarch, valtype))
8159 return RETURN_VALUE_STRUCT_CONVENTION;
8160 }
8161
8162 if (writebuf)
8163 arm_store_return_value (valtype, regcache, writebuf);
8164
8165 if (readbuf)
8166 arm_extract_return_value (valtype, regcache, readbuf);
8167
8168 return RETURN_VALUE_REGISTER_CONVENTION;
8169 }
8170
8171
8172 static int
8173 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8174 {
8175 struct gdbarch *gdbarch = get_frame_arch (frame);
8176 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8178 CORE_ADDR jb_addr;
8179 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8180
8181 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8182
8183 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8184 ARM_INT_REGISTER_SIZE))
8185 return 0;
8186
8187 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8188 return 1;
8189 }
8190
8191 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8192 return the target PC. Otherwise return 0. */
8193
8194 CORE_ADDR
8195 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8196 {
8197 const char *name;
8198 int namelen;
8199 CORE_ADDR start_addr;
8200
8201 /* Find the starting address and name of the function containing the PC. */
8202 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8203 {
8204 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8205 check here. */
8206 start_addr = arm_skip_bx_reg (frame, pc);
8207 if (start_addr != 0)
8208 return start_addr;
8209
8210 return 0;
8211 }
8212
8213 /* If PC is in a Thumb call or return stub, return the address of the
8214 target PC, which is in a register. The thunk functions are called
8215 _call_via_xx, where x is the register name. The possible names
8216 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8217 functions, named __ARM_call_via_r[0-7]. */
8218 if (startswith (name, "_call_via_")
8219 || startswith (name, "__ARM_call_via_"))
8220 {
8221 /* Use the name suffix to determine which register contains the
8222 target PC. */
8223 static const char *table[15] =
8224 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8225 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8226 };
8227 int regno;
8228 int offset = strlen (name) - 2;
8229
8230 for (regno = 0; regno <= 14; regno++)
8231 if (strcmp (&name[offset], table[regno]) == 0)
8232 return get_frame_register_unsigned (frame, regno);
8233 }
8234
8235 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8236 non-interworking calls to foo. We could decode the stubs
8237 to find the target but it's easier to use the symbol table. */
8238 namelen = strlen (name);
8239 if (name[0] == '_' && name[1] == '_'
8240 && ((namelen > 2 + strlen ("_from_thumb")
8241 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8242 || (namelen > 2 + strlen ("_from_arm")
8243 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8244 {
8245 char *target_name;
8246 int target_len = namelen - 2;
8247 struct bound_minimal_symbol minsym;
8248 struct objfile *objfile;
8249 struct obj_section *sec;
8250
8251 if (name[namelen - 1] == 'b')
8252 target_len -= strlen ("_from_thumb");
8253 else
8254 target_len -= strlen ("_from_arm");
8255
8256 target_name = (char *) alloca (target_len + 1);
8257 memcpy (target_name, name + 2, target_len);
8258 target_name[target_len] = '\0';
8259
8260 sec = find_pc_section (pc);
8261 objfile = (sec == NULL) ? NULL : sec->objfile;
8262 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8263 if (minsym.minsym != NULL)
8264 return BMSYMBOL_VALUE_ADDRESS (minsym);
8265 else
8266 return 0;
8267 }
8268
8269 return 0; /* not a stub */
8270 }
8271
8272 static void
8273 set_arm_command (const char *args, int from_tty)
8274 {
8275 printf_unfiltered (_("\
8276 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8277 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8278 }
8279
8280 static void
8281 show_arm_command (const char *args, int from_tty)
8282 {
8283 cmd_show_list (showarmcmdlist, from_tty, "");
8284 }
8285
8286 static void
8287 arm_update_current_architecture (void)
8288 {
8289 struct gdbarch_info info;
8290
8291 /* If the current architecture is not ARM, we have nothing to do. */
8292 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8293 return;
8294
8295 /* Update the architecture. */
8296 gdbarch_info_init (&info);
8297
8298 if (!gdbarch_update_p (info))
8299 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8300 }
8301
8302 static void
8303 set_fp_model_sfunc (const char *args, int from_tty,
8304 struct cmd_list_element *c)
8305 {
8306 int fp_model;
8307
8308 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8309 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8310 {
8311 arm_fp_model = (enum arm_float_model) fp_model;
8312 break;
8313 }
8314
8315 if (fp_model == ARM_FLOAT_LAST)
8316 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8317 current_fp_model);
8318
8319 arm_update_current_architecture ();
8320 }
8321
8322 static void
8323 show_fp_model (struct ui_file *file, int from_tty,
8324 struct cmd_list_element *c, const char *value)
8325 {
8326 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8327
8328 if (arm_fp_model == ARM_FLOAT_AUTO
8329 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8330 fprintf_filtered (file, _("\
8331 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8332 fp_model_strings[tdep->fp_model]);
8333 else
8334 fprintf_filtered (file, _("\
8335 The current ARM floating point model is \"%s\".\n"),
8336 fp_model_strings[arm_fp_model]);
8337 }
8338
8339 static void
8340 arm_set_abi (const char *args, int from_tty,
8341 struct cmd_list_element *c)
8342 {
8343 int arm_abi;
8344
8345 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8346 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8347 {
8348 arm_abi_global = (enum arm_abi_kind) arm_abi;
8349 break;
8350 }
8351
8352 if (arm_abi == ARM_ABI_LAST)
8353 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8354 arm_abi_string);
8355
8356 arm_update_current_architecture ();
8357 }
8358
8359 static void
8360 arm_show_abi (struct ui_file *file, int from_tty,
8361 struct cmd_list_element *c, const char *value)
8362 {
8363 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8364
8365 if (arm_abi_global == ARM_ABI_AUTO
8366 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8367 fprintf_filtered (file, _("\
8368 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8369 arm_abi_strings[tdep->arm_abi]);
8370 else
8371 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8372 arm_abi_string);
8373 }
8374
8375 static void
8376 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8377 struct cmd_list_element *c, const char *value)
8378 {
8379 fprintf_filtered (file,
8380 _("The current execution mode assumed "
8381 "(when symbols are unavailable) is \"%s\".\n"),
8382 arm_fallback_mode_string);
8383 }
8384
8385 static void
8386 arm_show_force_mode (struct ui_file *file, int from_tty,
8387 struct cmd_list_element *c, const char *value)
8388 {
8389 fprintf_filtered (file,
8390 _("The current execution mode assumed "
8391 "(even when symbols are available) is \"%s\".\n"),
8392 arm_force_mode_string);
8393 }
8394
8395 /* If the user changes the register disassembly style used for info
8396 register and other commands, we have to also switch the style used
8397 in opcodes for disassembly output. This function is run in the "set
8398 arm disassembly" command, and does that. */
8399
8400 static void
8401 set_disassembly_style_sfunc (const char *args, int from_tty,
8402 struct cmd_list_element *c)
8403 {
8404 /* Convert the short style name into the long style name (eg, reg-names-*)
8405 before calling the generic set_disassembler_options() function. */
8406 std::string long_name = std::string ("reg-names-") + disassembly_style;
8407 set_disassembler_options (&long_name[0]);
8408 }
8409
8410 static void
8411 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8412 struct cmd_list_element *c, const char *value)
8413 {
8414 struct gdbarch *gdbarch = get_current_arch ();
8415 char *options = get_disassembler_options (gdbarch);
8416 const char *style = "";
8417 int len = 0;
8418 const char *opt;
8419
8420 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8421 if (CONST_STRNEQ (opt, "reg-names-"))
8422 {
8423 style = &opt[strlen ("reg-names-")];
8424 len = strcspn (style, ",");
8425 }
8426
8427 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8428 }
8429 \f
8430 /* Return the ARM register name corresponding to register I. */
8431 static const char *
8432 arm_register_name (struct gdbarch *gdbarch, int i)
8433 {
8434 const int num_regs = gdbarch_num_regs (gdbarch);
8435
8436 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8437 && i >= num_regs && i < num_regs + 32)
8438 {
8439 static const char *const vfp_pseudo_names[] = {
8440 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8441 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8442 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8443 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8444 };
8445
8446 return vfp_pseudo_names[i - num_regs];
8447 }
8448
8449 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8450 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8451 {
8452 static const char *const neon_pseudo_names[] = {
8453 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8454 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8455 };
8456
8457 return neon_pseudo_names[i - num_regs - 32];
8458 }
8459
8460 if (i >= ARRAY_SIZE (arm_register_names))
8461 /* These registers are only supported on targets which supply
8462 an XML description. */
8463 return "";
8464
8465 return arm_register_names[i];
8466 }
8467
8468 /* Test whether the coff symbol specific value corresponds to a Thumb
8469 function. */
8470
8471 static int
8472 coff_sym_is_thumb (int val)
8473 {
8474 return (val == C_THUMBEXT
8475 || val == C_THUMBSTAT
8476 || val == C_THUMBEXTFUNC
8477 || val == C_THUMBSTATFUNC
8478 || val == C_THUMBLABEL);
8479 }
8480
8481 /* arm_coff_make_msymbol_special()
8482 arm_elf_make_msymbol_special()
8483
8484 These functions test whether the COFF or ELF symbol corresponds to
8485 an address in thumb code, and set a "special" bit in a minimal
8486 symbol to indicate that it does. */
8487
8488 static void
8489 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8490 {
8491 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8492
8493 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8494 == ST_BRANCH_TO_THUMB)
8495 MSYMBOL_SET_SPECIAL (msym);
8496 }
8497
8498 static void
8499 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8500 {
8501 if (coff_sym_is_thumb (val))
8502 MSYMBOL_SET_SPECIAL (msym);
8503 }
8504
8505 static void
8506 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8507 asymbol *sym)
8508 {
8509 const char *name = bfd_asymbol_name (sym);
8510 struct arm_per_objfile *data;
8511 struct arm_mapping_symbol new_map_sym;
8512
8513 gdb_assert (name[0] == '$');
8514 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8515 return;
8516
8517 data = arm_objfile_data_key.get (objfile);
8518 if (data == NULL)
8519 data = arm_objfile_data_key.emplace (objfile,
8520 objfile->obfd->section_count);
8521 arm_mapping_symbol_vec &map
8522 = data->section_maps[bfd_get_section (sym)->index];
8523
8524 new_map_sym.value = sym->value;
8525 new_map_sym.type = name[1];
8526
8527 /* Insert at the end, the vector will be sorted on first use. */
8528 map.push_back (new_map_sym);
8529 }
8530
8531 static void
8532 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8533 {
8534 struct gdbarch *gdbarch = regcache->arch ();
8535 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8536
8537 /* If necessary, set the T bit. */
8538 if (arm_apcs_32)
8539 {
8540 ULONGEST val, t_bit;
8541 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8542 t_bit = arm_psr_thumb_bit (gdbarch);
8543 if (arm_pc_is_thumb (gdbarch, pc))
8544 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8545 val | t_bit);
8546 else
8547 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8548 val & ~t_bit);
8549 }
8550 }
8551
8552 /* Read the contents of a NEON quad register, by reading from two
8553 double registers. This is used to implement the quad pseudo
8554 registers, and for argument passing in case the quad registers are
8555 missing; vectors are passed in quad registers when using the VFP
8556 ABI, even if a NEON unit is not present. REGNUM is the index of
8557 the quad register, in [0, 15]. */
8558
8559 static enum register_status
8560 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8561 int regnum, gdb_byte *buf)
8562 {
8563 char name_buf[4];
8564 gdb_byte reg_buf[8];
8565 int offset, double_regnum;
8566 enum register_status status;
8567
8568 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8569 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8570 strlen (name_buf));
8571
8572 /* d0 is always the least significant half of q0. */
8573 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8574 offset = 8;
8575 else
8576 offset = 0;
8577
8578 status = regcache->raw_read (double_regnum, reg_buf);
8579 if (status != REG_VALID)
8580 return status;
8581 memcpy (buf + offset, reg_buf, 8);
8582
8583 offset = 8 - offset;
8584 status = regcache->raw_read (double_regnum + 1, reg_buf);
8585 if (status != REG_VALID)
8586 return status;
8587 memcpy (buf + offset, reg_buf, 8);
8588
8589 return REG_VALID;
8590 }
8591
8592 static enum register_status
8593 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8594 int regnum, gdb_byte *buf)
8595 {
8596 const int num_regs = gdbarch_num_regs (gdbarch);
8597 char name_buf[4];
8598 gdb_byte reg_buf[8];
8599 int offset, double_regnum;
8600
8601 gdb_assert (regnum >= num_regs);
8602 regnum -= num_regs;
8603
8604 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8605 /* Quad-precision register. */
8606 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8607 else
8608 {
8609 enum register_status status;
8610
8611 /* Single-precision register. */
8612 gdb_assert (regnum < 32);
8613
8614 /* s0 is always the least significant half of d0. */
8615 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8616 offset = (regnum & 1) ? 0 : 4;
8617 else
8618 offset = (regnum & 1) ? 4 : 0;
8619
8620 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8621 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8622 strlen (name_buf));
8623
8624 status = regcache->raw_read (double_regnum, reg_buf);
8625 if (status == REG_VALID)
8626 memcpy (buf, reg_buf + offset, 4);
8627 return status;
8628 }
8629 }
8630
8631 /* Store the contents of BUF to a NEON quad register, by writing to
8632 two double registers. This is used to implement the quad pseudo
8633 registers, and for argument passing in case the quad registers are
8634 missing; vectors are passed in quad registers when using the VFP
8635 ABI, even if a NEON unit is not present. REGNUM is the index
8636 of the quad register, in [0, 15]. */
8637
8638 static void
8639 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8640 int regnum, const gdb_byte *buf)
8641 {
8642 char name_buf[4];
8643 int offset, double_regnum;
8644
8645 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8646 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8647 strlen (name_buf));
8648
8649 /* d0 is always the least significant half of q0. */
8650 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8651 offset = 8;
8652 else
8653 offset = 0;
8654
8655 regcache->raw_write (double_regnum, buf + offset);
8656 offset = 8 - offset;
8657 regcache->raw_write (double_regnum + 1, buf + offset);
8658 }
8659
8660 static void
8661 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8662 int regnum, const gdb_byte *buf)
8663 {
8664 const int num_regs = gdbarch_num_regs (gdbarch);
8665 char name_buf[4];
8666 gdb_byte reg_buf[8];
8667 int offset, double_regnum;
8668
8669 gdb_assert (regnum >= num_regs);
8670 regnum -= num_regs;
8671
8672 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8673 /* Quad-precision register. */
8674 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8675 else
8676 {
8677 /* Single-precision register. */
8678 gdb_assert (regnum < 32);
8679
8680 /* s0 is always the least significant half of d0. */
8681 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8682 offset = (regnum & 1) ? 0 : 4;
8683 else
8684 offset = (regnum & 1) ? 4 : 0;
8685
8686 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8687 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8688 strlen (name_buf));
8689
8690 regcache->raw_read (double_regnum, reg_buf);
8691 memcpy (reg_buf + offset, buf, 4);
8692 regcache->raw_write (double_regnum, reg_buf);
8693 }
8694 }
8695
8696 static struct value *
8697 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8698 {
8699 const int *reg_p = (const int *) baton;
8700 return value_of_register (*reg_p, frame);
8701 }
8702 \f
8703 static enum gdb_osabi
8704 arm_elf_osabi_sniffer (bfd *abfd)
8705 {
8706 unsigned int elfosabi;
8707 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8708
8709 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8710
8711 if (elfosabi == ELFOSABI_ARM)
8712 /* GNU tools use this value. Check note sections in this case,
8713 as well. */
8714 bfd_map_over_sections (abfd,
8715 generic_elf_osabi_sniff_abi_tag_sections,
8716 &osabi);
8717
8718 /* Anything else will be handled by the generic ELF sniffer. */
8719 return osabi;
8720 }
8721
8722 static int
8723 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8724 struct reggroup *group)
8725 {
8726 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8727 this, FPS register belongs to save_regroup, restore_reggroup, and
8728 all_reggroup, of course. */
8729 if (regnum == ARM_FPS_REGNUM)
8730 return (group == float_reggroup
8731 || group == save_reggroup
8732 || group == restore_reggroup
8733 || group == all_reggroup);
8734 else
8735 return default_register_reggroup_p (gdbarch, regnum, group);
8736 }
8737
8738 /* For backward-compatibility we allow two 'g' packet lengths with
8739 the remote protocol depending on whether FPA registers are
8740 supplied. M-profile targets do not have FPA registers, but some
8741 stubs already exist in the wild which use a 'g' packet which
8742 supplies them albeit with dummy values. The packet format which
8743 includes FPA registers should be considered deprecated for
8744 M-profile targets. */
8745
8746 static void
8747 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8748 {
8749 if (gdbarch_tdep (gdbarch)->is_m)
8750 {
8751 const target_desc *tdesc;
8752
8753 /* If we know from the executable this is an M-profile target,
8754 cater for remote targets whose register set layout is the
8755 same as the FPA layout. */
8756 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8757 register_remote_g_packet_guess (gdbarch,
8758 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8759 tdesc);
8760
8761 /* The regular M-profile layout. */
8762 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8763 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8764 tdesc);
8765
8766 /* M-profile plus M4F VFP. */
8767 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8768 register_remote_g_packet_guess (gdbarch,
8769 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8770 tdesc);
8771 }
8772
8773 /* Otherwise we don't have a useful guess. */
8774 }
8775
8776 /* Implement the code_of_frame_writable gdbarch method. */
8777
8778 static int
8779 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8780 {
8781 if (gdbarch_tdep (gdbarch)->is_m
8782 && get_frame_type (frame) == SIGTRAMP_FRAME)
8783 {
8784 /* M-profile exception frames return to some magic PCs, where
8785 isn't writable at all. */
8786 return 0;
8787 }
8788 else
8789 return 1;
8790 }
8791
8792 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8793 to be postfixed by a version (eg armv7hl). */
8794
8795 static const char *
8796 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8797 {
8798 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8799 return "arm(v[^- ]*)?";
8800 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8801 }
8802
8803 /* Initialize the current architecture based on INFO. If possible,
8804 re-use an architecture from ARCHES, which is a list of
8805 architectures already created during this debugging session.
8806
8807 Called e.g. at program startup, when reading a core file, and when
8808 reading a binary file. */
8809
8810 static struct gdbarch *
8811 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8812 {
8813 struct gdbarch_tdep *tdep;
8814 struct gdbarch *gdbarch;
8815 struct gdbarch_list *best_arch;
8816 enum arm_abi_kind arm_abi = arm_abi_global;
8817 enum arm_float_model fp_model = arm_fp_model;
8818 struct tdesc_arch_data *tdesc_data = NULL;
8819 int i, is_m = 0;
8820 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8821 int have_wmmx_registers = 0;
8822 int have_neon = 0;
8823 int have_fpa_registers = 1;
8824 const struct target_desc *tdesc = info.target_desc;
8825
8826 /* If we have an object to base this architecture on, try to determine
8827 its ABI. */
8828
8829 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8830 {
8831 int ei_osabi, e_flags;
8832
8833 switch (bfd_get_flavour (info.abfd))
8834 {
8835 case bfd_target_coff_flavour:
8836 /* Assume it's an old APCS-style ABI. */
8837 /* XXX WinCE? */
8838 arm_abi = ARM_ABI_APCS;
8839 break;
8840
8841 case bfd_target_elf_flavour:
8842 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8843 e_flags = elf_elfheader (info.abfd)->e_flags;
8844
8845 if (ei_osabi == ELFOSABI_ARM)
8846 {
8847 /* GNU tools used to use this value, but do not for EABI
8848 objects. There's nowhere to tag an EABI version
8849 anyway, so assume APCS. */
8850 arm_abi = ARM_ABI_APCS;
8851 }
8852 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8853 {
8854 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8855
8856 switch (eabi_ver)
8857 {
8858 case EF_ARM_EABI_UNKNOWN:
8859 /* Assume GNU tools. */
8860 arm_abi = ARM_ABI_APCS;
8861 break;
8862
8863 case EF_ARM_EABI_VER4:
8864 case EF_ARM_EABI_VER5:
8865 arm_abi = ARM_ABI_AAPCS;
8866 /* EABI binaries default to VFP float ordering.
8867 They may also contain build attributes that can
8868 be used to identify if the VFP argument-passing
8869 ABI is in use. */
8870 if (fp_model == ARM_FLOAT_AUTO)
8871 {
8872 #ifdef HAVE_ELF
8873 switch (bfd_elf_get_obj_attr_int (info.abfd,
8874 OBJ_ATTR_PROC,
8875 Tag_ABI_VFP_args))
8876 {
8877 case AEABI_VFP_args_base:
8878 /* "The user intended FP parameter/result
8879 passing to conform to AAPCS, base
8880 variant". */
8881 fp_model = ARM_FLOAT_SOFT_VFP;
8882 break;
8883 case AEABI_VFP_args_vfp:
8884 /* "The user intended FP parameter/result
8885 passing to conform to AAPCS, VFP
8886 variant". */
8887 fp_model = ARM_FLOAT_VFP;
8888 break;
8889 case AEABI_VFP_args_toolchain:
8890 /* "The user intended FP parameter/result
8891 passing to conform to tool chain-specific
8892 conventions" - we don't know any such
8893 conventions, so leave it as "auto". */
8894 break;
8895 case AEABI_VFP_args_compatible:
8896 /* "Code is compatible with both the base
8897 and VFP variants; the user did not permit
8898 non-variadic functions to pass FP
8899 parameters/results" - leave it as
8900 "auto". */
8901 break;
8902 default:
8903 /* Attribute value not mentioned in the
8904 November 2012 ABI, so leave it as
8905 "auto". */
8906 break;
8907 }
8908 #else
8909 fp_model = ARM_FLOAT_SOFT_VFP;
8910 #endif
8911 }
8912 break;
8913
8914 default:
8915 /* Leave it as "auto". */
8916 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8917 break;
8918 }
8919
8920 #ifdef HAVE_ELF
8921 /* Detect M-profile programs. This only works if the
8922 executable file includes build attributes; GCC does
8923 copy them to the executable, but e.g. RealView does
8924 not. */
8925 int attr_arch
8926 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8927 Tag_CPU_arch);
8928 int attr_profile
8929 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8930 Tag_CPU_arch_profile);
8931
8932 /* GCC specifies the profile for v6-M; RealView only
8933 specifies the profile for architectures starting with
8934 V7 (as opposed to architectures with a tag
8935 numerically greater than TAG_CPU_ARCH_V7). */
8936 if (!tdesc_has_registers (tdesc)
8937 && (attr_arch == TAG_CPU_ARCH_V6_M
8938 || attr_arch == TAG_CPU_ARCH_V6S_M
8939 || attr_profile == 'M'))
8940 is_m = 1;
8941 #endif
8942 }
8943
8944 if (fp_model == ARM_FLOAT_AUTO)
8945 {
8946 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8947 {
8948 case 0:
8949 /* Leave it as "auto". Strictly speaking this case
8950 means FPA, but almost nobody uses that now, and
8951 many toolchains fail to set the appropriate bits
8952 for the floating-point model they use. */
8953 break;
8954 case EF_ARM_SOFT_FLOAT:
8955 fp_model = ARM_FLOAT_SOFT_FPA;
8956 break;
8957 case EF_ARM_VFP_FLOAT:
8958 fp_model = ARM_FLOAT_VFP;
8959 break;
8960 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8961 fp_model = ARM_FLOAT_SOFT_VFP;
8962 break;
8963 }
8964 }
8965
8966 if (e_flags & EF_ARM_BE8)
8967 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8968
8969 break;
8970
8971 default:
8972 /* Leave it as "auto". */
8973 break;
8974 }
8975 }
8976
8977 /* Check any target description for validity. */
8978 if (tdesc_has_registers (tdesc))
8979 {
8980 /* For most registers we require GDB's default names; but also allow
8981 the numeric names for sp / lr / pc, as a convenience. */
8982 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8983 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8984 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8985
8986 const struct tdesc_feature *feature;
8987 int valid_p;
8988
8989 feature = tdesc_find_feature (tdesc,
8990 "org.gnu.gdb.arm.core");
8991 if (feature == NULL)
8992 {
8993 feature = tdesc_find_feature (tdesc,
8994 "org.gnu.gdb.arm.m-profile");
8995 if (feature == NULL)
8996 return NULL;
8997 else
8998 is_m = 1;
8999 }
9000
9001 tdesc_data = tdesc_data_alloc ();
9002
9003 valid_p = 1;
9004 for (i = 0; i < ARM_SP_REGNUM; i++)
9005 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9006 arm_register_names[i]);
9007 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9008 ARM_SP_REGNUM,
9009 arm_sp_names);
9010 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9011 ARM_LR_REGNUM,
9012 arm_lr_names);
9013 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9014 ARM_PC_REGNUM,
9015 arm_pc_names);
9016 if (is_m)
9017 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9018 ARM_PS_REGNUM, "xpsr");
9019 else
9020 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9021 ARM_PS_REGNUM, "cpsr");
9022
9023 if (!valid_p)
9024 {
9025 tdesc_data_cleanup (tdesc_data);
9026 return NULL;
9027 }
9028
9029 feature = tdesc_find_feature (tdesc,
9030 "org.gnu.gdb.arm.fpa");
9031 if (feature != NULL)
9032 {
9033 valid_p = 1;
9034 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9035 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9036 arm_register_names[i]);
9037 if (!valid_p)
9038 {
9039 tdesc_data_cleanup (tdesc_data);
9040 return NULL;
9041 }
9042 }
9043 else
9044 have_fpa_registers = 0;
9045
9046 feature = tdesc_find_feature (tdesc,
9047 "org.gnu.gdb.xscale.iwmmxt");
9048 if (feature != NULL)
9049 {
9050 static const char *const iwmmxt_names[] = {
9051 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9052 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9053 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9054 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9055 };
9056
9057 valid_p = 1;
9058 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9059 valid_p
9060 &= tdesc_numbered_register (feature, tdesc_data, i,
9061 iwmmxt_names[i - ARM_WR0_REGNUM]);
9062
9063 /* Check for the control registers, but do not fail if they
9064 are missing. */
9065 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9066 tdesc_numbered_register (feature, tdesc_data, i,
9067 iwmmxt_names[i - ARM_WR0_REGNUM]);
9068
9069 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9070 valid_p
9071 &= tdesc_numbered_register (feature, tdesc_data, i,
9072 iwmmxt_names[i - ARM_WR0_REGNUM]);
9073
9074 if (!valid_p)
9075 {
9076 tdesc_data_cleanup (tdesc_data);
9077 return NULL;
9078 }
9079
9080 have_wmmx_registers = 1;
9081 }
9082
9083 /* If we have a VFP unit, check whether the single precision registers
9084 are present. If not, then we will synthesize them as pseudo
9085 registers. */
9086 feature = tdesc_find_feature (tdesc,
9087 "org.gnu.gdb.arm.vfp");
9088 if (feature != NULL)
9089 {
9090 static const char *const vfp_double_names[] = {
9091 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9092 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9093 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9094 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9095 };
9096
9097 /* Require the double precision registers. There must be either
9098 16 or 32. */
9099 valid_p = 1;
9100 for (i = 0; i < 32; i++)
9101 {
9102 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9103 ARM_D0_REGNUM + i,
9104 vfp_double_names[i]);
9105 if (!valid_p)
9106 break;
9107 }
9108 if (!valid_p && i == 16)
9109 valid_p = 1;
9110
9111 /* Also require FPSCR. */
9112 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9113 ARM_FPSCR_REGNUM, "fpscr");
9114 if (!valid_p)
9115 {
9116 tdesc_data_cleanup (tdesc_data);
9117 return NULL;
9118 }
9119
9120 if (tdesc_unnumbered_register (feature, "s0") == 0)
9121 have_vfp_pseudos = 1;
9122
9123 vfp_register_count = i;
9124
9125 /* If we have VFP, also check for NEON. The architecture allows
9126 NEON without VFP (integer vector operations only), but GDB
9127 does not support that. */
9128 feature = tdesc_find_feature (tdesc,
9129 "org.gnu.gdb.arm.neon");
9130 if (feature != NULL)
9131 {
9132 /* NEON requires 32 double-precision registers. */
9133 if (i != 32)
9134 {
9135 tdesc_data_cleanup (tdesc_data);
9136 return NULL;
9137 }
9138
9139 /* If there are quad registers defined by the stub, use
9140 their type; otherwise (normally) provide them with
9141 the default type. */
9142 if (tdesc_unnumbered_register (feature, "q0") == 0)
9143 have_neon_pseudos = 1;
9144
9145 have_neon = 1;
9146 }
9147 }
9148 }
9149
9150 /* If there is already a candidate, use it. */
9151 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9152 best_arch != NULL;
9153 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9154 {
9155 if (arm_abi != ARM_ABI_AUTO
9156 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9157 continue;
9158
9159 if (fp_model != ARM_FLOAT_AUTO
9160 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9161 continue;
9162
9163 /* There are various other properties in tdep that we do not
9164 need to check here: those derived from a target description,
9165 since gdbarches with a different target description are
9166 automatically disqualified. */
9167
9168 /* Do check is_m, though, since it might come from the binary. */
9169 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9170 continue;
9171
9172 /* Found a match. */
9173 break;
9174 }
9175
9176 if (best_arch != NULL)
9177 {
9178 if (tdesc_data != NULL)
9179 tdesc_data_cleanup (tdesc_data);
9180 return best_arch->gdbarch;
9181 }
9182
9183 tdep = XCNEW (struct gdbarch_tdep);
9184 gdbarch = gdbarch_alloc (&info, tdep);
9185
9186 /* Record additional information about the architecture we are defining.
9187 These are gdbarch discriminators, like the OSABI. */
9188 tdep->arm_abi = arm_abi;
9189 tdep->fp_model = fp_model;
9190 tdep->is_m = is_m;
9191 tdep->have_fpa_registers = have_fpa_registers;
9192 tdep->have_wmmx_registers = have_wmmx_registers;
9193 gdb_assert (vfp_register_count == 0
9194 || vfp_register_count == 16
9195 || vfp_register_count == 32);
9196 tdep->vfp_register_count = vfp_register_count;
9197 tdep->have_vfp_pseudos = have_vfp_pseudos;
9198 tdep->have_neon_pseudos = have_neon_pseudos;
9199 tdep->have_neon = have_neon;
9200
9201 arm_register_g_packet_guesses (gdbarch);
9202
9203 /* Breakpoints. */
9204 switch (info.byte_order_for_code)
9205 {
9206 case BFD_ENDIAN_BIG:
9207 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9208 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9209 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9210 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9211
9212 break;
9213
9214 case BFD_ENDIAN_LITTLE:
9215 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9216 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9217 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9218 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9219
9220 break;
9221
9222 default:
9223 internal_error (__FILE__, __LINE__,
9224 _("arm_gdbarch_init: bad byte order for float format"));
9225 }
9226
9227 /* On ARM targets char defaults to unsigned. */
9228 set_gdbarch_char_signed (gdbarch, 0);
9229
9230 /* wchar_t is unsigned under the AAPCS. */
9231 if (tdep->arm_abi == ARM_ABI_AAPCS)
9232 set_gdbarch_wchar_signed (gdbarch, 0);
9233 else
9234 set_gdbarch_wchar_signed (gdbarch, 1);
9235
9236 /* Compute type alignment. */
9237 set_gdbarch_type_align (gdbarch, arm_type_align);
9238
9239 /* Note: for displaced stepping, this includes the breakpoint, and one word
9240 of additional scratch space. This setting isn't used for anything beside
9241 displaced stepping at present. */
9242 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9243
9244 /* This should be low enough for everything. */
9245 tdep->lowest_pc = 0x20;
9246 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9247
9248 /* The default, for both APCS and AAPCS, is to return small
9249 structures in registers. */
9250 tdep->struct_return = reg_struct_return;
9251
9252 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9253 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9254
9255 if (is_m)
9256 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9257
9258 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9259
9260 frame_base_set_default (gdbarch, &arm_normal_base);
9261
9262 /* Address manipulation. */
9263 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9264
9265 /* Advance PC across function entry code. */
9266 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9267
9268 /* Detect whether PC is at a point where the stack has been destroyed. */
9269 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9270
9271 /* Skip trampolines. */
9272 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9273
9274 /* The stack grows downward. */
9275 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9276
9277 /* Breakpoint manipulation. */
9278 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9279 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9280 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9281 arm_breakpoint_kind_from_current_state);
9282
9283 /* Information about registers, etc. */
9284 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9285 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9286 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9287 set_gdbarch_register_type (gdbarch, arm_register_type);
9288 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9289
9290 /* This "info float" is FPA-specific. Use the generic version if we
9291 do not have FPA. */
9292 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9293 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9294
9295 /* Internal <-> external register number maps. */
9296 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9297 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9298
9299 set_gdbarch_register_name (gdbarch, arm_register_name);
9300
9301 /* Returning results. */
9302 set_gdbarch_return_value (gdbarch, arm_return_value);
9303
9304 /* Disassembly. */
9305 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9306
9307 /* Minsymbol frobbing. */
9308 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9309 set_gdbarch_coff_make_msymbol_special (gdbarch,
9310 arm_coff_make_msymbol_special);
9311 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9312
9313 /* Thumb-2 IT block support. */
9314 set_gdbarch_adjust_breakpoint_address (gdbarch,
9315 arm_adjust_breakpoint_address);
9316
9317 /* Virtual tables. */
9318 set_gdbarch_vbit_in_delta (gdbarch, 1);
9319
9320 /* Hook in the ABI-specific overrides, if they have been registered. */
9321 gdbarch_init_osabi (info, gdbarch);
9322
9323 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9324
9325 /* Add some default predicates. */
9326 if (is_m)
9327 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9328 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9329 dwarf2_append_unwinders (gdbarch);
9330 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9331 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9332 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9333
9334 /* Now we have tuned the configuration, set a few final things,
9335 based on what the OS ABI has told us. */
9336
9337 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9338 binaries are always marked. */
9339 if (tdep->arm_abi == ARM_ABI_AUTO)
9340 tdep->arm_abi = ARM_ABI_APCS;
9341
9342 /* Watchpoints are not steppable. */
9343 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9344
9345 /* We used to default to FPA for generic ARM, but almost nobody
9346 uses that now, and we now provide a way for the user to force
9347 the model. So default to the most useful variant. */
9348 if (tdep->fp_model == ARM_FLOAT_AUTO)
9349 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9350
9351 if (tdep->jb_pc >= 0)
9352 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9353
9354 /* Floating point sizes and format. */
9355 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9356 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9357 {
9358 set_gdbarch_double_format
9359 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9360 set_gdbarch_long_double_format
9361 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9362 }
9363 else
9364 {
9365 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9366 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9367 }
9368
9369 if (have_vfp_pseudos)
9370 {
9371 /* NOTE: These are the only pseudo registers used by
9372 the ARM target at the moment. If more are added, a
9373 little more care in numbering will be needed. */
9374
9375 int num_pseudos = 32;
9376 if (have_neon_pseudos)
9377 num_pseudos += 16;
9378 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9379 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9380 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9381 }
9382
9383 if (tdesc_data)
9384 {
9385 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9386
9387 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9388
9389 /* Override tdesc_register_type to adjust the types of VFP
9390 registers for NEON. */
9391 set_gdbarch_register_type (gdbarch, arm_register_type);
9392 }
9393
9394 /* Add standard register aliases. We add aliases even for those
9395 nanes which are used by the current architecture - it's simpler,
9396 and does no harm, since nothing ever lists user registers. */
9397 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9398 user_reg_add (gdbarch, arm_register_aliases[i].name,
9399 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9400
9401 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9402 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9403
9404 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9405
9406 return gdbarch;
9407 }
9408
9409 static void
9410 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9411 {
9412 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9413
9414 if (tdep == NULL)
9415 return;
9416
9417 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9418 (unsigned long) tdep->lowest_pc);
9419 }
9420
9421 #if GDB_SELF_TEST
9422 namespace selftests
9423 {
9424 static void arm_record_test (void);
9425 }
9426 #endif
9427
9428 void
9429 _initialize_arm_tdep (void)
9430 {
9431 long length;
9432 int i, j;
9433 char regdesc[1024], *rdptr = regdesc;
9434 size_t rest = sizeof (regdesc);
9435
9436 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9437
9438 /* Add ourselves to objfile event chain. */
9439 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9440
9441 /* Register an ELF OS ABI sniffer for ARM binaries. */
9442 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9443 bfd_target_elf_flavour,
9444 arm_elf_osabi_sniffer);
9445
9446 /* Add root prefix command for all "set arm"/"show arm" commands. */
9447 add_prefix_cmd ("arm", no_class, set_arm_command,
9448 _("Various ARM-specific commands."),
9449 &setarmcmdlist, "set arm ", 0, &setlist);
9450
9451 add_prefix_cmd ("arm", no_class, show_arm_command,
9452 _("Various ARM-specific commands."),
9453 &showarmcmdlist, "show arm ", 0, &showlist);
9454
9455
9456 arm_disassembler_options = xstrdup ("reg-names-std");
9457 const disasm_options_t *disasm_options
9458 = &disassembler_options_arm ()->options;
9459 int num_disassembly_styles = 0;
9460 for (i = 0; disasm_options->name[i] != NULL; i++)
9461 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9462 num_disassembly_styles++;
9463
9464 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9465 valid_disassembly_styles = XNEWVEC (const char *,
9466 num_disassembly_styles + 1);
9467 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9468 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9469 {
9470 size_t offset = strlen ("reg-names-");
9471 const char *style = disasm_options->name[i];
9472 valid_disassembly_styles[j++] = &style[offset];
9473 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9474 disasm_options->description[i]);
9475 rdptr += length;
9476 rest -= length;
9477 }
9478 /* Mark the end of valid options. */
9479 valid_disassembly_styles[num_disassembly_styles] = NULL;
9480
9481 /* Create the help text. */
9482 std::string helptext = string_printf ("%s%s%s",
9483 _("The valid values are:\n"),
9484 regdesc,
9485 _("The default is \"std\"."));
9486
9487 add_setshow_enum_cmd("disassembler", no_class,
9488 valid_disassembly_styles, &disassembly_style,
9489 _("Set the disassembly style."),
9490 _("Show the disassembly style."),
9491 helptext.c_str (),
9492 set_disassembly_style_sfunc,
9493 show_disassembly_style_sfunc,
9494 &setarmcmdlist, &showarmcmdlist);
9495
9496 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9497 _("Set usage of ARM 32-bit mode."),
9498 _("Show usage of ARM 32-bit mode."),
9499 _("When off, a 26-bit PC will be used."),
9500 NULL,
9501 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9502 mode is %s. */
9503 &setarmcmdlist, &showarmcmdlist);
9504
9505 /* Add a command to allow the user to force the FPU model. */
9506 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9507 _("Set the floating point type."),
9508 _("Show the floating point type."),
9509 _("auto - Determine the FP typefrom the OS-ABI.\n\
9510 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9511 fpa - FPA co-processor (GCC compiled).\n\
9512 softvfp - Software FP with pure-endian doubles.\n\
9513 vfp - VFP co-processor."),
9514 set_fp_model_sfunc, show_fp_model,
9515 &setarmcmdlist, &showarmcmdlist);
9516
9517 /* Add a command to allow the user to force the ABI. */
9518 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9519 _("Set the ABI."),
9520 _("Show the ABI."),
9521 NULL, arm_set_abi, arm_show_abi,
9522 &setarmcmdlist, &showarmcmdlist);
9523
9524 /* Add two commands to allow the user to force the assumed
9525 execution mode. */
9526 add_setshow_enum_cmd ("fallback-mode", class_support,
9527 arm_mode_strings, &arm_fallback_mode_string,
9528 _("Set the mode assumed when symbols are unavailable."),
9529 _("Show the mode assumed when symbols are unavailable."),
9530 NULL, NULL, arm_show_fallback_mode,
9531 &setarmcmdlist, &showarmcmdlist);
9532 add_setshow_enum_cmd ("force-mode", class_support,
9533 arm_mode_strings, &arm_force_mode_string,
9534 _("Set the mode assumed even when symbols are available."),
9535 _("Show the mode assumed even when symbols are available."),
9536 NULL, NULL, arm_show_force_mode,
9537 &setarmcmdlist, &showarmcmdlist);
9538
9539 /* Debugging flag. */
9540 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9541 _("Set ARM debugging."),
9542 _("Show ARM debugging."),
9543 _("When on, arm-specific debugging is enabled."),
9544 NULL,
9545 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9546 &setdebuglist, &showdebuglist);
9547
9548 #if GDB_SELF_TEST
9549 selftests::register_test ("arm-record", selftests::arm_record_test);
9550 #endif
9551
9552 }
9553
9554 /* ARM-reversible process record data structures. */
9555
9556 #define ARM_INSN_SIZE_BYTES 4
9557 #define THUMB_INSN_SIZE_BYTES 2
9558 #define THUMB2_INSN_SIZE_BYTES 4
9559
9560
9561 /* Position of the bit within a 32-bit ARM instruction
9562 that defines whether the instruction is a load or store. */
9563 #define INSN_S_L_BIT_NUM 20
9564
9565 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9566 do \
9567 { \
9568 unsigned int reg_len = LENGTH; \
9569 if (reg_len) \
9570 { \
9571 REGS = XNEWVEC (uint32_t, reg_len); \
9572 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9573 } \
9574 } \
9575 while (0)
9576
9577 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9578 do \
9579 { \
9580 unsigned int mem_len = LENGTH; \
9581 if (mem_len) \
9582 { \
9583 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9584 memcpy(&MEMS->len, &RECORD_BUF[0], \
9585 sizeof(struct arm_mem_r) * LENGTH); \
9586 } \
9587 } \
9588 while (0)
9589
9590 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9591 #define INSN_RECORDED(ARM_RECORD) \
9592 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9593
9594 /* ARM memory record structure. */
9595 struct arm_mem_r
9596 {
9597 uint32_t len; /* Record length. */
9598 uint32_t addr; /* Memory address. */
9599 };
9600
9601 /* ARM instruction record contains opcode of current insn
9602 and execution state (before entry to decode_insn()),
9603 contains list of to-be-modified registers and
9604 memory blocks (on return from decode_insn()). */
9605
9606 typedef struct insn_decode_record_t
9607 {
9608 struct gdbarch *gdbarch;
9609 struct regcache *regcache;
9610 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9611 uint32_t arm_insn; /* Should accommodate thumb. */
9612 uint32_t cond; /* Condition code. */
9613 uint32_t opcode; /* Insn opcode. */
9614 uint32_t decode; /* Insn decode bits. */
9615 uint32_t mem_rec_count; /* No of mem records. */
9616 uint32_t reg_rec_count; /* No of reg records. */
9617 uint32_t *arm_regs; /* Registers to be saved for this record. */
9618 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9619 } insn_decode_record;
9620
9621
9622 /* Checks ARM SBZ and SBO mandatory fields. */
9623
9624 static int
9625 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9626 {
9627 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9628
9629 if (!len)
9630 return 1;
9631
9632 if (!sbo)
9633 ones = ~ones;
9634
9635 while (ones)
9636 {
9637 if (!(ones & sbo))
9638 {
9639 return 0;
9640 }
9641 ones = ones >> 1;
9642 }
9643 return 1;
9644 }
9645
9646 enum arm_record_result
9647 {
9648 ARM_RECORD_SUCCESS = 0,
9649 ARM_RECORD_FAILURE = 1
9650 };
9651
9652 typedef enum
9653 {
9654 ARM_RECORD_STRH=1,
9655 ARM_RECORD_STRD
9656 } arm_record_strx_t;
9657
9658 typedef enum
9659 {
9660 ARM_RECORD=1,
9661 THUMB_RECORD,
9662 THUMB2_RECORD
9663 } record_type_t;
9664
9665
9666 static int
9667 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9668 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9669 {
9670
9671 struct regcache *reg_cache = arm_insn_r->regcache;
9672 ULONGEST u_regval[2]= {0};
9673
9674 uint32_t reg_src1 = 0, reg_src2 = 0;
9675 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9676
9677 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9678 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9679
9680 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9681 {
9682 /* 1) Handle misc store, immediate offset. */
9683 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9684 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9685 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9686 regcache_raw_read_unsigned (reg_cache, reg_src1,
9687 &u_regval[0]);
9688 if (ARM_PC_REGNUM == reg_src1)
9689 {
9690 /* If R15 was used as Rn, hence current PC+8. */
9691 u_regval[0] = u_regval[0] + 8;
9692 }
9693 offset_8 = (immed_high << 4) | immed_low;
9694 /* Calculate target store address. */
9695 if (14 == arm_insn_r->opcode)
9696 {
9697 tgt_mem_addr = u_regval[0] + offset_8;
9698 }
9699 else
9700 {
9701 tgt_mem_addr = u_regval[0] - offset_8;
9702 }
9703 if (ARM_RECORD_STRH == str_type)
9704 {
9705 record_buf_mem[0] = 2;
9706 record_buf_mem[1] = tgt_mem_addr;
9707 arm_insn_r->mem_rec_count = 1;
9708 }
9709 else if (ARM_RECORD_STRD == str_type)
9710 {
9711 record_buf_mem[0] = 4;
9712 record_buf_mem[1] = tgt_mem_addr;
9713 record_buf_mem[2] = 4;
9714 record_buf_mem[3] = tgt_mem_addr + 4;
9715 arm_insn_r->mem_rec_count = 2;
9716 }
9717 }
9718 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9719 {
9720 /* 2) Store, register offset. */
9721 /* Get Rm. */
9722 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9723 /* Get Rn. */
9724 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9725 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9726 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9727 if (15 == reg_src2)
9728 {
9729 /* If R15 was used as Rn, hence current PC+8. */
9730 u_regval[0] = u_regval[0] + 8;
9731 }
9732 /* Calculate target store address, Rn +/- Rm, register offset. */
9733 if (12 == arm_insn_r->opcode)
9734 {
9735 tgt_mem_addr = u_regval[0] + u_regval[1];
9736 }
9737 else
9738 {
9739 tgt_mem_addr = u_regval[1] - u_regval[0];
9740 }
9741 if (ARM_RECORD_STRH == str_type)
9742 {
9743 record_buf_mem[0] = 2;
9744 record_buf_mem[1] = tgt_mem_addr;
9745 arm_insn_r->mem_rec_count = 1;
9746 }
9747 else if (ARM_RECORD_STRD == str_type)
9748 {
9749 record_buf_mem[0] = 4;
9750 record_buf_mem[1] = tgt_mem_addr;
9751 record_buf_mem[2] = 4;
9752 record_buf_mem[3] = tgt_mem_addr + 4;
9753 arm_insn_r->mem_rec_count = 2;
9754 }
9755 }
9756 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9757 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9758 {
9759 /* 3) Store, immediate pre-indexed. */
9760 /* 5) Store, immediate post-indexed. */
9761 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9762 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9763 offset_8 = (immed_high << 4) | immed_low;
9764 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9765 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9766 /* Calculate target store address, Rn +/- Rm, register offset. */
9767 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9768 {
9769 tgt_mem_addr = u_regval[0] + offset_8;
9770 }
9771 else
9772 {
9773 tgt_mem_addr = u_regval[0] - offset_8;
9774 }
9775 if (ARM_RECORD_STRH == str_type)
9776 {
9777 record_buf_mem[0] = 2;
9778 record_buf_mem[1] = tgt_mem_addr;
9779 arm_insn_r->mem_rec_count = 1;
9780 }
9781 else if (ARM_RECORD_STRD == str_type)
9782 {
9783 record_buf_mem[0] = 4;
9784 record_buf_mem[1] = tgt_mem_addr;
9785 record_buf_mem[2] = 4;
9786 record_buf_mem[3] = tgt_mem_addr + 4;
9787 arm_insn_r->mem_rec_count = 2;
9788 }
9789 /* Record Rn also as it changes. */
9790 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9791 arm_insn_r->reg_rec_count = 1;
9792 }
9793 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9794 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9795 {
9796 /* 4) Store, register pre-indexed. */
9797 /* 6) Store, register post -indexed. */
9798 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9799 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9800 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9801 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9802 /* Calculate target store address, Rn +/- Rm, register offset. */
9803 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9804 {
9805 tgt_mem_addr = u_regval[0] + u_regval[1];
9806 }
9807 else
9808 {
9809 tgt_mem_addr = u_regval[1] - u_regval[0];
9810 }
9811 if (ARM_RECORD_STRH == str_type)
9812 {
9813 record_buf_mem[0] = 2;
9814 record_buf_mem[1] = tgt_mem_addr;
9815 arm_insn_r->mem_rec_count = 1;
9816 }
9817 else if (ARM_RECORD_STRD == str_type)
9818 {
9819 record_buf_mem[0] = 4;
9820 record_buf_mem[1] = tgt_mem_addr;
9821 record_buf_mem[2] = 4;
9822 record_buf_mem[3] = tgt_mem_addr + 4;
9823 arm_insn_r->mem_rec_count = 2;
9824 }
9825 /* Record Rn also as it changes. */
9826 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9827 arm_insn_r->reg_rec_count = 1;
9828 }
9829 return 0;
9830 }
9831
9832 /* Handling ARM extension space insns. */
9833
9834 static int
9835 arm_record_extension_space (insn_decode_record *arm_insn_r)
9836 {
9837 int ret = 0; /* Return value: -1:record failure ; 0:success */
9838 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9839 uint32_t record_buf[8], record_buf_mem[8];
9840 uint32_t reg_src1 = 0;
9841 struct regcache *reg_cache = arm_insn_r->regcache;
9842 ULONGEST u_regval = 0;
9843
9844 gdb_assert (!INSN_RECORDED(arm_insn_r));
9845 /* Handle unconditional insn extension space. */
9846
9847 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9848 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9849 if (arm_insn_r->cond)
9850 {
9851 /* PLD has no affect on architectural state, it just affects
9852 the caches. */
9853 if (5 == ((opcode1 & 0xE0) >> 5))
9854 {
9855 /* BLX(1) */
9856 record_buf[0] = ARM_PS_REGNUM;
9857 record_buf[1] = ARM_LR_REGNUM;
9858 arm_insn_r->reg_rec_count = 2;
9859 }
9860 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9861 }
9862
9863
9864 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9865 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9866 {
9867 ret = -1;
9868 /* Undefined instruction on ARM V5; need to handle if later
9869 versions define it. */
9870 }
9871
9872 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9873 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9874 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9875
9876 /* Handle arithmetic insn extension space. */
9877 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9878 && !INSN_RECORDED(arm_insn_r))
9879 {
9880 /* Handle MLA(S) and MUL(S). */
9881 if (in_inclusive_range (insn_op1, 0U, 3U))
9882 {
9883 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9884 record_buf[1] = ARM_PS_REGNUM;
9885 arm_insn_r->reg_rec_count = 2;
9886 }
9887 else if (in_inclusive_range (insn_op1, 4U, 15U))
9888 {
9889 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9890 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9891 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9892 record_buf[2] = ARM_PS_REGNUM;
9893 arm_insn_r->reg_rec_count = 3;
9894 }
9895 }
9896
9897 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9898 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9899 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9900
9901 /* Handle control insn extension space. */
9902
9903 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9904 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9905 {
9906 if (!bit (arm_insn_r->arm_insn,25))
9907 {
9908 if (!bits (arm_insn_r->arm_insn, 4, 7))
9909 {
9910 if ((0 == insn_op1) || (2 == insn_op1))
9911 {
9912 /* MRS. */
9913 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9914 arm_insn_r->reg_rec_count = 1;
9915 }
9916 else if (1 == insn_op1)
9917 {
9918 /* CSPR is going to be changed. */
9919 record_buf[0] = ARM_PS_REGNUM;
9920 arm_insn_r->reg_rec_count = 1;
9921 }
9922 else if (3 == insn_op1)
9923 {
9924 /* SPSR is going to be changed. */
9925 /* We need to get SPSR value, which is yet to be done. */
9926 return -1;
9927 }
9928 }
9929 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9930 {
9931 if (1 == insn_op1)
9932 {
9933 /* BX. */
9934 record_buf[0] = ARM_PS_REGNUM;
9935 arm_insn_r->reg_rec_count = 1;
9936 }
9937 else if (3 == insn_op1)
9938 {
9939 /* CLZ. */
9940 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9941 arm_insn_r->reg_rec_count = 1;
9942 }
9943 }
9944 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9945 {
9946 /* BLX. */
9947 record_buf[0] = ARM_PS_REGNUM;
9948 record_buf[1] = ARM_LR_REGNUM;
9949 arm_insn_r->reg_rec_count = 2;
9950 }
9951 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9952 {
9953 /* QADD, QSUB, QDADD, QDSUB */
9954 record_buf[0] = ARM_PS_REGNUM;
9955 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9956 arm_insn_r->reg_rec_count = 2;
9957 }
9958 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9959 {
9960 /* BKPT. */
9961 record_buf[0] = ARM_PS_REGNUM;
9962 record_buf[1] = ARM_LR_REGNUM;
9963 arm_insn_r->reg_rec_count = 2;
9964
9965 /* Save SPSR also;how? */
9966 return -1;
9967 }
9968 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9969 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9970 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9971 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9972 )
9973 {
9974 if (0 == insn_op1 || 1 == insn_op1)
9975 {
9976 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9977 /* We dont do optimization for SMULW<y> where we
9978 need only Rd. */
9979 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9980 record_buf[1] = ARM_PS_REGNUM;
9981 arm_insn_r->reg_rec_count = 2;
9982 }
9983 else if (2 == insn_op1)
9984 {
9985 /* SMLAL<x><y>. */
9986 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9987 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
9988 arm_insn_r->reg_rec_count = 2;
9989 }
9990 else if (3 == insn_op1)
9991 {
9992 /* SMUL<x><y>. */
9993 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9994 arm_insn_r->reg_rec_count = 1;
9995 }
9996 }
9997 }
9998 else
9999 {
10000 /* MSR : immediate form. */
10001 if (1 == insn_op1)
10002 {
10003 /* CSPR is going to be changed. */
10004 record_buf[0] = ARM_PS_REGNUM;
10005 arm_insn_r->reg_rec_count = 1;
10006 }
10007 else if (3 == insn_op1)
10008 {
10009 /* SPSR is going to be changed. */
10010 /* we need to get SPSR value, which is yet to be done */
10011 return -1;
10012 }
10013 }
10014 }
10015
10016 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10017 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10018 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10019
10020 /* Handle load/store insn extension space. */
10021
10022 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10023 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10024 && !INSN_RECORDED(arm_insn_r))
10025 {
10026 /* SWP/SWPB. */
10027 if (0 == insn_op1)
10028 {
10029 /* These insn, changes register and memory as well. */
10030 /* SWP or SWPB insn. */
10031 /* Get memory address given by Rn. */
10032 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10033 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10034 /* SWP insn ?, swaps word. */
10035 if (8 == arm_insn_r->opcode)
10036 {
10037 record_buf_mem[0] = 4;
10038 }
10039 else
10040 {
10041 /* SWPB insn, swaps only byte. */
10042 record_buf_mem[0] = 1;
10043 }
10044 record_buf_mem[1] = u_regval;
10045 arm_insn_r->mem_rec_count = 1;
10046 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10047 arm_insn_r->reg_rec_count = 1;
10048 }
10049 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10050 {
10051 /* STRH. */
10052 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10053 ARM_RECORD_STRH);
10054 }
10055 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10056 {
10057 /* LDRD. */
10058 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10059 record_buf[1] = record_buf[0] + 1;
10060 arm_insn_r->reg_rec_count = 2;
10061 }
10062 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10063 {
10064 /* STRD. */
10065 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10066 ARM_RECORD_STRD);
10067 }
10068 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10069 {
10070 /* LDRH, LDRSB, LDRSH. */
10071 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10072 arm_insn_r->reg_rec_count = 1;
10073 }
10074
10075 }
10076
10077 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10078 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10079 && !INSN_RECORDED(arm_insn_r))
10080 {
10081 ret = -1;
10082 /* Handle coprocessor insn extension space. */
10083 }
10084
10085 /* To be done for ARMv5 and later; as of now we return -1. */
10086 if (-1 == ret)
10087 return ret;
10088
10089 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10090 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10091
10092 return ret;
10093 }
10094
10095 /* Handling opcode 000 insns. */
10096
10097 static int
10098 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10099 {
10100 struct regcache *reg_cache = arm_insn_r->regcache;
10101 uint32_t record_buf[8], record_buf_mem[8];
10102 ULONGEST u_regval[2] = {0};
10103
10104 uint32_t reg_src1 = 0;
10105 uint32_t opcode1 = 0;
10106
10107 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10108 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10109 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10110
10111 if (!((opcode1 & 0x19) == 0x10))
10112 {
10113 /* Data-processing (register) and Data-processing (register-shifted
10114 register */
10115 /* Out of 11 shifter operands mode, all the insn modifies destination
10116 register, which is specified by 13-16 decode. */
10117 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10118 record_buf[1] = ARM_PS_REGNUM;
10119 arm_insn_r->reg_rec_count = 2;
10120 }
10121 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10122 {
10123 /* Miscellaneous instructions */
10124
10125 if (3 == arm_insn_r->decode && 0x12 == opcode1
10126 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10127 {
10128 /* Handle BLX, branch and link/exchange. */
10129 if (9 == arm_insn_r->opcode)
10130 {
10131 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10132 and R14 stores the return address. */
10133 record_buf[0] = ARM_PS_REGNUM;
10134 record_buf[1] = ARM_LR_REGNUM;
10135 arm_insn_r->reg_rec_count = 2;
10136 }
10137 }
10138 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10139 {
10140 /* Handle enhanced software breakpoint insn, BKPT. */
10141 /* CPSR is changed to be executed in ARM state, disabling normal
10142 interrupts, entering abort mode. */
10143 /* According to high vector configuration PC is set. */
10144 /* user hit breakpoint and type reverse, in
10145 that case, we need to go back with previous CPSR and
10146 Program Counter. */
10147 record_buf[0] = ARM_PS_REGNUM;
10148 record_buf[1] = ARM_LR_REGNUM;
10149 arm_insn_r->reg_rec_count = 2;
10150
10151 /* Save SPSR also; how? */
10152 return -1;
10153 }
10154 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10155 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10156 {
10157 /* Handle BX, branch and link/exchange. */
10158 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10159 record_buf[0] = ARM_PS_REGNUM;
10160 arm_insn_r->reg_rec_count = 1;
10161 }
10162 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10163 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10164 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10165 {
10166 /* Count leading zeros: CLZ. */
10167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10168 arm_insn_r->reg_rec_count = 1;
10169 }
10170 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10171 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10172 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10173 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10174 {
10175 /* Handle MRS insn. */
10176 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10177 arm_insn_r->reg_rec_count = 1;
10178 }
10179 }
10180 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10181 {
10182 /* Multiply and multiply-accumulate */
10183
10184 /* Handle multiply instructions. */
10185 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10186 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10187 {
10188 /* Handle MLA and MUL. */
10189 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10190 record_buf[1] = ARM_PS_REGNUM;
10191 arm_insn_r->reg_rec_count = 2;
10192 }
10193 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10194 {
10195 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10196 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10197 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10198 record_buf[2] = ARM_PS_REGNUM;
10199 arm_insn_r->reg_rec_count = 3;
10200 }
10201 }
10202 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10203 {
10204 /* Synchronization primitives */
10205
10206 /* Handling SWP, SWPB. */
10207 /* These insn, changes register and memory as well. */
10208 /* SWP or SWPB insn. */
10209
10210 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10211 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10212 /* SWP insn ?, swaps word. */
10213 if (8 == arm_insn_r->opcode)
10214 {
10215 record_buf_mem[0] = 4;
10216 }
10217 else
10218 {
10219 /* SWPB insn, swaps only byte. */
10220 record_buf_mem[0] = 1;
10221 }
10222 record_buf_mem[1] = u_regval[0];
10223 arm_insn_r->mem_rec_count = 1;
10224 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10225 arm_insn_r->reg_rec_count = 1;
10226 }
10227 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10228 || 15 == arm_insn_r->decode)
10229 {
10230 if ((opcode1 & 0x12) == 2)
10231 {
10232 /* Extra load/store (unprivileged) */
10233 return -1;
10234 }
10235 else
10236 {
10237 /* Extra load/store */
10238 switch (bits (arm_insn_r->arm_insn, 5, 6))
10239 {
10240 case 1:
10241 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10242 {
10243 /* STRH (register), STRH (immediate) */
10244 arm_record_strx (arm_insn_r, &record_buf[0],
10245 &record_buf_mem[0], ARM_RECORD_STRH);
10246 }
10247 else if ((opcode1 & 0x05) == 0x1)
10248 {
10249 /* LDRH (register) */
10250 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10251 arm_insn_r->reg_rec_count = 1;
10252
10253 if (bit (arm_insn_r->arm_insn, 21))
10254 {
10255 /* Write back to Rn. */
10256 record_buf[arm_insn_r->reg_rec_count++]
10257 = bits (arm_insn_r->arm_insn, 16, 19);
10258 }
10259 }
10260 else if ((opcode1 & 0x05) == 0x5)
10261 {
10262 /* LDRH (immediate), LDRH (literal) */
10263 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10264
10265 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10266 arm_insn_r->reg_rec_count = 1;
10267
10268 if (rn != 15)
10269 {
10270 /*LDRH (immediate) */
10271 if (bit (arm_insn_r->arm_insn, 21))
10272 {
10273 /* Write back to Rn. */
10274 record_buf[arm_insn_r->reg_rec_count++] = rn;
10275 }
10276 }
10277 }
10278 else
10279 return -1;
10280 break;
10281 case 2:
10282 if ((opcode1 & 0x05) == 0x0)
10283 {
10284 /* LDRD (register) */
10285 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10286 record_buf[1] = record_buf[0] + 1;
10287 arm_insn_r->reg_rec_count = 2;
10288
10289 if (bit (arm_insn_r->arm_insn, 21))
10290 {
10291 /* Write back to Rn. */
10292 record_buf[arm_insn_r->reg_rec_count++]
10293 = bits (arm_insn_r->arm_insn, 16, 19);
10294 }
10295 }
10296 else if ((opcode1 & 0x05) == 0x1)
10297 {
10298 /* LDRSB (register) */
10299 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10300 arm_insn_r->reg_rec_count = 1;
10301
10302 if (bit (arm_insn_r->arm_insn, 21))
10303 {
10304 /* Write back to Rn. */
10305 record_buf[arm_insn_r->reg_rec_count++]
10306 = bits (arm_insn_r->arm_insn, 16, 19);
10307 }
10308 }
10309 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10310 {
10311 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10312 LDRSB (literal) */
10313 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10314
10315 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10316 arm_insn_r->reg_rec_count = 1;
10317
10318 if (rn != 15)
10319 {
10320 /*LDRD (immediate), LDRSB (immediate) */
10321 if (bit (arm_insn_r->arm_insn, 21))
10322 {
10323 /* Write back to Rn. */
10324 record_buf[arm_insn_r->reg_rec_count++] = rn;
10325 }
10326 }
10327 }
10328 else
10329 return -1;
10330 break;
10331 case 3:
10332 if ((opcode1 & 0x05) == 0x0)
10333 {
10334 /* STRD (register) */
10335 arm_record_strx (arm_insn_r, &record_buf[0],
10336 &record_buf_mem[0], ARM_RECORD_STRD);
10337 }
10338 else if ((opcode1 & 0x05) == 0x1)
10339 {
10340 /* LDRSH (register) */
10341 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10342 arm_insn_r->reg_rec_count = 1;
10343
10344 if (bit (arm_insn_r->arm_insn, 21))
10345 {
10346 /* Write back to Rn. */
10347 record_buf[arm_insn_r->reg_rec_count++]
10348 = bits (arm_insn_r->arm_insn, 16, 19);
10349 }
10350 }
10351 else if ((opcode1 & 0x05) == 0x4)
10352 {
10353 /* STRD (immediate) */
10354 arm_record_strx (arm_insn_r, &record_buf[0],
10355 &record_buf_mem[0], ARM_RECORD_STRD);
10356 }
10357 else if ((opcode1 & 0x05) == 0x5)
10358 {
10359 /* LDRSH (immediate), LDRSH (literal) */
10360 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10361 arm_insn_r->reg_rec_count = 1;
10362
10363 if (bit (arm_insn_r->arm_insn, 21))
10364 {
10365 /* Write back to Rn. */
10366 record_buf[arm_insn_r->reg_rec_count++]
10367 = bits (arm_insn_r->arm_insn, 16, 19);
10368 }
10369 }
10370 else
10371 return -1;
10372 break;
10373 default:
10374 return -1;
10375 }
10376 }
10377 }
10378 else
10379 {
10380 return -1;
10381 }
10382
10383 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10384 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10385 return 0;
10386 }
10387
10388 /* Handling opcode 001 insns. */
10389
10390 static int
10391 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10392 {
10393 uint32_t record_buf[8], record_buf_mem[8];
10394
10395 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10396 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10397
10398 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10399 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10400 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10401 )
10402 {
10403 /* Handle MSR insn. */
10404 if (9 == arm_insn_r->opcode)
10405 {
10406 /* CSPR is going to be changed. */
10407 record_buf[0] = ARM_PS_REGNUM;
10408 arm_insn_r->reg_rec_count = 1;
10409 }
10410 else
10411 {
10412 /* SPSR is going to be changed. */
10413 }
10414 }
10415 else if (arm_insn_r->opcode <= 15)
10416 {
10417 /* Normal data processing insns. */
10418 /* Out of 11 shifter operands mode, all the insn modifies destination
10419 register, which is specified by 13-16 decode. */
10420 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10421 record_buf[1] = ARM_PS_REGNUM;
10422 arm_insn_r->reg_rec_count = 2;
10423 }
10424 else
10425 {
10426 return -1;
10427 }
10428
10429 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10430 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10431 return 0;
10432 }
10433
10434 static int
10435 arm_record_media (insn_decode_record *arm_insn_r)
10436 {
10437 uint32_t record_buf[8];
10438
10439 switch (bits (arm_insn_r->arm_insn, 22, 24))
10440 {
10441 case 0:
10442 /* Parallel addition and subtraction, signed */
10443 case 1:
10444 /* Parallel addition and subtraction, unsigned */
10445 case 2:
10446 case 3:
10447 /* Packing, unpacking, saturation and reversal */
10448 {
10449 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10450
10451 record_buf[arm_insn_r->reg_rec_count++] = rd;
10452 }
10453 break;
10454
10455 case 4:
10456 case 5:
10457 /* Signed multiplies */
10458 {
10459 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10460 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10461
10462 record_buf[arm_insn_r->reg_rec_count++] = rd;
10463 if (op1 == 0x0)
10464 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10465 else if (op1 == 0x4)
10466 record_buf[arm_insn_r->reg_rec_count++]
10467 = bits (arm_insn_r->arm_insn, 12, 15);
10468 }
10469 break;
10470
10471 case 6:
10472 {
10473 if (bit (arm_insn_r->arm_insn, 21)
10474 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10475 {
10476 /* SBFX */
10477 record_buf[arm_insn_r->reg_rec_count++]
10478 = bits (arm_insn_r->arm_insn, 12, 15);
10479 }
10480 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10481 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10482 {
10483 /* USAD8 and USADA8 */
10484 record_buf[arm_insn_r->reg_rec_count++]
10485 = bits (arm_insn_r->arm_insn, 16, 19);
10486 }
10487 }
10488 break;
10489
10490 case 7:
10491 {
10492 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10493 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10494 {
10495 /* Permanently UNDEFINED */
10496 return -1;
10497 }
10498 else
10499 {
10500 /* BFC, BFI and UBFX */
10501 record_buf[arm_insn_r->reg_rec_count++]
10502 = bits (arm_insn_r->arm_insn, 12, 15);
10503 }
10504 }
10505 break;
10506
10507 default:
10508 return -1;
10509 }
10510
10511 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10512
10513 return 0;
10514 }
10515
10516 /* Handle ARM mode instructions with opcode 010. */
10517
10518 static int
10519 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10520 {
10521 struct regcache *reg_cache = arm_insn_r->regcache;
10522
10523 uint32_t reg_base , reg_dest;
10524 uint32_t offset_12, tgt_mem_addr;
10525 uint32_t record_buf[8], record_buf_mem[8];
10526 unsigned char wback;
10527 ULONGEST u_regval;
10528
10529 /* Calculate wback. */
10530 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10531 || (bit (arm_insn_r->arm_insn, 21) == 1);
10532
10533 arm_insn_r->reg_rec_count = 0;
10534 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10535
10536 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10537 {
10538 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10539 and LDRT. */
10540
10541 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10542 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10543
10544 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10545 preceeds a LDR instruction having R15 as reg_base, it
10546 emulates a branch and link instruction, and hence we need to save
10547 CPSR and PC as well. */
10548 if (ARM_PC_REGNUM == reg_dest)
10549 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10550
10551 /* If wback is true, also save the base register, which is going to be
10552 written to. */
10553 if (wback)
10554 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10555 }
10556 else
10557 {
10558 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10559
10560 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10561 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10562
10563 /* Handle bit U. */
10564 if (bit (arm_insn_r->arm_insn, 23))
10565 {
10566 /* U == 1: Add the offset. */
10567 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10568 }
10569 else
10570 {
10571 /* U == 0: subtract the offset. */
10572 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10573 }
10574
10575 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10576 bytes. */
10577 if (bit (arm_insn_r->arm_insn, 22))
10578 {
10579 /* STRB and STRBT: 1 byte. */
10580 record_buf_mem[0] = 1;
10581 }
10582 else
10583 {
10584 /* STR and STRT: 4 bytes. */
10585 record_buf_mem[0] = 4;
10586 }
10587
10588 /* Handle bit P. */
10589 if (bit (arm_insn_r->arm_insn, 24))
10590 record_buf_mem[1] = tgt_mem_addr;
10591 else
10592 record_buf_mem[1] = (uint32_t) u_regval;
10593
10594 arm_insn_r->mem_rec_count = 1;
10595
10596 /* If wback is true, also save the base register, which is going to be
10597 written to. */
10598 if (wback)
10599 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10600 }
10601
10602 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10603 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10604 return 0;
10605 }
10606
10607 /* Handling opcode 011 insns. */
10608
10609 static int
10610 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10611 {
10612 struct regcache *reg_cache = arm_insn_r->regcache;
10613
10614 uint32_t shift_imm = 0;
10615 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10616 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10617 uint32_t record_buf[8], record_buf_mem[8];
10618
10619 LONGEST s_word;
10620 ULONGEST u_regval[2];
10621
10622 if (bit (arm_insn_r->arm_insn, 4))
10623 return arm_record_media (arm_insn_r);
10624
10625 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10626 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10627
10628 /* Handle enhanced store insns and LDRD DSP insn,
10629 order begins according to addressing modes for store insns
10630 STRH insn. */
10631
10632 /* LDR or STR? */
10633 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10634 {
10635 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10636 /* LDR insn has a capability to do branching, if
10637 MOV LR, PC is precedded by LDR insn having Rn as R15
10638 in that case, it emulates branch and link insn, and hence we
10639 need to save CSPR and PC as well. */
10640 if (15 != reg_dest)
10641 {
10642 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10643 arm_insn_r->reg_rec_count = 1;
10644 }
10645 else
10646 {
10647 record_buf[0] = reg_dest;
10648 record_buf[1] = ARM_PS_REGNUM;
10649 arm_insn_r->reg_rec_count = 2;
10650 }
10651 }
10652 else
10653 {
10654 if (! bits (arm_insn_r->arm_insn, 4, 11))
10655 {
10656 /* Store insn, register offset and register pre-indexed,
10657 register post-indexed. */
10658 /* Get Rm. */
10659 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10660 /* Get Rn. */
10661 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10662 regcache_raw_read_unsigned (reg_cache, reg_src1
10663 , &u_regval[0]);
10664 regcache_raw_read_unsigned (reg_cache, reg_src2
10665 , &u_regval[1]);
10666 if (15 == reg_src2)
10667 {
10668 /* If R15 was used as Rn, hence current PC+8. */
10669 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10670 u_regval[0] = u_regval[0] + 8;
10671 }
10672 /* Calculate target store address, Rn +/- Rm, register offset. */
10673 /* U == 1. */
10674 if (bit (arm_insn_r->arm_insn, 23))
10675 {
10676 tgt_mem_addr = u_regval[0] + u_regval[1];
10677 }
10678 else
10679 {
10680 tgt_mem_addr = u_regval[1] - u_regval[0];
10681 }
10682
10683 switch (arm_insn_r->opcode)
10684 {
10685 /* STR. */
10686 case 8:
10687 case 12:
10688 /* STR. */
10689 case 9:
10690 case 13:
10691 /* STRT. */
10692 case 1:
10693 case 5:
10694 /* STR. */
10695 case 0:
10696 case 4:
10697 record_buf_mem[0] = 4;
10698 break;
10699
10700 /* STRB. */
10701 case 10:
10702 case 14:
10703 /* STRB. */
10704 case 11:
10705 case 15:
10706 /* STRBT. */
10707 case 3:
10708 case 7:
10709 /* STRB. */
10710 case 2:
10711 case 6:
10712 record_buf_mem[0] = 1;
10713 break;
10714
10715 default:
10716 gdb_assert_not_reached ("no decoding pattern found");
10717 break;
10718 }
10719 record_buf_mem[1] = tgt_mem_addr;
10720 arm_insn_r->mem_rec_count = 1;
10721
10722 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10723 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10724 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10725 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10726 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10727 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10728 )
10729 {
10730 /* Rn is going to be changed in pre-indexed mode and
10731 post-indexed mode as well. */
10732 record_buf[0] = reg_src2;
10733 arm_insn_r->reg_rec_count = 1;
10734 }
10735 }
10736 else
10737 {
10738 /* Store insn, scaled register offset; scaled pre-indexed. */
10739 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10740 /* Get Rm. */
10741 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10742 /* Get Rn. */
10743 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10744 /* Get shift_imm. */
10745 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10746 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10747 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10748 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10749 /* Offset_12 used as shift. */
10750 switch (offset_12)
10751 {
10752 case 0:
10753 /* Offset_12 used as index. */
10754 offset_12 = u_regval[0] << shift_imm;
10755 break;
10756
10757 case 1:
10758 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10759 break;
10760
10761 case 2:
10762 if (!shift_imm)
10763 {
10764 if (bit (u_regval[0], 31))
10765 {
10766 offset_12 = 0xFFFFFFFF;
10767 }
10768 else
10769 {
10770 offset_12 = 0;
10771 }
10772 }
10773 else
10774 {
10775 /* This is arithmetic shift. */
10776 offset_12 = s_word >> shift_imm;
10777 }
10778 break;
10779
10780 case 3:
10781 if (!shift_imm)
10782 {
10783 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10784 &u_regval[1]);
10785 /* Get C flag value and shift it by 31. */
10786 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10787 | (u_regval[0]) >> 1);
10788 }
10789 else
10790 {
10791 offset_12 = (u_regval[0] >> shift_imm) \
10792 | (u_regval[0] <<
10793 (sizeof(uint32_t) - shift_imm));
10794 }
10795 break;
10796
10797 default:
10798 gdb_assert_not_reached ("no decoding pattern found");
10799 break;
10800 }
10801
10802 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10803 /* bit U set. */
10804 if (bit (arm_insn_r->arm_insn, 23))
10805 {
10806 tgt_mem_addr = u_regval[1] + offset_12;
10807 }
10808 else
10809 {
10810 tgt_mem_addr = u_regval[1] - offset_12;
10811 }
10812
10813 switch (arm_insn_r->opcode)
10814 {
10815 /* STR. */
10816 case 8:
10817 case 12:
10818 /* STR. */
10819 case 9:
10820 case 13:
10821 /* STRT. */
10822 case 1:
10823 case 5:
10824 /* STR. */
10825 case 0:
10826 case 4:
10827 record_buf_mem[0] = 4;
10828 break;
10829
10830 /* STRB. */
10831 case 10:
10832 case 14:
10833 /* STRB. */
10834 case 11:
10835 case 15:
10836 /* STRBT. */
10837 case 3:
10838 case 7:
10839 /* STRB. */
10840 case 2:
10841 case 6:
10842 record_buf_mem[0] = 1;
10843 break;
10844
10845 default:
10846 gdb_assert_not_reached ("no decoding pattern found");
10847 break;
10848 }
10849 record_buf_mem[1] = tgt_mem_addr;
10850 arm_insn_r->mem_rec_count = 1;
10851
10852 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10853 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10854 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10855 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10856 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10857 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10858 )
10859 {
10860 /* Rn is going to be changed in register scaled pre-indexed
10861 mode,and scaled post indexed mode. */
10862 record_buf[0] = reg_src2;
10863 arm_insn_r->reg_rec_count = 1;
10864 }
10865 }
10866 }
10867
10868 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10869 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10870 return 0;
10871 }
10872
10873 /* Handle ARM mode instructions with opcode 100. */
10874
10875 static int
10876 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10877 {
10878 struct regcache *reg_cache = arm_insn_r->regcache;
10879 uint32_t register_count = 0, register_bits;
10880 uint32_t reg_base, addr_mode;
10881 uint32_t record_buf[24], record_buf_mem[48];
10882 uint32_t wback;
10883 ULONGEST u_regval;
10884
10885 /* Fetch the list of registers. */
10886 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10887 arm_insn_r->reg_rec_count = 0;
10888
10889 /* Fetch the base register that contains the address we are loading data
10890 to. */
10891 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10892
10893 /* Calculate wback. */
10894 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10895
10896 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10897 {
10898 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10899
10900 /* Find out which registers are going to be loaded from memory. */
10901 while (register_bits)
10902 {
10903 if (register_bits & 0x00000001)
10904 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10905 register_bits = register_bits >> 1;
10906 register_count++;
10907 }
10908
10909
10910 /* If wback is true, also save the base register, which is going to be
10911 written to. */
10912 if (wback)
10913 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10914
10915 /* Save the CPSR register. */
10916 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10917 }
10918 else
10919 {
10920 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10921
10922 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10923
10924 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10925
10926 /* Find out how many registers are going to be stored to memory. */
10927 while (register_bits)
10928 {
10929 if (register_bits & 0x00000001)
10930 register_count++;
10931 register_bits = register_bits >> 1;
10932 }
10933
10934 switch (addr_mode)
10935 {
10936 /* STMDA (STMED): Decrement after. */
10937 case 0:
10938 record_buf_mem[1] = (uint32_t) u_regval
10939 - register_count * ARM_INT_REGISTER_SIZE + 4;
10940 break;
10941 /* STM (STMIA, STMEA): Increment after. */
10942 case 1:
10943 record_buf_mem[1] = (uint32_t) u_regval;
10944 break;
10945 /* STMDB (STMFD): Decrement before. */
10946 case 2:
10947 record_buf_mem[1] = (uint32_t) u_regval
10948 - register_count * ARM_INT_REGISTER_SIZE;
10949 break;
10950 /* STMIB (STMFA): Increment before. */
10951 case 3:
10952 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
10953 break;
10954 default:
10955 gdb_assert_not_reached ("no decoding pattern found");
10956 break;
10957 }
10958
10959 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
10960 arm_insn_r->mem_rec_count = 1;
10961
10962 /* If wback is true, also save the base register, which is going to be
10963 written to. */
10964 if (wback)
10965 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10966 }
10967
10968 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10969 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10970 return 0;
10971 }
10972
10973 /* Handling opcode 101 insns. */
10974
10975 static int
10976 arm_record_b_bl (insn_decode_record *arm_insn_r)
10977 {
10978 uint32_t record_buf[8];
10979
10980 /* Handle B, BL, BLX(1) insns. */
10981 /* B simply branches so we do nothing here. */
10982 /* Note: BLX(1) doesnt fall here but instead it falls into
10983 extension space. */
10984 if (bit (arm_insn_r->arm_insn, 24))
10985 {
10986 record_buf[0] = ARM_LR_REGNUM;
10987 arm_insn_r->reg_rec_count = 1;
10988 }
10989
10990 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10991
10992 return 0;
10993 }
10994
10995 static int
10996 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10997 {
10998 printf_unfiltered (_("Process record does not support instruction "
10999 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11000 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11001
11002 return -1;
11003 }
11004
11005 /* Record handler for vector data transfer instructions. */
11006
11007 static int
11008 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11009 {
11010 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11011 uint32_t record_buf[4];
11012
11013 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11014 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11015 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11016 bit_l = bit (arm_insn_r->arm_insn, 20);
11017 bit_c = bit (arm_insn_r->arm_insn, 8);
11018
11019 /* Handle VMOV instruction. */
11020 if (bit_l && bit_c)
11021 {
11022 record_buf[0] = reg_t;
11023 arm_insn_r->reg_rec_count = 1;
11024 }
11025 else if (bit_l && !bit_c)
11026 {
11027 /* Handle VMOV instruction. */
11028 if (bits_a == 0x00)
11029 {
11030 record_buf[0] = reg_t;
11031 arm_insn_r->reg_rec_count = 1;
11032 }
11033 /* Handle VMRS instruction. */
11034 else if (bits_a == 0x07)
11035 {
11036 if (reg_t == 15)
11037 reg_t = ARM_PS_REGNUM;
11038
11039 record_buf[0] = reg_t;
11040 arm_insn_r->reg_rec_count = 1;
11041 }
11042 }
11043 else if (!bit_l && !bit_c)
11044 {
11045 /* Handle VMOV instruction. */
11046 if (bits_a == 0x00)
11047 {
11048 record_buf[0] = ARM_D0_REGNUM + reg_v;
11049
11050 arm_insn_r->reg_rec_count = 1;
11051 }
11052 /* Handle VMSR instruction. */
11053 else if (bits_a == 0x07)
11054 {
11055 record_buf[0] = ARM_FPSCR_REGNUM;
11056 arm_insn_r->reg_rec_count = 1;
11057 }
11058 }
11059 else if (!bit_l && bit_c)
11060 {
11061 /* Handle VMOV instruction. */
11062 if (!(bits_a & 0x04))
11063 {
11064 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11065 + ARM_D0_REGNUM;
11066 arm_insn_r->reg_rec_count = 1;
11067 }
11068 /* Handle VDUP instruction. */
11069 else
11070 {
11071 if (bit (arm_insn_r->arm_insn, 21))
11072 {
11073 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11074 record_buf[0] = reg_v + ARM_D0_REGNUM;
11075 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11076 arm_insn_r->reg_rec_count = 2;
11077 }
11078 else
11079 {
11080 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11081 record_buf[0] = reg_v + ARM_D0_REGNUM;
11082 arm_insn_r->reg_rec_count = 1;
11083 }
11084 }
11085 }
11086
11087 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11088 return 0;
11089 }
11090
11091 /* Record handler for extension register load/store instructions. */
11092
11093 static int
11094 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11095 {
11096 uint32_t opcode, single_reg;
11097 uint8_t op_vldm_vstm;
11098 uint32_t record_buf[8], record_buf_mem[128];
11099 ULONGEST u_regval = 0;
11100
11101 struct regcache *reg_cache = arm_insn_r->regcache;
11102
11103 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11104 single_reg = !bit (arm_insn_r->arm_insn, 8);
11105 op_vldm_vstm = opcode & 0x1b;
11106
11107 /* Handle VMOV instructions. */
11108 if ((opcode & 0x1e) == 0x04)
11109 {
11110 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11111 {
11112 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11113 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11114 arm_insn_r->reg_rec_count = 2;
11115 }
11116 else
11117 {
11118 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11119 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11120
11121 if (single_reg)
11122 {
11123 /* The first S register number m is REG_M:M (M is bit 5),
11124 the corresponding D register number is REG_M:M / 2, which
11125 is REG_M. */
11126 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11127 /* The second S register number is REG_M:M + 1, the
11128 corresponding D register number is (REG_M:M + 1) / 2.
11129 IOW, if bit M is 1, the first and second S registers
11130 are mapped to different D registers, otherwise, they are
11131 in the same D register. */
11132 if (bit_m)
11133 {
11134 record_buf[arm_insn_r->reg_rec_count++]
11135 = ARM_D0_REGNUM + reg_m + 1;
11136 }
11137 }
11138 else
11139 {
11140 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11141 arm_insn_r->reg_rec_count = 1;
11142 }
11143 }
11144 }
11145 /* Handle VSTM and VPUSH instructions. */
11146 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11147 || op_vldm_vstm == 0x12)
11148 {
11149 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11150 uint32_t memory_index = 0;
11151
11152 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11153 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11154 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11155 imm_off32 = imm_off8 << 2;
11156 memory_count = imm_off8;
11157
11158 if (bit (arm_insn_r->arm_insn, 23))
11159 start_address = u_regval;
11160 else
11161 start_address = u_regval - imm_off32;
11162
11163 if (bit (arm_insn_r->arm_insn, 21))
11164 {
11165 record_buf[0] = reg_rn;
11166 arm_insn_r->reg_rec_count = 1;
11167 }
11168
11169 while (memory_count > 0)
11170 {
11171 if (single_reg)
11172 {
11173 record_buf_mem[memory_index] = 4;
11174 record_buf_mem[memory_index + 1] = start_address;
11175 start_address = start_address + 4;
11176 memory_index = memory_index + 2;
11177 }
11178 else
11179 {
11180 record_buf_mem[memory_index] = 4;
11181 record_buf_mem[memory_index + 1] = start_address;
11182 record_buf_mem[memory_index + 2] = 4;
11183 record_buf_mem[memory_index + 3] = start_address + 4;
11184 start_address = start_address + 8;
11185 memory_index = memory_index + 4;
11186 }
11187 memory_count--;
11188 }
11189 arm_insn_r->mem_rec_count = (memory_index >> 1);
11190 }
11191 /* Handle VLDM instructions. */
11192 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11193 || op_vldm_vstm == 0x13)
11194 {
11195 uint32_t reg_count, reg_vd;
11196 uint32_t reg_index = 0;
11197 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11198
11199 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11200 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11201
11202 /* REG_VD is the first D register number. If the instruction
11203 loads memory to S registers (SINGLE_REG is TRUE), the register
11204 number is (REG_VD << 1 | bit D), so the corresponding D
11205 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11206 if (!single_reg)
11207 reg_vd = reg_vd | (bit_d << 4);
11208
11209 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11210 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11211
11212 /* If the instruction loads memory to D register, REG_COUNT should
11213 be divided by 2, according to the ARM Architecture Reference
11214 Manual. If the instruction loads memory to S register, divide by
11215 2 as well because two S registers are mapped to D register. */
11216 reg_count = reg_count / 2;
11217 if (single_reg && bit_d)
11218 {
11219 /* Increase the register count if S register list starts from
11220 an odd number (bit d is one). */
11221 reg_count++;
11222 }
11223
11224 while (reg_count > 0)
11225 {
11226 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11227 reg_count--;
11228 }
11229 arm_insn_r->reg_rec_count = reg_index;
11230 }
11231 /* VSTR Vector store register. */
11232 else if ((opcode & 0x13) == 0x10)
11233 {
11234 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11235 uint32_t memory_index = 0;
11236
11237 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11238 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11239 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11240 imm_off32 = imm_off8 << 2;
11241
11242 if (bit (arm_insn_r->arm_insn, 23))
11243 start_address = u_regval + imm_off32;
11244 else
11245 start_address = u_regval - imm_off32;
11246
11247 if (single_reg)
11248 {
11249 record_buf_mem[memory_index] = 4;
11250 record_buf_mem[memory_index + 1] = start_address;
11251 arm_insn_r->mem_rec_count = 1;
11252 }
11253 else
11254 {
11255 record_buf_mem[memory_index] = 4;
11256 record_buf_mem[memory_index + 1] = start_address;
11257 record_buf_mem[memory_index + 2] = 4;
11258 record_buf_mem[memory_index + 3] = start_address + 4;
11259 arm_insn_r->mem_rec_count = 2;
11260 }
11261 }
11262 /* VLDR Vector load register. */
11263 else if ((opcode & 0x13) == 0x11)
11264 {
11265 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11266
11267 if (!single_reg)
11268 {
11269 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11270 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11271 }
11272 else
11273 {
11274 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11275 /* Record register D rather than pseudo register S. */
11276 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11277 }
11278 arm_insn_r->reg_rec_count = 1;
11279 }
11280
11281 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11282 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11283 return 0;
11284 }
11285
11286 /* Record handler for arm/thumb mode VFP data processing instructions. */
11287
11288 static int
11289 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11290 {
11291 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11292 uint32_t record_buf[4];
11293 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11294 enum insn_types curr_insn_type = INSN_INV;
11295
11296 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11297 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11298 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11299 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11300 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11301 bit_d = bit (arm_insn_r->arm_insn, 22);
11302 /* Mask off the "D" bit. */
11303 opc1 = opc1 & ~0x04;
11304
11305 /* Handle VMLA, VMLS. */
11306 if (opc1 == 0x00)
11307 {
11308 if (bit (arm_insn_r->arm_insn, 10))
11309 {
11310 if (bit (arm_insn_r->arm_insn, 6))
11311 curr_insn_type = INSN_T0;
11312 else
11313 curr_insn_type = INSN_T1;
11314 }
11315 else
11316 {
11317 if (dp_op_sz)
11318 curr_insn_type = INSN_T1;
11319 else
11320 curr_insn_type = INSN_T2;
11321 }
11322 }
11323 /* Handle VNMLA, VNMLS, VNMUL. */
11324 else if (opc1 == 0x01)
11325 {
11326 if (dp_op_sz)
11327 curr_insn_type = INSN_T1;
11328 else
11329 curr_insn_type = INSN_T2;
11330 }
11331 /* Handle VMUL. */
11332 else if (opc1 == 0x02 && !(opc3 & 0x01))
11333 {
11334 if (bit (arm_insn_r->arm_insn, 10))
11335 {
11336 if (bit (arm_insn_r->arm_insn, 6))
11337 curr_insn_type = INSN_T0;
11338 else
11339 curr_insn_type = INSN_T1;
11340 }
11341 else
11342 {
11343 if (dp_op_sz)
11344 curr_insn_type = INSN_T1;
11345 else
11346 curr_insn_type = INSN_T2;
11347 }
11348 }
11349 /* Handle VADD, VSUB. */
11350 else if (opc1 == 0x03)
11351 {
11352 if (!bit (arm_insn_r->arm_insn, 9))
11353 {
11354 if (bit (arm_insn_r->arm_insn, 6))
11355 curr_insn_type = INSN_T0;
11356 else
11357 curr_insn_type = INSN_T1;
11358 }
11359 else
11360 {
11361 if (dp_op_sz)
11362 curr_insn_type = INSN_T1;
11363 else
11364 curr_insn_type = INSN_T2;
11365 }
11366 }
11367 /* Handle VDIV. */
11368 else if (opc1 == 0x08)
11369 {
11370 if (dp_op_sz)
11371 curr_insn_type = INSN_T1;
11372 else
11373 curr_insn_type = INSN_T2;
11374 }
11375 /* Handle all other vfp data processing instructions. */
11376 else if (opc1 == 0x0b)
11377 {
11378 /* Handle VMOV. */
11379 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11380 {
11381 if (bit (arm_insn_r->arm_insn, 4))
11382 {
11383 if (bit (arm_insn_r->arm_insn, 6))
11384 curr_insn_type = INSN_T0;
11385 else
11386 curr_insn_type = INSN_T1;
11387 }
11388 else
11389 {
11390 if (dp_op_sz)
11391 curr_insn_type = INSN_T1;
11392 else
11393 curr_insn_type = INSN_T2;
11394 }
11395 }
11396 /* Handle VNEG and VABS. */
11397 else if ((opc2 == 0x01 && opc3 == 0x01)
11398 || (opc2 == 0x00 && opc3 == 0x03))
11399 {
11400 if (!bit (arm_insn_r->arm_insn, 11))
11401 {
11402 if (bit (arm_insn_r->arm_insn, 6))
11403 curr_insn_type = INSN_T0;
11404 else
11405 curr_insn_type = INSN_T1;
11406 }
11407 else
11408 {
11409 if (dp_op_sz)
11410 curr_insn_type = INSN_T1;
11411 else
11412 curr_insn_type = INSN_T2;
11413 }
11414 }
11415 /* Handle VSQRT. */
11416 else if (opc2 == 0x01 && opc3 == 0x03)
11417 {
11418 if (dp_op_sz)
11419 curr_insn_type = INSN_T1;
11420 else
11421 curr_insn_type = INSN_T2;
11422 }
11423 /* Handle VCVT. */
11424 else if (opc2 == 0x07 && opc3 == 0x03)
11425 {
11426 if (!dp_op_sz)
11427 curr_insn_type = INSN_T1;
11428 else
11429 curr_insn_type = INSN_T2;
11430 }
11431 else if (opc3 & 0x01)
11432 {
11433 /* Handle VCVT. */
11434 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11435 {
11436 if (!bit (arm_insn_r->arm_insn, 18))
11437 curr_insn_type = INSN_T2;
11438 else
11439 {
11440 if (dp_op_sz)
11441 curr_insn_type = INSN_T1;
11442 else
11443 curr_insn_type = INSN_T2;
11444 }
11445 }
11446 /* Handle VCVT. */
11447 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11448 {
11449 if (dp_op_sz)
11450 curr_insn_type = INSN_T1;
11451 else
11452 curr_insn_type = INSN_T2;
11453 }
11454 /* Handle VCVTB, VCVTT. */
11455 else if ((opc2 & 0x0e) == 0x02)
11456 curr_insn_type = INSN_T2;
11457 /* Handle VCMP, VCMPE. */
11458 else if ((opc2 & 0x0e) == 0x04)
11459 curr_insn_type = INSN_T3;
11460 }
11461 }
11462
11463 switch (curr_insn_type)
11464 {
11465 case INSN_T0:
11466 reg_vd = reg_vd | (bit_d << 4);
11467 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11468 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11469 arm_insn_r->reg_rec_count = 2;
11470 break;
11471
11472 case INSN_T1:
11473 reg_vd = reg_vd | (bit_d << 4);
11474 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11475 arm_insn_r->reg_rec_count = 1;
11476 break;
11477
11478 case INSN_T2:
11479 reg_vd = (reg_vd << 1) | bit_d;
11480 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11481 arm_insn_r->reg_rec_count = 1;
11482 break;
11483
11484 case INSN_T3:
11485 record_buf[0] = ARM_FPSCR_REGNUM;
11486 arm_insn_r->reg_rec_count = 1;
11487 break;
11488
11489 default:
11490 gdb_assert_not_reached ("no decoding pattern found");
11491 break;
11492 }
11493
11494 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11495 return 0;
11496 }
11497
11498 /* Handling opcode 110 insns. */
11499
11500 static int
11501 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11502 {
11503 uint32_t op1, op1_ebit, coproc;
11504
11505 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11506 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11507 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11508
11509 if ((coproc & 0x0e) == 0x0a)
11510 {
11511 /* Handle extension register ld/st instructions. */
11512 if (!(op1 & 0x20))
11513 return arm_record_exreg_ld_st_insn (arm_insn_r);
11514
11515 /* 64-bit transfers between arm core and extension registers. */
11516 if ((op1 & 0x3e) == 0x04)
11517 return arm_record_exreg_ld_st_insn (arm_insn_r);
11518 }
11519 else
11520 {
11521 /* Handle coprocessor ld/st instructions. */
11522 if (!(op1 & 0x3a))
11523 {
11524 /* Store. */
11525 if (!op1_ebit)
11526 return arm_record_unsupported_insn (arm_insn_r);
11527 else
11528 /* Load. */
11529 return arm_record_unsupported_insn (arm_insn_r);
11530 }
11531
11532 /* Move to coprocessor from two arm core registers. */
11533 if (op1 == 0x4)
11534 return arm_record_unsupported_insn (arm_insn_r);
11535
11536 /* Move to two arm core registers from coprocessor. */
11537 if (op1 == 0x5)
11538 {
11539 uint32_t reg_t[2];
11540
11541 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11542 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11543 arm_insn_r->reg_rec_count = 2;
11544
11545 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11546 return 0;
11547 }
11548 }
11549 return arm_record_unsupported_insn (arm_insn_r);
11550 }
11551
11552 /* Handling opcode 111 insns. */
11553
11554 static int
11555 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11556 {
11557 uint32_t op, op1_ebit, coproc, bits_24_25;
11558 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11559 struct regcache *reg_cache = arm_insn_r->regcache;
11560
11561 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11562 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11563 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11564 op = bit (arm_insn_r->arm_insn, 4);
11565 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11566
11567 /* Handle arm SWI/SVC system call instructions. */
11568 if (bits_24_25 == 0x3)
11569 {
11570 if (tdep->arm_syscall_record != NULL)
11571 {
11572 ULONGEST svc_operand, svc_number;
11573
11574 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11575
11576 if (svc_operand) /* OABI. */
11577 svc_number = svc_operand - 0x900000;
11578 else /* EABI. */
11579 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11580
11581 return tdep->arm_syscall_record (reg_cache, svc_number);
11582 }
11583 else
11584 {
11585 printf_unfiltered (_("no syscall record support\n"));
11586 return -1;
11587 }
11588 }
11589 else if (bits_24_25 == 0x02)
11590 {
11591 if (op)
11592 {
11593 if ((coproc & 0x0e) == 0x0a)
11594 {
11595 /* 8, 16, and 32-bit transfer */
11596 return arm_record_vdata_transfer_insn (arm_insn_r);
11597 }
11598 else
11599 {
11600 if (op1_ebit)
11601 {
11602 /* MRC, MRC2 */
11603 uint32_t record_buf[1];
11604
11605 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11606 if (record_buf[0] == 15)
11607 record_buf[0] = ARM_PS_REGNUM;
11608
11609 arm_insn_r->reg_rec_count = 1;
11610 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11611 record_buf);
11612 return 0;
11613 }
11614 else
11615 {
11616 /* MCR, MCR2 */
11617 return -1;
11618 }
11619 }
11620 }
11621 else
11622 {
11623 if ((coproc & 0x0e) == 0x0a)
11624 {
11625 /* VFP data-processing instructions. */
11626 return arm_record_vfp_data_proc_insn (arm_insn_r);
11627 }
11628 else
11629 {
11630 /* CDP, CDP2 */
11631 return -1;
11632 }
11633 }
11634 }
11635 else
11636 {
11637 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11638
11639 if (op1 == 5)
11640 {
11641 if ((coproc & 0x0e) != 0x0a)
11642 {
11643 /* MRRC, MRRC2 */
11644 return -1;
11645 }
11646 }
11647 else if (op1 == 4 || op1 == 5)
11648 {
11649 if ((coproc & 0x0e) == 0x0a)
11650 {
11651 /* 64-bit transfers between ARM core and extension */
11652 return -1;
11653 }
11654 else if (op1 == 4)
11655 {
11656 /* MCRR, MCRR2 */
11657 return -1;
11658 }
11659 }
11660 else if (op1 == 0 || op1 == 1)
11661 {
11662 /* UNDEFINED */
11663 return -1;
11664 }
11665 else
11666 {
11667 if ((coproc & 0x0e) == 0x0a)
11668 {
11669 /* Extension register load/store */
11670 }
11671 else
11672 {
11673 /* STC, STC2, LDC, LDC2 */
11674 }
11675 return -1;
11676 }
11677 }
11678
11679 return -1;
11680 }
11681
11682 /* Handling opcode 000 insns. */
11683
11684 static int
11685 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11686 {
11687 uint32_t record_buf[8];
11688 uint32_t reg_src1 = 0;
11689
11690 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11691
11692 record_buf[0] = ARM_PS_REGNUM;
11693 record_buf[1] = reg_src1;
11694 thumb_insn_r->reg_rec_count = 2;
11695
11696 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11697
11698 return 0;
11699 }
11700
11701
11702 /* Handling opcode 001 insns. */
11703
11704 static int
11705 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11706 {
11707 uint32_t record_buf[8];
11708 uint32_t reg_src1 = 0;
11709
11710 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11711
11712 record_buf[0] = ARM_PS_REGNUM;
11713 record_buf[1] = reg_src1;
11714 thumb_insn_r->reg_rec_count = 2;
11715
11716 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11717
11718 return 0;
11719 }
11720
11721 /* Handling opcode 010 insns. */
11722
11723 static int
11724 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11725 {
11726 struct regcache *reg_cache = thumb_insn_r->regcache;
11727 uint32_t record_buf[8], record_buf_mem[8];
11728
11729 uint32_t reg_src1 = 0, reg_src2 = 0;
11730 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11731
11732 ULONGEST u_regval[2] = {0};
11733
11734 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11735
11736 if (bit (thumb_insn_r->arm_insn, 12))
11737 {
11738 /* Handle load/store register offset. */
11739 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11740
11741 if (in_inclusive_range (opB, 4U, 7U))
11742 {
11743 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11744 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11745 record_buf[0] = reg_src1;
11746 thumb_insn_r->reg_rec_count = 1;
11747 }
11748 else if (in_inclusive_range (opB, 0U, 2U))
11749 {
11750 /* STR(2), STRB(2), STRH(2) . */
11751 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11752 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11753 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11754 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11755 if (0 == opB)
11756 record_buf_mem[0] = 4; /* STR (2). */
11757 else if (2 == opB)
11758 record_buf_mem[0] = 1; /* STRB (2). */
11759 else if (1 == opB)
11760 record_buf_mem[0] = 2; /* STRH (2). */
11761 record_buf_mem[1] = u_regval[0] + u_regval[1];
11762 thumb_insn_r->mem_rec_count = 1;
11763 }
11764 }
11765 else if (bit (thumb_insn_r->arm_insn, 11))
11766 {
11767 /* Handle load from literal pool. */
11768 /* LDR(3). */
11769 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11770 record_buf[0] = reg_src1;
11771 thumb_insn_r->reg_rec_count = 1;
11772 }
11773 else if (opcode1)
11774 {
11775 /* Special data instructions and branch and exchange */
11776 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11777 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11778 if ((3 == opcode2) && (!opcode3))
11779 {
11780 /* Branch with exchange. */
11781 record_buf[0] = ARM_PS_REGNUM;
11782 thumb_insn_r->reg_rec_count = 1;
11783 }
11784 else
11785 {
11786 /* Format 8; special data processing insns. */
11787 record_buf[0] = ARM_PS_REGNUM;
11788 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11789 | bits (thumb_insn_r->arm_insn, 0, 2));
11790 thumb_insn_r->reg_rec_count = 2;
11791 }
11792 }
11793 else
11794 {
11795 /* Format 5; data processing insns. */
11796 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11797 if (bit (thumb_insn_r->arm_insn, 7))
11798 {
11799 reg_src1 = reg_src1 + 8;
11800 }
11801 record_buf[0] = ARM_PS_REGNUM;
11802 record_buf[1] = reg_src1;
11803 thumb_insn_r->reg_rec_count = 2;
11804 }
11805
11806 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11807 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11808 record_buf_mem);
11809
11810 return 0;
11811 }
11812
11813 /* Handling opcode 001 insns. */
11814
11815 static int
11816 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11817 {
11818 struct regcache *reg_cache = thumb_insn_r->regcache;
11819 uint32_t record_buf[8], record_buf_mem[8];
11820
11821 uint32_t reg_src1 = 0;
11822 uint32_t opcode = 0, immed_5 = 0;
11823
11824 ULONGEST u_regval = 0;
11825
11826 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11827
11828 if (opcode)
11829 {
11830 /* LDR(1). */
11831 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11832 record_buf[0] = reg_src1;
11833 thumb_insn_r->reg_rec_count = 1;
11834 }
11835 else
11836 {
11837 /* STR(1). */
11838 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11839 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11840 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11841 record_buf_mem[0] = 4;
11842 record_buf_mem[1] = u_regval + (immed_5 * 4);
11843 thumb_insn_r->mem_rec_count = 1;
11844 }
11845
11846 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11847 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11848 record_buf_mem);
11849
11850 return 0;
11851 }
11852
11853 /* Handling opcode 100 insns. */
11854
11855 static int
11856 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11857 {
11858 struct regcache *reg_cache = thumb_insn_r->regcache;
11859 uint32_t record_buf[8], record_buf_mem[8];
11860
11861 uint32_t reg_src1 = 0;
11862 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11863
11864 ULONGEST u_regval = 0;
11865
11866 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11867
11868 if (3 == opcode)
11869 {
11870 /* LDR(4). */
11871 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11872 record_buf[0] = reg_src1;
11873 thumb_insn_r->reg_rec_count = 1;
11874 }
11875 else if (1 == opcode)
11876 {
11877 /* LDRH(1). */
11878 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11879 record_buf[0] = reg_src1;
11880 thumb_insn_r->reg_rec_count = 1;
11881 }
11882 else if (2 == opcode)
11883 {
11884 /* STR(3). */
11885 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11886 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11887 record_buf_mem[0] = 4;
11888 record_buf_mem[1] = u_regval + (immed_8 * 4);
11889 thumb_insn_r->mem_rec_count = 1;
11890 }
11891 else if (0 == opcode)
11892 {
11893 /* STRH(1). */
11894 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11895 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11896 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11897 record_buf_mem[0] = 2;
11898 record_buf_mem[1] = u_regval + (immed_5 * 2);
11899 thumb_insn_r->mem_rec_count = 1;
11900 }
11901
11902 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11903 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11904 record_buf_mem);
11905
11906 return 0;
11907 }
11908
11909 /* Handling opcode 101 insns. */
11910
11911 static int
11912 thumb_record_misc (insn_decode_record *thumb_insn_r)
11913 {
11914 struct regcache *reg_cache = thumb_insn_r->regcache;
11915
11916 uint32_t opcode = 0;
11917 uint32_t register_bits = 0, register_count = 0;
11918 uint32_t index = 0, start_address = 0;
11919 uint32_t record_buf[24], record_buf_mem[48];
11920 uint32_t reg_src1;
11921
11922 ULONGEST u_regval = 0;
11923
11924 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11925
11926 if (opcode == 0 || opcode == 1)
11927 {
11928 /* ADR and ADD (SP plus immediate) */
11929
11930 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11931 record_buf[0] = reg_src1;
11932 thumb_insn_r->reg_rec_count = 1;
11933 }
11934 else
11935 {
11936 /* Miscellaneous 16-bit instructions */
11937 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11938
11939 switch (opcode2)
11940 {
11941 case 6:
11942 /* SETEND and CPS */
11943 break;
11944 case 0:
11945 /* ADD/SUB (SP plus immediate) */
11946 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11947 record_buf[0] = ARM_SP_REGNUM;
11948 thumb_insn_r->reg_rec_count = 1;
11949 break;
11950 case 1: /* fall through */
11951 case 3: /* fall through */
11952 case 9: /* fall through */
11953 case 11:
11954 /* CBNZ, CBZ */
11955 break;
11956 case 2:
11957 /* SXTH, SXTB, UXTH, UXTB */
11958 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11959 thumb_insn_r->reg_rec_count = 1;
11960 break;
11961 case 4: /* fall through */
11962 case 5:
11963 /* PUSH. */
11964 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11965 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11966 while (register_bits)
11967 {
11968 if (register_bits & 0x00000001)
11969 register_count++;
11970 register_bits = register_bits >> 1;
11971 }
11972 start_address = u_regval - \
11973 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11974 thumb_insn_r->mem_rec_count = register_count;
11975 while (register_count)
11976 {
11977 record_buf_mem[(register_count * 2) - 1] = start_address;
11978 record_buf_mem[(register_count * 2) - 2] = 4;
11979 start_address = start_address + 4;
11980 register_count--;
11981 }
11982 record_buf[0] = ARM_SP_REGNUM;
11983 thumb_insn_r->reg_rec_count = 1;
11984 break;
11985 case 10:
11986 /* REV, REV16, REVSH */
11987 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11988 thumb_insn_r->reg_rec_count = 1;
11989 break;
11990 case 12: /* fall through */
11991 case 13:
11992 /* POP. */
11993 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11994 while (register_bits)
11995 {
11996 if (register_bits & 0x00000001)
11997 record_buf[index++] = register_count;
11998 register_bits = register_bits >> 1;
11999 register_count++;
12000 }
12001 record_buf[index++] = ARM_PS_REGNUM;
12002 record_buf[index++] = ARM_SP_REGNUM;
12003 thumb_insn_r->reg_rec_count = index;
12004 break;
12005 case 0xe:
12006 /* BKPT insn. */
12007 /* Handle enhanced software breakpoint insn, BKPT. */
12008 /* CPSR is changed to be executed in ARM state, disabling normal
12009 interrupts, entering abort mode. */
12010 /* According to high vector configuration PC is set. */
12011 /* User hits breakpoint and type reverse, in that case, we need to go back with
12012 previous CPSR and Program Counter. */
12013 record_buf[0] = ARM_PS_REGNUM;
12014 record_buf[1] = ARM_LR_REGNUM;
12015 thumb_insn_r->reg_rec_count = 2;
12016 /* We need to save SPSR value, which is not yet done. */
12017 printf_unfiltered (_("Process record does not support instruction "
12018 "0x%0x at address %s.\n"),
12019 thumb_insn_r->arm_insn,
12020 paddress (thumb_insn_r->gdbarch,
12021 thumb_insn_r->this_addr));
12022 return -1;
12023
12024 case 0xf:
12025 /* If-Then, and hints */
12026 break;
12027 default:
12028 return -1;
12029 };
12030 }
12031
12032 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12033 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12034 record_buf_mem);
12035
12036 return 0;
12037 }
12038
12039 /* Handling opcode 110 insns. */
12040
12041 static int
12042 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12043 {
12044 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12045 struct regcache *reg_cache = thumb_insn_r->regcache;
12046
12047 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12048 uint32_t reg_src1 = 0;
12049 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12050 uint32_t index = 0, start_address = 0;
12051 uint32_t record_buf[24], record_buf_mem[48];
12052
12053 ULONGEST u_regval = 0;
12054
12055 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12056 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12057
12058 if (1 == opcode2)
12059 {
12060
12061 /* LDMIA. */
12062 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12063 /* Get Rn. */
12064 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12065 while (register_bits)
12066 {
12067 if (register_bits & 0x00000001)
12068 record_buf[index++] = register_count;
12069 register_bits = register_bits >> 1;
12070 register_count++;
12071 }
12072 record_buf[index++] = reg_src1;
12073 thumb_insn_r->reg_rec_count = index;
12074 }
12075 else if (0 == opcode2)
12076 {
12077 /* It handles both STMIA. */
12078 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12079 /* Get Rn. */
12080 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12081 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12082 while (register_bits)
12083 {
12084 if (register_bits & 0x00000001)
12085 register_count++;
12086 register_bits = register_bits >> 1;
12087 }
12088 start_address = u_regval;
12089 thumb_insn_r->mem_rec_count = register_count;
12090 while (register_count)
12091 {
12092 record_buf_mem[(register_count * 2) - 1] = start_address;
12093 record_buf_mem[(register_count * 2) - 2] = 4;
12094 start_address = start_address + 4;
12095 register_count--;
12096 }
12097 }
12098 else if (0x1F == opcode1)
12099 {
12100 /* Handle arm syscall insn. */
12101 if (tdep->arm_syscall_record != NULL)
12102 {
12103 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12104 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12105 }
12106 else
12107 {
12108 printf_unfiltered (_("no syscall record support\n"));
12109 return -1;
12110 }
12111 }
12112
12113 /* B (1), conditional branch is automatically taken care in process_record,
12114 as PC is saved there. */
12115
12116 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12117 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12118 record_buf_mem);
12119
12120 return ret;
12121 }
12122
12123 /* Handling opcode 111 insns. */
12124
12125 static int
12126 thumb_record_branch (insn_decode_record *thumb_insn_r)
12127 {
12128 uint32_t record_buf[8];
12129 uint32_t bits_h = 0;
12130
12131 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12132
12133 if (2 == bits_h || 3 == bits_h)
12134 {
12135 /* BL */
12136 record_buf[0] = ARM_LR_REGNUM;
12137 thumb_insn_r->reg_rec_count = 1;
12138 }
12139 else if (1 == bits_h)
12140 {
12141 /* BLX(1). */
12142 record_buf[0] = ARM_PS_REGNUM;
12143 record_buf[1] = ARM_LR_REGNUM;
12144 thumb_insn_r->reg_rec_count = 2;
12145 }
12146
12147 /* B(2) is automatically taken care in process_record, as PC is
12148 saved there. */
12149
12150 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12151
12152 return 0;
12153 }
12154
12155 /* Handler for thumb2 load/store multiple instructions. */
12156
12157 static int
12158 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12159 {
12160 struct regcache *reg_cache = thumb2_insn_r->regcache;
12161
12162 uint32_t reg_rn, op;
12163 uint32_t register_bits = 0, register_count = 0;
12164 uint32_t index = 0, start_address = 0;
12165 uint32_t record_buf[24], record_buf_mem[48];
12166
12167 ULONGEST u_regval = 0;
12168
12169 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12170 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12171
12172 if (0 == op || 3 == op)
12173 {
12174 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12175 {
12176 /* Handle RFE instruction. */
12177 record_buf[0] = ARM_PS_REGNUM;
12178 thumb2_insn_r->reg_rec_count = 1;
12179 }
12180 else
12181 {
12182 /* Handle SRS instruction after reading banked SP. */
12183 return arm_record_unsupported_insn (thumb2_insn_r);
12184 }
12185 }
12186 else if (1 == op || 2 == op)
12187 {
12188 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12189 {
12190 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12191 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12192 while (register_bits)
12193 {
12194 if (register_bits & 0x00000001)
12195 record_buf[index++] = register_count;
12196
12197 register_count++;
12198 register_bits = register_bits >> 1;
12199 }
12200 record_buf[index++] = reg_rn;
12201 record_buf[index++] = ARM_PS_REGNUM;
12202 thumb2_insn_r->reg_rec_count = index;
12203 }
12204 else
12205 {
12206 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12207 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12208 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12209 while (register_bits)
12210 {
12211 if (register_bits & 0x00000001)
12212 register_count++;
12213
12214 register_bits = register_bits >> 1;
12215 }
12216
12217 if (1 == op)
12218 {
12219 /* Start address calculation for LDMDB/LDMEA. */
12220 start_address = u_regval;
12221 }
12222 else if (2 == op)
12223 {
12224 /* Start address calculation for LDMDB/LDMEA. */
12225 start_address = u_regval - register_count * 4;
12226 }
12227
12228 thumb2_insn_r->mem_rec_count = register_count;
12229 while (register_count)
12230 {
12231 record_buf_mem[register_count * 2 - 1] = start_address;
12232 record_buf_mem[register_count * 2 - 2] = 4;
12233 start_address = start_address + 4;
12234 register_count--;
12235 }
12236 record_buf[0] = reg_rn;
12237 record_buf[1] = ARM_PS_REGNUM;
12238 thumb2_insn_r->reg_rec_count = 2;
12239 }
12240 }
12241
12242 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12243 record_buf_mem);
12244 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12245 record_buf);
12246 return ARM_RECORD_SUCCESS;
12247 }
12248
12249 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12250 instructions. */
12251
12252 static int
12253 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12254 {
12255 struct regcache *reg_cache = thumb2_insn_r->regcache;
12256
12257 uint32_t reg_rd, reg_rn, offset_imm;
12258 uint32_t reg_dest1, reg_dest2;
12259 uint32_t address, offset_addr;
12260 uint32_t record_buf[8], record_buf_mem[8];
12261 uint32_t op1, op2, op3;
12262
12263 ULONGEST u_regval[2];
12264
12265 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12266 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12267 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12268
12269 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12270 {
12271 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12272 {
12273 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12274 record_buf[0] = reg_dest1;
12275 record_buf[1] = ARM_PS_REGNUM;
12276 thumb2_insn_r->reg_rec_count = 2;
12277 }
12278
12279 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12280 {
12281 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12282 record_buf[2] = reg_dest2;
12283 thumb2_insn_r->reg_rec_count = 3;
12284 }
12285 }
12286 else
12287 {
12288 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12289 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12290
12291 if (0 == op1 && 0 == op2)
12292 {
12293 /* Handle STREX. */
12294 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12295 address = u_regval[0] + (offset_imm * 4);
12296 record_buf_mem[0] = 4;
12297 record_buf_mem[1] = address;
12298 thumb2_insn_r->mem_rec_count = 1;
12299 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12300 record_buf[0] = reg_rd;
12301 thumb2_insn_r->reg_rec_count = 1;
12302 }
12303 else if (1 == op1 && 0 == op2)
12304 {
12305 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12306 record_buf[0] = reg_rd;
12307 thumb2_insn_r->reg_rec_count = 1;
12308 address = u_regval[0];
12309 record_buf_mem[1] = address;
12310
12311 if (4 == op3)
12312 {
12313 /* Handle STREXB. */
12314 record_buf_mem[0] = 1;
12315 thumb2_insn_r->mem_rec_count = 1;
12316 }
12317 else if (5 == op3)
12318 {
12319 /* Handle STREXH. */
12320 record_buf_mem[0] = 2 ;
12321 thumb2_insn_r->mem_rec_count = 1;
12322 }
12323 else if (7 == op3)
12324 {
12325 /* Handle STREXD. */
12326 address = u_regval[0];
12327 record_buf_mem[0] = 4;
12328 record_buf_mem[2] = 4;
12329 record_buf_mem[3] = address + 4;
12330 thumb2_insn_r->mem_rec_count = 2;
12331 }
12332 }
12333 else
12334 {
12335 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12336
12337 if (bit (thumb2_insn_r->arm_insn, 24))
12338 {
12339 if (bit (thumb2_insn_r->arm_insn, 23))
12340 offset_addr = u_regval[0] + (offset_imm * 4);
12341 else
12342 offset_addr = u_regval[0] - (offset_imm * 4);
12343
12344 address = offset_addr;
12345 }
12346 else
12347 address = u_regval[0];
12348
12349 record_buf_mem[0] = 4;
12350 record_buf_mem[1] = address;
12351 record_buf_mem[2] = 4;
12352 record_buf_mem[3] = address + 4;
12353 thumb2_insn_r->mem_rec_count = 2;
12354 record_buf[0] = reg_rn;
12355 thumb2_insn_r->reg_rec_count = 1;
12356 }
12357 }
12358
12359 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12360 record_buf);
12361 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12362 record_buf_mem);
12363 return ARM_RECORD_SUCCESS;
12364 }
12365
12366 /* Handler for thumb2 data processing (shift register and modified immediate)
12367 instructions. */
12368
12369 static int
12370 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12371 {
12372 uint32_t reg_rd, op;
12373 uint32_t record_buf[8];
12374
12375 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12376 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12377
12378 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12379 {
12380 record_buf[0] = ARM_PS_REGNUM;
12381 thumb2_insn_r->reg_rec_count = 1;
12382 }
12383 else
12384 {
12385 record_buf[0] = reg_rd;
12386 record_buf[1] = ARM_PS_REGNUM;
12387 thumb2_insn_r->reg_rec_count = 2;
12388 }
12389
12390 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12391 record_buf);
12392 return ARM_RECORD_SUCCESS;
12393 }
12394
12395 /* Generic handler for thumb2 instructions which effect destination and PS
12396 registers. */
12397
12398 static int
12399 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12400 {
12401 uint32_t reg_rd;
12402 uint32_t record_buf[8];
12403
12404 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12405
12406 record_buf[0] = reg_rd;
12407 record_buf[1] = ARM_PS_REGNUM;
12408 thumb2_insn_r->reg_rec_count = 2;
12409
12410 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12411 record_buf);
12412 return ARM_RECORD_SUCCESS;
12413 }
12414
12415 /* Handler for thumb2 branch and miscellaneous control instructions. */
12416
12417 static int
12418 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12419 {
12420 uint32_t op, op1, op2;
12421 uint32_t record_buf[8];
12422
12423 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12424 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12425 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12426
12427 /* Handle MSR insn. */
12428 if (!(op1 & 0x2) && 0x38 == op)
12429 {
12430 if (!(op2 & 0x3))
12431 {
12432 /* CPSR is going to be changed. */
12433 record_buf[0] = ARM_PS_REGNUM;
12434 thumb2_insn_r->reg_rec_count = 1;
12435 }
12436 else
12437 {
12438 arm_record_unsupported_insn(thumb2_insn_r);
12439 return -1;
12440 }
12441 }
12442 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12443 {
12444 /* BLX. */
12445 record_buf[0] = ARM_PS_REGNUM;
12446 record_buf[1] = ARM_LR_REGNUM;
12447 thumb2_insn_r->reg_rec_count = 2;
12448 }
12449
12450 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12451 record_buf);
12452 return ARM_RECORD_SUCCESS;
12453 }
12454
12455 /* Handler for thumb2 store single data item instructions. */
12456
12457 static int
12458 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12459 {
12460 struct regcache *reg_cache = thumb2_insn_r->regcache;
12461
12462 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12463 uint32_t address, offset_addr;
12464 uint32_t record_buf[8], record_buf_mem[8];
12465 uint32_t op1, op2;
12466
12467 ULONGEST u_regval[2];
12468
12469 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12470 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12471 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12472 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12473
12474 if (bit (thumb2_insn_r->arm_insn, 23))
12475 {
12476 /* T2 encoding. */
12477 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12478 offset_addr = u_regval[0] + offset_imm;
12479 address = offset_addr;
12480 }
12481 else
12482 {
12483 /* T3 encoding. */
12484 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12485 {
12486 /* Handle STRB (register). */
12487 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12488 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12489 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12490 offset_addr = u_regval[1] << shift_imm;
12491 address = u_regval[0] + offset_addr;
12492 }
12493 else
12494 {
12495 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12496 if (bit (thumb2_insn_r->arm_insn, 10))
12497 {
12498 if (bit (thumb2_insn_r->arm_insn, 9))
12499 offset_addr = u_regval[0] + offset_imm;
12500 else
12501 offset_addr = u_regval[0] - offset_imm;
12502
12503 address = offset_addr;
12504 }
12505 else
12506 address = u_regval[0];
12507 }
12508 }
12509
12510 switch (op1)
12511 {
12512 /* Store byte instructions. */
12513 case 4:
12514 case 0:
12515 record_buf_mem[0] = 1;
12516 break;
12517 /* Store half word instructions. */
12518 case 1:
12519 case 5:
12520 record_buf_mem[0] = 2;
12521 break;
12522 /* Store word instructions. */
12523 case 2:
12524 case 6:
12525 record_buf_mem[0] = 4;
12526 break;
12527
12528 default:
12529 gdb_assert_not_reached ("no decoding pattern found");
12530 break;
12531 }
12532
12533 record_buf_mem[1] = address;
12534 thumb2_insn_r->mem_rec_count = 1;
12535 record_buf[0] = reg_rn;
12536 thumb2_insn_r->reg_rec_count = 1;
12537
12538 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12539 record_buf);
12540 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12541 record_buf_mem);
12542 return ARM_RECORD_SUCCESS;
12543 }
12544
12545 /* Handler for thumb2 load memory hints instructions. */
12546
12547 static int
12548 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12549 {
12550 uint32_t record_buf[8];
12551 uint32_t reg_rt, reg_rn;
12552
12553 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12554 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12555
12556 if (ARM_PC_REGNUM != reg_rt)
12557 {
12558 record_buf[0] = reg_rt;
12559 record_buf[1] = reg_rn;
12560 record_buf[2] = ARM_PS_REGNUM;
12561 thumb2_insn_r->reg_rec_count = 3;
12562
12563 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12564 record_buf);
12565 return ARM_RECORD_SUCCESS;
12566 }
12567
12568 return ARM_RECORD_FAILURE;
12569 }
12570
12571 /* Handler for thumb2 load word instructions. */
12572
12573 static int
12574 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12575 {
12576 uint32_t record_buf[8];
12577
12578 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12579 record_buf[1] = ARM_PS_REGNUM;
12580 thumb2_insn_r->reg_rec_count = 2;
12581
12582 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12583 record_buf);
12584 return ARM_RECORD_SUCCESS;
12585 }
12586
12587 /* Handler for thumb2 long multiply, long multiply accumulate, and
12588 divide instructions. */
12589
12590 static int
12591 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12592 {
12593 uint32_t opcode1 = 0, opcode2 = 0;
12594 uint32_t record_buf[8];
12595
12596 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12597 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12598
12599 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12600 {
12601 /* Handle SMULL, UMULL, SMULAL. */
12602 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12603 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12604 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12605 record_buf[2] = ARM_PS_REGNUM;
12606 thumb2_insn_r->reg_rec_count = 3;
12607 }
12608 else if (1 == opcode1 || 3 == opcode2)
12609 {
12610 /* Handle SDIV and UDIV. */
12611 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12612 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12613 record_buf[2] = ARM_PS_REGNUM;
12614 thumb2_insn_r->reg_rec_count = 3;
12615 }
12616 else
12617 return ARM_RECORD_FAILURE;
12618
12619 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12620 record_buf);
12621 return ARM_RECORD_SUCCESS;
12622 }
12623
12624 /* Record handler for thumb32 coprocessor instructions. */
12625
12626 static int
12627 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12628 {
12629 if (bit (thumb2_insn_r->arm_insn, 25))
12630 return arm_record_coproc_data_proc (thumb2_insn_r);
12631 else
12632 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12633 }
12634
12635 /* Record handler for advance SIMD structure load/store instructions. */
12636
12637 static int
12638 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12639 {
12640 struct regcache *reg_cache = thumb2_insn_r->regcache;
12641 uint32_t l_bit, a_bit, b_bits;
12642 uint32_t record_buf[128], record_buf_mem[128];
12643 uint32_t reg_rn, reg_vd, address, f_elem;
12644 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12645 uint8_t f_ebytes;
12646
12647 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12648 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12649 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12650 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12651 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12652 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12653 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12654 f_elem = 8 / f_ebytes;
12655
12656 if (!l_bit)
12657 {
12658 ULONGEST u_regval = 0;
12659 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12660 address = u_regval;
12661
12662 if (!a_bit)
12663 {
12664 /* Handle VST1. */
12665 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12666 {
12667 if (b_bits == 0x07)
12668 bf_regs = 1;
12669 else if (b_bits == 0x0a)
12670 bf_regs = 2;
12671 else if (b_bits == 0x06)
12672 bf_regs = 3;
12673 else if (b_bits == 0x02)
12674 bf_regs = 4;
12675 else
12676 bf_regs = 0;
12677
12678 for (index_r = 0; index_r < bf_regs; index_r++)
12679 {
12680 for (index_e = 0; index_e < f_elem; index_e++)
12681 {
12682 record_buf_mem[index_m++] = f_ebytes;
12683 record_buf_mem[index_m++] = address;
12684 address = address + f_ebytes;
12685 thumb2_insn_r->mem_rec_count += 1;
12686 }
12687 }
12688 }
12689 /* Handle VST2. */
12690 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12691 {
12692 if (b_bits == 0x09 || b_bits == 0x08)
12693 bf_regs = 1;
12694 else if (b_bits == 0x03)
12695 bf_regs = 2;
12696 else
12697 bf_regs = 0;
12698
12699 for (index_r = 0; index_r < bf_regs; index_r++)
12700 for (index_e = 0; index_e < f_elem; index_e++)
12701 {
12702 for (loop_t = 0; loop_t < 2; loop_t++)
12703 {
12704 record_buf_mem[index_m++] = f_ebytes;
12705 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12706 thumb2_insn_r->mem_rec_count += 1;
12707 }
12708 address = address + (2 * f_ebytes);
12709 }
12710 }
12711 /* Handle VST3. */
12712 else if ((b_bits & 0x0e) == 0x04)
12713 {
12714 for (index_e = 0; index_e < f_elem; index_e++)
12715 {
12716 for (loop_t = 0; loop_t < 3; loop_t++)
12717 {
12718 record_buf_mem[index_m++] = f_ebytes;
12719 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12720 thumb2_insn_r->mem_rec_count += 1;
12721 }
12722 address = address + (3 * f_ebytes);
12723 }
12724 }
12725 /* Handle VST4. */
12726 else if (!(b_bits & 0x0e))
12727 {
12728 for (index_e = 0; index_e < f_elem; index_e++)
12729 {
12730 for (loop_t = 0; loop_t < 4; loop_t++)
12731 {
12732 record_buf_mem[index_m++] = f_ebytes;
12733 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12734 thumb2_insn_r->mem_rec_count += 1;
12735 }
12736 address = address + (4 * f_ebytes);
12737 }
12738 }
12739 }
12740 else
12741 {
12742 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12743
12744 if (bft_size == 0x00)
12745 f_ebytes = 1;
12746 else if (bft_size == 0x01)
12747 f_ebytes = 2;
12748 else if (bft_size == 0x02)
12749 f_ebytes = 4;
12750 else
12751 f_ebytes = 0;
12752
12753 /* Handle VST1. */
12754 if (!(b_bits & 0x0b) || b_bits == 0x08)
12755 thumb2_insn_r->mem_rec_count = 1;
12756 /* Handle VST2. */
12757 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12758 thumb2_insn_r->mem_rec_count = 2;
12759 /* Handle VST3. */
12760 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12761 thumb2_insn_r->mem_rec_count = 3;
12762 /* Handle VST4. */
12763 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12764 thumb2_insn_r->mem_rec_count = 4;
12765
12766 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12767 {
12768 record_buf_mem[index_m] = f_ebytes;
12769 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12770 }
12771 }
12772 }
12773 else
12774 {
12775 if (!a_bit)
12776 {
12777 /* Handle VLD1. */
12778 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12779 thumb2_insn_r->reg_rec_count = 1;
12780 /* Handle VLD2. */
12781 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12782 thumb2_insn_r->reg_rec_count = 2;
12783 /* Handle VLD3. */
12784 else if ((b_bits & 0x0e) == 0x04)
12785 thumb2_insn_r->reg_rec_count = 3;
12786 /* Handle VLD4. */
12787 else if (!(b_bits & 0x0e))
12788 thumb2_insn_r->reg_rec_count = 4;
12789 }
12790 else
12791 {
12792 /* Handle VLD1. */
12793 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12794 thumb2_insn_r->reg_rec_count = 1;
12795 /* Handle VLD2. */
12796 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12797 thumb2_insn_r->reg_rec_count = 2;
12798 /* Handle VLD3. */
12799 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12800 thumb2_insn_r->reg_rec_count = 3;
12801 /* Handle VLD4. */
12802 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12803 thumb2_insn_r->reg_rec_count = 4;
12804
12805 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12806 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12807 }
12808 }
12809
12810 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12811 {
12812 record_buf[index_r] = reg_rn;
12813 thumb2_insn_r->reg_rec_count += 1;
12814 }
12815
12816 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12817 record_buf);
12818 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12819 record_buf_mem);
12820 return 0;
12821 }
12822
12823 /* Decodes thumb2 instruction type and invokes its record handler. */
12824
12825 static unsigned int
12826 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12827 {
12828 uint32_t op, op1, op2;
12829
12830 op = bit (thumb2_insn_r->arm_insn, 15);
12831 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12832 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12833
12834 if (op1 == 0x01)
12835 {
12836 if (!(op2 & 0x64 ))
12837 {
12838 /* Load/store multiple instruction. */
12839 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12840 }
12841 else if ((op2 & 0x64) == 0x4)
12842 {
12843 /* Load/store (dual/exclusive) and table branch instruction. */
12844 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12845 }
12846 else if ((op2 & 0x60) == 0x20)
12847 {
12848 /* Data-processing (shifted register). */
12849 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12850 }
12851 else if (op2 & 0x40)
12852 {
12853 /* Co-processor instructions. */
12854 return thumb2_record_coproc_insn (thumb2_insn_r);
12855 }
12856 }
12857 else if (op1 == 0x02)
12858 {
12859 if (op)
12860 {
12861 /* Branches and miscellaneous control instructions. */
12862 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12863 }
12864 else if (op2 & 0x20)
12865 {
12866 /* Data-processing (plain binary immediate) instruction. */
12867 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12868 }
12869 else
12870 {
12871 /* Data-processing (modified immediate). */
12872 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12873 }
12874 }
12875 else if (op1 == 0x03)
12876 {
12877 if (!(op2 & 0x71 ))
12878 {
12879 /* Store single data item. */
12880 return thumb2_record_str_single_data (thumb2_insn_r);
12881 }
12882 else if (!((op2 & 0x71) ^ 0x10))
12883 {
12884 /* Advanced SIMD or structure load/store instructions. */
12885 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12886 }
12887 else if (!((op2 & 0x67) ^ 0x01))
12888 {
12889 /* Load byte, memory hints instruction. */
12890 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12891 }
12892 else if (!((op2 & 0x67) ^ 0x03))
12893 {
12894 /* Load halfword, memory hints instruction. */
12895 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12896 }
12897 else if (!((op2 & 0x67) ^ 0x05))
12898 {
12899 /* Load word instruction. */
12900 return thumb2_record_ld_word (thumb2_insn_r);
12901 }
12902 else if (!((op2 & 0x70) ^ 0x20))
12903 {
12904 /* Data-processing (register) instruction. */
12905 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12906 }
12907 else if (!((op2 & 0x78) ^ 0x30))
12908 {
12909 /* Multiply, multiply accumulate, abs diff instruction. */
12910 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12911 }
12912 else if (!((op2 & 0x78) ^ 0x38))
12913 {
12914 /* Long multiply, long multiply accumulate, and divide. */
12915 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12916 }
12917 else if (op2 & 0x40)
12918 {
12919 /* Co-processor instructions. */
12920 return thumb2_record_coproc_insn (thumb2_insn_r);
12921 }
12922 }
12923
12924 return -1;
12925 }
12926
12927 namespace {
12928 /* Abstract memory reader. */
12929
12930 class abstract_memory_reader
12931 {
12932 public:
12933 /* Read LEN bytes of target memory at address MEMADDR, placing the
12934 results in GDB's memory at BUF. Return true on success. */
12935
12936 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12937 };
12938
12939 /* Instruction reader from real target. */
12940
12941 class instruction_reader : public abstract_memory_reader
12942 {
12943 public:
12944 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12945 {
12946 if (target_read_memory (memaddr, buf, len))
12947 return false;
12948 else
12949 return true;
12950 }
12951 };
12952
12953 } // namespace
12954
12955 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12956 and positive val on fauilure. */
12957
12958 static int
12959 extract_arm_insn (abstract_memory_reader& reader,
12960 insn_decode_record *insn_record, uint32_t insn_size)
12961 {
12962 gdb_byte buf[insn_size];
12963
12964 memset (&buf[0], 0, insn_size);
12965
12966 if (!reader.read (insn_record->this_addr, buf, insn_size))
12967 return 1;
12968 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12969 insn_size,
12970 gdbarch_byte_order_for_code (insn_record->gdbarch));
12971 return 0;
12972 }
12973
12974 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12975
12976 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12977 dispatch it. */
12978
12979 static int
12980 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12981 record_type_t record_type, uint32_t insn_size)
12982 {
12983
12984 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12985 instruction. */
12986 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12987 {
12988 arm_record_data_proc_misc_ld_str, /* 000. */
12989 arm_record_data_proc_imm, /* 001. */
12990 arm_record_ld_st_imm_offset, /* 010. */
12991 arm_record_ld_st_reg_offset, /* 011. */
12992 arm_record_ld_st_multiple, /* 100. */
12993 arm_record_b_bl, /* 101. */
12994 arm_record_asimd_vfp_coproc, /* 110. */
12995 arm_record_coproc_data_proc /* 111. */
12996 };
12997
12998 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12999 instruction. */
13000 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13001 { \
13002 thumb_record_shift_add_sub, /* 000. */
13003 thumb_record_add_sub_cmp_mov, /* 001. */
13004 thumb_record_ld_st_reg_offset, /* 010. */
13005 thumb_record_ld_st_imm_offset, /* 011. */
13006 thumb_record_ld_st_stack, /* 100. */
13007 thumb_record_misc, /* 101. */
13008 thumb_record_ldm_stm_swi, /* 110. */
13009 thumb_record_branch /* 111. */
13010 };
13011
13012 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13013 uint32_t insn_id = 0;
13014
13015 if (extract_arm_insn (reader, arm_record, insn_size))
13016 {
13017 if (record_debug)
13018 {
13019 printf_unfiltered (_("Process record: error reading memory at "
13020 "addr %s len = %d.\n"),
13021 paddress (arm_record->gdbarch,
13022 arm_record->this_addr), insn_size);
13023 }
13024 return -1;
13025 }
13026 else if (ARM_RECORD == record_type)
13027 {
13028 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13029 insn_id = bits (arm_record->arm_insn, 25, 27);
13030
13031 if (arm_record->cond == 0xf)
13032 ret = arm_record_extension_space (arm_record);
13033 else
13034 {
13035 /* If this insn has fallen into extension space
13036 then we need not decode it anymore. */
13037 ret = arm_handle_insn[insn_id] (arm_record);
13038 }
13039 if (ret != ARM_RECORD_SUCCESS)
13040 {
13041 arm_record_unsupported_insn (arm_record);
13042 ret = -1;
13043 }
13044 }
13045 else if (THUMB_RECORD == record_type)
13046 {
13047 /* As thumb does not have condition codes, we set negative. */
13048 arm_record->cond = -1;
13049 insn_id = bits (arm_record->arm_insn, 13, 15);
13050 ret = thumb_handle_insn[insn_id] (arm_record);
13051 if (ret != ARM_RECORD_SUCCESS)
13052 {
13053 arm_record_unsupported_insn (arm_record);
13054 ret = -1;
13055 }
13056 }
13057 else if (THUMB2_RECORD == record_type)
13058 {
13059 /* As thumb does not have condition codes, we set negative. */
13060 arm_record->cond = -1;
13061
13062 /* Swap first half of 32bit thumb instruction with second half. */
13063 arm_record->arm_insn
13064 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13065
13066 ret = thumb2_record_decode_insn_handler (arm_record);
13067
13068 if (ret != ARM_RECORD_SUCCESS)
13069 {
13070 arm_record_unsupported_insn (arm_record);
13071 ret = -1;
13072 }
13073 }
13074 else
13075 {
13076 /* Throw assertion. */
13077 gdb_assert_not_reached ("not a valid instruction, could not decode");
13078 }
13079
13080 return ret;
13081 }
13082
13083 #if GDB_SELF_TEST
13084 namespace selftests {
13085
13086 /* Provide both 16-bit and 32-bit thumb instructions. */
13087
13088 class instruction_reader_thumb : public abstract_memory_reader
13089 {
13090 public:
13091 template<size_t SIZE>
13092 instruction_reader_thumb (enum bfd_endian endian,
13093 const uint16_t (&insns)[SIZE])
13094 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13095 {}
13096
13097 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13098 {
13099 SELF_CHECK (len == 4 || len == 2);
13100 SELF_CHECK (memaddr % 2 == 0);
13101 SELF_CHECK ((memaddr / 2) < m_insns_size);
13102
13103 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13104 if (len == 4)
13105 {
13106 store_unsigned_integer (&buf[2], 2, m_endian,
13107 m_insns[memaddr / 2 + 1]);
13108 }
13109 return true;
13110 }
13111
13112 private:
13113 enum bfd_endian m_endian;
13114 const uint16_t *m_insns;
13115 size_t m_insns_size;
13116 };
13117
13118 static void
13119 arm_record_test (void)
13120 {
13121 struct gdbarch_info info;
13122 gdbarch_info_init (&info);
13123 info.bfd_arch_info = bfd_scan_arch ("arm");
13124
13125 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13126
13127 SELF_CHECK (gdbarch != NULL);
13128
13129 /* 16-bit Thumb instructions. */
13130 {
13131 insn_decode_record arm_record;
13132
13133 memset (&arm_record, 0, sizeof (insn_decode_record));
13134 arm_record.gdbarch = gdbarch;
13135
13136 static const uint16_t insns[] = {
13137 /* db b2 uxtb r3, r3 */
13138 0xb2db,
13139 /* cd 58 ldr r5, [r1, r3] */
13140 0x58cd,
13141 };
13142
13143 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13144 instruction_reader_thumb reader (endian, insns);
13145 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13146 THUMB_INSN_SIZE_BYTES);
13147
13148 SELF_CHECK (ret == 0);
13149 SELF_CHECK (arm_record.mem_rec_count == 0);
13150 SELF_CHECK (arm_record.reg_rec_count == 1);
13151 SELF_CHECK (arm_record.arm_regs[0] == 3);
13152
13153 arm_record.this_addr += 2;
13154 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13155 THUMB_INSN_SIZE_BYTES);
13156
13157 SELF_CHECK (ret == 0);
13158 SELF_CHECK (arm_record.mem_rec_count == 0);
13159 SELF_CHECK (arm_record.reg_rec_count == 1);
13160 SELF_CHECK (arm_record.arm_regs[0] == 5);
13161 }
13162
13163 /* 32-bit Thumb-2 instructions. */
13164 {
13165 insn_decode_record arm_record;
13166
13167 memset (&arm_record, 0, sizeof (insn_decode_record));
13168 arm_record.gdbarch = gdbarch;
13169
13170 static const uint16_t insns[] = {
13171 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13172 0xee1d, 0x7f70,
13173 };
13174
13175 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13176 instruction_reader_thumb reader (endian, insns);
13177 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13178 THUMB2_INSN_SIZE_BYTES);
13179
13180 SELF_CHECK (ret == 0);
13181 SELF_CHECK (arm_record.mem_rec_count == 0);
13182 SELF_CHECK (arm_record.reg_rec_count == 1);
13183 SELF_CHECK (arm_record.arm_regs[0] == 7);
13184 }
13185 }
13186 } // namespace selftests
13187 #endif /* GDB_SELF_TEST */
13188
13189 /* Cleans up local record registers and memory allocations. */
13190
13191 static void
13192 deallocate_reg_mem (insn_decode_record *record)
13193 {
13194 xfree (record->arm_regs);
13195 xfree (record->arm_mems);
13196 }
13197
13198
13199 /* Parse the current instruction and record the values of the registers and
13200 memory that will be changed in current instruction to record_arch_list".
13201 Return -1 if something is wrong. */
13202
13203 int
13204 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13205 CORE_ADDR insn_addr)
13206 {
13207
13208 uint32_t no_of_rec = 0;
13209 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13210 ULONGEST t_bit = 0, insn_id = 0;
13211
13212 ULONGEST u_regval = 0;
13213
13214 insn_decode_record arm_record;
13215
13216 memset (&arm_record, 0, sizeof (insn_decode_record));
13217 arm_record.regcache = regcache;
13218 arm_record.this_addr = insn_addr;
13219 arm_record.gdbarch = gdbarch;
13220
13221
13222 if (record_debug > 1)
13223 {
13224 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13225 "addr = %s\n",
13226 paddress (gdbarch, arm_record.this_addr));
13227 }
13228
13229 instruction_reader reader;
13230 if (extract_arm_insn (reader, &arm_record, 2))
13231 {
13232 if (record_debug)
13233 {
13234 printf_unfiltered (_("Process record: error reading memory at "
13235 "addr %s len = %d.\n"),
13236 paddress (arm_record.gdbarch,
13237 arm_record.this_addr), 2);
13238 }
13239 return -1;
13240 }
13241
13242 /* Check the insn, whether it is thumb or arm one. */
13243
13244 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13245 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13246
13247
13248 if (!(u_regval & t_bit))
13249 {
13250 /* We are decoding arm insn. */
13251 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13252 }
13253 else
13254 {
13255 insn_id = bits (arm_record.arm_insn, 11, 15);
13256 /* is it thumb2 insn? */
13257 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13258 {
13259 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13260 THUMB2_INSN_SIZE_BYTES);
13261 }
13262 else
13263 {
13264 /* We are decoding thumb insn. */
13265 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13266 THUMB_INSN_SIZE_BYTES);
13267 }
13268 }
13269
13270 if (0 == ret)
13271 {
13272 /* Record registers. */
13273 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13274 if (arm_record.arm_regs)
13275 {
13276 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13277 {
13278 if (record_full_arch_list_add_reg
13279 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13280 ret = -1;
13281 }
13282 }
13283 /* Record memories. */
13284 if (arm_record.arm_mems)
13285 {
13286 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13287 {
13288 if (record_full_arch_list_add_mem
13289 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13290 arm_record.arm_mems[no_of_rec].len))
13291 ret = -1;
13292 }
13293 }
13294
13295 if (record_full_arch_list_add_end ())
13296 ret = -1;
13297 }
13298
13299
13300 deallocate_reg_mem (&arm_record);
13301
13302 return ret;
13303 }
13304
13305 /* See arm-tdep.h. */
13306
13307 const target_desc *
13308 arm_read_description (arm_fp_type fp_type)
13309 {
13310 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13311
13312 if (tdesc == nullptr)
13313 {
13314 tdesc = arm_create_target_description (fp_type);
13315 tdesc_arm_list[fp_type] = tdesc;
13316 }
13317
13318 return tdesc;
13319 }
13320
13321 /* See arm-tdep.h. */
13322
13323 const target_desc *
13324 arm_read_mprofile_description (arm_m_profile_type m_type)
13325 {
13326 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13327
13328 if (tdesc == nullptr)
13329 {
13330 tdesc = arm_create_mprofile_target_description (m_type);
13331 tdesc_arm_mprofile_list[m_type] = tdesc;
13332 }
13333
13334 return tdesc;
13335 }
This page took 0.444056 seconds and 5 git commands to generate.