gdb: bool-ify follow_fork
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2/frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
49
50 #include "arch/arm.h"
51 #include "arch/arm-get-next-pcs.h"
52 #include "arm-tdep.h"
53 #include "gdb/sim-arm.h"
54
55 #include "elf-bfd.h"
56 #include "coff/internal.h"
57 #include "elf/arm.h"
58
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
62
63 #if GDB_SELF_TEST
64 #include "gdbsupport/selftest.h"
65 #endif
66
67 static bool arm_debug;
68
69 /* Macros for setting and testing a bit in a minimal symbol that marks
70 it as Thumb function. The MSB of the minimal symbol's "info" field
71 is used for this purpose.
72
73 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
74 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
75
76 #define MSYMBOL_SET_SPECIAL(msym) \
77 MSYMBOL_TARGET_FLAG_1 (msym) = 1
78
79 #define MSYMBOL_IS_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym)
81
82 struct arm_mapping_symbol
83 {
84 CORE_ADDR value;
85 char type;
86
87 bool operator< (const arm_mapping_symbol &other) const
88 { return this->value < other.value; }
89 };
90
91 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
92
93 struct arm_per_bfd
94 {
95 explicit arm_per_bfd (size_t num_sections)
96 : section_maps (new arm_mapping_symbol_vec[num_sections]),
97 section_maps_sorted (new bool[num_sections] ())
98 {}
99
100 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
101
102 /* Information about mapping symbols ($a, $d, $t) in the objfile.
103
104 The format is an array of vectors of arm_mapping_symbols, there is one
105 vector for each section of the objfile (the array is index by BFD section
106 index).
107
108 For each section, the vector of arm_mapping_symbol is sorted by
109 symbol value (address). */
110 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
111
112 /* For each corresponding element of section_maps above, is this vector
113 sorted. */
114 std::unique_ptr<bool[]> section_maps_sorted;
115 };
116
117 /* Per-bfd data used for mapping symbols. */
118 static bfd_key<arm_per_bfd> arm_bfd_data_key;
119
120 /* The list of available "set arm ..." and "show arm ..." commands. */
121 static struct cmd_list_element *setarmcmdlist = NULL;
122 static struct cmd_list_element *showarmcmdlist = NULL;
123
124 /* The type of floating-point to use. Keep this in sync with enum
125 arm_float_model, and the help string in _initialize_arm_tdep. */
126 static const char *const fp_model_strings[] =
127 {
128 "auto",
129 "softfpa",
130 "fpa",
131 "softvfp",
132 "vfp",
133 NULL
134 };
135
136 /* A variable that can be configured by the user. */
137 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
138 static const char *current_fp_model = "auto";
139
140 /* The ABI to use. Keep this in sync with arm_abi_kind. */
141 static const char *const arm_abi_strings[] =
142 {
143 "auto",
144 "APCS",
145 "AAPCS",
146 NULL
147 };
148
149 /* A variable that can be configured by the user. */
150 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
151 static const char *arm_abi_string = "auto";
152
153 /* The execution mode to assume. */
154 static const char *const arm_mode_strings[] =
155 {
156 "auto",
157 "arm",
158 "thumb",
159 NULL
160 };
161
162 static const char *arm_fallback_mode_string = "auto";
163 static const char *arm_force_mode_string = "auto";
164
165 /* The standard register names, and all the valid aliases for them. Note
166 that `fp', `sp' and `pc' are not added in this alias list, because they
167 have been added as builtin user registers in
168 std-regs.c:_initialize_frame_reg. */
169 static const struct
170 {
171 const char *name;
172 int regnum;
173 } arm_register_aliases[] = {
174 /* Basic register numbers. */
175 { "r0", 0 },
176 { "r1", 1 },
177 { "r2", 2 },
178 { "r3", 3 },
179 { "r4", 4 },
180 { "r5", 5 },
181 { "r6", 6 },
182 { "r7", 7 },
183 { "r8", 8 },
184 { "r9", 9 },
185 { "r10", 10 },
186 { "r11", 11 },
187 { "r12", 12 },
188 { "r13", 13 },
189 { "r14", 14 },
190 { "r15", 15 },
191 /* Synonyms (argument and variable registers). */
192 { "a1", 0 },
193 { "a2", 1 },
194 { "a3", 2 },
195 { "a4", 3 },
196 { "v1", 4 },
197 { "v2", 5 },
198 { "v3", 6 },
199 { "v4", 7 },
200 { "v5", 8 },
201 { "v6", 9 },
202 { "v7", 10 },
203 { "v8", 11 },
204 /* Other platform-specific names for r9. */
205 { "sb", 9 },
206 { "tr", 9 },
207 /* Special names. */
208 { "ip", 12 },
209 { "lr", 14 },
210 /* Names used by GCC (not listed in the ARM EABI). */
211 { "sl", 10 },
212 /* A special name from the older ATPCS. */
213 { "wr", 7 },
214 };
215
216 static const char *const arm_register_names[] =
217 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
218 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
219 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
220 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
221 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
222 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
223 "fps", "cpsr" }; /* 24 25 */
224
225 /* Holds the current set of options to be passed to the disassembler. */
226 static char *arm_disassembler_options;
227
228 /* Valid register name styles. */
229 static const char **valid_disassembly_styles;
230
231 /* Disassembly style to use. Default to "std" register names. */
232 static const char *disassembly_style;
233
234 /* All possible arm target descriptors. */
235 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
236 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
237
238 /* This is used to keep the bfd arch_info in sync with the disassembly
239 style. */
240 static void set_disassembly_style_sfunc (const char *, int,
241 struct cmd_list_element *);
242 static void show_disassembly_style_sfunc (struct ui_file *, int,
243 struct cmd_list_element *,
244 const char *);
245
246 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
247 readable_regcache *regcache,
248 int regnum, gdb_byte *buf);
249 static void arm_neon_quad_write (struct gdbarch *gdbarch,
250 struct regcache *regcache,
251 int regnum, const gdb_byte *buf);
252
253 static CORE_ADDR
254 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
255
256
257 /* get_next_pcs operations. */
258 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
259 arm_get_next_pcs_read_memory_unsigned_integer,
260 arm_get_next_pcs_syscall_next_pc,
261 arm_get_next_pcs_addr_bits_remove,
262 arm_get_next_pcs_is_thumb,
263 NULL,
264 };
265
266 struct arm_prologue_cache
267 {
268 /* The stack pointer at the time this frame was created; i.e. the
269 caller's stack pointer when this function was called. It is used
270 to identify this frame. */
271 CORE_ADDR prev_sp;
272
273 /* The frame base for this frame is just prev_sp - frame size.
274 FRAMESIZE is the distance from the frame pointer to the
275 initial stack pointer. */
276
277 int framesize;
278
279 /* The register used to hold the frame pointer for this frame. */
280 int framereg;
281
282 /* Saved register offsets. */
283 struct trad_frame_saved_reg *saved_regs;
284 };
285
286 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
287 CORE_ADDR prologue_start,
288 CORE_ADDR prologue_end,
289 struct arm_prologue_cache *cache);
290
291 /* Architecture version for displaced stepping. This effects the behaviour of
292 certain instructions, and really should not be hard-wired. */
293
294 #define DISPLACED_STEPPING_ARCH_VERSION 5
295
296 /* See arm-tdep.h. */
297
298 bool arm_apcs_32 = true;
299
300 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
301
302 int
303 arm_psr_thumb_bit (struct gdbarch *gdbarch)
304 {
305 if (gdbarch_tdep (gdbarch)->is_m)
306 return XPSR_T;
307 else
308 return CPSR_T;
309 }
310
311 /* Determine if the processor is currently executing in Thumb mode. */
312
313 int
314 arm_is_thumb (struct regcache *regcache)
315 {
316 ULONGEST cpsr;
317 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
318
319 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
320
321 return (cpsr & t_bit) != 0;
322 }
323
324 /* Determine if FRAME is executing in Thumb mode. */
325
326 int
327 arm_frame_is_thumb (struct frame_info *frame)
328 {
329 CORE_ADDR cpsr;
330 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
331
332 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
333 directly (from a signal frame or dummy frame) or by interpreting
334 the saved LR (from a prologue or DWARF frame). So consult it and
335 trust the unwinders. */
336 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
337
338 return (cpsr & t_bit) != 0;
339 }
340
341 /* Search for the mapping symbol covering MEMADDR. If one is found,
342 return its type. Otherwise, return 0. If START is non-NULL,
343 set *START to the location of the mapping symbol. */
344
345 static char
346 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
347 {
348 struct obj_section *sec;
349
350 /* If there are mapping symbols, consult them. */
351 sec = find_pc_section (memaddr);
352 if (sec != NULL)
353 {
354 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
355 if (data != NULL)
356 {
357 unsigned int section_idx = sec->the_bfd_section->index;
358 arm_mapping_symbol_vec &map
359 = data->section_maps[section_idx];
360
361 /* Sort the vector on first use. */
362 if (!data->section_maps_sorted[section_idx])
363 {
364 std::sort (map.begin (), map.end ());
365 data->section_maps_sorted[section_idx] = true;
366 }
367
368 struct arm_mapping_symbol map_key
369 = { memaddr - obj_section_addr (sec), 0 };
370 arm_mapping_symbol_vec::const_iterator it
371 = std::lower_bound (map.begin (), map.end (), map_key);
372
373 /* std::lower_bound finds the earliest ordered insertion
374 point. If the symbol at this position starts at this exact
375 address, we use that; otherwise, the preceding
376 mapping symbol covers this address. */
377 if (it < map.end ())
378 {
379 if (it->value == map_key.value)
380 {
381 if (start)
382 *start = it->value + obj_section_addr (sec);
383 return it->type;
384 }
385 }
386
387 if (it > map.begin ())
388 {
389 arm_mapping_symbol_vec::const_iterator prev_it
390 = it - 1;
391
392 if (start)
393 *start = prev_it->value + obj_section_addr (sec);
394 return prev_it->type;
395 }
396 }
397 }
398
399 return 0;
400 }
401
402 /* Determine if the program counter specified in MEMADDR is in a Thumb
403 function. This function should be called for addresses unrelated to
404 any executing frame; otherwise, prefer arm_frame_is_thumb. */
405
406 int
407 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
408 {
409 struct bound_minimal_symbol sym;
410 char type;
411 arm_displaced_step_closure *dsc
412 = ((arm_displaced_step_closure * )
413 get_displaced_step_closure_by_addr (memaddr));
414
415 /* If checking the mode of displaced instruction in copy area, the mode
416 should be determined by instruction on the original address. */
417 if (dsc)
418 {
419 if (debug_displaced)
420 fprintf_unfiltered (gdb_stdlog,
421 "displaced: check mode of %.8lx instead of %.8lx\n",
422 (unsigned long) dsc->insn_addr,
423 (unsigned long) memaddr);
424 memaddr = dsc->insn_addr;
425 }
426
427 /* If bit 0 of the address is set, assume this is a Thumb address. */
428 if (IS_THUMB_ADDR (memaddr))
429 return 1;
430
431 /* If the user wants to override the symbol table, let him. */
432 if (strcmp (arm_force_mode_string, "arm") == 0)
433 return 0;
434 if (strcmp (arm_force_mode_string, "thumb") == 0)
435 return 1;
436
437 /* ARM v6-M and v7-M are always in Thumb mode. */
438 if (gdbarch_tdep (gdbarch)->is_m)
439 return 1;
440
441 /* If there are mapping symbols, consult them. */
442 type = arm_find_mapping_symbol (memaddr, NULL);
443 if (type)
444 return type == 't';
445
446 /* Thumb functions have a "special" bit set in minimal symbols. */
447 sym = lookup_minimal_symbol_by_pc (memaddr);
448 if (sym.minsym)
449 return (MSYMBOL_IS_SPECIAL (sym.minsym));
450
451 /* If the user wants to override the fallback mode, let them. */
452 if (strcmp (arm_fallback_mode_string, "arm") == 0)
453 return 0;
454 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
455 return 1;
456
457 /* If we couldn't find any symbol, but we're talking to a running
458 target, then trust the current value of $cpsr. This lets
459 "display/i $pc" always show the correct mode (though if there is
460 a symbol table we will not reach here, so it still may not be
461 displayed in the mode it will be executed). */
462 if (target_has_registers)
463 return arm_frame_is_thumb (get_current_frame ());
464
465 /* Otherwise we're out of luck; we assume ARM. */
466 return 0;
467 }
468
469 /* Determine if the address specified equals any of these magic return
470 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
471 architectures.
472
473 From ARMv6-M Reference Manual B1.5.8
474 Table B1-5 Exception return behavior
475
476 EXC_RETURN Return To Return Stack
477 0xFFFFFFF1 Handler mode Main
478 0xFFFFFFF9 Thread mode Main
479 0xFFFFFFFD Thread mode Process
480
481 From ARMv7-M Reference Manual B1.5.8
482 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
483
484 EXC_RETURN Return To Return Stack
485 0xFFFFFFF1 Handler mode Main
486 0xFFFFFFF9 Thread mode Main
487 0xFFFFFFFD Thread mode Process
488
489 Table B1-9 EXC_RETURN definition of exception return behavior, with
490 FP
491
492 EXC_RETURN Return To Return Stack Frame Type
493 0xFFFFFFE1 Handler mode Main Extended
494 0xFFFFFFE9 Thread mode Main Extended
495 0xFFFFFFED Thread mode Process Extended
496 0xFFFFFFF1 Handler mode Main Basic
497 0xFFFFFFF9 Thread mode Main Basic
498 0xFFFFFFFD Thread mode Process Basic
499
500 For more details see "B1.5.8 Exception return behavior"
501 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
502
503 static int
504 arm_m_addr_is_magic (CORE_ADDR addr)
505 {
506 switch (addr)
507 {
508 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
509 the exception return behavior. */
510 case 0xffffffe1:
511 case 0xffffffe9:
512 case 0xffffffed:
513 case 0xfffffff1:
514 case 0xfffffff9:
515 case 0xfffffffd:
516 /* Address is magic. */
517 return 1;
518
519 default:
520 /* Address is not magic. */
521 return 0;
522 }
523 }
524
525 /* Remove useless bits from addresses in a running program. */
526 static CORE_ADDR
527 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
528 {
529 /* On M-profile devices, do not strip the low bit from EXC_RETURN
530 (the magic exception return address). */
531 if (gdbarch_tdep (gdbarch)->is_m
532 && arm_m_addr_is_magic (val))
533 return val;
534
535 if (arm_apcs_32)
536 return UNMAKE_THUMB_ADDR (val);
537 else
538 return (val & 0x03fffffc);
539 }
540
541 /* Return 1 if PC is the start of a compiler helper function which
542 can be safely ignored during prologue skipping. IS_THUMB is true
543 if the function is known to be a Thumb function due to the way it
544 is being called. */
545 static int
546 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
547 {
548 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
549 struct bound_minimal_symbol msym;
550
551 msym = lookup_minimal_symbol_by_pc (pc);
552 if (msym.minsym != NULL
553 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
554 && msym.minsym->linkage_name () != NULL)
555 {
556 const char *name = msym.minsym->linkage_name ();
557
558 /* The GNU linker's Thumb call stub to foo is named
559 __foo_from_thumb. */
560 if (strstr (name, "_from_thumb") != NULL)
561 name += 2;
562
563 /* On soft-float targets, __truncdfsf2 is called to convert promoted
564 arguments to their argument types in non-prototyped
565 functions. */
566 if (startswith (name, "__truncdfsf2"))
567 return 1;
568 if (startswith (name, "__aeabi_d2f"))
569 return 1;
570
571 /* Internal functions related to thread-local storage. */
572 if (startswith (name, "__tls_get_addr"))
573 return 1;
574 if (startswith (name, "__aeabi_read_tp"))
575 return 1;
576 }
577 else
578 {
579 /* If we run against a stripped glibc, we may be unable to identify
580 special functions by name. Check for one important case,
581 __aeabi_read_tp, by comparing the *code* against the default
582 implementation (this is hand-written ARM assembler in glibc). */
583
584 if (!is_thumb
585 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
586 == 0xe3e00a0f /* mov r0, #0xffff0fff */
587 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
588 == 0xe240f01f) /* sub pc, r0, #31 */
589 return 1;
590 }
591
592 return 0;
593 }
594
595 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
596 the first 16-bit of instruction, and INSN2 is the second 16-bit of
597 instruction. */
598 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
599 ((bits ((insn1), 0, 3) << 12) \
600 | (bits ((insn1), 10, 10) << 11) \
601 | (bits ((insn2), 12, 14) << 8) \
602 | bits ((insn2), 0, 7))
603
604 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
605 the 32-bit instruction. */
606 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
607 ((bits ((insn), 16, 19) << 12) \
608 | bits ((insn), 0, 11))
609
610 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
611
612 static unsigned int
613 thumb_expand_immediate (unsigned int imm)
614 {
615 unsigned int count = imm >> 7;
616
617 if (count < 8)
618 switch (count / 2)
619 {
620 case 0:
621 return imm & 0xff;
622 case 1:
623 return (imm & 0xff) | ((imm & 0xff) << 16);
624 case 2:
625 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
626 case 3:
627 return (imm & 0xff) | ((imm & 0xff) << 8)
628 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
629 }
630
631 return (0x80 | (imm & 0x7f)) << (32 - count);
632 }
633
634 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
635 epilogue, 0 otherwise. */
636
637 static int
638 thumb_instruction_restores_sp (unsigned short insn)
639 {
640 return (insn == 0x46bd /* mov sp, r7 */
641 || (insn & 0xff80) == 0xb000 /* add sp, imm */
642 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
643 }
644
645 /* Analyze a Thumb prologue, looking for a recognizable stack frame
646 and frame pointer. Scan until we encounter a store that could
647 clobber the stack frame unexpectedly, or an unknown instruction.
648 Return the last address which is definitely safe to skip for an
649 initial breakpoint. */
650
651 static CORE_ADDR
652 thumb_analyze_prologue (struct gdbarch *gdbarch,
653 CORE_ADDR start, CORE_ADDR limit,
654 struct arm_prologue_cache *cache)
655 {
656 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
657 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
658 int i;
659 pv_t regs[16];
660 CORE_ADDR offset;
661 CORE_ADDR unrecognized_pc = 0;
662
663 for (i = 0; i < 16; i++)
664 regs[i] = pv_register (i, 0);
665 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
666
667 while (start < limit)
668 {
669 unsigned short insn;
670
671 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
672
673 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
674 {
675 int regno;
676 int mask;
677
678 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
679 break;
680
681 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
682 whether to save LR (R14). */
683 mask = (insn & 0xff) | ((insn & 0x100) << 6);
684
685 /* Calculate offsets of saved R0-R7 and LR. */
686 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
687 if (mask & (1 << regno))
688 {
689 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
690 -4);
691 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
692 }
693 }
694 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
695 {
696 offset = (insn & 0x7f) << 2; /* get scaled offset */
697 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
698 -offset);
699 }
700 else if (thumb_instruction_restores_sp (insn))
701 {
702 /* Don't scan past the epilogue. */
703 break;
704 }
705 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
706 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
707 (insn & 0xff) << 2);
708 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
709 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
710 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
711 bits (insn, 6, 8));
712 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
713 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
714 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
715 bits (insn, 0, 7));
716 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
717 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
718 && pv_is_constant (regs[bits (insn, 3, 5)]))
719 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
720 regs[bits (insn, 6, 8)]);
721 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
722 && pv_is_constant (regs[bits (insn, 3, 6)]))
723 {
724 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
725 int rm = bits (insn, 3, 6);
726 regs[rd] = pv_add (regs[rd], regs[rm]);
727 }
728 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
729 {
730 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
731 int src_reg = (insn & 0x78) >> 3;
732 regs[dst_reg] = regs[src_reg];
733 }
734 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
735 {
736 /* Handle stores to the stack. Normally pushes are used,
737 but with GCC -mtpcs-frame, there may be other stores
738 in the prologue to create the frame. */
739 int regno = (insn >> 8) & 0x7;
740 pv_t addr;
741
742 offset = (insn & 0xff) << 2;
743 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
744
745 if (stack.store_would_trash (addr))
746 break;
747
748 stack.store (addr, 4, regs[regno]);
749 }
750 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
751 {
752 int rd = bits (insn, 0, 2);
753 int rn = bits (insn, 3, 5);
754 pv_t addr;
755
756 offset = bits (insn, 6, 10) << 2;
757 addr = pv_add_constant (regs[rn], offset);
758
759 if (stack.store_would_trash (addr))
760 break;
761
762 stack.store (addr, 4, regs[rd]);
763 }
764 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
765 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
766 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
767 /* Ignore stores of argument registers to the stack. */
768 ;
769 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
770 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
771 /* Ignore block loads from the stack, potentially copying
772 parameters from memory. */
773 ;
774 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
775 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
776 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
777 /* Similarly ignore single loads from the stack. */
778 ;
779 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
780 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
781 /* Skip register copies, i.e. saves to another register
782 instead of the stack. */
783 ;
784 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
785 /* Recognize constant loads; even with small stacks these are necessary
786 on Thumb. */
787 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
788 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
789 {
790 /* Constant pool loads, for the same reason. */
791 unsigned int constant;
792 CORE_ADDR loc;
793
794 loc = start + 4 + bits (insn, 0, 7) * 4;
795 constant = read_memory_unsigned_integer (loc, 4, byte_order);
796 regs[bits (insn, 8, 10)] = pv_constant (constant);
797 }
798 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
799 {
800 unsigned short inst2;
801
802 inst2 = read_code_unsigned_integer (start + 2, 2,
803 byte_order_for_code);
804
805 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
806 {
807 /* BL, BLX. Allow some special function calls when
808 skipping the prologue; GCC generates these before
809 storing arguments to the stack. */
810 CORE_ADDR nextpc;
811 int j1, j2, imm1, imm2;
812
813 imm1 = sbits (insn, 0, 10);
814 imm2 = bits (inst2, 0, 10);
815 j1 = bit (inst2, 13);
816 j2 = bit (inst2, 11);
817
818 offset = ((imm1 << 12) + (imm2 << 1));
819 offset ^= ((!j2) << 22) | ((!j1) << 23);
820
821 nextpc = start + 4 + offset;
822 /* For BLX make sure to clear the low bits. */
823 if (bit (inst2, 12) == 0)
824 nextpc = nextpc & 0xfffffffc;
825
826 if (!skip_prologue_function (gdbarch, nextpc,
827 bit (inst2, 12) != 0))
828 break;
829 }
830
831 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
832 { registers } */
833 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
834 {
835 pv_t addr = regs[bits (insn, 0, 3)];
836 int regno;
837
838 if (stack.store_would_trash (addr))
839 break;
840
841 /* Calculate offsets of saved registers. */
842 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
843 if (inst2 & (1 << regno))
844 {
845 addr = pv_add_constant (addr, -4);
846 stack.store (addr, 4, regs[regno]);
847 }
848
849 if (insn & 0x0020)
850 regs[bits (insn, 0, 3)] = addr;
851 }
852
853 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
854 [Rn, #+/-imm]{!} */
855 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
856 {
857 int regno1 = bits (inst2, 12, 15);
858 int regno2 = bits (inst2, 8, 11);
859 pv_t addr = regs[bits (insn, 0, 3)];
860
861 offset = inst2 & 0xff;
862 if (insn & 0x0080)
863 addr = pv_add_constant (addr, offset);
864 else
865 addr = pv_add_constant (addr, -offset);
866
867 if (stack.store_would_trash (addr))
868 break;
869
870 stack.store (addr, 4, regs[regno1]);
871 stack.store (pv_add_constant (addr, 4),
872 4, regs[regno2]);
873
874 if (insn & 0x0020)
875 regs[bits (insn, 0, 3)] = addr;
876 }
877
878 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
879 && (inst2 & 0x0c00) == 0x0c00
880 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 {
882 int regno = bits (inst2, 12, 15);
883 pv_t addr = regs[bits (insn, 0, 3)];
884
885 offset = inst2 & 0xff;
886 if (inst2 & 0x0200)
887 addr = pv_add_constant (addr, offset);
888 else
889 addr = pv_add_constant (addr, -offset);
890
891 if (stack.store_would_trash (addr))
892 break;
893
894 stack.store (addr, 4, regs[regno]);
895
896 if (inst2 & 0x0100)
897 regs[bits (insn, 0, 3)] = addr;
898 }
899
900 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
901 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 {
903 int regno = bits (inst2, 12, 15);
904 pv_t addr;
905
906 offset = inst2 & 0xfff;
907 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
908
909 if (stack.store_would_trash (addr))
910 break;
911
912 stack.store (addr, 4, regs[regno]);
913 }
914
915 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
916 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
917 /* Ignore stores of argument registers to the stack. */
918 ;
919
920 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
921 && (inst2 & 0x0d00) == 0x0c00
922 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
923 /* Ignore stores of argument registers to the stack. */
924 ;
925
926 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
927 { registers } */
928 && (inst2 & 0x8000) == 0x0000
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 /* Ignore block loads from the stack, potentially copying
931 parameters from memory. */
932 ;
933
934 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
935 [Rn, #+/-imm] */
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Similarly ignore dual loads from the stack. */
938 ;
939
940 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
941 && (inst2 & 0x0d00) == 0x0c00
942 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
943 /* Similarly ignore single loads from the stack. */
944 ;
945
946 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
947 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
948 /* Similarly ignore single loads from the stack. */
949 ;
950
951 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
952 && (inst2 & 0x8000) == 0x0000)
953 {
954 unsigned int imm = ((bits (insn, 10, 10) << 11)
955 | (bits (inst2, 12, 14) << 8)
956 | bits (inst2, 0, 7));
957
958 regs[bits (inst2, 8, 11)]
959 = pv_add_constant (regs[bits (insn, 0, 3)],
960 thumb_expand_immediate (imm));
961 }
962
963 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
964 && (inst2 & 0x8000) == 0x0000)
965 {
966 unsigned int imm = ((bits (insn, 10, 10) << 11)
967 | (bits (inst2, 12, 14) << 8)
968 | bits (inst2, 0, 7));
969
970 regs[bits (inst2, 8, 11)]
971 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
972 }
973
974 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
975 && (inst2 & 0x8000) == 0x0000)
976 {
977 unsigned int imm = ((bits (insn, 10, 10) << 11)
978 | (bits (inst2, 12, 14) << 8)
979 | bits (inst2, 0, 7));
980
981 regs[bits (inst2, 8, 11)]
982 = pv_add_constant (regs[bits (insn, 0, 3)],
983 - (CORE_ADDR) thumb_expand_immediate (imm));
984 }
985
986 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
987 && (inst2 & 0x8000) == 0x0000)
988 {
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
992
993 regs[bits (inst2, 8, 11)]
994 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
995 }
996
997 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
998 {
999 unsigned int imm = ((bits (insn, 10, 10) << 11)
1000 | (bits (inst2, 12, 14) << 8)
1001 | bits (inst2, 0, 7));
1002
1003 regs[bits (inst2, 8, 11)]
1004 = pv_constant (thumb_expand_immediate (imm));
1005 }
1006
1007 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1008 {
1009 unsigned int imm
1010 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1011
1012 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1013 }
1014
1015 else if (insn == 0xea5f /* mov.w Rd,Rm */
1016 && (inst2 & 0xf0f0) == 0)
1017 {
1018 int dst_reg = (inst2 & 0x0f00) >> 8;
1019 int src_reg = inst2 & 0xf;
1020 regs[dst_reg] = regs[src_reg];
1021 }
1022
1023 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1024 {
1025 /* Constant pool loads. */
1026 unsigned int constant;
1027 CORE_ADDR loc;
1028
1029 offset = bits (inst2, 0, 11);
1030 if (insn & 0x0080)
1031 loc = start + 4 + offset;
1032 else
1033 loc = start + 4 - offset;
1034
1035 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1036 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1037 }
1038
1039 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1040 {
1041 /* Constant pool loads. */
1042 unsigned int constant;
1043 CORE_ADDR loc;
1044
1045 offset = bits (inst2, 0, 7) << 2;
1046 if (insn & 0x0080)
1047 loc = start + 4 + offset;
1048 else
1049 loc = start + 4 - offset;
1050
1051 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1052 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1053
1054 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1055 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1056 }
1057
1058 else if (thumb2_instruction_changes_pc (insn, inst2))
1059 {
1060 /* Don't scan past anything that might change control flow. */
1061 break;
1062 }
1063 else
1064 {
1065 /* The optimizer might shove anything into the prologue,
1066 so we just skip what we don't recognize. */
1067 unrecognized_pc = start;
1068 }
1069
1070 start += 2;
1071 }
1072 else if (thumb_instruction_changes_pc (insn))
1073 {
1074 /* Don't scan past anything that might change control flow. */
1075 break;
1076 }
1077 else
1078 {
1079 /* The optimizer might shove anything into the prologue,
1080 so we just skip what we don't recognize. */
1081 unrecognized_pc = start;
1082 }
1083
1084 start += 2;
1085 }
1086
1087 if (arm_debug)
1088 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1089 paddress (gdbarch, start));
1090
1091 if (unrecognized_pc == 0)
1092 unrecognized_pc = start;
1093
1094 if (cache == NULL)
1095 return unrecognized_pc;
1096
1097 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1098 {
1099 /* Frame pointer is fp. Frame size is constant. */
1100 cache->framereg = ARM_FP_REGNUM;
1101 cache->framesize = -regs[ARM_FP_REGNUM].k;
1102 }
1103 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1104 {
1105 /* Frame pointer is r7. Frame size is constant. */
1106 cache->framereg = THUMB_FP_REGNUM;
1107 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1108 }
1109 else
1110 {
1111 /* Try the stack pointer... this is a bit desperate. */
1112 cache->framereg = ARM_SP_REGNUM;
1113 cache->framesize = -regs[ARM_SP_REGNUM].k;
1114 }
1115
1116 for (i = 0; i < 16; i++)
1117 if (stack.find_reg (gdbarch, i, &offset))
1118 cache->saved_regs[i].addr = offset;
1119
1120 return unrecognized_pc;
1121 }
1122
1123
1124 /* Try to analyze the instructions starting from PC, which load symbol
1125 __stack_chk_guard. Return the address of instruction after loading this
1126 symbol, set the dest register number to *BASEREG, and set the size of
1127 instructions for loading symbol in OFFSET. Return 0 if instructions are
1128 not recognized. */
1129
1130 static CORE_ADDR
1131 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1132 unsigned int *destreg, int *offset)
1133 {
1134 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1135 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1136 unsigned int low, high, address;
1137
1138 address = 0;
1139 if (is_thumb)
1140 {
1141 unsigned short insn1
1142 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1143
1144 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1145 {
1146 *destreg = bits (insn1, 8, 10);
1147 *offset = 2;
1148 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1149 address = read_memory_unsigned_integer (address, 4,
1150 byte_order_for_code);
1151 }
1152 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1153 {
1154 unsigned short insn2
1155 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1156
1157 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1158
1159 insn1
1160 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1161 insn2
1162 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1163
1164 /* movt Rd, #const */
1165 if ((insn1 & 0xfbc0) == 0xf2c0)
1166 {
1167 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1168 *destreg = bits (insn2, 8, 11);
1169 *offset = 8;
1170 address = (high << 16 | low);
1171 }
1172 }
1173 }
1174 else
1175 {
1176 unsigned int insn
1177 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1178
1179 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1180 {
1181 address = bits (insn, 0, 11) + pc + 8;
1182 address = read_memory_unsigned_integer (address, 4,
1183 byte_order_for_code);
1184
1185 *destreg = bits (insn, 12, 15);
1186 *offset = 4;
1187 }
1188 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1189 {
1190 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1191
1192 insn
1193 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1194
1195 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1196 {
1197 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1198 *destreg = bits (insn, 12, 15);
1199 *offset = 8;
1200 address = (high << 16 | low);
1201 }
1202 }
1203 }
1204
1205 return address;
1206 }
1207
1208 /* Try to skip a sequence of instructions used for stack protector. If PC
1209 points to the first instruction of this sequence, return the address of
1210 first instruction after this sequence, otherwise, return original PC.
1211
1212 On arm, this sequence of instructions is composed of mainly three steps,
1213 Step 1: load symbol __stack_chk_guard,
1214 Step 2: load from address of __stack_chk_guard,
1215 Step 3: store it to somewhere else.
1216
1217 Usually, instructions on step 2 and step 3 are the same on various ARM
1218 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1219 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1220 instructions in step 1 vary from different ARM architectures. On ARMv7,
1221 they are,
1222
1223 movw Rn, #:lower16:__stack_chk_guard
1224 movt Rn, #:upper16:__stack_chk_guard
1225
1226 On ARMv5t, it is,
1227
1228 ldr Rn, .Label
1229 ....
1230 .Lable:
1231 .word __stack_chk_guard
1232
1233 Since ldr/str is a very popular instruction, we can't use them as
1234 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1235 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1236 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1237
1238 static CORE_ADDR
1239 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1240 {
1241 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1242 unsigned int basereg;
1243 struct bound_minimal_symbol stack_chk_guard;
1244 int offset;
1245 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1246 CORE_ADDR addr;
1247
1248 /* Try to parse the instructions in Step 1. */
1249 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1250 &basereg, &offset);
1251 if (!addr)
1252 return pc;
1253
1254 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1255 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1256 Otherwise, this sequence cannot be for stack protector. */
1257 if (stack_chk_guard.minsym == NULL
1258 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1259 return pc;
1260
1261 if (is_thumb)
1262 {
1263 unsigned int destreg;
1264 unsigned short insn
1265 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1266
1267 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1268 if ((insn & 0xf800) != 0x6800)
1269 return pc;
1270 if (bits (insn, 3, 5) != basereg)
1271 return pc;
1272 destreg = bits (insn, 0, 2);
1273
1274 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1275 byte_order_for_code);
1276 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1277 if ((insn & 0xf800) != 0x6000)
1278 return pc;
1279 if (destreg != bits (insn, 0, 2))
1280 return pc;
1281 }
1282 else
1283 {
1284 unsigned int destreg;
1285 unsigned int insn
1286 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1287
1288 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1289 if ((insn & 0x0e500000) != 0x04100000)
1290 return pc;
1291 if (bits (insn, 16, 19) != basereg)
1292 return pc;
1293 destreg = bits (insn, 12, 15);
1294 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1295 insn = read_code_unsigned_integer (pc + offset + 4,
1296 4, byte_order_for_code);
1297 if ((insn & 0x0e500000) != 0x04000000)
1298 return pc;
1299 if (bits (insn, 12, 15) != destreg)
1300 return pc;
1301 }
1302 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1303 on arm. */
1304 if (is_thumb)
1305 return pc + offset + 4;
1306 else
1307 return pc + offset + 8;
1308 }
1309
1310 /* Advance the PC across any function entry prologue instructions to
1311 reach some "real" code.
1312
1313 The APCS (ARM Procedure Call Standard) defines the following
1314 prologue:
1315
1316 mov ip, sp
1317 [stmfd sp!, {a1,a2,a3,a4}]
1318 stmfd sp!, {...,fp,ip,lr,pc}
1319 [stfe f7, [sp, #-12]!]
1320 [stfe f6, [sp, #-12]!]
1321 [stfe f5, [sp, #-12]!]
1322 [stfe f4, [sp, #-12]!]
1323 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1324
1325 static CORE_ADDR
1326 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1327 {
1328 CORE_ADDR func_addr, limit_pc;
1329
1330 /* See if we can determine the end of the prologue via the symbol table.
1331 If so, then return either PC, or the PC after the prologue, whichever
1332 is greater. */
1333 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1334 {
1335 CORE_ADDR post_prologue_pc
1336 = skip_prologue_using_sal (gdbarch, func_addr);
1337 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1338
1339 if (post_prologue_pc)
1340 post_prologue_pc
1341 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1342
1343
1344 /* GCC always emits a line note before the prologue and another
1345 one after, even if the two are at the same address or on the
1346 same line. Take advantage of this so that we do not need to
1347 know every instruction that might appear in the prologue. We
1348 will have producer information for most binaries; if it is
1349 missing (e.g. for -gstabs), assuming the GNU tools. */
1350 if (post_prologue_pc
1351 && (cust == NULL
1352 || COMPUNIT_PRODUCER (cust) == NULL
1353 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1354 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1355 return post_prologue_pc;
1356
1357 if (post_prologue_pc != 0)
1358 {
1359 CORE_ADDR analyzed_limit;
1360
1361 /* For non-GCC compilers, make sure the entire line is an
1362 acceptable prologue; GDB will round this function's
1363 return value up to the end of the following line so we
1364 can not skip just part of a line (and we do not want to).
1365
1366 RealView does not treat the prologue specially, but does
1367 associate prologue code with the opening brace; so this
1368 lets us skip the first line if we think it is the opening
1369 brace. */
1370 if (arm_pc_is_thumb (gdbarch, func_addr))
1371 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1372 post_prologue_pc, NULL);
1373 else
1374 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1375 post_prologue_pc, NULL);
1376
1377 if (analyzed_limit != post_prologue_pc)
1378 return func_addr;
1379
1380 return post_prologue_pc;
1381 }
1382 }
1383
1384 /* Can't determine prologue from the symbol table, need to examine
1385 instructions. */
1386
1387 /* Find an upper limit on the function prologue using the debug
1388 information. If the debug information could not be used to provide
1389 that bound, then use an arbitrary large number as the upper bound. */
1390 /* Like arm_scan_prologue, stop no later than pc + 64. */
1391 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1392 if (limit_pc == 0)
1393 limit_pc = pc + 64; /* Magic. */
1394
1395
1396 /* Check if this is Thumb code. */
1397 if (arm_pc_is_thumb (gdbarch, pc))
1398 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1399 else
1400 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1401 }
1402
1403 /* *INDENT-OFF* */
1404 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1405 This function decodes a Thumb function prologue to determine:
1406 1) the size of the stack frame
1407 2) which registers are saved on it
1408 3) the offsets of saved regs
1409 4) the offset from the stack pointer to the frame pointer
1410
1411 A typical Thumb function prologue would create this stack frame
1412 (offsets relative to FP)
1413 old SP -> 24 stack parameters
1414 20 LR
1415 16 R7
1416 R7 -> 0 local variables (16 bytes)
1417 SP -> -12 additional stack space (12 bytes)
1418 The frame size would thus be 36 bytes, and the frame offset would be
1419 12 bytes. The frame register is R7.
1420
1421 The comments for thumb_skip_prolog() describe the algorithm we use
1422 to detect the end of the prolog. */
1423 /* *INDENT-ON* */
1424
1425 static void
1426 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1427 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1428 {
1429 CORE_ADDR prologue_start;
1430 CORE_ADDR prologue_end;
1431
1432 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1433 &prologue_end))
1434 {
1435 /* See comment in arm_scan_prologue for an explanation of
1436 this heuristics. */
1437 if (prologue_end > prologue_start + 64)
1438 {
1439 prologue_end = prologue_start + 64;
1440 }
1441 }
1442 else
1443 /* We're in the boondocks: we have no idea where the start of the
1444 function is. */
1445 return;
1446
1447 prologue_end = std::min (prologue_end, prev_pc);
1448
1449 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1450 }
1451
1452 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1453 otherwise. */
1454
1455 static int
1456 arm_instruction_restores_sp (unsigned int insn)
1457 {
1458 if (bits (insn, 28, 31) != INST_NV)
1459 {
1460 if ((insn & 0x0df0f000) == 0x0080d000
1461 /* ADD SP (register or immediate). */
1462 || (insn & 0x0df0f000) == 0x0040d000
1463 /* SUB SP (register or immediate). */
1464 || (insn & 0x0ffffff0) == 0x01a0d000
1465 /* MOV SP. */
1466 || (insn & 0x0fff0000) == 0x08bd0000
1467 /* POP (LDMIA). */
1468 || (insn & 0x0fff0000) == 0x049d0000)
1469 /* POP of a single register. */
1470 return 1;
1471 }
1472
1473 return 0;
1474 }
1475
1476 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1477 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1478 fill it in. Return the first address not recognized as a prologue
1479 instruction.
1480
1481 We recognize all the instructions typically found in ARM prologues,
1482 plus harmless instructions which can be skipped (either for analysis
1483 purposes, or a more restrictive set that can be skipped when finding
1484 the end of the prologue). */
1485
1486 static CORE_ADDR
1487 arm_analyze_prologue (struct gdbarch *gdbarch,
1488 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1489 struct arm_prologue_cache *cache)
1490 {
1491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1492 int regno;
1493 CORE_ADDR offset, current_pc;
1494 pv_t regs[ARM_FPS_REGNUM];
1495 CORE_ADDR unrecognized_pc = 0;
1496
1497 /* Search the prologue looking for instructions that set up the
1498 frame pointer, adjust the stack pointer, and save registers.
1499
1500 Be careful, however, and if it doesn't look like a prologue,
1501 don't try to scan it. If, for instance, a frameless function
1502 begins with stmfd sp!, then we will tell ourselves there is
1503 a frame, which will confuse stack traceback, as well as "finish"
1504 and other operations that rely on a knowledge of the stack
1505 traceback. */
1506
1507 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1508 regs[regno] = pv_register (regno, 0);
1509 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1510
1511 for (current_pc = prologue_start;
1512 current_pc < prologue_end;
1513 current_pc += 4)
1514 {
1515 unsigned int insn
1516 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1517
1518 if (insn == 0xe1a0c00d) /* mov ip, sp */
1519 {
1520 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1521 continue;
1522 }
1523 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1525 {
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1528 int rd = bits (insn, 12, 15);
1529 imm = (imm >> rot) | (imm << (32 - rot));
1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1531 continue;
1532 }
1533 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1534 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1535 {
1536 unsigned imm = insn & 0xff; /* immediate value */
1537 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1538 int rd = bits (insn, 12, 15);
1539 imm = (imm >> rot) | (imm << (32 - rot));
1540 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1541 continue;
1542 }
1543 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1544 [sp, #-4]! */
1545 {
1546 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1547 break;
1548 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1549 stack.store (regs[ARM_SP_REGNUM], 4,
1550 regs[bits (insn, 12, 15)]);
1551 continue;
1552 }
1553 else if ((insn & 0xffff0000) == 0xe92d0000)
1554 /* stmfd sp!, {..., fp, ip, lr, pc}
1555 or
1556 stmfd sp!, {a1, a2, a3, a4} */
1557 {
1558 int mask = insn & 0xffff;
1559
1560 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1561 break;
1562
1563 /* Calculate offsets of saved registers. */
1564 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1565 if (mask & (1 << regno))
1566 {
1567 regs[ARM_SP_REGNUM]
1568 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1569 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1570 }
1571 }
1572 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1573 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1574 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1575 {
1576 /* No need to add this to saved_regs -- it's just an arg reg. */
1577 continue;
1578 }
1579 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1580 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1581 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1582 {
1583 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 continue;
1585 }
1586 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1587 { registers } */
1588 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1589 {
1590 /* No need to add this to saved_regs -- it's just arg regs. */
1591 continue;
1592 }
1593 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1594 {
1595 unsigned imm = insn & 0xff; /* immediate value */
1596 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1597 imm = (imm >> rot) | (imm << (32 - rot));
1598 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1599 }
1600 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1601 {
1602 unsigned imm = insn & 0xff; /* immediate value */
1603 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1604 imm = (imm >> rot) | (imm << (32 - rot));
1605 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1606 }
1607 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1608 [sp, -#c]! */
1609 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1610 {
1611 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1612 break;
1613
1614 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1615 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1616 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1617 }
1618 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1619 [sp!] */
1620 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1621 {
1622 int n_saved_fp_regs;
1623 unsigned int fp_start_reg, fp_bound_reg;
1624
1625 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1626 break;
1627
1628 if ((insn & 0x800) == 0x800) /* N0 is set */
1629 {
1630 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1631 n_saved_fp_regs = 3;
1632 else
1633 n_saved_fp_regs = 1;
1634 }
1635 else
1636 {
1637 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1638 n_saved_fp_regs = 2;
1639 else
1640 n_saved_fp_regs = 4;
1641 }
1642
1643 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1644 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1645 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1646 {
1647 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1648 stack.store (regs[ARM_SP_REGNUM], 12,
1649 regs[fp_start_reg++]);
1650 }
1651 }
1652 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1653 {
1654 /* Allow some special function calls when skipping the
1655 prologue; GCC generates these before storing arguments to
1656 the stack. */
1657 CORE_ADDR dest = BranchDest (current_pc, insn);
1658
1659 if (skip_prologue_function (gdbarch, dest, 0))
1660 continue;
1661 else
1662 break;
1663 }
1664 else if ((insn & 0xf0000000) != 0xe0000000)
1665 break; /* Condition not true, exit early. */
1666 else if (arm_instruction_changes_pc (insn))
1667 /* Don't scan past anything that might change control flow. */
1668 break;
1669 else if (arm_instruction_restores_sp (insn))
1670 {
1671 /* Don't scan past the epilogue. */
1672 break;
1673 }
1674 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 /* Ignore block loads from the stack, potentially copying
1677 parameters from memory. */
1678 continue;
1679 else if ((insn & 0xfc500000) == 0xe4100000
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Similarly ignore single loads from the stack. */
1682 continue;
1683 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1684 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1685 register instead of the stack. */
1686 continue;
1687 else
1688 {
1689 /* The optimizer might shove anything into the prologue, if
1690 we build up cache (cache != NULL) from scanning prologue,
1691 we just skip what we don't recognize and scan further to
1692 make cache as complete as possible. However, if we skip
1693 prologue, we'll stop immediately on unrecognized
1694 instruction. */
1695 unrecognized_pc = current_pc;
1696 if (cache != NULL)
1697 continue;
1698 else
1699 break;
1700 }
1701 }
1702
1703 if (unrecognized_pc == 0)
1704 unrecognized_pc = current_pc;
1705
1706 if (cache)
1707 {
1708 int framereg, framesize;
1709
1710 /* The frame size is just the distance from the frame register
1711 to the original stack pointer. */
1712 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1713 {
1714 /* Frame pointer is fp. */
1715 framereg = ARM_FP_REGNUM;
1716 framesize = -regs[ARM_FP_REGNUM].k;
1717 }
1718 else
1719 {
1720 /* Try the stack pointer... this is a bit desperate. */
1721 framereg = ARM_SP_REGNUM;
1722 framesize = -regs[ARM_SP_REGNUM].k;
1723 }
1724
1725 cache->framereg = framereg;
1726 cache->framesize = framesize;
1727
1728 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1729 if (stack.find_reg (gdbarch, regno, &offset))
1730 cache->saved_regs[regno].addr = offset;
1731 }
1732
1733 if (arm_debug)
1734 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1735 paddress (gdbarch, unrecognized_pc));
1736
1737 return unrecognized_pc;
1738 }
1739
1740 static void
1741 arm_scan_prologue (struct frame_info *this_frame,
1742 struct arm_prologue_cache *cache)
1743 {
1744 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1745 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1746 CORE_ADDR prologue_start, prologue_end;
1747 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1748 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1749
1750 /* Assume there is no frame until proven otherwise. */
1751 cache->framereg = ARM_SP_REGNUM;
1752 cache->framesize = 0;
1753
1754 /* Check for Thumb prologue. */
1755 if (arm_frame_is_thumb (this_frame))
1756 {
1757 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1758 return;
1759 }
1760
1761 /* Find the function prologue. If we can't find the function in
1762 the symbol table, peek in the stack frame to find the PC. */
1763 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1764 &prologue_end))
1765 {
1766 /* One way to find the end of the prologue (which works well
1767 for unoptimized code) is to do the following:
1768
1769 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1770
1771 if (sal.line == 0)
1772 prologue_end = prev_pc;
1773 else if (sal.end < prologue_end)
1774 prologue_end = sal.end;
1775
1776 This mechanism is very accurate so long as the optimizer
1777 doesn't move any instructions from the function body into the
1778 prologue. If this happens, sal.end will be the last
1779 instruction in the first hunk of prologue code just before
1780 the first instruction that the scheduler has moved from
1781 the body to the prologue.
1782
1783 In order to make sure that we scan all of the prologue
1784 instructions, we use a slightly less accurate mechanism which
1785 may scan more than necessary. To help compensate for this
1786 lack of accuracy, the prologue scanning loop below contains
1787 several clauses which'll cause the loop to terminate early if
1788 an implausible prologue instruction is encountered.
1789
1790 The expression
1791
1792 prologue_start + 64
1793
1794 is a suitable endpoint since it accounts for the largest
1795 possible prologue plus up to five instructions inserted by
1796 the scheduler. */
1797
1798 if (prologue_end > prologue_start + 64)
1799 {
1800 prologue_end = prologue_start + 64; /* See above. */
1801 }
1802 }
1803 else
1804 {
1805 /* We have no symbol information. Our only option is to assume this
1806 function has a standard stack frame and the normal frame register.
1807 Then, we can find the value of our frame pointer on entrance to
1808 the callee (or at the present moment if this is the innermost frame).
1809 The value stored there should be the address of the stmfd + 8. */
1810 CORE_ADDR frame_loc;
1811 ULONGEST return_value;
1812
1813 /* AAPCS does not use a frame register, so we can abort here. */
1814 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1815 return;
1816
1817 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1818 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1819 &return_value))
1820 return;
1821 else
1822 {
1823 prologue_start = gdbarch_addr_bits_remove
1824 (gdbarch, return_value) - 8;
1825 prologue_end = prologue_start + 64; /* See above. */
1826 }
1827 }
1828
1829 if (prev_pc < prologue_end)
1830 prologue_end = prev_pc;
1831
1832 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1833 }
1834
1835 static struct arm_prologue_cache *
1836 arm_make_prologue_cache (struct frame_info *this_frame)
1837 {
1838 int reg;
1839 struct arm_prologue_cache *cache;
1840 CORE_ADDR unwound_fp;
1841
1842 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1843 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1844
1845 arm_scan_prologue (this_frame, cache);
1846
1847 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1848 if (unwound_fp == 0)
1849 return cache;
1850
1851 cache->prev_sp = unwound_fp + cache->framesize;
1852
1853 /* Calculate actual addresses of saved registers using offsets
1854 determined by arm_scan_prologue. */
1855 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1856 if (trad_frame_addr_p (cache->saved_regs, reg))
1857 cache->saved_regs[reg].addr += cache->prev_sp;
1858
1859 return cache;
1860 }
1861
1862 /* Implementation of the stop_reason hook for arm_prologue frames. */
1863
1864 static enum unwind_stop_reason
1865 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1866 void **this_cache)
1867 {
1868 struct arm_prologue_cache *cache;
1869 CORE_ADDR pc;
1870
1871 if (*this_cache == NULL)
1872 *this_cache = arm_make_prologue_cache (this_frame);
1873 cache = (struct arm_prologue_cache *) *this_cache;
1874
1875 /* This is meant to halt the backtrace at "_start". */
1876 pc = get_frame_pc (this_frame);
1877 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1878 return UNWIND_OUTERMOST;
1879
1880 /* If we've hit a wall, stop. */
1881 if (cache->prev_sp == 0)
1882 return UNWIND_OUTERMOST;
1883
1884 return UNWIND_NO_REASON;
1885 }
1886
1887 /* Our frame ID for a normal frame is the current function's starting PC
1888 and the caller's SP when we were called. */
1889
1890 static void
1891 arm_prologue_this_id (struct frame_info *this_frame,
1892 void **this_cache,
1893 struct frame_id *this_id)
1894 {
1895 struct arm_prologue_cache *cache;
1896 struct frame_id id;
1897 CORE_ADDR pc, func;
1898
1899 if (*this_cache == NULL)
1900 *this_cache = arm_make_prologue_cache (this_frame);
1901 cache = (struct arm_prologue_cache *) *this_cache;
1902
1903 /* Use function start address as part of the frame ID. If we cannot
1904 identify the start address (due to missing symbol information),
1905 fall back to just using the current PC. */
1906 pc = get_frame_pc (this_frame);
1907 func = get_frame_func (this_frame);
1908 if (!func)
1909 func = pc;
1910
1911 id = frame_id_build (cache->prev_sp, func);
1912 *this_id = id;
1913 }
1914
1915 static struct value *
1916 arm_prologue_prev_register (struct frame_info *this_frame,
1917 void **this_cache,
1918 int prev_regnum)
1919 {
1920 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1921 struct arm_prologue_cache *cache;
1922
1923 if (*this_cache == NULL)
1924 *this_cache = arm_make_prologue_cache (this_frame);
1925 cache = (struct arm_prologue_cache *) *this_cache;
1926
1927 /* If we are asked to unwind the PC, then we need to return the LR
1928 instead. The prologue may save PC, but it will point into this
1929 frame's prologue, not the next frame's resume location. Also
1930 strip the saved T bit. A valid LR may have the low bit set, but
1931 a valid PC never does. */
1932 if (prev_regnum == ARM_PC_REGNUM)
1933 {
1934 CORE_ADDR lr;
1935
1936 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1937 return frame_unwind_got_constant (this_frame, prev_regnum,
1938 arm_addr_bits_remove (gdbarch, lr));
1939 }
1940
1941 /* SP is generally not saved to the stack, but this frame is
1942 identified by the next frame's stack pointer at the time of the call.
1943 The value was already reconstructed into PREV_SP. */
1944 if (prev_regnum == ARM_SP_REGNUM)
1945 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1946
1947 /* The CPSR may have been changed by the call instruction and by the
1948 called function. The only bit we can reconstruct is the T bit,
1949 by checking the low bit of LR as of the call. This is a reliable
1950 indicator of Thumb-ness except for some ARM v4T pre-interworking
1951 Thumb code, which could get away with a clear low bit as long as
1952 the called function did not use bx. Guess that all other
1953 bits are unchanged; the condition flags are presumably lost,
1954 but the processor status is likely valid. */
1955 if (prev_regnum == ARM_PS_REGNUM)
1956 {
1957 CORE_ADDR lr, cpsr;
1958 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1959
1960 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1961 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1962 if (IS_THUMB_ADDR (lr))
1963 cpsr |= t_bit;
1964 else
1965 cpsr &= ~t_bit;
1966 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1967 }
1968
1969 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1970 prev_regnum);
1971 }
1972
1973 struct frame_unwind arm_prologue_unwind = {
1974 NORMAL_FRAME,
1975 arm_prologue_unwind_stop_reason,
1976 arm_prologue_this_id,
1977 arm_prologue_prev_register,
1978 NULL,
1979 default_frame_sniffer
1980 };
1981
1982 /* Maintain a list of ARM exception table entries per objfile, similar to the
1983 list of mapping symbols. We only cache entries for standard ARM-defined
1984 personality routines; the cache will contain only the frame unwinding
1985 instructions associated with the entry (not the descriptors). */
1986
1987 struct arm_exidx_entry
1988 {
1989 CORE_ADDR addr;
1990 gdb_byte *entry;
1991
1992 bool operator< (const arm_exidx_entry &other) const
1993 {
1994 return addr < other.addr;
1995 }
1996 };
1997
1998 struct arm_exidx_data
1999 {
2000 std::vector<std::vector<arm_exidx_entry>> section_maps;
2001 };
2002
2003 /* Per-BFD key to store exception handling information. */
2004 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2005
2006 static struct obj_section *
2007 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 {
2009 struct obj_section *osect;
2010
2011 ALL_OBJFILE_OSECTIONS (objfile, osect)
2012 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2013 {
2014 bfd_vma start, size;
2015 start = bfd_section_vma (osect->the_bfd_section);
2016 size = bfd_section_size (osect->the_bfd_section);
2017
2018 if (start <= vma && vma < start + size)
2019 return osect;
2020 }
2021
2022 return NULL;
2023 }
2024
2025 /* Parse contents of exception table and exception index sections
2026 of OBJFILE, and fill in the exception table entry cache.
2027
2028 For each entry that refers to a standard ARM-defined personality
2029 routine, extract the frame unwinding instructions (from either
2030 the index or the table section). The unwinding instructions
2031 are normalized by:
2032 - extracting them from the rest of the table data
2033 - converting to host endianness
2034 - appending the implicit 0xb0 ("Finish") code
2035
2036 The extracted and normalized instructions are stored for later
2037 retrieval by the arm_find_exidx_entry routine. */
2038
2039 static void
2040 arm_exidx_new_objfile (struct objfile *objfile)
2041 {
2042 struct arm_exidx_data *data;
2043 asection *exidx, *extab;
2044 bfd_vma exidx_vma = 0, extab_vma = 0;
2045 LONGEST i;
2046
2047 /* If we've already touched this file, do nothing. */
2048 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2049 return;
2050
2051 /* Read contents of exception table and index. */
2052 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2053 gdb::byte_vector exidx_data;
2054 if (exidx)
2055 {
2056 exidx_vma = bfd_section_vma (exidx);
2057 exidx_data.resize (bfd_section_size (exidx));
2058
2059 if (!bfd_get_section_contents (objfile->obfd, exidx,
2060 exidx_data.data (), 0,
2061 exidx_data.size ()))
2062 return;
2063 }
2064
2065 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2066 gdb::byte_vector extab_data;
2067 if (extab)
2068 {
2069 extab_vma = bfd_section_vma (extab);
2070 extab_data.resize (bfd_section_size (extab));
2071
2072 if (!bfd_get_section_contents (objfile->obfd, extab,
2073 extab_data.data (), 0,
2074 extab_data.size ()))
2075 return;
2076 }
2077
2078 /* Allocate exception table data structure. */
2079 data = arm_exidx_data_key.emplace (objfile->obfd);
2080 data->section_maps.resize (objfile->obfd->section_count);
2081
2082 /* Fill in exception table. */
2083 for (i = 0; i < exidx_data.size () / 8; i++)
2084 {
2085 struct arm_exidx_entry new_exidx_entry;
2086 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2087 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2088 exidx_data.data () + i * 8 + 4);
2089 bfd_vma addr = 0, word = 0;
2090 int n_bytes = 0, n_words = 0;
2091 struct obj_section *sec;
2092 gdb_byte *entry = NULL;
2093
2094 /* Extract address of start of function. */
2095 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2096 idx += exidx_vma + i * 8;
2097
2098 /* Find section containing function and compute section offset. */
2099 sec = arm_obj_section_from_vma (objfile, idx);
2100 if (sec == NULL)
2101 continue;
2102 idx -= bfd_section_vma (sec->the_bfd_section);
2103
2104 /* Determine address of exception table entry. */
2105 if (val == 1)
2106 {
2107 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2108 }
2109 else if ((val & 0xff000000) == 0x80000000)
2110 {
2111 /* Exception table entry embedded in .ARM.exidx
2112 -- must be short form. */
2113 word = val;
2114 n_bytes = 3;
2115 }
2116 else if (!(val & 0x80000000))
2117 {
2118 /* Exception table entry in .ARM.extab. */
2119 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2120 addr += exidx_vma + i * 8 + 4;
2121
2122 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2123 {
2124 word = bfd_h_get_32 (objfile->obfd,
2125 extab_data.data () + addr - extab_vma);
2126 addr += 4;
2127
2128 if ((word & 0xff000000) == 0x80000000)
2129 {
2130 /* Short form. */
2131 n_bytes = 3;
2132 }
2133 else if ((word & 0xff000000) == 0x81000000
2134 || (word & 0xff000000) == 0x82000000)
2135 {
2136 /* Long form. */
2137 n_bytes = 2;
2138 n_words = ((word >> 16) & 0xff);
2139 }
2140 else if (!(word & 0x80000000))
2141 {
2142 bfd_vma pers;
2143 struct obj_section *pers_sec;
2144 int gnu_personality = 0;
2145
2146 /* Custom personality routine. */
2147 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2148 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2149
2150 /* Check whether we've got one of the variants of the
2151 GNU personality routines. */
2152 pers_sec = arm_obj_section_from_vma (objfile, pers);
2153 if (pers_sec)
2154 {
2155 static const char *personality[] =
2156 {
2157 "__gcc_personality_v0",
2158 "__gxx_personality_v0",
2159 "__gcj_personality_v0",
2160 "__gnu_objc_personality_v0",
2161 NULL
2162 };
2163
2164 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2165 int k;
2166
2167 for (k = 0; personality[k]; k++)
2168 if (lookup_minimal_symbol_by_pc_name
2169 (pc, personality[k], objfile))
2170 {
2171 gnu_personality = 1;
2172 break;
2173 }
2174 }
2175
2176 /* If so, the next word contains a word count in the high
2177 byte, followed by the same unwind instructions as the
2178 pre-defined forms. */
2179 if (gnu_personality
2180 && addr + 4 <= extab_vma + extab_data.size ())
2181 {
2182 word = bfd_h_get_32 (objfile->obfd,
2183 (extab_data.data ()
2184 + addr - extab_vma));
2185 addr += 4;
2186 n_bytes = 3;
2187 n_words = ((word >> 24) & 0xff);
2188 }
2189 }
2190 }
2191 }
2192
2193 /* Sanity check address. */
2194 if (n_words)
2195 if (addr < extab_vma
2196 || addr + 4 * n_words > extab_vma + extab_data.size ())
2197 n_words = n_bytes = 0;
2198
2199 /* The unwind instructions reside in WORD (only the N_BYTES least
2200 significant bytes are valid), followed by N_WORDS words in the
2201 extab section starting at ADDR. */
2202 if (n_bytes || n_words)
2203 {
2204 gdb_byte *p = entry
2205 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2206 n_bytes + n_words * 4 + 1);
2207
2208 while (n_bytes--)
2209 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2210
2211 while (n_words--)
2212 {
2213 word = bfd_h_get_32 (objfile->obfd,
2214 extab_data.data () + addr - extab_vma);
2215 addr += 4;
2216
2217 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2218 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2219 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2220 *p++ = (gdb_byte) (word & 0xff);
2221 }
2222
2223 /* Implied "Finish" to terminate the list. */
2224 *p++ = 0xb0;
2225 }
2226
2227 /* Push entry onto vector. They are guaranteed to always
2228 appear in order of increasing addresses. */
2229 new_exidx_entry.addr = idx;
2230 new_exidx_entry.entry = entry;
2231 data->section_maps[sec->the_bfd_section->index].push_back
2232 (new_exidx_entry);
2233 }
2234 }
2235
2236 /* Search for the exception table entry covering MEMADDR. If one is found,
2237 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2238 set *START to the start of the region covered by this entry. */
2239
2240 static gdb_byte *
2241 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2242 {
2243 struct obj_section *sec;
2244
2245 sec = find_pc_section (memaddr);
2246 if (sec != NULL)
2247 {
2248 struct arm_exidx_data *data;
2249 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2250
2251 data = arm_exidx_data_key.get (sec->objfile->obfd);
2252 if (data != NULL)
2253 {
2254 std::vector<arm_exidx_entry> &map
2255 = data->section_maps[sec->the_bfd_section->index];
2256 if (!map.empty ())
2257 {
2258 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2259
2260 /* std::lower_bound finds the earliest ordered insertion
2261 point. If the following symbol starts at this exact
2262 address, we use that; otherwise, the preceding
2263 exception table entry covers this address. */
2264 if (idx < map.end ())
2265 {
2266 if (idx->addr == map_key.addr)
2267 {
2268 if (start)
2269 *start = idx->addr + obj_section_addr (sec);
2270 return idx->entry;
2271 }
2272 }
2273
2274 if (idx > map.begin ())
2275 {
2276 idx = idx - 1;
2277 if (start)
2278 *start = idx->addr + obj_section_addr (sec);
2279 return idx->entry;
2280 }
2281 }
2282 }
2283 }
2284
2285 return NULL;
2286 }
2287
2288 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2289 instruction list from the ARM exception table entry ENTRY, allocate and
2290 return a prologue cache structure describing how to unwind this frame.
2291
2292 Return NULL if the unwinding instruction list contains a "spare",
2293 "reserved" or "refuse to unwind" instruction as defined in section
2294 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2295 for the ARM Architecture" document. */
2296
2297 static struct arm_prologue_cache *
2298 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2299 {
2300 CORE_ADDR vsp = 0;
2301 int vsp_valid = 0;
2302
2303 struct arm_prologue_cache *cache;
2304 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2305 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2306
2307 for (;;)
2308 {
2309 gdb_byte insn;
2310
2311 /* Whenever we reload SP, we actually have to retrieve its
2312 actual value in the current frame. */
2313 if (!vsp_valid)
2314 {
2315 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2316 {
2317 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2318 vsp = get_frame_register_unsigned (this_frame, reg);
2319 }
2320 else
2321 {
2322 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2323 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2324 }
2325
2326 vsp_valid = 1;
2327 }
2328
2329 /* Decode next unwind instruction. */
2330 insn = *entry++;
2331
2332 if ((insn & 0xc0) == 0)
2333 {
2334 int offset = insn & 0x3f;
2335 vsp += (offset << 2) + 4;
2336 }
2337 else if ((insn & 0xc0) == 0x40)
2338 {
2339 int offset = insn & 0x3f;
2340 vsp -= (offset << 2) + 4;
2341 }
2342 else if ((insn & 0xf0) == 0x80)
2343 {
2344 int mask = ((insn & 0xf) << 8) | *entry++;
2345 int i;
2346
2347 /* The special case of an all-zero mask identifies
2348 "Refuse to unwind". We return NULL to fall back
2349 to the prologue analyzer. */
2350 if (mask == 0)
2351 return NULL;
2352
2353 /* Pop registers r4..r15 under mask. */
2354 for (i = 0; i < 12; i++)
2355 if (mask & (1 << i))
2356 {
2357 cache->saved_regs[4 + i].addr = vsp;
2358 vsp += 4;
2359 }
2360
2361 /* Special-case popping SP -- we need to reload vsp. */
2362 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2363 vsp_valid = 0;
2364 }
2365 else if ((insn & 0xf0) == 0x90)
2366 {
2367 int reg = insn & 0xf;
2368
2369 /* Reserved cases. */
2370 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2371 return NULL;
2372
2373 /* Set SP from another register and mark VSP for reload. */
2374 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2375 vsp_valid = 0;
2376 }
2377 else if ((insn & 0xf0) == 0xa0)
2378 {
2379 int count = insn & 0x7;
2380 int pop_lr = (insn & 0x8) != 0;
2381 int i;
2382
2383 /* Pop r4..r[4+count]. */
2384 for (i = 0; i <= count; i++)
2385 {
2386 cache->saved_regs[4 + i].addr = vsp;
2387 vsp += 4;
2388 }
2389
2390 /* If indicated by flag, pop LR as well. */
2391 if (pop_lr)
2392 {
2393 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2394 vsp += 4;
2395 }
2396 }
2397 else if (insn == 0xb0)
2398 {
2399 /* We could only have updated PC by popping into it; if so, it
2400 will show up as address. Otherwise, copy LR into PC. */
2401 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2402 cache->saved_regs[ARM_PC_REGNUM]
2403 = cache->saved_regs[ARM_LR_REGNUM];
2404
2405 /* We're done. */
2406 break;
2407 }
2408 else if (insn == 0xb1)
2409 {
2410 int mask = *entry++;
2411 int i;
2412
2413 /* All-zero mask and mask >= 16 is "spare". */
2414 if (mask == 0 || mask >= 16)
2415 return NULL;
2416
2417 /* Pop r0..r3 under mask. */
2418 for (i = 0; i < 4; i++)
2419 if (mask & (1 << i))
2420 {
2421 cache->saved_regs[i].addr = vsp;
2422 vsp += 4;
2423 }
2424 }
2425 else if (insn == 0xb2)
2426 {
2427 ULONGEST offset = 0;
2428 unsigned shift = 0;
2429
2430 do
2431 {
2432 offset |= (*entry & 0x7f) << shift;
2433 shift += 7;
2434 }
2435 while (*entry++ & 0x80);
2436
2437 vsp += 0x204 + (offset << 2);
2438 }
2439 else if (insn == 0xb3)
2440 {
2441 int start = *entry >> 4;
2442 int count = (*entry++) & 0xf;
2443 int i;
2444
2445 /* Only registers D0..D15 are valid here. */
2446 if (start + count >= 16)
2447 return NULL;
2448
2449 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2450 for (i = 0; i <= count; i++)
2451 {
2452 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2453 vsp += 8;
2454 }
2455
2456 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2457 vsp += 4;
2458 }
2459 else if ((insn & 0xf8) == 0xb8)
2460 {
2461 int count = insn & 0x7;
2462 int i;
2463
2464 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2465 for (i = 0; i <= count; i++)
2466 {
2467 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2468 vsp += 8;
2469 }
2470
2471 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2472 vsp += 4;
2473 }
2474 else if (insn == 0xc6)
2475 {
2476 int start = *entry >> 4;
2477 int count = (*entry++) & 0xf;
2478 int i;
2479
2480 /* Only registers WR0..WR15 are valid. */
2481 if (start + count >= 16)
2482 return NULL;
2483
2484 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2485 for (i = 0; i <= count; i++)
2486 {
2487 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2488 vsp += 8;
2489 }
2490 }
2491 else if (insn == 0xc7)
2492 {
2493 int mask = *entry++;
2494 int i;
2495
2496 /* All-zero mask and mask >= 16 is "spare". */
2497 if (mask == 0 || mask >= 16)
2498 return NULL;
2499
2500 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2501 for (i = 0; i < 4; i++)
2502 if (mask & (1 << i))
2503 {
2504 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2505 vsp += 4;
2506 }
2507 }
2508 else if ((insn & 0xf8) == 0xc0)
2509 {
2510 int count = insn & 0x7;
2511 int i;
2512
2513 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2514 for (i = 0; i <= count; i++)
2515 {
2516 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2517 vsp += 8;
2518 }
2519 }
2520 else if (insn == 0xc8)
2521 {
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2524 int i;
2525
2526 /* Only registers D0..D31 are valid. */
2527 if (start + count >= 16)
2528 return NULL;
2529
2530 /* Pop VFP double-precision registers
2531 D[16+start]..D[16+start+count]. */
2532 for (i = 0; i <= count; i++)
2533 {
2534 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2535 vsp += 8;
2536 }
2537 }
2538 else if (insn == 0xc9)
2539 {
2540 int start = *entry >> 4;
2541 int count = (*entry++) & 0xf;
2542 int i;
2543
2544 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2545 for (i = 0; i <= count; i++)
2546 {
2547 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2548 vsp += 8;
2549 }
2550 }
2551 else if ((insn & 0xf8) == 0xd0)
2552 {
2553 int count = insn & 0x7;
2554 int i;
2555
2556 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2557 for (i = 0; i <= count; i++)
2558 {
2559 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2560 vsp += 8;
2561 }
2562 }
2563 else
2564 {
2565 /* Everything else is "spare". */
2566 return NULL;
2567 }
2568 }
2569
2570 /* If we restore SP from a register, assume this was the frame register.
2571 Otherwise just fall back to SP as frame register. */
2572 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2573 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2574 else
2575 cache->framereg = ARM_SP_REGNUM;
2576
2577 /* Determine offset to previous frame. */
2578 cache->framesize
2579 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2580
2581 /* We already got the previous SP. */
2582 cache->prev_sp = vsp;
2583
2584 return cache;
2585 }
2586
2587 /* Unwinding via ARM exception table entries. Note that the sniffer
2588 already computes a filled-in prologue cache, which is then used
2589 with the same arm_prologue_this_id and arm_prologue_prev_register
2590 routines also used for prologue-parsing based unwinding. */
2591
2592 static int
2593 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2594 struct frame_info *this_frame,
2595 void **this_prologue_cache)
2596 {
2597 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2598 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2599 CORE_ADDR addr_in_block, exidx_region, func_start;
2600 struct arm_prologue_cache *cache;
2601 gdb_byte *entry;
2602
2603 /* See if we have an ARM exception table entry covering this address. */
2604 addr_in_block = get_frame_address_in_block (this_frame);
2605 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2606 if (!entry)
2607 return 0;
2608
2609 /* The ARM exception table does not describe unwind information
2610 for arbitrary PC values, but is guaranteed to be correct only
2611 at call sites. We have to decide here whether we want to use
2612 ARM exception table information for this frame, or fall back
2613 to using prologue parsing. (Note that if we have DWARF CFI,
2614 this sniffer isn't even called -- CFI is always preferred.)
2615
2616 Before we make this decision, however, we check whether we
2617 actually have *symbol* information for the current frame.
2618 If not, prologue parsing would not work anyway, so we might
2619 as well use the exception table and hope for the best. */
2620 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2621 {
2622 int exc_valid = 0;
2623
2624 /* If the next frame is "normal", we are at a call site in this
2625 frame, so exception information is guaranteed to be valid. */
2626 if (get_next_frame (this_frame)
2627 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2628 exc_valid = 1;
2629
2630 /* We also assume exception information is valid if we're currently
2631 blocked in a system call. The system library is supposed to
2632 ensure this, so that e.g. pthread cancellation works. */
2633 if (arm_frame_is_thumb (this_frame))
2634 {
2635 ULONGEST insn;
2636
2637 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2638 2, byte_order_for_code, &insn)
2639 && (insn & 0xff00) == 0xdf00 /* svc */)
2640 exc_valid = 1;
2641 }
2642 else
2643 {
2644 ULONGEST insn;
2645
2646 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2647 4, byte_order_for_code, &insn)
2648 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2649 exc_valid = 1;
2650 }
2651
2652 /* Bail out if we don't know that exception information is valid. */
2653 if (!exc_valid)
2654 return 0;
2655
2656 /* The ARM exception index does not mark the *end* of the region
2657 covered by the entry, and some functions will not have any entry.
2658 To correctly recognize the end of the covered region, the linker
2659 should have inserted dummy records with a CANTUNWIND marker.
2660
2661 Unfortunately, current versions of GNU ld do not reliably do
2662 this, and thus we may have found an incorrect entry above.
2663 As a (temporary) sanity check, we only use the entry if it
2664 lies *within* the bounds of the function. Note that this check
2665 might reject perfectly valid entries that just happen to cover
2666 multiple functions; therefore this check ought to be removed
2667 once the linker is fixed. */
2668 if (func_start > exidx_region)
2669 return 0;
2670 }
2671
2672 /* Decode the list of unwinding instructions into a prologue cache.
2673 Note that this may fail due to e.g. a "refuse to unwind" code. */
2674 cache = arm_exidx_fill_cache (this_frame, entry);
2675 if (!cache)
2676 return 0;
2677
2678 *this_prologue_cache = cache;
2679 return 1;
2680 }
2681
2682 struct frame_unwind arm_exidx_unwind = {
2683 NORMAL_FRAME,
2684 default_frame_unwind_stop_reason,
2685 arm_prologue_this_id,
2686 arm_prologue_prev_register,
2687 NULL,
2688 arm_exidx_unwind_sniffer
2689 };
2690
2691 static struct arm_prologue_cache *
2692 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2693 {
2694 struct arm_prologue_cache *cache;
2695 int reg;
2696
2697 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2698 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2699
2700 /* Still rely on the offset calculated from prologue. */
2701 arm_scan_prologue (this_frame, cache);
2702
2703 /* Since we are in epilogue, the SP has been restored. */
2704 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2705
2706 /* Calculate actual addresses of saved registers using offsets
2707 determined by arm_scan_prologue. */
2708 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2709 if (trad_frame_addr_p (cache->saved_regs, reg))
2710 cache->saved_regs[reg].addr += cache->prev_sp;
2711
2712 return cache;
2713 }
2714
2715 /* Implementation of function hook 'this_id' in
2716 'struct frame_uwnind' for epilogue unwinder. */
2717
2718 static void
2719 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2720 void **this_cache,
2721 struct frame_id *this_id)
2722 {
2723 struct arm_prologue_cache *cache;
2724 CORE_ADDR pc, func;
2725
2726 if (*this_cache == NULL)
2727 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2728 cache = (struct arm_prologue_cache *) *this_cache;
2729
2730 /* Use function start address as part of the frame ID. If we cannot
2731 identify the start address (due to missing symbol information),
2732 fall back to just using the current PC. */
2733 pc = get_frame_pc (this_frame);
2734 func = get_frame_func (this_frame);
2735 if (func == 0)
2736 func = pc;
2737
2738 (*this_id) = frame_id_build (cache->prev_sp, pc);
2739 }
2740
2741 /* Implementation of function hook 'prev_register' in
2742 'struct frame_uwnind' for epilogue unwinder. */
2743
2744 static struct value *
2745 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2746 void **this_cache, int regnum)
2747 {
2748 if (*this_cache == NULL)
2749 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2750
2751 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2752 }
2753
2754 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2755 CORE_ADDR pc);
2756 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2757 CORE_ADDR pc);
2758
2759 /* Implementation of function hook 'sniffer' in
2760 'struct frame_uwnind' for epilogue unwinder. */
2761
2762 static int
2763 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2764 struct frame_info *this_frame,
2765 void **this_prologue_cache)
2766 {
2767 if (frame_relative_level (this_frame) == 0)
2768 {
2769 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2770 CORE_ADDR pc = get_frame_pc (this_frame);
2771
2772 if (arm_frame_is_thumb (this_frame))
2773 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2774 else
2775 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2776 }
2777 else
2778 return 0;
2779 }
2780
2781 /* Frame unwinder from epilogue. */
2782
2783 static const struct frame_unwind arm_epilogue_frame_unwind =
2784 {
2785 NORMAL_FRAME,
2786 default_frame_unwind_stop_reason,
2787 arm_epilogue_frame_this_id,
2788 arm_epilogue_frame_prev_register,
2789 NULL,
2790 arm_epilogue_frame_sniffer,
2791 };
2792
2793 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2794 trampoline, return the target PC. Otherwise return 0.
2795
2796 void call0a (char c, short s, int i, long l) {}
2797
2798 int main (void)
2799 {
2800 (*pointer_to_call0a) (c, s, i, l);
2801 }
2802
2803 Instead of calling a stub library function _call_via_xx (xx is
2804 the register name), GCC may inline the trampoline in the object
2805 file as below (register r2 has the address of call0a).
2806
2807 .global main
2808 .type main, %function
2809 ...
2810 bl .L1
2811 ...
2812 .size main, .-main
2813
2814 .L1:
2815 bx r2
2816
2817 The trampoline 'bx r2' doesn't belong to main. */
2818
2819 static CORE_ADDR
2820 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2821 {
2822 /* The heuristics of recognizing such trampoline is that FRAME is
2823 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2824 if (arm_frame_is_thumb (frame))
2825 {
2826 gdb_byte buf[2];
2827
2828 if (target_read_memory (pc, buf, 2) == 0)
2829 {
2830 struct gdbarch *gdbarch = get_frame_arch (frame);
2831 enum bfd_endian byte_order_for_code
2832 = gdbarch_byte_order_for_code (gdbarch);
2833 uint16_t insn
2834 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2835
2836 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2837 {
2838 CORE_ADDR dest
2839 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2840
2841 /* Clear the LSB so that gdb core sets step-resume
2842 breakpoint at the right address. */
2843 return UNMAKE_THUMB_ADDR (dest);
2844 }
2845 }
2846 }
2847
2848 return 0;
2849 }
2850
2851 static struct arm_prologue_cache *
2852 arm_make_stub_cache (struct frame_info *this_frame)
2853 {
2854 struct arm_prologue_cache *cache;
2855
2856 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2857 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2858
2859 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2860
2861 return cache;
2862 }
2863
2864 /* Our frame ID for a stub frame is the current SP and LR. */
2865
2866 static void
2867 arm_stub_this_id (struct frame_info *this_frame,
2868 void **this_cache,
2869 struct frame_id *this_id)
2870 {
2871 struct arm_prologue_cache *cache;
2872
2873 if (*this_cache == NULL)
2874 *this_cache = arm_make_stub_cache (this_frame);
2875 cache = (struct arm_prologue_cache *) *this_cache;
2876
2877 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2878 }
2879
2880 static int
2881 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2882 struct frame_info *this_frame,
2883 void **this_prologue_cache)
2884 {
2885 CORE_ADDR addr_in_block;
2886 gdb_byte dummy[4];
2887 CORE_ADDR pc, start_addr;
2888 const char *name;
2889
2890 addr_in_block = get_frame_address_in_block (this_frame);
2891 pc = get_frame_pc (this_frame);
2892 if (in_plt_section (addr_in_block)
2893 /* We also use the stub winder if the target memory is unreadable
2894 to avoid having the prologue unwinder trying to read it. */
2895 || target_read_memory (pc, dummy, 4) != 0)
2896 return 1;
2897
2898 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2899 && arm_skip_bx_reg (this_frame, pc) != 0)
2900 return 1;
2901
2902 return 0;
2903 }
2904
2905 struct frame_unwind arm_stub_unwind = {
2906 NORMAL_FRAME,
2907 default_frame_unwind_stop_reason,
2908 arm_stub_this_id,
2909 arm_prologue_prev_register,
2910 NULL,
2911 arm_stub_unwind_sniffer
2912 };
2913
2914 /* Put here the code to store, into CACHE->saved_regs, the addresses
2915 of the saved registers of frame described by THIS_FRAME. CACHE is
2916 returned. */
2917
2918 static struct arm_prologue_cache *
2919 arm_m_exception_cache (struct frame_info *this_frame)
2920 {
2921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2923 struct arm_prologue_cache *cache;
2924 CORE_ADDR unwound_sp;
2925 LONGEST xpsr;
2926
2927 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2928 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2929
2930 unwound_sp = get_frame_register_unsigned (this_frame,
2931 ARM_SP_REGNUM);
2932
2933 /* The hardware saves eight 32-bit words, comprising xPSR,
2934 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2935 "B1.5.6 Exception entry behavior" in
2936 "ARMv7-M Architecture Reference Manual". */
2937 cache->saved_regs[0].addr = unwound_sp;
2938 cache->saved_regs[1].addr = unwound_sp + 4;
2939 cache->saved_regs[2].addr = unwound_sp + 8;
2940 cache->saved_regs[3].addr = unwound_sp + 12;
2941 cache->saved_regs[12].addr = unwound_sp + 16;
2942 cache->saved_regs[14].addr = unwound_sp + 20;
2943 cache->saved_regs[15].addr = unwound_sp + 24;
2944 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2945
2946 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2947 aligner between the top of the 32-byte stack frame and the
2948 previous context's stack pointer. */
2949 cache->prev_sp = unwound_sp + 32;
2950 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2951 && (xpsr & (1 << 9)) != 0)
2952 cache->prev_sp += 4;
2953
2954 return cache;
2955 }
2956
2957 /* Implementation of function hook 'this_id' in
2958 'struct frame_uwnind'. */
2959
2960 static void
2961 arm_m_exception_this_id (struct frame_info *this_frame,
2962 void **this_cache,
2963 struct frame_id *this_id)
2964 {
2965 struct arm_prologue_cache *cache;
2966
2967 if (*this_cache == NULL)
2968 *this_cache = arm_m_exception_cache (this_frame);
2969 cache = (struct arm_prologue_cache *) *this_cache;
2970
2971 /* Our frame ID for a stub frame is the current SP and LR. */
2972 *this_id = frame_id_build (cache->prev_sp,
2973 get_frame_pc (this_frame));
2974 }
2975
2976 /* Implementation of function hook 'prev_register' in
2977 'struct frame_uwnind'. */
2978
2979 static struct value *
2980 arm_m_exception_prev_register (struct frame_info *this_frame,
2981 void **this_cache,
2982 int prev_regnum)
2983 {
2984 struct arm_prologue_cache *cache;
2985
2986 if (*this_cache == NULL)
2987 *this_cache = arm_m_exception_cache (this_frame);
2988 cache = (struct arm_prologue_cache *) *this_cache;
2989
2990 /* The value was already reconstructed into PREV_SP. */
2991 if (prev_regnum == ARM_SP_REGNUM)
2992 return frame_unwind_got_constant (this_frame, prev_regnum,
2993 cache->prev_sp);
2994
2995 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2996 prev_regnum);
2997 }
2998
2999 /* Implementation of function hook 'sniffer' in
3000 'struct frame_uwnind'. */
3001
3002 static int
3003 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3004 struct frame_info *this_frame,
3005 void **this_prologue_cache)
3006 {
3007 CORE_ADDR this_pc = get_frame_pc (this_frame);
3008
3009 /* No need to check is_m; this sniffer is only registered for
3010 M-profile architectures. */
3011
3012 /* Check if exception frame returns to a magic PC value. */
3013 return arm_m_addr_is_magic (this_pc);
3014 }
3015
3016 /* Frame unwinder for M-profile exceptions. */
3017
3018 struct frame_unwind arm_m_exception_unwind =
3019 {
3020 SIGTRAMP_FRAME,
3021 default_frame_unwind_stop_reason,
3022 arm_m_exception_this_id,
3023 arm_m_exception_prev_register,
3024 NULL,
3025 arm_m_exception_unwind_sniffer
3026 };
3027
3028 static CORE_ADDR
3029 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3030 {
3031 struct arm_prologue_cache *cache;
3032
3033 if (*this_cache == NULL)
3034 *this_cache = arm_make_prologue_cache (this_frame);
3035 cache = (struct arm_prologue_cache *) *this_cache;
3036
3037 return cache->prev_sp - cache->framesize;
3038 }
3039
3040 struct frame_base arm_normal_base = {
3041 &arm_prologue_unwind,
3042 arm_normal_frame_base,
3043 arm_normal_frame_base,
3044 arm_normal_frame_base
3045 };
3046
3047 static struct value *
3048 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3049 int regnum)
3050 {
3051 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3052 CORE_ADDR lr, cpsr;
3053 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3054
3055 switch (regnum)
3056 {
3057 case ARM_PC_REGNUM:
3058 /* The PC is normally copied from the return column, which
3059 describes saves of LR. However, that version may have an
3060 extra bit set to indicate Thumb state. The bit is not
3061 part of the PC. */
3062 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3063 return frame_unwind_got_constant (this_frame, regnum,
3064 arm_addr_bits_remove (gdbarch, lr));
3065
3066 case ARM_PS_REGNUM:
3067 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3068 cpsr = get_frame_register_unsigned (this_frame, regnum);
3069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3070 if (IS_THUMB_ADDR (lr))
3071 cpsr |= t_bit;
3072 else
3073 cpsr &= ~t_bit;
3074 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3075
3076 default:
3077 internal_error (__FILE__, __LINE__,
3078 _("Unexpected register %d"), regnum);
3079 }
3080 }
3081
3082 static void
3083 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3084 struct dwarf2_frame_state_reg *reg,
3085 struct frame_info *this_frame)
3086 {
3087 switch (regnum)
3088 {
3089 case ARM_PC_REGNUM:
3090 case ARM_PS_REGNUM:
3091 reg->how = DWARF2_FRAME_REG_FN;
3092 reg->loc.fn = arm_dwarf2_prev_register;
3093 break;
3094 case ARM_SP_REGNUM:
3095 reg->how = DWARF2_FRAME_REG_CFA;
3096 break;
3097 }
3098 }
3099
3100 /* Implement the stack_frame_destroyed_p gdbarch method. */
3101
3102 static int
3103 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3104 {
3105 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3106 unsigned int insn, insn2;
3107 int found_return = 0, found_stack_adjust = 0;
3108 CORE_ADDR func_start, func_end;
3109 CORE_ADDR scan_pc;
3110 gdb_byte buf[4];
3111
3112 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3113 return 0;
3114
3115 /* The epilogue is a sequence of instructions along the following lines:
3116
3117 - add stack frame size to SP or FP
3118 - [if frame pointer used] restore SP from FP
3119 - restore registers from SP [may include PC]
3120 - a return-type instruction [if PC wasn't already restored]
3121
3122 In a first pass, we scan forward from the current PC and verify the
3123 instructions we find as compatible with this sequence, ending in a
3124 return instruction.
3125
3126 However, this is not sufficient to distinguish indirect function calls
3127 within a function from indirect tail calls in the epilogue in some cases.
3128 Therefore, if we didn't already find any SP-changing instruction during
3129 forward scan, we add a backward scanning heuristic to ensure we actually
3130 are in the epilogue. */
3131
3132 scan_pc = pc;
3133 while (scan_pc < func_end && !found_return)
3134 {
3135 if (target_read_memory (scan_pc, buf, 2))
3136 break;
3137
3138 scan_pc += 2;
3139 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3140
3141 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3142 found_return = 1;
3143 else if (insn == 0x46f7) /* mov pc, lr */
3144 found_return = 1;
3145 else if (thumb_instruction_restores_sp (insn))
3146 {
3147 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3148 found_return = 1;
3149 }
3150 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3151 {
3152 if (target_read_memory (scan_pc, buf, 2))
3153 break;
3154
3155 scan_pc += 2;
3156 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3157
3158 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3159 {
3160 if (insn2 & 0x8000) /* <registers> include PC. */
3161 found_return = 1;
3162 }
3163 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3164 && (insn2 & 0x0fff) == 0x0b04)
3165 {
3166 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3167 found_return = 1;
3168 }
3169 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3170 && (insn2 & 0x0e00) == 0x0a00)
3171 ;
3172 else
3173 break;
3174 }
3175 else
3176 break;
3177 }
3178
3179 if (!found_return)
3180 return 0;
3181
3182 /* Since any instruction in the epilogue sequence, with the possible
3183 exception of return itself, updates the stack pointer, we need to
3184 scan backwards for at most one instruction. Try either a 16-bit or
3185 a 32-bit instruction. This is just a heuristic, so we do not worry
3186 too much about false positives. */
3187
3188 if (pc - 4 < func_start)
3189 return 0;
3190 if (target_read_memory (pc - 4, buf, 4))
3191 return 0;
3192
3193 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3194 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3195
3196 if (thumb_instruction_restores_sp (insn2))
3197 found_stack_adjust = 1;
3198 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3199 found_stack_adjust = 1;
3200 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3201 && (insn2 & 0x0fff) == 0x0b04)
3202 found_stack_adjust = 1;
3203 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3204 && (insn2 & 0x0e00) == 0x0a00)
3205 found_stack_adjust = 1;
3206
3207 return found_stack_adjust;
3208 }
3209
3210 static int
3211 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3212 {
3213 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3214 unsigned int insn;
3215 int found_return;
3216 CORE_ADDR func_start, func_end;
3217
3218 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3219 return 0;
3220
3221 /* We are in the epilogue if the previous instruction was a stack
3222 adjustment and the next instruction is a possible return (bx, mov
3223 pc, or pop). We could have to scan backwards to find the stack
3224 adjustment, or forwards to find the return, but this is a decent
3225 approximation. First scan forwards. */
3226
3227 found_return = 0;
3228 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3229 if (bits (insn, 28, 31) != INST_NV)
3230 {
3231 if ((insn & 0x0ffffff0) == 0x012fff10)
3232 /* BX. */
3233 found_return = 1;
3234 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3235 /* MOV PC. */
3236 found_return = 1;
3237 else if ((insn & 0x0fff0000) == 0x08bd0000
3238 && (insn & 0x0000c000) != 0)
3239 /* POP (LDMIA), including PC or LR. */
3240 found_return = 1;
3241 }
3242
3243 if (!found_return)
3244 return 0;
3245
3246 /* Scan backwards. This is just a heuristic, so do not worry about
3247 false positives from mode changes. */
3248
3249 if (pc < func_start + 4)
3250 return 0;
3251
3252 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3253 if (arm_instruction_restores_sp (insn))
3254 return 1;
3255
3256 return 0;
3257 }
3258
3259 /* Implement the stack_frame_destroyed_p gdbarch method. */
3260
3261 static int
3262 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3263 {
3264 if (arm_pc_is_thumb (gdbarch, pc))
3265 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3266 else
3267 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3268 }
3269
3270 /* When arguments must be pushed onto the stack, they go on in reverse
3271 order. The code below implements a FILO (stack) to do this. */
3272
3273 struct stack_item
3274 {
3275 int len;
3276 struct stack_item *prev;
3277 gdb_byte *data;
3278 };
3279
3280 static struct stack_item *
3281 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3282 {
3283 struct stack_item *si;
3284 si = XNEW (struct stack_item);
3285 si->data = (gdb_byte *) xmalloc (len);
3286 si->len = len;
3287 si->prev = prev;
3288 memcpy (si->data, contents, len);
3289 return si;
3290 }
3291
3292 static struct stack_item *
3293 pop_stack_item (struct stack_item *si)
3294 {
3295 struct stack_item *dead = si;
3296 si = si->prev;
3297 xfree (dead->data);
3298 xfree (dead);
3299 return si;
3300 }
3301
3302 /* Implement the gdbarch type alignment method, overrides the generic
3303 alignment algorithm for anything that is arm specific. */
3304
3305 static ULONGEST
3306 arm_type_align (gdbarch *gdbarch, struct type *t)
3307 {
3308 t = check_typedef (t);
3309 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3310 {
3311 /* Use the natural alignment for vector types (the same for
3312 scalar type), but the maximum alignment is 64-bit. */
3313 if (TYPE_LENGTH (t) > 8)
3314 return 8;
3315 else
3316 return TYPE_LENGTH (t);
3317 }
3318
3319 /* Allow the common code to calculate the alignment. */
3320 return 0;
3321 }
3322
3323 /* Possible base types for a candidate for passing and returning in
3324 VFP registers. */
3325
3326 enum arm_vfp_cprc_base_type
3327 {
3328 VFP_CPRC_UNKNOWN,
3329 VFP_CPRC_SINGLE,
3330 VFP_CPRC_DOUBLE,
3331 VFP_CPRC_VEC64,
3332 VFP_CPRC_VEC128
3333 };
3334
3335 /* The length of one element of base type B. */
3336
3337 static unsigned
3338 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3339 {
3340 switch (b)
3341 {
3342 case VFP_CPRC_SINGLE:
3343 return 4;
3344 case VFP_CPRC_DOUBLE:
3345 return 8;
3346 case VFP_CPRC_VEC64:
3347 return 8;
3348 case VFP_CPRC_VEC128:
3349 return 16;
3350 default:
3351 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3352 (int) b);
3353 }
3354 }
3355
3356 /* The character ('s', 'd' or 'q') for the type of VFP register used
3357 for passing base type B. */
3358
3359 static int
3360 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3361 {
3362 switch (b)
3363 {
3364 case VFP_CPRC_SINGLE:
3365 return 's';
3366 case VFP_CPRC_DOUBLE:
3367 return 'd';
3368 case VFP_CPRC_VEC64:
3369 return 'd';
3370 case VFP_CPRC_VEC128:
3371 return 'q';
3372 default:
3373 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3374 (int) b);
3375 }
3376 }
3377
3378 /* Determine whether T may be part of a candidate for passing and
3379 returning in VFP registers, ignoring the limit on the total number
3380 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3381 classification of the first valid component found; if it is not
3382 VFP_CPRC_UNKNOWN, all components must have the same classification
3383 as *BASE_TYPE. If it is found that T contains a type not permitted
3384 for passing and returning in VFP registers, a type differently
3385 classified from *BASE_TYPE, or two types differently classified
3386 from each other, return -1, otherwise return the total number of
3387 base-type elements found (possibly 0 in an empty structure or
3388 array). Vector types are not currently supported, matching the
3389 generic AAPCS support. */
3390
3391 static int
3392 arm_vfp_cprc_sub_candidate (struct type *t,
3393 enum arm_vfp_cprc_base_type *base_type)
3394 {
3395 t = check_typedef (t);
3396 switch (TYPE_CODE (t))
3397 {
3398 case TYPE_CODE_FLT:
3399 switch (TYPE_LENGTH (t))
3400 {
3401 case 4:
3402 if (*base_type == VFP_CPRC_UNKNOWN)
3403 *base_type = VFP_CPRC_SINGLE;
3404 else if (*base_type != VFP_CPRC_SINGLE)
3405 return -1;
3406 return 1;
3407
3408 case 8:
3409 if (*base_type == VFP_CPRC_UNKNOWN)
3410 *base_type = VFP_CPRC_DOUBLE;
3411 else if (*base_type != VFP_CPRC_DOUBLE)
3412 return -1;
3413 return 1;
3414
3415 default:
3416 return -1;
3417 }
3418 break;
3419
3420 case TYPE_CODE_COMPLEX:
3421 /* Arguments of complex T where T is one of the types float or
3422 double get treated as if they are implemented as:
3423
3424 struct complexT
3425 {
3426 T real;
3427 T imag;
3428 };
3429
3430 */
3431 switch (TYPE_LENGTH (t))
3432 {
3433 case 8:
3434 if (*base_type == VFP_CPRC_UNKNOWN)
3435 *base_type = VFP_CPRC_SINGLE;
3436 else if (*base_type != VFP_CPRC_SINGLE)
3437 return -1;
3438 return 2;
3439
3440 case 16:
3441 if (*base_type == VFP_CPRC_UNKNOWN)
3442 *base_type = VFP_CPRC_DOUBLE;
3443 else if (*base_type != VFP_CPRC_DOUBLE)
3444 return -1;
3445 return 2;
3446
3447 default:
3448 return -1;
3449 }
3450 break;
3451
3452 case TYPE_CODE_ARRAY:
3453 {
3454 if (TYPE_VECTOR (t))
3455 {
3456 /* A 64-bit or 128-bit containerized vector type are VFP
3457 CPRCs. */
3458 switch (TYPE_LENGTH (t))
3459 {
3460 case 8:
3461 if (*base_type == VFP_CPRC_UNKNOWN)
3462 *base_type = VFP_CPRC_VEC64;
3463 return 1;
3464 case 16:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_VEC128;
3467 return 1;
3468 default:
3469 return -1;
3470 }
3471 }
3472 else
3473 {
3474 int count;
3475 unsigned unitlen;
3476
3477 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3478 base_type);
3479 if (count == -1)
3480 return -1;
3481 if (TYPE_LENGTH (t) == 0)
3482 {
3483 gdb_assert (count == 0);
3484 return 0;
3485 }
3486 else if (count == 0)
3487 return -1;
3488 unitlen = arm_vfp_cprc_unit_length (*base_type);
3489 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3490 return TYPE_LENGTH (t) / unitlen;
3491 }
3492 }
3493 break;
3494
3495 case TYPE_CODE_STRUCT:
3496 {
3497 int count = 0;
3498 unsigned unitlen;
3499 int i;
3500 for (i = 0; i < TYPE_NFIELDS (t); i++)
3501 {
3502 int sub_count = 0;
3503
3504 if (!field_is_static (&TYPE_FIELD (t, i)))
3505 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3506 base_type);
3507 if (sub_count == -1)
3508 return -1;
3509 count += sub_count;
3510 }
3511 if (TYPE_LENGTH (t) == 0)
3512 {
3513 gdb_assert (count == 0);
3514 return 0;
3515 }
3516 else if (count == 0)
3517 return -1;
3518 unitlen = arm_vfp_cprc_unit_length (*base_type);
3519 if (TYPE_LENGTH (t) != unitlen * count)
3520 return -1;
3521 return count;
3522 }
3523
3524 case TYPE_CODE_UNION:
3525 {
3526 int count = 0;
3527 unsigned unitlen;
3528 int i;
3529 for (i = 0; i < TYPE_NFIELDS (t); i++)
3530 {
3531 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3532 base_type);
3533 if (sub_count == -1)
3534 return -1;
3535 count = (count > sub_count ? count : sub_count);
3536 }
3537 if (TYPE_LENGTH (t) == 0)
3538 {
3539 gdb_assert (count == 0);
3540 return 0;
3541 }
3542 else if (count == 0)
3543 return -1;
3544 unitlen = arm_vfp_cprc_unit_length (*base_type);
3545 if (TYPE_LENGTH (t) != unitlen * count)
3546 return -1;
3547 return count;
3548 }
3549
3550 default:
3551 break;
3552 }
3553
3554 return -1;
3555 }
3556
3557 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3558 if passed to or returned from a non-variadic function with the VFP
3559 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3560 *BASE_TYPE to the base type for T and *COUNT to the number of
3561 elements of that base type before returning. */
3562
3563 static int
3564 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3565 int *count)
3566 {
3567 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3568 int c = arm_vfp_cprc_sub_candidate (t, &b);
3569 if (c <= 0 || c > 4)
3570 return 0;
3571 *base_type = b;
3572 *count = c;
3573 return 1;
3574 }
3575
3576 /* Return 1 if the VFP ABI should be used for passing arguments to and
3577 returning values from a function of type FUNC_TYPE, 0
3578 otherwise. */
3579
3580 static int
3581 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3582 {
3583 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3584 /* Variadic functions always use the base ABI. Assume that functions
3585 without debug info are not variadic. */
3586 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3587 return 0;
3588 /* The VFP ABI is only supported as a variant of AAPCS. */
3589 if (tdep->arm_abi != ARM_ABI_AAPCS)
3590 return 0;
3591 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3592 }
3593
3594 /* We currently only support passing parameters in integer registers, which
3595 conforms with GCC's default model, and VFP argument passing following
3596 the VFP variant of AAPCS. Several other variants exist and
3597 we should probably support some of them based on the selected ABI. */
3598
3599 static CORE_ADDR
3600 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3601 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3602 struct value **args, CORE_ADDR sp,
3603 function_call_return_method return_method,
3604 CORE_ADDR struct_addr)
3605 {
3606 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3607 int argnum;
3608 int argreg;
3609 int nstack;
3610 struct stack_item *si = NULL;
3611 int use_vfp_abi;
3612 struct type *ftype;
3613 unsigned vfp_regs_free = (1 << 16) - 1;
3614
3615 /* Determine the type of this function and whether the VFP ABI
3616 applies. */
3617 ftype = check_typedef (value_type (function));
3618 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3619 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3620 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3621
3622 /* Set the return address. For the ARM, the return breakpoint is
3623 always at BP_ADDR. */
3624 if (arm_pc_is_thumb (gdbarch, bp_addr))
3625 bp_addr |= 1;
3626 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3627
3628 /* Walk through the list of args and determine how large a temporary
3629 stack is required. Need to take care here as structs may be
3630 passed on the stack, and we have to push them. */
3631 nstack = 0;
3632
3633 argreg = ARM_A1_REGNUM;
3634 nstack = 0;
3635
3636 /* The struct_return pointer occupies the first parameter
3637 passing register. */
3638 if (return_method == return_method_struct)
3639 {
3640 if (arm_debug)
3641 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3642 gdbarch_register_name (gdbarch, argreg),
3643 paddress (gdbarch, struct_addr));
3644 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3645 argreg++;
3646 }
3647
3648 for (argnum = 0; argnum < nargs; argnum++)
3649 {
3650 int len;
3651 struct type *arg_type;
3652 struct type *target_type;
3653 enum type_code typecode;
3654 const bfd_byte *val;
3655 int align;
3656 enum arm_vfp_cprc_base_type vfp_base_type;
3657 int vfp_base_count;
3658 int may_use_core_reg = 1;
3659
3660 arg_type = check_typedef (value_type (args[argnum]));
3661 len = TYPE_LENGTH (arg_type);
3662 target_type = TYPE_TARGET_TYPE (arg_type);
3663 typecode = TYPE_CODE (arg_type);
3664 val = value_contents (args[argnum]);
3665
3666 align = type_align (arg_type);
3667 /* Round alignment up to a whole number of words. */
3668 align = (align + ARM_INT_REGISTER_SIZE - 1)
3669 & ~(ARM_INT_REGISTER_SIZE - 1);
3670 /* Different ABIs have different maximum alignments. */
3671 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3672 {
3673 /* The APCS ABI only requires word alignment. */
3674 align = ARM_INT_REGISTER_SIZE;
3675 }
3676 else
3677 {
3678 /* The AAPCS requires at most doubleword alignment. */
3679 if (align > ARM_INT_REGISTER_SIZE * 2)
3680 align = ARM_INT_REGISTER_SIZE * 2;
3681 }
3682
3683 if (use_vfp_abi
3684 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3685 &vfp_base_count))
3686 {
3687 int regno;
3688 int unit_length;
3689 int shift;
3690 unsigned mask;
3691
3692 /* Because this is a CPRC it cannot go in a core register or
3693 cause a core register to be skipped for alignment.
3694 Either it goes in VFP registers and the rest of this loop
3695 iteration is skipped for this argument, or it goes on the
3696 stack (and the stack alignment code is correct for this
3697 case). */
3698 may_use_core_reg = 0;
3699
3700 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3701 shift = unit_length / 4;
3702 mask = (1 << (shift * vfp_base_count)) - 1;
3703 for (regno = 0; regno < 16; regno += shift)
3704 if (((vfp_regs_free >> regno) & mask) == mask)
3705 break;
3706
3707 if (regno < 16)
3708 {
3709 int reg_char;
3710 int reg_scaled;
3711 int i;
3712
3713 vfp_regs_free &= ~(mask << regno);
3714 reg_scaled = regno / shift;
3715 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3716 for (i = 0; i < vfp_base_count; i++)
3717 {
3718 char name_buf[4];
3719 int regnum;
3720 if (reg_char == 'q')
3721 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3722 val + i * unit_length);
3723 else
3724 {
3725 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3726 reg_char, reg_scaled + i);
3727 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3728 strlen (name_buf));
3729 regcache->cooked_write (regnum, val + i * unit_length);
3730 }
3731 }
3732 continue;
3733 }
3734 else
3735 {
3736 /* This CPRC could not go in VFP registers, so all VFP
3737 registers are now marked as used. */
3738 vfp_regs_free = 0;
3739 }
3740 }
3741
3742 /* Push stack padding for doubleword alignment. */
3743 if (nstack & (align - 1))
3744 {
3745 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3746 nstack += ARM_INT_REGISTER_SIZE;
3747 }
3748
3749 /* Doubleword aligned quantities must go in even register pairs. */
3750 if (may_use_core_reg
3751 && argreg <= ARM_LAST_ARG_REGNUM
3752 && align > ARM_INT_REGISTER_SIZE
3753 && argreg & 1)
3754 argreg++;
3755
3756 /* If the argument is a pointer to a function, and it is a
3757 Thumb function, create a LOCAL copy of the value and set
3758 the THUMB bit in it. */
3759 if (TYPE_CODE_PTR == typecode
3760 && target_type != NULL
3761 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3762 {
3763 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3764 if (arm_pc_is_thumb (gdbarch, regval))
3765 {
3766 bfd_byte *copy = (bfd_byte *) alloca (len);
3767 store_unsigned_integer (copy, len, byte_order,
3768 MAKE_THUMB_ADDR (regval));
3769 val = copy;
3770 }
3771 }
3772
3773 /* Copy the argument to general registers or the stack in
3774 register-sized pieces. Large arguments are split between
3775 registers and stack. */
3776 while (len > 0)
3777 {
3778 int partial_len = len < ARM_INT_REGISTER_SIZE
3779 ? len : ARM_INT_REGISTER_SIZE;
3780 CORE_ADDR regval
3781 = extract_unsigned_integer (val, partial_len, byte_order);
3782
3783 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3784 {
3785 /* The argument is being passed in a general purpose
3786 register. */
3787 if (byte_order == BFD_ENDIAN_BIG)
3788 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3789 if (arm_debug)
3790 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3791 argnum,
3792 gdbarch_register_name
3793 (gdbarch, argreg),
3794 phex (regval, ARM_INT_REGISTER_SIZE));
3795 regcache_cooked_write_unsigned (regcache, argreg, regval);
3796 argreg++;
3797 }
3798 else
3799 {
3800 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3801
3802 memset (buf, 0, sizeof (buf));
3803 store_unsigned_integer (buf, partial_len, byte_order, regval);
3804
3805 /* Push the arguments onto the stack. */
3806 if (arm_debug)
3807 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3808 argnum, nstack);
3809 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3810 nstack += ARM_INT_REGISTER_SIZE;
3811 }
3812
3813 len -= partial_len;
3814 val += partial_len;
3815 }
3816 }
3817 /* If we have an odd number of words to push, then decrement the stack
3818 by one word now, so first stack argument will be dword aligned. */
3819 if (nstack & 4)
3820 sp -= 4;
3821
3822 while (si)
3823 {
3824 sp -= si->len;
3825 write_memory (sp, si->data, si->len);
3826 si = pop_stack_item (si);
3827 }
3828
3829 /* Finally, update teh SP register. */
3830 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3831
3832 return sp;
3833 }
3834
3835
3836 /* Always align the frame to an 8-byte boundary. This is required on
3837 some platforms and harmless on the rest. */
3838
3839 static CORE_ADDR
3840 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3841 {
3842 /* Align the stack to eight bytes. */
3843 return sp & ~ (CORE_ADDR) 7;
3844 }
3845
3846 static void
3847 print_fpu_flags (struct ui_file *file, int flags)
3848 {
3849 if (flags & (1 << 0))
3850 fputs_filtered ("IVO ", file);
3851 if (flags & (1 << 1))
3852 fputs_filtered ("DVZ ", file);
3853 if (flags & (1 << 2))
3854 fputs_filtered ("OFL ", file);
3855 if (flags & (1 << 3))
3856 fputs_filtered ("UFL ", file);
3857 if (flags & (1 << 4))
3858 fputs_filtered ("INX ", file);
3859 fputc_filtered ('\n', file);
3860 }
3861
3862 /* Print interesting information about the floating point processor
3863 (if present) or emulator. */
3864 static void
3865 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3866 struct frame_info *frame, const char *args)
3867 {
3868 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3869 int type;
3870
3871 type = (status >> 24) & 127;
3872 if (status & (1 << 31))
3873 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3874 else
3875 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3876 /* i18n: [floating point unit] mask */
3877 fputs_filtered (_("mask: "), file);
3878 print_fpu_flags (file, status >> 16);
3879 /* i18n: [floating point unit] flags */
3880 fputs_filtered (_("flags: "), file);
3881 print_fpu_flags (file, status);
3882 }
3883
3884 /* Construct the ARM extended floating point type. */
3885 static struct type *
3886 arm_ext_type (struct gdbarch *gdbarch)
3887 {
3888 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3889
3890 if (!tdep->arm_ext_type)
3891 tdep->arm_ext_type
3892 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3893 floatformats_arm_ext);
3894
3895 return tdep->arm_ext_type;
3896 }
3897
3898 static struct type *
3899 arm_neon_double_type (struct gdbarch *gdbarch)
3900 {
3901 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3902
3903 if (tdep->neon_double_type == NULL)
3904 {
3905 struct type *t, *elem;
3906
3907 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3908 TYPE_CODE_UNION);
3909 elem = builtin_type (gdbarch)->builtin_uint8;
3910 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3911 elem = builtin_type (gdbarch)->builtin_uint16;
3912 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3913 elem = builtin_type (gdbarch)->builtin_uint32;
3914 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3915 elem = builtin_type (gdbarch)->builtin_uint64;
3916 append_composite_type_field (t, "u64", elem);
3917 elem = builtin_type (gdbarch)->builtin_float;
3918 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3919 elem = builtin_type (gdbarch)->builtin_double;
3920 append_composite_type_field (t, "f64", elem);
3921
3922 TYPE_VECTOR (t) = 1;
3923 TYPE_NAME (t) = "neon_d";
3924 tdep->neon_double_type = t;
3925 }
3926
3927 return tdep->neon_double_type;
3928 }
3929
3930 /* FIXME: The vector types are not correctly ordered on big-endian
3931 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3932 bits of d0 - regardless of what unit size is being held in d0. So
3933 the offset of the first uint8 in d0 is 7, but the offset of the
3934 first float is 4. This code works as-is for little-endian
3935 targets. */
3936
3937 static struct type *
3938 arm_neon_quad_type (struct gdbarch *gdbarch)
3939 {
3940 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3941
3942 if (tdep->neon_quad_type == NULL)
3943 {
3944 struct type *t, *elem;
3945
3946 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3947 TYPE_CODE_UNION);
3948 elem = builtin_type (gdbarch)->builtin_uint8;
3949 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3950 elem = builtin_type (gdbarch)->builtin_uint16;
3951 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3952 elem = builtin_type (gdbarch)->builtin_uint32;
3953 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3954 elem = builtin_type (gdbarch)->builtin_uint64;
3955 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3956 elem = builtin_type (gdbarch)->builtin_float;
3957 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3958 elem = builtin_type (gdbarch)->builtin_double;
3959 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3960
3961 TYPE_VECTOR (t) = 1;
3962 TYPE_NAME (t) = "neon_q";
3963 tdep->neon_quad_type = t;
3964 }
3965
3966 return tdep->neon_quad_type;
3967 }
3968
3969 /* Return the GDB type object for the "standard" data type of data in
3970 register N. */
3971
3972 static struct type *
3973 arm_register_type (struct gdbarch *gdbarch, int regnum)
3974 {
3975 int num_regs = gdbarch_num_regs (gdbarch);
3976
3977 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3978 && regnum >= num_regs && regnum < num_regs + 32)
3979 return builtin_type (gdbarch)->builtin_float;
3980
3981 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3982 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3983 return arm_neon_quad_type (gdbarch);
3984
3985 /* If the target description has register information, we are only
3986 in this function so that we can override the types of
3987 double-precision registers for NEON. */
3988 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3989 {
3990 struct type *t = tdesc_register_type (gdbarch, regnum);
3991
3992 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3993 && TYPE_CODE (t) == TYPE_CODE_FLT
3994 && gdbarch_tdep (gdbarch)->have_neon)
3995 return arm_neon_double_type (gdbarch);
3996 else
3997 return t;
3998 }
3999
4000 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4001 {
4002 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4003 return builtin_type (gdbarch)->builtin_void;
4004
4005 return arm_ext_type (gdbarch);
4006 }
4007 else if (regnum == ARM_SP_REGNUM)
4008 return builtin_type (gdbarch)->builtin_data_ptr;
4009 else if (regnum == ARM_PC_REGNUM)
4010 return builtin_type (gdbarch)->builtin_func_ptr;
4011 else if (regnum >= ARRAY_SIZE (arm_register_names))
4012 /* These registers are only supported on targets which supply
4013 an XML description. */
4014 return builtin_type (gdbarch)->builtin_int0;
4015 else
4016 return builtin_type (gdbarch)->builtin_uint32;
4017 }
4018
4019 /* Map a DWARF register REGNUM onto the appropriate GDB register
4020 number. */
4021
4022 static int
4023 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4024 {
4025 /* Core integer regs. */
4026 if (reg >= 0 && reg <= 15)
4027 return reg;
4028
4029 /* Legacy FPA encoding. These were once used in a way which
4030 overlapped with VFP register numbering, so their use is
4031 discouraged, but GDB doesn't support the ARM toolchain
4032 which used them for VFP. */
4033 if (reg >= 16 && reg <= 23)
4034 return ARM_F0_REGNUM + reg - 16;
4035
4036 /* New assignments for the FPA registers. */
4037 if (reg >= 96 && reg <= 103)
4038 return ARM_F0_REGNUM + reg - 96;
4039
4040 /* WMMX register assignments. */
4041 if (reg >= 104 && reg <= 111)
4042 return ARM_WCGR0_REGNUM + reg - 104;
4043
4044 if (reg >= 112 && reg <= 127)
4045 return ARM_WR0_REGNUM + reg - 112;
4046
4047 if (reg >= 192 && reg <= 199)
4048 return ARM_WC0_REGNUM + reg - 192;
4049
4050 /* VFP v2 registers. A double precision value is actually
4051 in d1 rather than s2, but the ABI only defines numbering
4052 for the single precision registers. This will "just work"
4053 in GDB for little endian targets (we'll read eight bytes,
4054 starting in s0 and then progressing to s1), but will be
4055 reversed on big endian targets with VFP. This won't
4056 be a problem for the new Neon quad registers; you're supposed
4057 to use DW_OP_piece for those. */
4058 if (reg >= 64 && reg <= 95)
4059 {
4060 char name_buf[4];
4061
4062 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4063 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4064 strlen (name_buf));
4065 }
4066
4067 /* VFP v3 / Neon registers. This range is also used for VFP v2
4068 registers, except that it now describes d0 instead of s0. */
4069 if (reg >= 256 && reg <= 287)
4070 {
4071 char name_buf[4];
4072
4073 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4074 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4075 strlen (name_buf));
4076 }
4077
4078 return -1;
4079 }
4080
4081 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4082 static int
4083 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4084 {
4085 int reg = regnum;
4086 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4087
4088 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4089 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4090
4091 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4092 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4093
4094 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4095 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4096
4097 if (reg < NUM_GREGS)
4098 return SIM_ARM_R0_REGNUM + reg;
4099 reg -= NUM_GREGS;
4100
4101 if (reg < NUM_FREGS)
4102 return SIM_ARM_FP0_REGNUM + reg;
4103 reg -= NUM_FREGS;
4104
4105 if (reg < NUM_SREGS)
4106 return SIM_ARM_FPS_REGNUM + reg;
4107 reg -= NUM_SREGS;
4108
4109 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4110 }
4111
4112 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4113 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4114 NULL if an error occurs. BUF is freed. */
4115
4116 static gdb_byte *
4117 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4118 int old_len, int new_len)
4119 {
4120 gdb_byte *new_buf;
4121 int bytes_to_read = new_len - old_len;
4122
4123 new_buf = (gdb_byte *) xmalloc (new_len);
4124 memcpy (new_buf + bytes_to_read, buf, old_len);
4125 xfree (buf);
4126 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4127 {
4128 xfree (new_buf);
4129 return NULL;
4130 }
4131 return new_buf;
4132 }
4133
4134 /* An IT block is at most the 2-byte IT instruction followed by
4135 four 4-byte instructions. The furthest back we must search to
4136 find an IT block that affects the current instruction is thus
4137 2 + 3 * 4 == 14 bytes. */
4138 #define MAX_IT_BLOCK_PREFIX 14
4139
4140 /* Use a quick scan if there are more than this many bytes of
4141 code. */
4142 #define IT_SCAN_THRESHOLD 32
4143
4144 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4145 A breakpoint in an IT block may not be hit, depending on the
4146 condition flags. */
4147 static CORE_ADDR
4148 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4149 {
4150 gdb_byte *buf;
4151 char map_type;
4152 CORE_ADDR boundary, func_start;
4153 int buf_len;
4154 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4155 int i, any, last_it, last_it_count;
4156
4157 /* If we are using BKPT breakpoints, none of this is necessary. */
4158 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4159 return bpaddr;
4160
4161 /* ARM mode does not have this problem. */
4162 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4163 return bpaddr;
4164
4165 /* We are setting a breakpoint in Thumb code that could potentially
4166 contain an IT block. The first step is to find how much Thumb
4167 code there is; we do not need to read outside of known Thumb
4168 sequences. */
4169 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4170 if (map_type == 0)
4171 /* Thumb-2 code must have mapping symbols to have a chance. */
4172 return bpaddr;
4173
4174 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4175
4176 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4177 && func_start > boundary)
4178 boundary = func_start;
4179
4180 /* Search for a candidate IT instruction. We have to do some fancy
4181 footwork to distinguish a real IT instruction from the second
4182 half of a 32-bit instruction, but there is no need for that if
4183 there's no candidate. */
4184 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4185 if (buf_len == 0)
4186 /* No room for an IT instruction. */
4187 return bpaddr;
4188
4189 buf = (gdb_byte *) xmalloc (buf_len);
4190 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4191 return bpaddr;
4192 any = 0;
4193 for (i = 0; i < buf_len; i += 2)
4194 {
4195 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4196 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4197 {
4198 any = 1;
4199 break;
4200 }
4201 }
4202
4203 if (any == 0)
4204 {
4205 xfree (buf);
4206 return bpaddr;
4207 }
4208
4209 /* OK, the code bytes before this instruction contain at least one
4210 halfword which resembles an IT instruction. We know that it's
4211 Thumb code, but there are still two possibilities. Either the
4212 halfword really is an IT instruction, or it is the second half of
4213 a 32-bit Thumb instruction. The only way we can tell is to
4214 scan forwards from a known instruction boundary. */
4215 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4216 {
4217 int definite;
4218
4219 /* There's a lot of code before this instruction. Start with an
4220 optimistic search; it's easy to recognize halfwords that can
4221 not be the start of a 32-bit instruction, and use that to
4222 lock on to the instruction boundaries. */
4223 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4224 if (buf == NULL)
4225 return bpaddr;
4226 buf_len = IT_SCAN_THRESHOLD;
4227
4228 definite = 0;
4229 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4230 {
4231 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4232 if (thumb_insn_size (inst1) == 2)
4233 {
4234 definite = 1;
4235 break;
4236 }
4237 }
4238
4239 /* At this point, if DEFINITE, BUF[I] is the first place we
4240 are sure that we know the instruction boundaries, and it is far
4241 enough from BPADDR that we could not miss an IT instruction
4242 affecting BPADDR. If ! DEFINITE, give up - start from a
4243 known boundary. */
4244 if (! definite)
4245 {
4246 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4247 bpaddr - boundary);
4248 if (buf == NULL)
4249 return bpaddr;
4250 buf_len = bpaddr - boundary;
4251 i = 0;
4252 }
4253 }
4254 else
4255 {
4256 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4257 if (buf == NULL)
4258 return bpaddr;
4259 buf_len = bpaddr - boundary;
4260 i = 0;
4261 }
4262
4263 /* Scan forwards. Find the last IT instruction before BPADDR. */
4264 last_it = -1;
4265 last_it_count = 0;
4266 while (i < buf_len)
4267 {
4268 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4269 last_it_count--;
4270 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4271 {
4272 last_it = i;
4273 if (inst1 & 0x0001)
4274 last_it_count = 4;
4275 else if (inst1 & 0x0002)
4276 last_it_count = 3;
4277 else if (inst1 & 0x0004)
4278 last_it_count = 2;
4279 else
4280 last_it_count = 1;
4281 }
4282 i += thumb_insn_size (inst1);
4283 }
4284
4285 xfree (buf);
4286
4287 if (last_it == -1)
4288 /* There wasn't really an IT instruction after all. */
4289 return bpaddr;
4290
4291 if (last_it_count < 1)
4292 /* It was too far away. */
4293 return bpaddr;
4294
4295 /* This really is a trouble spot. Move the breakpoint to the IT
4296 instruction. */
4297 return bpaddr - buf_len + last_it;
4298 }
4299
4300 /* ARM displaced stepping support.
4301
4302 Generally ARM displaced stepping works as follows:
4303
4304 1. When an instruction is to be single-stepped, it is first decoded by
4305 arm_process_displaced_insn. Depending on the type of instruction, it is
4306 then copied to a scratch location, possibly in a modified form. The
4307 copy_* set of functions performs such modification, as necessary. A
4308 breakpoint is placed after the modified instruction in the scratch space
4309 to return control to GDB. Note in particular that instructions which
4310 modify the PC will no longer do so after modification.
4311
4312 2. The instruction is single-stepped, by setting the PC to the scratch
4313 location address, and resuming. Control returns to GDB when the
4314 breakpoint is hit.
4315
4316 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4317 function used for the current instruction. This function's job is to
4318 put the CPU/memory state back to what it would have been if the
4319 instruction had been executed unmodified in its original location. */
4320
4321 /* NOP instruction (mov r0, r0). */
4322 #define ARM_NOP 0xe1a00000
4323 #define THUMB_NOP 0x4600
4324
4325 /* Helper for register reads for displaced stepping. In particular, this
4326 returns the PC as it would be seen by the instruction at its original
4327 location. */
4328
4329 ULONGEST
4330 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4331 int regno)
4332 {
4333 ULONGEST ret;
4334 CORE_ADDR from = dsc->insn_addr;
4335
4336 if (regno == ARM_PC_REGNUM)
4337 {
4338 /* Compute pipeline offset:
4339 - When executing an ARM instruction, PC reads as the address of the
4340 current instruction plus 8.
4341 - When executing a Thumb instruction, PC reads as the address of the
4342 current instruction plus 4. */
4343
4344 if (!dsc->is_thumb)
4345 from += 8;
4346 else
4347 from += 4;
4348
4349 if (debug_displaced)
4350 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4351 (unsigned long) from);
4352 return (ULONGEST) from;
4353 }
4354 else
4355 {
4356 regcache_cooked_read_unsigned (regs, regno, &ret);
4357 if (debug_displaced)
4358 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4359 regno, (unsigned long) ret);
4360 return ret;
4361 }
4362 }
4363
4364 static int
4365 displaced_in_arm_mode (struct regcache *regs)
4366 {
4367 ULONGEST ps;
4368 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4369
4370 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4371
4372 return (ps & t_bit) == 0;
4373 }
4374
4375 /* Write to the PC as from a branch instruction. */
4376
4377 static void
4378 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4379 ULONGEST val)
4380 {
4381 if (!dsc->is_thumb)
4382 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4383 architecture versions < 6. */
4384 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4385 val & ~(ULONGEST) 0x3);
4386 else
4387 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4388 val & ~(ULONGEST) 0x1);
4389 }
4390
4391 /* Write to the PC as from a branch-exchange instruction. */
4392
4393 static void
4394 bx_write_pc (struct regcache *regs, ULONGEST val)
4395 {
4396 ULONGEST ps;
4397 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4398
4399 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4400
4401 if ((val & 1) == 1)
4402 {
4403 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4404 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4405 }
4406 else if ((val & 2) == 0)
4407 {
4408 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4409 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4410 }
4411 else
4412 {
4413 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4414 mode, align dest to 4 bytes). */
4415 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4416 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4417 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4418 }
4419 }
4420
4421 /* Write to the PC as if from a load instruction. */
4422
4423 static void
4424 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4425 ULONGEST val)
4426 {
4427 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4428 bx_write_pc (regs, val);
4429 else
4430 branch_write_pc (regs, dsc, val);
4431 }
4432
4433 /* Write to the PC as if from an ALU instruction. */
4434
4435 static void
4436 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4437 ULONGEST val)
4438 {
4439 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4440 bx_write_pc (regs, val);
4441 else
4442 branch_write_pc (regs, dsc, val);
4443 }
4444
4445 /* Helper for writing to registers for displaced stepping. Writing to the PC
4446 has a varying effects depending on the instruction which does the write:
4447 this is controlled by the WRITE_PC argument. */
4448
4449 void
4450 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4451 int regno, ULONGEST val, enum pc_write_style write_pc)
4452 {
4453 if (regno == ARM_PC_REGNUM)
4454 {
4455 if (debug_displaced)
4456 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4457 (unsigned long) val);
4458 switch (write_pc)
4459 {
4460 case BRANCH_WRITE_PC:
4461 branch_write_pc (regs, dsc, val);
4462 break;
4463
4464 case BX_WRITE_PC:
4465 bx_write_pc (regs, val);
4466 break;
4467
4468 case LOAD_WRITE_PC:
4469 load_write_pc (regs, dsc, val);
4470 break;
4471
4472 case ALU_WRITE_PC:
4473 alu_write_pc (regs, dsc, val);
4474 break;
4475
4476 case CANNOT_WRITE_PC:
4477 warning (_("Instruction wrote to PC in an unexpected way when "
4478 "single-stepping"));
4479 break;
4480
4481 default:
4482 internal_error (__FILE__, __LINE__,
4483 _("Invalid argument to displaced_write_reg"));
4484 }
4485
4486 dsc->wrote_to_pc = 1;
4487 }
4488 else
4489 {
4490 if (debug_displaced)
4491 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4492 regno, (unsigned long) val);
4493 regcache_cooked_write_unsigned (regs, regno, val);
4494 }
4495 }
4496
4497 /* This function is used to concisely determine if an instruction INSN
4498 references PC. Register fields of interest in INSN should have the
4499 corresponding fields of BITMASK set to 0b1111. The function
4500 returns return 1 if any of these fields in INSN reference the PC
4501 (also 0b1111, r15), else it returns 0. */
4502
4503 static int
4504 insn_references_pc (uint32_t insn, uint32_t bitmask)
4505 {
4506 uint32_t lowbit = 1;
4507
4508 while (bitmask != 0)
4509 {
4510 uint32_t mask;
4511
4512 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4513 ;
4514
4515 if (!lowbit)
4516 break;
4517
4518 mask = lowbit * 0xf;
4519
4520 if ((insn & mask) == mask)
4521 return 1;
4522
4523 bitmask &= ~mask;
4524 }
4525
4526 return 0;
4527 }
4528
4529 /* The simplest copy function. Many instructions have the same effect no
4530 matter what address they are executed at: in those cases, use this. */
4531
4532 static int
4533 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4534 const char *iname, arm_displaced_step_closure *dsc)
4535 {
4536 if (debug_displaced)
4537 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4538 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4539 iname);
4540
4541 dsc->modinsn[0] = insn;
4542
4543 return 0;
4544 }
4545
4546 static int
4547 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4548 uint16_t insn2, const char *iname,
4549 arm_displaced_step_closure *dsc)
4550 {
4551 if (debug_displaced)
4552 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4553 "opcode/class '%s' unmodified\n", insn1, insn2,
4554 iname);
4555
4556 dsc->modinsn[0] = insn1;
4557 dsc->modinsn[1] = insn2;
4558 dsc->numinsns = 2;
4559
4560 return 0;
4561 }
4562
4563 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4564 modification. */
4565 static int
4566 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4567 const char *iname,
4568 arm_displaced_step_closure *dsc)
4569 {
4570 if (debug_displaced)
4571 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4572 "opcode/class '%s' unmodified\n", insn,
4573 iname);
4574
4575 dsc->modinsn[0] = insn;
4576
4577 return 0;
4578 }
4579
4580 /* Preload instructions with immediate offset. */
4581
4582 static void
4583 cleanup_preload (struct gdbarch *gdbarch,
4584 struct regcache *regs, arm_displaced_step_closure *dsc)
4585 {
4586 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4587 if (!dsc->u.preload.immed)
4588 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4589 }
4590
4591 static void
4592 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4593 arm_displaced_step_closure *dsc, unsigned int rn)
4594 {
4595 ULONGEST rn_val;
4596 /* Preload instructions:
4597
4598 {pli/pld} [rn, #+/-imm]
4599 ->
4600 {pli/pld} [r0, #+/-imm]. */
4601
4602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4603 rn_val = displaced_read_reg (regs, dsc, rn);
4604 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4605 dsc->u.preload.immed = 1;
4606
4607 dsc->cleanup = &cleanup_preload;
4608 }
4609
4610 static int
4611 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4612 arm_displaced_step_closure *dsc)
4613 {
4614 unsigned int rn = bits (insn, 16, 19);
4615
4616 if (!insn_references_pc (insn, 0x000f0000ul))
4617 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4618
4619 if (debug_displaced)
4620 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4621 (unsigned long) insn);
4622
4623 dsc->modinsn[0] = insn & 0xfff0ffff;
4624
4625 install_preload (gdbarch, regs, dsc, rn);
4626
4627 return 0;
4628 }
4629
4630 static int
4631 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4632 struct regcache *regs, arm_displaced_step_closure *dsc)
4633 {
4634 unsigned int rn = bits (insn1, 0, 3);
4635 unsigned int u_bit = bit (insn1, 7);
4636 int imm12 = bits (insn2, 0, 11);
4637 ULONGEST pc_val;
4638
4639 if (rn != ARM_PC_REGNUM)
4640 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4641
4642 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4643 PLD (literal) Encoding T1. */
4644 if (debug_displaced)
4645 fprintf_unfiltered (gdb_stdlog,
4646 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4647 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4648 imm12);
4649
4650 if (!u_bit)
4651 imm12 = -1 * imm12;
4652
4653 /* Rewrite instruction {pli/pld} PC imm12 into:
4654 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4655
4656 {pli/pld} [r0, r1]
4657
4658 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4659
4660 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4661 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4662
4663 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4664
4665 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4666 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4667 dsc->u.preload.immed = 0;
4668
4669 /* {pli/pld} [r0, r1] */
4670 dsc->modinsn[0] = insn1 & 0xfff0;
4671 dsc->modinsn[1] = 0xf001;
4672 dsc->numinsns = 2;
4673
4674 dsc->cleanup = &cleanup_preload;
4675 return 0;
4676 }
4677
4678 /* Preload instructions with register offset. */
4679
4680 static void
4681 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4682 arm_displaced_step_closure *dsc, unsigned int rn,
4683 unsigned int rm)
4684 {
4685 ULONGEST rn_val, rm_val;
4686
4687 /* Preload register-offset instructions:
4688
4689 {pli/pld} [rn, rm {, shift}]
4690 ->
4691 {pli/pld} [r0, r1 {, shift}]. */
4692
4693 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4694 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4695 rn_val = displaced_read_reg (regs, dsc, rn);
4696 rm_val = displaced_read_reg (regs, dsc, rm);
4697 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4698 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4699 dsc->u.preload.immed = 0;
4700
4701 dsc->cleanup = &cleanup_preload;
4702 }
4703
4704 static int
4705 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4706 struct regcache *regs,
4707 arm_displaced_step_closure *dsc)
4708 {
4709 unsigned int rn = bits (insn, 16, 19);
4710 unsigned int rm = bits (insn, 0, 3);
4711
4712
4713 if (!insn_references_pc (insn, 0x000f000ful))
4714 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4715
4716 if (debug_displaced)
4717 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4718 (unsigned long) insn);
4719
4720 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4721
4722 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4723 return 0;
4724 }
4725
4726 /* Copy/cleanup coprocessor load and store instructions. */
4727
4728 static void
4729 cleanup_copro_load_store (struct gdbarch *gdbarch,
4730 struct regcache *regs,
4731 arm_displaced_step_closure *dsc)
4732 {
4733 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4734
4735 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4736
4737 if (dsc->u.ldst.writeback)
4738 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4739 }
4740
4741 static void
4742 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4743 arm_displaced_step_closure *dsc,
4744 int writeback, unsigned int rn)
4745 {
4746 ULONGEST rn_val;
4747
4748 /* Coprocessor load/store instructions:
4749
4750 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4751 ->
4752 {stc/stc2} [r0, #+/-imm].
4753
4754 ldc/ldc2 are handled identically. */
4755
4756 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4757 rn_val = displaced_read_reg (regs, dsc, rn);
4758 /* PC should be 4-byte aligned. */
4759 rn_val = rn_val & 0xfffffffc;
4760 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4761
4762 dsc->u.ldst.writeback = writeback;
4763 dsc->u.ldst.rn = rn;
4764
4765 dsc->cleanup = &cleanup_copro_load_store;
4766 }
4767
4768 static int
4769 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4770 struct regcache *regs,
4771 arm_displaced_step_closure *dsc)
4772 {
4773 unsigned int rn = bits (insn, 16, 19);
4774
4775 if (!insn_references_pc (insn, 0x000f0000ul))
4776 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4777
4778 if (debug_displaced)
4779 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4780 "load/store insn %.8lx\n", (unsigned long) insn);
4781
4782 dsc->modinsn[0] = insn & 0xfff0ffff;
4783
4784 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4785
4786 return 0;
4787 }
4788
4789 static int
4790 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4791 uint16_t insn2, struct regcache *regs,
4792 arm_displaced_step_closure *dsc)
4793 {
4794 unsigned int rn = bits (insn1, 0, 3);
4795
4796 if (rn != ARM_PC_REGNUM)
4797 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4798 "copro load/store", dsc);
4799
4800 if (debug_displaced)
4801 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4802 "load/store insn %.4x%.4x\n", insn1, insn2);
4803
4804 dsc->modinsn[0] = insn1 & 0xfff0;
4805 dsc->modinsn[1] = insn2;
4806 dsc->numinsns = 2;
4807
4808 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4809 doesn't support writeback, so pass 0. */
4810 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4811
4812 return 0;
4813 }
4814
4815 /* Clean up branch instructions (actually perform the branch, by setting
4816 PC). */
4817
4818 static void
4819 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4820 arm_displaced_step_closure *dsc)
4821 {
4822 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4823 int branch_taken = condition_true (dsc->u.branch.cond, status);
4824 enum pc_write_style write_pc = dsc->u.branch.exchange
4825 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4826
4827 if (!branch_taken)
4828 return;
4829
4830 if (dsc->u.branch.link)
4831 {
4832 /* The value of LR should be the next insn of current one. In order
4833 not to confuse logic handling later insn `bx lr', if current insn mode
4834 is Thumb, the bit 0 of LR value should be set to 1. */
4835 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4836
4837 if (dsc->is_thumb)
4838 next_insn_addr |= 0x1;
4839
4840 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4841 CANNOT_WRITE_PC);
4842 }
4843
4844 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4845 }
4846
4847 /* Copy B/BL/BLX instructions with immediate destinations. */
4848
4849 static void
4850 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4851 arm_displaced_step_closure *dsc,
4852 unsigned int cond, int exchange, int link, long offset)
4853 {
4854 /* Implement "BL<cond> <label>" as:
4855
4856 Preparation: cond <- instruction condition
4857 Insn: mov r0, r0 (nop)
4858 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4859
4860 B<cond> similar, but don't set r14 in cleanup. */
4861
4862 dsc->u.branch.cond = cond;
4863 dsc->u.branch.link = link;
4864 dsc->u.branch.exchange = exchange;
4865
4866 dsc->u.branch.dest = dsc->insn_addr;
4867 if (link && exchange)
4868 /* For BLX, offset is computed from the Align (PC, 4). */
4869 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4870
4871 if (dsc->is_thumb)
4872 dsc->u.branch.dest += 4 + offset;
4873 else
4874 dsc->u.branch.dest += 8 + offset;
4875
4876 dsc->cleanup = &cleanup_branch;
4877 }
4878 static int
4879 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4880 struct regcache *regs, arm_displaced_step_closure *dsc)
4881 {
4882 unsigned int cond = bits (insn, 28, 31);
4883 int exchange = (cond == 0xf);
4884 int link = exchange || bit (insn, 24);
4885 long offset;
4886
4887 if (debug_displaced)
4888 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4889 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4890 (unsigned long) insn);
4891 if (exchange)
4892 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4893 then arrange the switch into Thumb mode. */
4894 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4895 else
4896 offset = bits (insn, 0, 23) << 2;
4897
4898 if (bit (offset, 25))
4899 offset = offset | ~0x3ffffff;
4900
4901 dsc->modinsn[0] = ARM_NOP;
4902
4903 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4904 return 0;
4905 }
4906
4907 static int
4908 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4909 uint16_t insn2, struct regcache *regs,
4910 arm_displaced_step_closure *dsc)
4911 {
4912 int link = bit (insn2, 14);
4913 int exchange = link && !bit (insn2, 12);
4914 int cond = INST_AL;
4915 long offset = 0;
4916 int j1 = bit (insn2, 13);
4917 int j2 = bit (insn2, 11);
4918 int s = sbits (insn1, 10, 10);
4919 int i1 = !(j1 ^ bit (insn1, 10));
4920 int i2 = !(j2 ^ bit (insn1, 10));
4921
4922 if (!link && !exchange) /* B */
4923 {
4924 offset = (bits (insn2, 0, 10) << 1);
4925 if (bit (insn2, 12)) /* Encoding T4 */
4926 {
4927 offset |= (bits (insn1, 0, 9) << 12)
4928 | (i2 << 22)
4929 | (i1 << 23)
4930 | (s << 24);
4931 cond = INST_AL;
4932 }
4933 else /* Encoding T3 */
4934 {
4935 offset |= (bits (insn1, 0, 5) << 12)
4936 | (j1 << 18)
4937 | (j2 << 19)
4938 | (s << 20);
4939 cond = bits (insn1, 6, 9);
4940 }
4941 }
4942 else
4943 {
4944 offset = (bits (insn1, 0, 9) << 12);
4945 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4946 offset |= exchange ?
4947 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4948 }
4949
4950 if (debug_displaced)
4951 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4952 "%.4x %.4x with offset %.8lx\n",
4953 link ? (exchange) ? "blx" : "bl" : "b",
4954 insn1, insn2, offset);
4955
4956 dsc->modinsn[0] = THUMB_NOP;
4957
4958 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4959 return 0;
4960 }
4961
4962 /* Copy B Thumb instructions. */
4963 static int
4964 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4965 arm_displaced_step_closure *dsc)
4966 {
4967 unsigned int cond = 0;
4968 int offset = 0;
4969 unsigned short bit_12_15 = bits (insn, 12, 15);
4970 CORE_ADDR from = dsc->insn_addr;
4971
4972 if (bit_12_15 == 0xd)
4973 {
4974 /* offset = SignExtend (imm8:0, 32) */
4975 offset = sbits ((insn << 1), 0, 8);
4976 cond = bits (insn, 8, 11);
4977 }
4978 else if (bit_12_15 == 0xe) /* Encoding T2 */
4979 {
4980 offset = sbits ((insn << 1), 0, 11);
4981 cond = INST_AL;
4982 }
4983
4984 if (debug_displaced)
4985 fprintf_unfiltered (gdb_stdlog,
4986 "displaced: copying b immediate insn %.4x "
4987 "with offset %d\n", insn, offset);
4988
4989 dsc->u.branch.cond = cond;
4990 dsc->u.branch.link = 0;
4991 dsc->u.branch.exchange = 0;
4992 dsc->u.branch.dest = from + 4 + offset;
4993
4994 dsc->modinsn[0] = THUMB_NOP;
4995
4996 dsc->cleanup = &cleanup_branch;
4997
4998 return 0;
4999 }
5000
5001 /* Copy BX/BLX with register-specified destinations. */
5002
5003 static void
5004 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5005 arm_displaced_step_closure *dsc, int link,
5006 unsigned int cond, unsigned int rm)
5007 {
5008 /* Implement {BX,BLX}<cond> <reg>" as:
5009
5010 Preparation: cond <- instruction condition
5011 Insn: mov r0, r0 (nop)
5012 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5013
5014 Don't set r14 in cleanup for BX. */
5015
5016 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5017
5018 dsc->u.branch.cond = cond;
5019 dsc->u.branch.link = link;
5020
5021 dsc->u.branch.exchange = 1;
5022
5023 dsc->cleanup = &cleanup_branch;
5024 }
5025
5026 static int
5027 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5028 struct regcache *regs, arm_displaced_step_closure *dsc)
5029 {
5030 unsigned int cond = bits (insn, 28, 31);
5031 /* BX: x12xxx1x
5032 BLX: x12xxx3x. */
5033 int link = bit (insn, 5);
5034 unsigned int rm = bits (insn, 0, 3);
5035
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5038 (unsigned long) insn);
5039
5040 dsc->modinsn[0] = ARM_NOP;
5041
5042 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5043 return 0;
5044 }
5045
5046 static int
5047 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5048 struct regcache *regs,
5049 arm_displaced_step_closure *dsc)
5050 {
5051 int link = bit (insn, 7);
5052 unsigned int rm = bits (insn, 3, 6);
5053
5054 if (debug_displaced)
5055 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5056 (unsigned short) insn);
5057
5058 dsc->modinsn[0] = THUMB_NOP;
5059
5060 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5061
5062 return 0;
5063 }
5064
5065
5066 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5067
5068 static void
5069 cleanup_alu_imm (struct gdbarch *gdbarch,
5070 struct regcache *regs, arm_displaced_step_closure *dsc)
5071 {
5072 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5073 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5074 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5075 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5076 }
5077
5078 static int
5079 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5080 arm_displaced_step_closure *dsc)
5081 {
5082 unsigned int rn = bits (insn, 16, 19);
5083 unsigned int rd = bits (insn, 12, 15);
5084 unsigned int op = bits (insn, 21, 24);
5085 int is_mov = (op == 0xd);
5086 ULONGEST rd_val, rn_val;
5087
5088 if (!insn_references_pc (insn, 0x000ff000ul))
5089 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5090
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5093 "%.8lx\n", is_mov ? "move" : "ALU",
5094 (unsigned long) insn);
5095
5096 /* Instruction is of form:
5097
5098 <op><cond> rd, [rn,] #imm
5099
5100 Rewrite as:
5101
5102 Preparation: tmp1, tmp2 <- r0, r1;
5103 r0, r1 <- rd, rn
5104 Insn: <op><cond> r0, r1, #imm
5105 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5106 */
5107
5108 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5109 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5110 rn_val = displaced_read_reg (regs, dsc, rn);
5111 rd_val = displaced_read_reg (regs, dsc, rd);
5112 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5113 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5114 dsc->rd = rd;
5115
5116 if (is_mov)
5117 dsc->modinsn[0] = insn & 0xfff00fff;
5118 else
5119 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5120
5121 dsc->cleanup = &cleanup_alu_imm;
5122
5123 return 0;
5124 }
5125
5126 static int
5127 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5128 uint16_t insn2, struct regcache *regs,
5129 arm_displaced_step_closure *dsc)
5130 {
5131 unsigned int op = bits (insn1, 5, 8);
5132 unsigned int rn, rm, rd;
5133 ULONGEST rd_val, rn_val;
5134
5135 rn = bits (insn1, 0, 3); /* Rn */
5136 rm = bits (insn2, 0, 3); /* Rm */
5137 rd = bits (insn2, 8, 11); /* Rd */
5138
5139 /* This routine is only called for instruction MOV. */
5140 gdb_assert (op == 0x2 && rn == 0xf);
5141
5142 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5143 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5144
5145 if (debug_displaced)
5146 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5147 "ALU", insn1, insn2);
5148
5149 /* Instruction is of form:
5150
5151 <op><cond> rd, [rn,] #imm
5152
5153 Rewrite as:
5154
5155 Preparation: tmp1, tmp2 <- r0, r1;
5156 r0, r1 <- rd, rn
5157 Insn: <op><cond> r0, r1, #imm
5158 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5159 */
5160
5161 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5162 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5163 rn_val = displaced_read_reg (regs, dsc, rn);
5164 rd_val = displaced_read_reg (regs, dsc, rd);
5165 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5166 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5167 dsc->rd = rd;
5168
5169 dsc->modinsn[0] = insn1;
5170 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5171 dsc->numinsns = 2;
5172
5173 dsc->cleanup = &cleanup_alu_imm;
5174
5175 return 0;
5176 }
5177
5178 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5179
5180 static void
5181 cleanup_alu_reg (struct gdbarch *gdbarch,
5182 struct regcache *regs, arm_displaced_step_closure *dsc)
5183 {
5184 ULONGEST rd_val;
5185 int i;
5186
5187 rd_val = displaced_read_reg (regs, dsc, 0);
5188
5189 for (i = 0; i < 3; i++)
5190 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5191
5192 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5193 }
5194
5195 static void
5196 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5197 arm_displaced_step_closure *dsc,
5198 unsigned int rd, unsigned int rn, unsigned int rm)
5199 {
5200 ULONGEST rd_val, rn_val, rm_val;
5201
5202 /* Instruction is of form:
5203
5204 <op><cond> rd, [rn,] rm [, <shift>]
5205
5206 Rewrite as:
5207
5208 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5209 r0, r1, r2 <- rd, rn, rm
5210 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5211 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5212 */
5213
5214 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5215 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5216 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5217 rd_val = displaced_read_reg (regs, dsc, rd);
5218 rn_val = displaced_read_reg (regs, dsc, rn);
5219 rm_val = displaced_read_reg (regs, dsc, rm);
5220 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5221 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5222 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5223 dsc->rd = rd;
5224
5225 dsc->cleanup = &cleanup_alu_reg;
5226 }
5227
5228 static int
5229 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5230 arm_displaced_step_closure *dsc)
5231 {
5232 unsigned int op = bits (insn, 21, 24);
5233 int is_mov = (op == 0xd);
5234
5235 if (!insn_references_pc (insn, 0x000ff00ful))
5236 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5237
5238 if (debug_displaced)
5239 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5240 is_mov ? "move" : "ALU", (unsigned long) insn);
5241
5242 if (is_mov)
5243 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5244 else
5245 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5246
5247 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5248 bits (insn, 0, 3));
5249 return 0;
5250 }
5251
5252 static int
5253 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5254 struct regcache *regs,
5255 arm_displaced_step_closure *dsc)
5256 {
5257 unsigned rm, rd;
5258
5259 rm = bits (insn, 3, 6);
5260 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5261
5262 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5263 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5264
5265 if (debug_displaced)
5266 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5267 (unsigned short) insn);
5268
5269 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5270
5271 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5272
5273 return 0;
5274 }
5275
5276 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5277
5278 static void
5279 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5280 struct regcache *regs,
5281 arm_displaced_step_closure *dsc)
5282 {
5283 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5284 int i;
5285
5286 for (i = 0; i < 4; i++)
5287 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5288
5289 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5290 }
5291
5292 static void
5293 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5294 arm_displaced_step_closure *dsc,
5295 unsigned int rd, unsigned int rn, unsigned int rm,
5296 unsigned rs)
5297 {
5298 int i;
5299 ULONGEST rd_val, rn_val, rm_val, rs_val;
5300
5301 /* Instruction is of form:
5302
5303 <op><cond> rd, [rn,] rm, <shift> rs
5304
5305 Rewrite as:
5306
5307 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5308 r0, r1, r2, r3 <- rd, rn, rm, rs
5309 Insn: <op><cond> r0, r1, r2, <shift> r3
5310 Cleanup: tmp5 <- r0
5311 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5312 rd <- tmp5
5313 */
5314
5315 for (i = 0; i < 4; i++)
5316 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5317
5318 rd_val = displaced_read_reg (regs, dsc, rd);
5319 rn_val = displaced_read_reg (regs, dsc, rn);
5320 rm_val = displaced_read_reg (regs, dsc, rm);
5321 rs_val = displaced_read_reg (regs, dsc, rs);
5322 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5324 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5325 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5326 dsc->rd = rd;
5327 dsc->cleanup = &cleanup_alu_shifted_reg;
5328 }
5329
5330 static int
5331 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5332 struct regcache *regs,
5333 arm_displaced_step_closure *dsc)
5334 {
5335 unsigned int op = bits (insn, 21, 24);
5336 int is_mov = (op == 0xd);
5337 unsigned int rd, rn, rm, rs;
5338
5339 if (!insn_references_pc (insn, 0x000fff0ful))
5340 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5341
5342 if (debug_displaced)
5343 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5344 "%.8lx\n", is_mov ? "move" : "ALU",
5345 (unsigned long) insn);
5346
5347 rn = bits (insn, 16, 19);
5348 rm = bits (insn, 0, 3);
5349 rs = bits (insn, 8, 11);
5350 rd = bits (insn, 12, 15);
5351
5352 if (is_mov)
5353 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5354 else
5355 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5356
5357 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5358
5359 return 0;
5360 }
5361
5362 /* Clean up load instructions. */
5363
5364 static void
5365 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5366 arm_displaced_step_closure *dsc)
5367 {
5368 ULONGEST rt_val, rt_val2 = 0, rn_val;
5369
5370 rt_val = displaced_read_reg (regs, dsc, 0);
5371 if (dsc->u.ldst.xfersize == 8)
5372 rt_val2 = displaced_read_reg (regs, dsc, 1);
5373 rn_val = displaced_read_reg (regs, dsc, 2);
5374
5375 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5376 if (dsc->u.ldst.xfersize > 4)
5377 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5378 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5379 if (!dsc->u.ldst.immed)
5380 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5381
5382 /* Handle register writeback. */
5383 if (dsc->u.ldst.writeback)
5384 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5385 /* Put result in right place. */
5386 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5387 if (dsc->u.ldst.xfersize == 8)
5388 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5389 }
5390
5391 /* Clean up store instructions. */
5392
5393 static void
5394 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5395 arm_displaced_step_closure *dsc)
5396 {
5397 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5398
5399 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5400 if (dsc->u.ldst.xfersize > 4)
5401 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5402 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5403 if (!dsc->u.ldst.immed)
5404 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5405 if (!dsc->u.ldst.restore_r4)
5406 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5407
5408 /* Writeback. */
5409 if (dsc->u.ldst.writeback)
5410 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5411 }
5412
5413 /* Copy "extra" load/store instructions. These are halfword/doubleword
5414 transfers, which have a different encoding to byte/word transfers. */
5415
5416 static int
5417 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5418 struct regcache *regs, arm_displaced_step_closure *dsc)
5419 {
5420 unsigned int op1 = bits (insn, 20, 24);
5421 unsigned int op2 = bits (insn, 5, 6);
5422 unsigned int rt = bits (insn, 12, 15);
5423 unsigned int rn = bits (insn, 16, 19);
5424 unsigned int rm = bits (insn, 0, 3);
5425 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5426 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5427 int immed = (op1 & 0x4) != 0;
5428 int opcode;
5429 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5430
5431 if (!insn_references_pc (insn, 0x000ff00ful))
5432 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5433
5434 if (debug_displaced)
5435 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5436 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5437 (unsigned long) insn);
5438
5439 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5440
5441 if (opcode < 0)
5442 internal_error (__FILE__, __LINE__,
5443 _("copy_extra_ld_st: instruction decode error"));
5444
5445 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5446 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5447 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5448 if (!immed)
5449 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5450
5451 rt_val = displaced_read_reg (regs, dsc, rt);
5452 if (bytesize[opcode] == 8)
5453 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5454 rn_val = displaced_read_reg (regs, dsc, rn);
5455 if (!immed)
5456 rm_val = displaced_read_reg (regs, dsc, rm);
5457
5458 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5459 if (bytesize[opcode] == 8)
5460 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5461 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5462 if (!immed)
5463 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5464
5465 dsc->rd = rt;
5466 dsc->u.ldst.xfersize = bytesize[opcode];
5467 dsc->u.ldst.rn = rn;
5468 dsc->u.ldst.immed = immed;
5469 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5470 dsc->u.ldst.restore_r4 = 0;
5471
5472 if (immed)
5473 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5474 ->
5475 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5476 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5477 else
5478 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5479 ->
5480 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5481 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5482
5483 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5484
5485 return 0;
5486 }
5487
5488 /* Copy byte/half word/word loads and stores. */
5489
5490 static void
5491 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5492 arm_displaced_step_closure *dsc, int load,
5493 int immed, int writeback, int size, int usermode,
5494 int rt, int rm, int rn)
5495 {
5496 ULONGEST rt_val, rn_val, rm_val = 0;
5497
5498 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5499 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5500 if (!immed)
5501 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5502 if (!load)
5503 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5504
5505 rt_val = displaced_read_reg (regs, dsc, rt);
5506 rn_val = displaced_read_reg (regs, dsc, rn);
5507 if (!immed)
5508 rm_val = displaced_read_reg (regs, dsc, rm);
5509
5510 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5511 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5512 if (!immed)
5513 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5514 dsc->rd = rt;
5515 dsc->u.ldst.xfersize = size;
5516 dsc->u.ldst.rn = rn;
5517 dsc->u.ldst.immed = immed;
5518 dsc->u.ldst.writeback = writeback;
5519
5520 /* To write PC we can do:
5521
5522 Before this sequence of instructions:
5523 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5524 r2 is the Rn value got from displaced_read_reg.
5525
5526 Insn1: push {pc} Write address of STR instruction + offset on stack
5527 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5528 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5529 = addr(Insn1) + offset - addr(Insn3) - 8
5530 = offset - 16
5531 Insn4: add r4, r4, #8 r4 = offset - 8
5532 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5533 = from + offset
5534 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5535
5536 Otherwise we don't know what value to write for PC, since the offset is
5537 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5538 of this can be found in Section "Saving from r15" in
5539 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5540
5541 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5542 }
5543
5544
5545 static int
5546 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5547 uint16_t insn2, struct regcache *regs,
5548 arm_displaced_step_closure *dsc, int size)
5549 {
5550 unsigned int u_bit = bit (insn1, 7);
5551 unsigned int rt = bits (insn2, 12, 15);
5552 int imm12 = bits (insn2, 0, 11);
5553 ULONGEST pc_val;
5554
5555 if (debug_displaced)
5556 fprintf_unfiltered (gdb_stdlog,
5557 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5558 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5559 imm12);
5560
5561 if (!u_bit)
5562 imm12 = -1 * imm12;
5563
5564 /* Rewrite instruction LDR Rt imm12 into:
5565
5566 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5567
5568 LDR R0, R2, R3,
5569
5570 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5571
5572
5573 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5574 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5575 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5576
5577 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5578
5579 pc_val = pc_val & 0xfffffffc;
5580
5581 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5582 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5583
5584 dsc->rd = rt;
5585
5586 dsc->u.ldst.xfersize = size;
5587 dsc->u.ldst.immed = 0;
5588 dsc->u.ldst.writeback = 0;
5589 dsc->u.ldst.restore_r4 = 0;
5590
5591 /* LDR R0, R2, R3 */
5592 dsc->modinsn[0] = 0xf852;
5593 dsc->modinsn[1] = 0x3;
5594 dsc->numinsns = 2;
5595
5596 dsc->cleanup = &cleanup_load;
5597
5598 return 0;
5599 }
5600
5601 static int
5602 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5603 uint16_t insn2, struct regcache *regs,
5604 arm_displaced_step_closure *dsc,
5605 int writeback, int immed)
5606 {
5607 unsigned int rt = bits (insn2, 12, 15);
5608 unsigned int rn = bits (insn1, 0, 3);
5609 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5610 /* In LDR (register), there is also a register Rm, which is not allowed to
5611 be PC, so we don't have to check it. */
5612
5613 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5614 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5615 dsc);
5616
5617 if (debug_displaced)
5618 fprintf_unfiltered (gdb_stdlog,
5619 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5620 rt, rn, insn1, insn2);
5621
5622 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5623 0, rt, rm, rn);
5624
5625 dsc->u.ldst.restore_r4 = 0;
5626
5627 if (immed)
5628 /* ldr[b]<cond> rt, [rn, #imm], etc.
5629 ->
5630 ldr[b]<cond> r0, [r2, #imm]. */
5631 {
5632 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5633 dsc->modinsn[1] = insn2 & 0x0fff;
5634 }
5635 else
5636 /* ldr[b]<cond> rt, [rn, rm], etc.
5637 ->
5638 ldr[b]<cond> r0, [r2, r3]. */
5639 {
5640 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5641 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5642 }
5643
5644 dsc->numinsns = 2;
5645
5646 return 0;
5647 }
5648
5649
5650 static int
5651 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5652 struct regcache *regs,
5653 arm_displaced_step_closure *dsc,
5654 int load, int size, int usermode)
5655 {
5656 int immed = !bit (insn, 25);
5657 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5658 unsigned int rt = bits (insn, 12, 15);
5659 unsigned int rn = bits (insn, 16, 19);
5660 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5661
5662 if (!insn_references_pc (insn, 0x000ff00ful))
5663 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5664
5665 if (debug_displaced)
5666 fprintf_unfiltered (gdb_stdlog,
5667 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5668 load ? (size == 1 ? "ldrb" : "ldr")
5669 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5670 rt, rn,
5671 (unsigned long) insn);
5672
5673 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5674 usermode, rt, rm, rn);
5675
5676 if (load || rt != ARM_PC_REGNUM)
5677 {
5678 dsc->u.ldst.restore_r4 = 0;
5679
5680 if (immed)
5681 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5682 ->
5683 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5684 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5685 else
5686 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5687 ->
5688 {ldr,str}[b]<cond> r0, [r2, r3]. */
5689 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5690 }
5691 else
5692 {
5693 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5694 dsc->u.ldst.restore_r4 = 1;
5695 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5696 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5697 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5698 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5699 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5700
5701 /* As above. */
5702 if (immed)
5703 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5704 else
5705 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5706
5707 dsc->numinsns = 6;
5708 }
5709
5710 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5711
5712 return 0;
5713 }
5714
5715 /* Cleanup LDM instructions with fully-populated register list. This is an
5716 unfortunate corner case: it's impossible to implement correctly by modifying
5717 the instruction. The issue is as follows: we have an instruction,
5718
5719 ldm rN, {r0-r15}
5720
5721 which we must rewrite to avoid loading PC. A possible solution would be to
5722 do the load in two halves, something like (with suitable cleanup
5723 afterwards):
5724
5725 mov r8, rN
5726 ldm[id][ab] r8!, {r0-r7}
5727 str r7, <temp>
5728 ldm[id][ab] r8, {r7-r14}
5729 <bkpt>
5730
5731 but at present there's no suitable place for <temp>, since the scratch space
5732 is overwritten before the cleanup routine is called. For now, we simply
5733 emulate the instruction. */
5734
5735 static void
5736 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5737 arm_displaced_step_closure *dsc)
5738 {
5739 int inc = dsc->u.block.increment;
5740 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5741 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5742 uint32_t regmask = dsc->u.block.regmask;
5743 int regno = inc ? 0 : 15;
5744 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5745 int exception_return = dsc->u.block.load && dsc->u.block.user
5746 && (regmask & 0x8000) != 0;
5747 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5748 int do_transfer = condition_true (dsc->u.block.cond, status);
5749 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5750
5751 if (!do_transfer)
5752 return;
5753
5754 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5755 sensible we can do here. Complain loudly. */
5756 if (exception_return)
5757 error (_("Cannot single-step exception return"));
5758
5759 /* We don't handle any stores here for now. */
5760 gdb_assert (dsc->u.block.load != 0);
5761
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5764 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5765 dsc->u.block.increment ? "inc" : "dec",
5766 dsc->u.block.before ? "before" : "after");
5767
5768 while (regmask)
5769 {
5770 uint32_t memword;
5771
5772 if (inc)
5773 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5774 regno++;
5775 else
5776 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5777 regno--;
5778
5779 xfer_addr += bump_before;
5780
5781 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5782 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5783
5784 xfer_addr += bump_after;
5785
5786 regmask &= ~(1 << regno);
5787 }
5788
5789 if (dsc->u.block.writeback)
5790 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5791 CANNOT_WRITE_PC);
5792 }
5793
5794 /* Clean up an STM which included the PC in the register list. */
5795
5796 static void
5797 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5798 arm_displaced_step_closure *dsc)
5799 {
5800 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5801 int store_executed = condition_true (dsc->u.block.cond, status);
5802 CORE_ADDR pc_stored_at, transferred_regs
5803 = count_one_bits (dsc->u.block.regmask);
5804 CORE_ADDR stm_insn_addr;
5805 uint32_t pc_val;
5806 long offset;
5807 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5808
5809 /* If condition code fails, there's nothing else to do. */
5810 if (!store_executed)
5811 return;
5812
5813 if (dsc->u.block.increment)
5814 {
5815 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5816
5817 if (dsc->u.block.before)
5818 pc_stored_at += 4;
5819 }
5820 else
5821 {
5822 pc_stored_at = dsc->u.block.xfer_addr;
5823
5824 if (dsc->u.block.before)
5825 pc_stored_at -= 4;
5826 }
5827
5828 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5829 stm_insn_addr = dsc->scratch_base;
5830 offset = pc_val - stm_insn_addr;
5831
5832 if (debug_displaced)
5833 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5834 "STM instruction\n", offset);
5835
5836 /* Rewrite the stored PC to the proper value for the non-displaced original
5837 instruction. */
5838 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5839 dsc->insn_addr + offset);
5840 }
5841
5842 /* Clean up an LDM which includes the PC in the register list. We clumped all
5843 the registers in the transferred list into a contiguous range r0...rX (to
5844 avoid loading PC directly and losing control of the debugged program), so we
5845 must undo that here. */
5846
5847 static void
5848 cleanup_block_load_pc (struct gdbarch *gdbarch,
5849 struct regcache *regs,
5850 arm_displaced_step_closure *dsc)
5851 {
5852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5853 int load_executed = condition_true (dsc->u.block.cond, status);
5854 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5855 unsigned int regs_loaded = count_one_bits (mask);
5856 unsigned int num_to_shuffle = regs_loaded, clobbered;
5857
5858 /* The method employed here will fail if the register list is fully populated
5859 (we need to avoid loading PC directly). */
5860 gdb_assert (num_to_shuffle < 16);
5861
5862 if (!load_executed)
5863 return;
5864
5865 clobbered = (1 << num_to_shuffle) - 1;
5866
5867 while (num_to_shuffle > 0)
5868 {
5869 if ((mask & (1 << write_reg)) != 0)
5870 {
5871 unsigned int read_reg = num_to_shuffle - 1;
5872
5873 if (read_reg != write_reg)
5874 {
5875 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5876 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5877 if (debug_displaced)
5878 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5879 "loaded register r%d to r%d\n"), read_reg,
5880 write_reg);
5881 }
5882 else if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5884 "r%d already in the right place\n"),
5885 write_reg);
5886
5887 clobbered &= ~(1 << write_reg);
5888
5889 num_to_shuffle--;
5890 }
5891
5892 write_reg--;
5893 }
5894
5895 /* Restore any registers we scribbled over. */
5896 for (write_reg = 0; clobbered != 0; write_reg++)
5897 {
5898 if ((clobbered & (1 << write_reg)) != 0)
5899 {
5900 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5901 CANNOT_WRITE_PC);
5902 if (debug_displaced)
5903 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5904 "clobbered register r%d\n"), write_reg);
5905 clobbered &= ~(1 << write_reg);
5906 }
5907 }
5908
5909 /* Perform register writeback manually. */
5910 if (dsc->u.block.writeback)
5911 {
5912 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5913
5914 if (dsc->u.block.increment)
5915 new_rn_val += regs_loaded * 4;
5916 else
5917 new_rn_val -= regs_loaded * 4;
5918
5919 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5920 CANNOT_WRITE_PC);
5921 }
5922 }
5923
5924 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5925 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5926
5927 static int
5928 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5929 struct regcache *regs,
5930 arm_displaced_step_closure *dsc)
5931 {
5932 int load = bit (insn, 20);
5933 int user = bit (insn, 22);
5934 int increment = bit (insn, 23);
5935 int before = bit (insn, 24);
5936 int writeback = bit (insn, 21);
5937 int rn = bits (insn, 16, 19);
5938
5939 /* Block transfers which don't mention PC can be run directly
5940 out-of-line. */
5941 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5942 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5943
5944 if (rn == ARM_PC_REGNUM)
5945 {
5946 warning (_("displaced: Unpredictable LDM or STM with "
5947 "base register r15"));
5948 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5949 }
5950
5951 if (debug_displaced)
5952 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5953 "%.8lx\n", (unsigned long) insn);
5954
5955 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5956 dsc->u.block.rn = rn;
5957
5958 dsc->u.block.load = load;
5959 dsc->u.block.user = user;
5960 dsc->u.block.increment = increment;
5961 dsc->u.block.before = before;
5962 dsc->u.block.writeback = writeback;
5963 dsc->u.block.cond = bits (insn, 28, 31);
5964
5965 dsc->u.block.regmask = insn & 0xffff;
5966
5967 if (load)
5968 {
5969 if ((insn & 0xffff) == 0xffff)
5970 {
5971 /* LDM with a fully-populated register list. This case is
5972 particularly tricky. Implement for now by fully emulating the
5973 instruction (which might not behave perfectly in all cases, but
5974 these instructions should be rare enough for that not to matter
5975 too much). */
5976 dsc->modinsn[0] = ARM_NOP;
5977
5978 dsc->cleanup = &cleanup_block_load_all;
5979 }
5980 else
5981 {
5982 /* LDM of a list of registers which includes PC. Implement by
5983 rewriting the list of registers to be transferred into a
5984 contiguous chunk r0...rX before doing the transfer, then shuffling
5985 registers into the correct places in the cleanup routine. */
5986 unsigned int regmask = insn & 0xffff;
5987 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
5988 unsigned int i;
5989
5990 for (i = 0; i < num_in_list; i++)
5991 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5992
5993 /* Writeback makes things complicated. We need to avoid clobbering
5994 the base register with one of the registers in our modified
5995 register list, but just using a different register can't work in
5996 all cases, e.g.:
5997
5998 ldm r14!, {r0-r13,pc}
5999
6000 which would need to be rewritten as:
6001
6002 ldm rN!, {r0-r14}
6003
6004 but that can't work, because there's no free register for N.
6005
6006 Solve this by turning off the writeback bit, and emulating
6007 writeback manually in the cleanup routine. */
6008
6009 if (writeback)
6010 insn &= ~(1 << 21);
6011
6012 new_regmask = (1 << num_in_list) - 1;
6013
6014 if (debug_displaced)
6015 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6016 "{..., pc}: original reg list %.4x, modified "
6017 "list %.4x\n"), rn, writeback ? "!" : "",
6018 (int) insn & 0xffff, new_regmask);
6019
6020 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6021
6022 dsc->cleanup = &cleanup_block_load_pc;
6023 }
6024 }
6025 else
6026 {
6027 /* STM of a list of registers which includes PC. Run the instruction
6028 as-is, but out of line: this will store the wrong value for the PC,
6029 so we must manually fix up the memory in the cleanup routine.
6030 Doing things this way has the advantage that we can auto-detect
6031 the offset of the PC write (which is architecture-dependent) in
6032 the cleanup routine. */
6033 dsc->modinsn[0] = insn;
6034
6035 dsc->cleanup = &cleanup_block_store_pc;
6036 }
6037
6038 return 0;
6039 }
6040
6041 static int
6042 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6043 struct regcache *regs,
6044 arm_displaced_step_closure *dsc)
6045 {
6046 int rn = bits (insn1, 0, 3);
6047 int load = bit (insn1, 4);
6048 int writeback = bit (insn1, 5);
6049
6050 /* Block transfers which don't mention PC can be run directly
6051 out-of-line. */
6052 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6053 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6054
6055 if (rn == ARM_PC_REGNUM)
6056 {
6057 warning (_("displaced: Unpredictable LDM or STM with "
6058 "base register r15"));
6059 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6060 "unpredictable ldm/stm", dsc);
6061 }
6062
6063 if (debug_displaced)
6064 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6065 "%.4x%.4x\n", insn1, insn2);
6066
6067 /* Clear bit 13, since it should be always zero. */
6068 dsc->u.block.regmask = (insn2 & 0xdfff);
6069 dsc->u.block.rn = rn;
6070
6071 dsc->u.block.load = load;
6072 dsc->u.block.user = 0;
6073 dsc->u.block.increment = bit (insn1, 7);
6074 dsc->u.block.before = bit (insn1, 8);
6075 dsc->u.block.writeback = writeback;
6076 dsc->u.block.cond = INST_AL;
6077 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6078
6079 if (load)
6080 {
6081 if (dsc->u.block.regmask == 0xffff)
6082 {
6083 /* This branch is impossible to happen. */
6084 gdb_assert (0);
6085 }
6086 else
6087 {
6088 unsigned int regmask = dsc->u.block.regmask;
6089 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6090 unsigned int i;
6091
6092 for (i = 0; i < num_in_list; i++)
6093 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6094
6095 if (writeback)
6096 insn1 &= ~(1 << 5);
6097
6098 new_regmask = (1 << num_in_list) - 1;
6099
6100 if (debug_displaced)
6101 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6102 "{..., pc}: original reg list %.4x, modified "
6103 "list %.4x\n"), rn, writeback ? "!" : "",
6104 (int) dsc->u.block.regmask, new_regmask);
6105
6106 dsc->modinsn[0] = insn1;
6107 dsc->modinsn[1] = (new_regmask & 0xffff);
6108 dsc->numinsns = 2;
6109
6110 dsc->cleanup = &cleanup_block_load_pc;
6111 }
6112 }
6113 else
6114 {
6115 dsc->modinsn[0] = insn1;
6116 dsc->modinsn[1] = insn2;
6117 dsc->numinsns = 2;
6118 dsc->cleanup = &cleanup_block_store_pc;
6119 }
6120 return 0;
6121 }
6122
6123 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6124 This is used to avoid a dependency on BFD's bfd_endian enum. */
6125
6126 ULONGEST
6127 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6128 int byte_order)
6129 {
6130 return read_memory_unsigned_integer (memaddr, len,
6131 (enum bfd_endian) byte_order);
6132 }
6133
6134 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6135
6136 CORE_ADDR
6137 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6138 CORE_ADDR val)
6139 {
6140 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6141 }
6142
6143 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6144
6145 static CORE_ADDR
6146 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6147 {
6148 return 0;
6149 }
6150
6151 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6152
6153 int
6154 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6155 {
6156 return arm_is_thumb (self->regcache);
6157 }
6158
6159 /* single_step() is called just before we want to resume the inferior,
6160 if we want to single-step it but there is no hardware or kernel
6161 single-step support. We find the target of the coming instructions
6162 and breakpoint them. */
6163
6164 std::vector<CORE_ADDR>
6165 arm_software_single_step (struct regcache *regcache)
6166 {
6167 struct gdbarch *gdbarch = regcache->arch ();
6168 struct arm_get_next_pcs next_pcs_ctx;
6169
6170 arm_get_next_pcs_ctor (&next_pcs_ctx,
6171 &arm_get_next_pcs_ops,
6172 gdbarch_byte_order (gdbarch),
6173 gdbarch_byte_order_for_code (gdbarch),
6174 0,
6175 regcache);
6176
6177 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6178
6179 for (CORE_ADDR &pc_ref : next_pcs)
6180 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6181
6182 return next_pcs;
6183 }
6184
6185 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6186 for Linux, where some SVC instructions must be treated specially. */
6187
6188 static void
6189 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6190 arm_displaced_step_closure *dsc)
6191 {
6192 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6193
6194 if (debug_displaced)
6195 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6196 "%.8lx\n", (unsigned long) resume_addr);
6197
6198 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6199 }
6200
6201
6202 /* Common copy routine for svc instruction. */
6203
6204 static int
6205 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6206 arm_displaced_step_closure *dsc)
6207 {
6208 /* Preparation: none.
6209 Insn: unmodified svc.
6210 Cleanup: pc <- insn_addr + insn_size. */
6211
6212 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6213 instruction. */
6214 dsc->wrote_to_pc = 1;
6215
6216 /* Allow OS-specific code to override SVC handling. */
6217 if (dsc->u.svc.copy_svc_os)
6218 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6219 else
6220 {
6221 dsc->cleanup = &cleanup_svc;
6222 return 0;
6223 }
6224 }
6225
6226 static int
6227 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6228 struct regcache *regs, arm_displaced_step_closure *dsc)
6229 {
6230
6231 if (debug_displaced)
6232 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6233 (unsigned long) insn);
6234
6235 dsc->modinsn[0] = insn;
6236
6237 return install_svc (gdbarch, regs, dsc);
6238 }
6239
6240 static int
6241 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6242 struct regcache *regs, arm_displaced_step_closure *dsc)
6243 {
6244
6245 if (debug_displaced)
6246 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6247 insn);
6248
6249 dsc->modinsn[0] = insn;
6250
6251 return install_svc (gdbarch, regs, dsc);
6252 }
6253
6254 /* Copy undefined instructions. */
6255
6256 static int
6257 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6258 arm_displaced_step_closure *dsc)
6259 {
6260 if (debug_displaced)
6261 fprintf_unfiltered (gdb_stdlog,
6262 "displaced: copying undefined insn %.8lx\n",
6263 (unsigned long) insn);
6264
6265 dsc->modinsn[0] = insn;
6266
6267 return 0;
6268 }
6269
6270 static int
6271 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6272 arm_displaced_step_closure *dsc)
6273 {
6274
6275 if (debug_displaced)
6276 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6277 "%.4x %.4x\n", (unsigned short) insn1,
6278 (unsigned short) insn2);
6279
6280 dsc->modinsn[0] = insn1;
6281 dsc->modinsn[1] = insn2;
6282 dsc->numinsns = 2;
6283
6284 return 0;
6285 }
6286
6287 /* Copy unpredictable instructions. */
6288
6289 static int
6290 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6291 arm_displaced_step_closure *dsc)
6292 {
6293 if (debug_displaced)
6294 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6295 "%.8lx\n", (unsigned long) insn);
6296
6297 dsc->modinsn[0] = insn;
6298
6299 return 0;
6300 }
6301
6302 /* The decode_* functions are instruction decoding helpers. They mostly follow
6303 the presentation in the ARM ARM. */
6304
6305 static int
6306 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6307 struct regcache *regs,
6308 arm_displaced_step_closure *dsc)
6309 {
6310 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6311 unsigned int rn = bits (insn, 16, 19);
6312
6313 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6314 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6315 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6316 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6317 else if ((op1 & 0x60) == 0x20)
6318 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6319 else if ((op1 & 0x71) == 0x40)
6320 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6321 dsc);
6322 else if ((op1 & 0x77) == 0x41)
6323 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6324 else if ((op1 & 0x77) == 0x45)
6325 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6326 else if ((op1 & 0x77) == 0x51)
6327 {
6328 if (rn != 0xf)
6329 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6330 else
6331 return arm_copy_unpred (gdbarch, insn, dsc);
6332 }
6333 else if ((op1 & 0x77) == 0x55)
6334 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6335 else if (op1 == 0x57)
6336 switch (op2)
6337 {
6338 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6339 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6340 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6341 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6342 default: return arm_copy_unpred (gdbarch, insn, dsc);
6343 }
6344 else if ((op1 & 0x63) == 0x43)
6345 return arm_copy_unpred (gdbarch, insn, dsc);
6346 else if ((op2 & 0x1) == 0x0)
6347 switch (op1 & ~0x80)
6348 {
6349 case 0x61:
6350 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6351 case 0x65:
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6353 case 0x71: case 0x75:
6354 /* pld/pldw reg. */
6355 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6356 case 0x63: case 0x67: case 0x73: case 0x77:
6357 return arm_copy_unpred (gdbarch, insn, dsc);
6358 default:
6359 return arm_copy_undef (gdbarch, insn, dsc);
6360 }
6361 else
6362 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6363 }
6364
6365 static int
6366 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6367 struct regcache *regs,
6368 arm_displaced_step_closure *dsc)
6369 {
6370 if (bit (insn, 27) == 0)
6371 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6372 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6373 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6374 {
6375 case 0x0: case 0x2:
6376 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6377
6378 case 0x1: case 0x3:
6379 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6380
6381 case 0x4: case 0x5: case 0x6: case 0x7:
6382 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6383
6384 case 0x8:
6385 switch ((insn & 0xe00000) >> 21)
6386 {
6387 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6388 /* stc/stc2. */
6389 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6390
6391 case 0x2:
6392 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6393
6394 default:
6395 return arm_copy_undef (gdbarch, insn, dsc);
6396 }
6397
6398 case 0x9:
6399 {
6400 int rn_f = (bits (insn, 16, 19) == 0xf);
6401 switch ((insn & 0xe00000) >> 21)
6402 {
6403 case 0x1: case 0x3:
6404 /* ldc/ldc2 imm (undefined for rn == pc). */
6405 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6406 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6407
6408 case 0x2:
6409 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6410
6411 case 0x4: case 0x5: case 0x6: case 0x7:
6412 /* ldc/ldc2 lit (undefined for rn != pc). */
6413 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6414 : arm_copy_undef (gdbarch, insn, dsc);
6415
6416 default:
6417 return arm_copy_undef (gdbarch, insn, dsc);
6418 }
6419 }
6420
6421 case 0xa:
6422 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6423
6424 case 0xb:
6425 if (bits (insn, 16, 19) == 0xf)
6426 /* ldc/ldc2 lit. */
6427 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6428 else
6429 return arm_copy_undef (gdbarch, insn, dsc);
6430
6431 case 0xc:
6432 if (bit (insn, 4))
6433 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6434 else
6435 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6436
6437 case 0xd:
6438 if (bit (insn, 4))
6439 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6440 else
6441 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6442
6443 default:
6444 return arm_copy_undef (gdbarch, insn, dsc);
6445 }
6446 }
6447
6448 /* Decode miscellaneous instructions in dp/misc encoding space. */
6449
6450 static int
6451 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 arm_displaced_step_closure *dsc)
6454 {
6455 unsigned int op2 = bits (insn, 4, 6);
6456 unsigned int op = bits (insn, 21, 22);
6457
6458 switch (op2)
6459 {
6460 case 0x0:
6461 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6462
6463 case 0x1:
6464 if (op == 0x1) /* bx. */
6465 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6466 else if (op == 0x3)
6467 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6468 else
6469 return arm_copy_undef (gdbarch, insn, dsc);
6470
6471 case 0x2:
6472 if (op == 0x1)
6473 /* Not really supported. */
6474 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6475 else
6476 return arm_copy_undef (gdbarch, insn, dsc);
6477
6478 case 0x3:
6479 if (op == 0x1)
6480 return arm_copy_bx_blx_reg (gdbarch, insn,
6481 regs, dsc); /* blx register. */
6482 else
6483 return arm_copy_undef (gdbarch, insn, dsc);
6484
6485 case 0x5:
6486 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6487
6488 case 0x7:
6489 if (op == 0x1)
6490 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6491 else if (op == 0x3)
6492 /* Not really supported. */
6493 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6494 /* Fall through. */
6495
6496 default:
6497 return arm_copy_undef (gdbarch, insn, dsc);
6498 }
6499 }
6500
6501 static int
6502 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6503 struct regcache *regs,
6504 arm_displaced_step_closure *dsc)
6505 {
6506 if (bit (insn, 25))
6507 switch (bits (insn, 20, 24))
6508 {
6509 case 0x10:
6510 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6511
6512 case 0x14:
6513 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6514
6515 case 0x12: case 0x16:
6516 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6517
6518 default:
6519 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6520 }
6521 else
6522 {
6523 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6524
6525 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6526 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6527 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6528 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6529 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6530 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6532 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6533 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6534 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6535 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6536 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6537 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6538 /* 2nd arg means "unprivileged". */
6539 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6540 dsc);
6541 }
6542
6543 /* Should be unreachable. */
6544 return 1;
6545 }
6546
6547 static int
6548 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6549 struct regcache *regs,
6550 arm_displaced_step_closure *dsc)
6551 {
6552 int a = bit (insn, 25), b = bit (insn, 4);
6553 uint32_t op1 = bits (insn, 20, 24);
6554
6555 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6556 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6558 else if ((!a && (op1 & 0x17) == 0x02)
6559 || (a && (op1 & 0x17) == 0x02 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6561 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6562 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6564 else if ((!a && (op1 & 0x17) == 0x03)
6565 || (a && (op1 & 0x17) == 0x03 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6567 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6568 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6570 else if ((!a && (op1 & 0x17) == 0x06)
6571 || (a && (op1 & 0x17) == 0x06 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6573 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6574 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6576 else if ((!a && (op1 & 0x17) == 0x07)
6577 || (a && (op1 & 0x17) == 0x07 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6579
6580 /* Should be unreachable. */
6581 return 1;
6582 }
6583
6584 static int
6585 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6586 arm_displaced_step_closure *dsc)
6587 {
6588 switch (bits (insn, 20, 24))
6589 {
6590 case 0x00: case 0x01: case 0x02: case 0x03:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6592
6593 case 0x04: case 0x05: case 0x06: case 0x07:
6594 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6595
6596 case 0x08: case 0x09: case 0x0a: case 0x0b:
6597 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6598 return arm_copy_unmodified (gdbarch, insn,
6599 "decode/pack/unpack/saturate/reverse", dsc);
6600
6601 case 0x18:
6602 if (bits (insn, 5, 7) == 0) /* op2. */
6603 {
6604 if (bits (insn, 12, 15) == 0xf)
6605 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6606 else
6607 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6608 }
6609 else
6610 return arm_copy_undef (gdbarch, insn, dsc);
6611
6612 case 0x1a: case 0x1b:
6613 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6614 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6615 else
6616 return arm_copy_undef (gdbarch, insn, dsc);
6617
6618 case 0x1c: case 0x1d:
6619 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6620 {
6621 if (bits (insn, 0, 3) == 0xf)
6622 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6623 else
6624 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6625 }
6626 else
6627 return arm_copy_undef (gdbarch, insn, dsc);
6628
6629 case 0x1e: case 0x1f:
6630 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6631 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6632 else
6633 return arm_copy_undef (gdbarch, insn, dsc);
6634 }
6635
6636 /* Should be unreachable. */
6637 return 1;
6638 }
6639
6640 static int
6641 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6642 struct regcache *regs,
6643 arm_displaced_step_closure *dsc)
6644 {
6645 if (bit (insn, 25))
6646 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6647 else
6648 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6649 }
6650
6651 static int
6652 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6653 struct regcache *regs,
6654 arm_displaced_step_closure *dsc)
6655 {
6656 unsigned int opcode = bits (insn, 20, 24);
6657
6658 switch (opcode)
6659 {
6660 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6661 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6662
6663 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6664 case 0x12: case 0x16:
6665 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6666
6667 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6668 case 0x13: case 0x17:
6669 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6670
6671 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6672 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6673 /* Note: no writeback for these instructions. Bit 25 will always be
6674 zero though (via caller), so the following works OK. */
6675 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6676 }
6677
6678 /* Should be unreachable. */
6679 return 1;
6680 }
6681
6682 /* Decode shifted register instructions. */
6683
6684 static int
6685 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6686 uint16_t insn2, struct regcache *regs,
6687 arm_displaced_step_closure *dsc)
6688 {
6689 /* PC is only allowed to be used in instruction MOV. */
6690
6691 unsigned int op = bits (insn1, 5, 8);
6692 unsigned int rn = bits (insn1, 0, 3);
6693
6694 if (op == 0x2 && rn == 0xf) /* MOV */
6695 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6696 else
6697 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6698 "dp (shift reg)", dsc);
6699 }
6700
6701
6702 /* Decode extension register load/store. Exactly the same as
6703 arm_decode_ext_reg_ld_st. */
6704
6705 static int
6706 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6707 uint16_t insn2, struct regcache *regs,
6708 arm_displaced_step_closure *dsc)
6709 {
6710 unsigned int opcode = bits (insn1, 4, 8);
6711
6712 switch (opcode)
6713 {
6714 case 0x04: case 0x05:
6715 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6716 "vfp/neon vmov", dsc);
6717
6718 case 0x08: case 0x0c: /* 01x00 */
6719 case 0x0a: case 0x0e: /* 01x10 */
6720 case 0x12: case 0x16: /* 10x10 */
6721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6722 "vfp/neon vstm/vpush", dsc);
6723
6724 case 0x09: case 0x0d: /* 01x01 */
6725 case 0x0b: case 0x0f: /* 01x11 */
6726 case 0x13: case 0x17: /* 10x11 */
6727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6728 "vfp/neon vldm/vpop", dsc);
6729
6730 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6731 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6732 "vstr", dsc);
6733 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6734 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6735 }
6736
6737 /* Should be unreachable. */
6738 return 1;
6739 }
6740
6741 static int
6742 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6743 struct regcache *regs, arm_displaced_step_closure *dsc)
6744 {
6745 unsigned int op1 = bits (insn, 20, 25);
6746 int op = bit (insn, 4);
6747 unsigned int coproc = bits (insn, 8, 11);
6748
6749 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6750 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6751 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6752 && (coproc & 0xe) != 0xa)
6753 /* stc/stc2. */
6754 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6755 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6756 && (coproc & 0xe) != 0xa)
6757 /* ldc/ldc2 imm/lit. */
6758 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6759 else if ((op1 & 0x3e) == 0x00)
6760 return arm_copy_undef (gdbarch, insn, dsc);
6761 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6762 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6763 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6764 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6765 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6766 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6767 else if ((op1 & 0x30) == 0x20 && !op)
6768 {
6769 if ((coproc & 0xe) == 0xa)
6770 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6771 else
6772 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6773 }
6774 else if ((op1 & 0x30) == 0x20 && op)
6775 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6776 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6777 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6778 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6779 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6780 else if ((op1 & 0x30) == 0x30)
6781 return arm_copy_svc (gdbarch, insn, regs, dsc);
6782 else
6783 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6784 }
6785
6786 static int
6787 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6788 uint16_t insn2, struct regcache *regs,
6789 arm_displaced_step_closure *dsc)
6790 {
6791 unsigned int coproc = bits (insn2, 8, 11);
6792 unsigned int bit_5_8 = bits (insn1, 5, 8);
6793 unsigned int bit_9 = bit (insn1, 9);
6794 unsigned int bit_4 = bit (insn1, 4);
6795
6796 if (bit_9 == 0)
6797 {
6798 if (bit_5_8 == 2)
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6801 dsc);
6802 else if (bit_5_8 == 0) /* UNDEFINED. */
6803 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6804 else
6805 {
6806 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6807 if ((coproc & 0xe) == 0xa)
6808 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6809 dsc);
6810 else /* coproc is not 101x. */
6811 {
6812 if (bit_4 == 0) /* STC/STC2. */
6813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6814 "stc/stc2", dsc);
6815 else /* LDC/LDC2 {literal, immediate}. */
6816 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6817 regs, dsc);
6818 }
6819 }
6820 }
6821 else
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6823
6824 return 0;
6825 }
6826
6827 static void
6828 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6829 arm_displaced_step_closure *dsc, int rd)
6830 {
6831 /* ADR Rd, #imm
6832
6833 Rewrite as:
6834
6835 Preparation: Rd <- PC
6836 Insn: ADD Rd, #imm
6837 Cleanup: Null.
6838 */
6839
6840 /* Rd <- PC */
6841 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6842 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6843 }
6844
6845 static int
6846 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6847 arm_displaced_step_closure *dsc,
6848 int rd, unsigned int imm)
6849 {
6850
6851 /* Encoding T2: ADDS Rd, #imm */
6852 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6853
6854 install_pc_relative (gdbarch, regs, dsc, rd);
6855
6856 return 0;
6857 }
6858
6859 static int
6860 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6861 struct regcache *regs,
6862 arm_displaced_step_closure *dsc)
6863 {
6864 unsigned int rd = bits (insn, 8, 10);
6865 unsigned int imm8 = bits (insn, 0, 7);
6866
6867 if (debug_displaced)
6868 fprintf_unfiltered (gdb_stdlog,
6869 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6870 rd, imm8, insn);
6871
6872 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6873 }
6874
6875 static int
6876 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6877 uint16_t insn2, struct regcache *regs,
6878 arm_displaced_step_closure *dsc)
6879 {
6880 unsigned int rd = bits (insn2, 8, 11);
6881 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6882 extract raw immediate encoding rather than computing immediate. When
6883 generating ADD or SUB instruction, we can simply perform OR operation to
6884 set immediate into ADD. */
6885 unsigned int imm_3_8 = insn2 & 0x70ff;
6886 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6887
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog,
6890 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6891 rd, imm_i, imm_3_8, insn1, insn2);
6892
6893 if (bit (insn1, 7)) /* Encoding T2 */
6894 {
6895 /* Encoding T3: SUB Rd, Rd, #imm */
6896 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6897 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6898 }
6899 else /* Encoding T3 */
6900 {
6901 /* Encoding T3: ADD Rd, Rd, #imm */
6902 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6903 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6904 }
6905 dsc->numinsns = 2;
6906
6907 install_pc_relative (gdbarch, regs, dsc, rd);
6908
6909 return 0;
6910 }
6911
6912 static int
6913 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6914 struct regcache *regs,
6915 arm_displaced_step_closure *dsc)
6916 {
6917 unsigned int rt = bits (insn1, 8, 10);
6918 unsigned int pc;
6919 int imm8 = (bits (insn1, 0, 7) << 2);
6920
6921 /* LDR Rd, #imm8
6922
6923 Rwrite as:
6924
6925 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6926
6927 Insn: LDR R0, [R2, R3];
6928 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6929
6930 if (debug_displaced)
6931 fprintf_unfiltered (gdb_stdlog,
6932 "displaced: copying thumb ldr r%d [pc #%d]\n"
6933 , rt, imm8);
6934
6935 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6936 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6937 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6938 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6939 /* The assembler calculates the required value of the offset from the
6940 Align(PC,4) value of this instruction to the label. */
6941 pc = pc & 0xfffffffc;
6942
6943 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6944 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6945
6946 dsc->rd = rt;
6947 dsc->u.ldst.xfersize = 4;
6948 dsc->u.ldst.rn = 0;
6949 dsc->u.ldst.immed = 0;
6950 dsc->u.ldst.writeback = 0;
6951 dsc->u.ldst.restore_r4 = 0;
6952
6953 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6954
6955 dsc->cleanup = &cleanup_load;
6956
6957 return 0;
6958 }
6959
6960 /* Copy Thumb cbnz/cbz instruction. */
6961
6962 static int
6963 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6964 struct regcache *regs,
6965 arm_displaced_step_closure *dsc)
6966 {
6967 int non_zero = bit (insn1, 11);
6968 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6969 CORE_ADDR from = dsc->insn_addr;
6970 int rn = bits (insn1, 0, 2);
6971 int rn_val = displaced_read_reg (regs, dsc, rn);
6972
6973 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6974 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6975 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6976 condition is false, let it be, cleanup_branch will do nothing. */
6977 if (dsc->u.branch.cond)
6978 {
6979 dsc->u.branch.cond = INST_AL;
6980 dsc->u.branch.dest = from + 4 + imm5;
6981 }
6982 else
6983 dsc->u.branch.dest = from + 2;
6984
6985 dsc->u.branch.link = 0;
6986 dsc->u.branch.exchange = 0;
6987
6988 if (debug_displaced)
6989 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6990 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6991 rn, rn_val, insn1, dsc->u.branch.dest);
6992
6993 dsc->modinsn[0] = THUMB_NOP;
6994
6995 dsc->cleanup = &cleanup_branch;
6996 return 0;
6997 }
6998
6999 /* Copy Table Branch Byte/Halfword */
7000 static int
7001 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7002 uint16_t insn2, struct regcache *regs,
7003 arm_displaced_step_closure *dsc)
7004 {
7005 ULONGEST rn_val, rm_val;
7006 int is_tbh = bit (insn2, 4);
7007 CORE_ADDR halfwords = 0;
7008 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7009
7010 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7011 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7012
7013 if (is_tbh)
7014 {
7015 gdb_byte buf[2];
7016
7017 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7018 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7019 }
7020 else
7021 {
7022 gdb_byte buf[1];
7023
7024 target_read_memory (rn_val + rm_val, buf, 1);
7025 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7026 }
7027
7028 if (debug_displaced)
7029 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7030 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7031 (unsigned int) rn_val, (unsigned int) rm_val,
7032 (unsigned int) halfwords);
7033
7034 dsc->u.branch.cond = INST_AL;
7035 dsc->u.branch.link = 0;
7036 dsc->u.branch.exchange = 0;
7037 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7038
7039 dsc->cleanup = &cleanup_branch;
7040
7041 return 0;
7042 }
7043
7044 static void
7045 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7046 arm_displaced_step_closure *dsc)
7047 {
7048 /* PC <- r7 */
7049 int val = displaced_read_reg (regs, dsc, 7);
7050 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7051
7052 /* r7 <- r8 */
7053 val = displaced_read_reg (regs, dsc, 8);
7054 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7055
7056 /* r8 <- tmp[0] */
7057 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7058
7059 }
7060
7061 static int
7062 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7063 struct regcache *regs,
7064 arm_displaced_step_closure *dsc)
7065 {
7066 dsc->u.block.regmask = insn1 & 0x00ff;
7067
7068 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7069 to :
7070
7071 (1) register list is full, that is, r0-r7 are used.
7072 Prepare: tmp[0] <- r8
7073
7074 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7075 MOV r8, r7; Move value of r7 to r8;
7076 POP {r7}; Store PC value into r7.
7077
7078 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7079
7080 (2) register list is not full, supposing there are N registers in
7081 register list (except PC, 0 <= N <= 7).
7082 Prepare: for each i, 0 - N, tmp[i] <- ri.
7083
7084 POP {r0, r1, ...., rN};
7085
7086 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7087 from tmp[] properly.
7088 */
7089 if (debug_displaced)
7090 fprintf_unfiltered (gdb_stdlog,
7091 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7092 dsc->u.block.regmask, insn1);
7093
7094 if (dsc->u.block.regmask == 0xff)
7095 {
7096 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7097
7098 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7099 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7100 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7101
7102 dsc->numinsns = 3;
7103 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7104 }
7105 else
7106 {
7107 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7108 unsigned int i;
7109 unsigned int new_regmask;
7110
7111 for (i = 0; i < num_in_list + 1; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 new_regmask = (1 << (num_in_list + 1)) - 1;
7115
7116 if (debug_displaced)
7117 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7118 "{..., pc}: original reg list %.4x,"
7119 " modified list %.4x\n"),
7120 (int) dsc->u.block.regmask, new_regmask);
7121
7122 dsc->u.block.regmask |= 0x8000;
7123 dsc->u.block.writeback = 0;
7124 dsc->u.block.cond = INST_AL;
7125
7126 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7127
7128 dsc->cleanup = &cleanup_block_load_pc;
7129 }
7130
7131 return 0;
7132 }
7133
7134 static void
7135 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7136 struct regcache *regs,
7137 arm_displaced_step_closure *dsc)
7138 {
7139 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7140 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7141 int err = 0;
7142
7143 /* 16-bit thumb instructions. */
7144 switch (op_bit_12_15)
7145 {
7146 /* Shift (imme), add, subtract, move and compare. */
7147 case 0: case 1: case 2: case 3:
7148 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7149 "shift/add/sub/mov/cmp",
7150 dsc);
7151 break;
7152 case 4:
7153 switch (op_bit_10_11)
7154 {
7155 case 0: /* Data-processing */
7156 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7157 "data-processing",
7158 dsc);
7159 break;
7160 case 1: /* Special data instructions and branch and exchange. */
7161 {
7162 unsigned short op = bits (insn1, 7, 9);
7163 if (op == 6 || op == 7) /* BX or BLX */
7164 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7165 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7166 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7167 else
7168 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7169 dsc);
7170 }
7171 break;
7172 default: /* LDR (literal) */
7173 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7174 }
7175 break;
7176 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7177 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7178 break;
7179 case 10:
7180 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7181 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7182 else /* Generate SP-relative address */
7183 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7184 break;
7185 case 11: /* Misc 16-bit instructions */
7186 {
7187 switch (bits (insn1, 8, 11))
7188 {
7189 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7190 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7191 break;
7192 case 12: case 13: /* POP */
7193 if (bit (insn1, 8)) /* PC is in register list. */
7194 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7195 else
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7197 break;
7198 case 15: /* If-Then, and hints */
7199 if (bits (insn1, 0, 3))
7200 /* If-Then makes up to four following instructions conditional.
7201 IT instruction itself is not conditional, so handle it as a
7202 common unmodified instruction. */
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7204 dsc);
7205 else
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7207 break;
7208 default:
7209 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7210 }
7211 }
7212 break;
7213 case 12:
7214 if (op_bit_10_11 < 2) /* Store multiple registers */
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7216 else /* Load multiple registers */
7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7218 break;
7219 case 13: /* Conditional branch and supervisor call */
7220 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7221 err = thumb_copy_b (gdbarch, insn1, dsc);
7222 else
7223 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7224 break;
7225 case 14: /* Unconditional branch */
7226 err = thumb_copy_b (gdbarch, insn1, dsc);
7227 break;
7228 default:
7229 err = 1;
7230 }
7231
7232 if (err)
7233 internal_error (__FILE__, __LINE__,
7234 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7235 }
7236
7237 static int
7238 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7239 uint16_t insn1, uint16_t insn2,
7240 struct regcache *regs,
7241 arm_displaced_step_closure *dsc)
7242 {
7243 int rt = bits (insn2, 12, 15);
7244 int rn = bits (insn1, 0, 3);
7245 int op1 = bits (insn1, 7, 8);
7246
7247 switch (bits (insn1, 5, 6))
7248 {
7249 case 0: /* Load byte and memory hints */
7250 if (rt == 0xf) /* PLD/PLI */
7251 {
7252 if (rn == 0xf)
7253 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7254 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7255 else
7256 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7257 "pli/pld", dsc);
7258 }
7259 else
7260 {
7261 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7262 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7263 1);
7264 else
7265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7266 "ldrb{reg, immediate}/ldrbt",
7267 dsc);
7268 }
7269
7270 break;
7271 case 1: /* Load halfword and memory hints. */
7272 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7273 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7274 "pld/unalloc memhint", dsc);
7275 else
7276 {
7277 if (rn == 0xf)
7278 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7279 2);
7280 else
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7282 "ldrh/ldrht", dsc);
7283 }
7284 break;
7285 case 2: /* Load word */
7286 {
7287 int insn2_bit_8_11 = bits (insn2, 8, 11);
7288
7289 if (rn == 0xf)
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7291 else if (op1 == 0x1) /* Encoding T3 */
7292 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7293 0, 1);
7294 else /* op1 == 0x0 */
7295 {
7296 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7297 /* LDR (immediate) */
7298 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7299 dsc, bit (insn2, 8), 1);
7300 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7302 "ldrt", dsc);
7303 else
7304 /* LDR (register) */
7305 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7306 dsc, 0, 0);
7307 }
7308 break;
7309 }
7310 default:
7311 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7312 break;
7313 }
7314 return 0;
7315 }
7316
7317 static void
7318 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7319 uint16_t insn2, struct regcache *regs,
7320 arm_displaced_step_closure *dsc)
7321 {
7322 int err = 0;
7323 unsigned short op = bit (insn2, 15);
7324 unsigned int op1 = bits (insn1, 11, 12);
7325
7326 switch (op1)
7327 {
7328 case 1:
7329 {
7330 switch (bits (insn1, 9, 10))
7331 {
7332 case 0:
7333 if (bit (insn1, 6))
7334 {
7335 /* Load/store {dual, exclusive}, table branch. */
7336 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7337 && bits (insn2, 5, 7) == 0)
7338 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7339 dsc);
7340 else
7341 /* PC is not allowed to use in load/store {dual, exclusive}
7342 instructions. */
7343 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "load/store dual/ex", dsc);
7345 }
7346 else /* load/store multiple */
7347 {
7348 switch (bits (insn1, 7, 8))
7349 {
7350 case 0: case 3: /* SRS, RFE */
7351 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "srs/rfe", dsc);
7353 break;
7354 case 1: case 2: /* LDM/STM/PUSH/POP */
7355 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7356 break;
7357 }
7358 }
7359 break;
7360
7361 case 1:
7362 /* Data-processing (shift register). */
7363 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7364 dsc);
7365 break;
7366 default: /* Coprocessor instructions. */
7367 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7368 break;
7369 }
7370 break;
7371 }
7372 case 2: /* op1 = 2 */
7373 if (op) /* Branch and misc control. */
7374 {
7375 if (bit (insn2, 14) /* BLX/BL */
7376 || bit (insn2, 12) /* Unconditional branch */
7377 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7378 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7379 else
7380 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7381 "misc ctrl", dsc);
7382 }
7383 else
7384 {
7385 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7386 {
7387 int dp_op = bits (insn1, 4, 8);
7388 int rn = bits (insn1, 0, 3);
7389 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7390 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7391 regs, dsc);
7392 else
7393 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7394 "dp/pb", dsc);
7395 }
7396 else /* Data processing (modified immediate) */
7397 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7398 "dp/mi", dsc);
7399 }
7400 break;
7401 case 3: /* op1 = 3 */
7402 switch (bits (insn1, 9, 10))
7403 {
7404 case 0:
7405 if (bit (insn1, 4))
7406 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7407 regs, dsc);
7408 else /* NEON Load/Store and Store single data item */
7409 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7410 "neon elt/struct load/store",
7411 dsc);
7412 break;
7413 case 1: /* op1 = 3, bits (9, 10) == 1 */
7414 switch (bits (insn1, 7, 8))
7415 {
7416 case 0: case 1: /* Data processing (register) */
7417 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7418 "dp(reg)", dsc);
7419 break;
7420 case 2: /* Multiply and absolute difference */
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "mul/mua/diff", dsc);
7423 break;
7424 case 3: /* Long multiply and divide */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "lmul/lmua", dsc);
7427 break;
7428 }
7429 break;
7430 default: /* Coprocessor instructions */
7431 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7432 break;
7433 }
7434 break;
7435 default:
7436 err = 1;
7437 }
7438
7439 if (err)
7440 internal_error (__FILE__, __LINE__,
7441 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7442
7443 }
7444
7445 static void
7446 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7447 struct regcache *regs,
7448 arm_displaced_step_closure *dsc)
7449 {
7450 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7451 uint16_t insn1
7452 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7453
7454 if (debug_displaced)
7455 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7456 "at %.8lx\n", insn1, (unsigned long) from);
7457
7458 dsc->is_thumb = 1;
7459 dsc->insn_size = thumb_insn_size (insn1);
7460 if (thumb_insn_size (insn1) == 4)
7461 {
7462 uint16_t insn2
7463 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7464 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7465 }
7466 else
7467 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7468 }
7469
7470 void
7471 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7472 CORE_ADDR to, struct regcache *regs,
7473 arm_displaced_step_closure *dsc)
7474 {
7475 int err = 0;
7476 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7477 uint32_t insn;
7478
7479 /* Most displaced instructions use a 1-instruction scratch space, so set this
7480 here and override below if/when necessary. */
7481 dsc->numinsns = 1;
7482 dsc->insn_addr = from;
7483 dsc->scratch_base = to;
7484 dsc->cleanup = NULL;
7485 dsc->wrote_to_pc = 0;
7486
7487 if (!displaced_in_arm_mode (regs))
7488 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7489
7490 dsc->is_thumb = 0;
7491 dsc->insn_size = 4;
7492 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7493 if (debug_displaced)
7494 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7495 "at %.8lx\n", (unsigned long) insn,
7496 (unsigned long) from);
7497
7498 if ((insn & 0xf0000000) == 0xf0000000)
7499 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7500 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7501 {
7502 case 0x0: case 0x1: case 0x2: case 0x3:
7503 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7504 break;
7505
7506 case 0x4: case 0x5: case 0x6:
7507 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7508 break;
7509
7510 case 0x7:
7511 err = arm_decode_media (gdbarch, insn, dsc);
7512 break;
7513
7514 case 0x8: case 0x9: case 0xa: case 0xb:
7515 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7516 break;
7517
7518 case 0xc: case 0xd: case 0xe: case 0xf:
7519 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7520 break;
7521 }
7522
7523 if (err)
7524 internal_error (__FILE__, __LINE__,
7525 _("arm_process_displaced_insn: Instruction decode error"));
7526 }
7527
7528 /* Actually set up the scratch space for a displaced instruction. */
7529
7530 void
7531 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7532 CORE_ADDR to, arm_displaced_step_closure *dsc)
7533 {
7534 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7535 unsigned int i, len, offset;
7536 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7537 int size = dsc->is_thumb? 2 : 4;
7538 const gdb_byte *bkp_insn;
7539
7540 offset = 0;
7541 /* Poke modified instruction(s). */
7542 for (i = 0; i < dsc->numinsns; i++)
7543 {
7544 if (debug_displaced)
7545 {
7546 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7547 if (size == 4)
7548 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7549 dsc->modinsn[i]);
7550 else if (size == 2)
7551 fprintf_unfiltered (gdb_stdlog, "%.4x",
7552 (unsigned short)dsc->modinsn[i]);
7553
7554 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7555 (unsigned long) to + offset);
7556
7557 }
7558 write_memory_unsigned_integer (to + offset, size,
7559 byte_order_for_code,
7560 dsc->modinsn[i]);
7561 offset += size;
7562 }
7563
7564 /* Choose the correct breakpoint instruction. */
7565 if (dsc->is_thumb)
7566 {
7567 bkp_insn = tdep->thumb_breakpoint;
7568 len = tdep->thumb_breakpoint_size;
7569 }
7570 else
7571 {
7572 bkp_insn = tdep->arm_breakpoint;
7573 len = tdep->arm_breakpoint_size;
7574 }
7575
7576 /* Put breakpoint afterwards. */
7577 write_memory (to + offset, bkp_insn, len);
7578
7579 if (debug_displaced)
7580 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7581 paddress (gdbarch, from), paddress (gdbarch, to));
7582 }
7583
7584 /* Entry point for cleaning things up after a displaced instruction has been
7585 single-stepped. */
7586
7587 void
7588 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7589 struct displaced_step_closure *dsc_,
7590 CORE_ADDR from, CORE_ADDR to,
7591 struct regcache *regs)
7592 {
7593 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7594
7595 if (dsc->cleanup)
7596 dsc->cleanup (gdbarch, regs, dsc);
7597
7598 if (!dsc->wrote_to_pc)
7599 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7600 dsc->insn_addr + dsc->insn_size);
7601
7602 }
7603
7604 #include "bfd-in2.h"
7605 #include "libcoff.h"
7606
7607 static int
7608 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7609 {
7610 gdb_disassembler *di
7611 = static_cast<gdb_disassembler *>(info->application_data);
7612 struct gdbarch *gdbarch = di->arch ();
7613
7614 if (arm_pc_is_thumb (gdbarch, memaddr))
7615 {
7616 static asymbol *asym;
7617 static combined_entry_type ce;
7618 static struct coff_symbol_struct csym;
7619 static struct bfd fake_bfd;
7620 static bfd_target fake_target;
7621
7622 if (csym.native == NULL)
7623 {
7624 /* Create a fake symbol vector containing a Thumb symbol.
7625 This is solely so that the code in print_insn_little_arm()
7626 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7627 the presence of a Thumb symbol and switch to decoding
7628 Thumb instructions. */
7629
7630 fake_target.flavour = bfd_target_coff_flavour;
7631 fake_bfd.xvec = &fake_target;
7632 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7633 csym.native = &ce;
7634 csym.symbol.the_bfd = &fake_bfd;
7635 csym.symbol.name = "fake";
7636 asym = (asymbol *) & csym;
7637 }
7638
7639 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7640 info->symbols = &asym;
7641 }
7642 else
7643 info->symbols = NULL;
7644
7645 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7646 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7647 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7648 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7649 in default_print_insn. */
7650 if (exec_bfd != NULL)
7651 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7652
7653 return default_print_insn (memaddr, info);
7654 }
7655
7656 /* The following define instruction sequences that will cause ARM
7657 cpu's to take an undefined instruction trap. These are used to
7658 signal a breakpoint to GDB.
7659
7660 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7661 modes. A different instruction is required for each mode. The ARM
7662 cpu's can also be big or little endian. Thus four different
7663 instructions are needed to support all cases.
7664
7665 Note: ARMv4 defines several new instructions that will take the
7666 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7667 not in fact add the new instructions. The new undefined
7668 instructions in ARMv4 are all instructions that had no defined
7669 behaviour in earlier chips. There is no guarantee that they will
7670 raise an exception, but may be treated as NOP's. In practice, it
7671 may only safe to rely on instructions matching:
7672
7673 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7674 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7675 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7676
7677 Even this may only true if the condition predicate is true. The
7678 following use a condition predicate of ALWAYS so it is always TRUE.
7679
7680 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7681 and NetBSD all use a software interrupt rather than an undefined
7682 instruction to force a trap. This can be handled by by the
7683 abi-specific code during establishment of the gdbarch vector. */
7684
7685 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7686 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7687 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7688 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7689
7690 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7691 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7692 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7693 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7694
7695 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7696
7697 static int
7698 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7699 {
7700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7701 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7702
7703 if (arm_pc_is_thumb (gdbarch, *pcptr))
7704 {
7705 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7706
7707 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7708 check whether we are replacing a 32-bit instruction. */
7709 if (tdep->thumb2_breakpoint != NULL)
7710 {
7711 gdb_byte buf[2];
7712
7713 if (target_read_memory (*pcptr, buf, 2) == 0)
7714 {
7715 unsigned short inst1;
7716
7717 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7718 if (thumb_insn_size (inst1) == 4)
7719 return ARM_BP_KIND_THUMB2;
7720 }
7721 }
7722
7723 return ARM_BP_KIND_THUMB;
7724 }
7725 else
7726 return ARM_BP_KIND_ARM;
7727
7728 }
7729
7730 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7731
7732 static const gdb_byte *
7733 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7734 {
7735 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7736
7737 switch (kind)
7738 {
7739 case ARM_BP_KIND_ARM:
7740 *size = tdep->arm_breakpoint_size;
7741 return tdep->arm_breakpoint;
7742 case ARM_BP_KIND_THUMB:
7743 *size = tdep->thumb_breakpoint_size;
7744 return tdep->thumb_breakpoint;
7745 case ARM_BP_KIND_THUMB2:
7746 *size = tdep->thumb2_breakpoint_size;
7747 return tdep->thumb2_breakpoint;
7748 default:
7749 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7750 }
7751 }
7752
7753 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7754
7755 static int
7756 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7757 struct regcache *regcache,
7758 CORE_ADDR *pcptr)
7759 {
7760 gdb_byte buf[4];
7761
7762 /* Check the memory pointed by PC is readable. */
7763 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7764 {
7765 struct arm_get_next_pcs next_pcs_ctx;
7766
7767 arm_get_next_pcs_ctor (&next_pcs_ctx,
7768 &arm_get_next_pcs_ops,
7769 gdbarch_byte_order (gdbarch),
7770 gdbarch_byte_order_for_code (gdbarch),
7771 0,
7772 regcache);
7773
7774 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7775
7776 /* If MEMADDR is the next instruction of current pc, do the
7777 software single step computation, and get the thumb mode by
7778 the destination address. */
7779 for (CORE_ADDR pc : next_pcs)
7780 {
7781 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7782 {
7783 if (IS_THUMB_ADDR (pc))
7784 {
7785 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7786 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7787 }
7788 else
7789 return ARM_BP_KIND_ARM;
7790 }
7791 }
7792 }
7793
7794 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7795 }
7796
7797 /* Extract from an array REGBUF containing the (raw) register state a
7798 function return value of type TYPE, and copy that, in virtual
7799 format, into VALBUF. */
7800
7801 static void
7802 arm_extract_return_value (struct type *type, struct regcache *regs,
7803 gdb_byte *valbuf)
7804 {
7805 struct gdbarch *gdbarch = regs->arch ();
7806 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7807
7808 if (TYPE_CODE_FLT == TYPE_CODE (type))
7809 {
7810 switch (gdbarch_tdep (gdbarch)->fp_model)
7811 {
7812 case ARM_FLOAT_FPA:
7813 {
7814 /* The value is in register F0 in internal format. We need to
7815 extract the raw value and then convert it to the desired
7816 internal type. */
7817 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7818
7819 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7820 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7821 valbuf, type);
7822 }
7823 break;
7824
7825 case ARM_FLOAT_SOFT_FPA:
7826 case ARM_FLOAT_SOFT_VFP:
7827 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7828 not using the VFP ABI code. */
7829 case ARM_FLOAT_VFP:
7830 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7831 if (TYPE_LENGTH (type) > 4)
7832 regs->cooked_read (ARM_A1_REGNUM + 1,
7833 valbuf + ARM_INT_REGISTER_SIZE);
7834 break;
7835
7836 default:
7837 internal_error (__FILE__, __LINE__,
7838 _("arm_extract_return_value: "
7839 "Floating point model not supported"));
7840 break;
7841 }
7842 }
7843 else if (TYPE_CODE (type) == TYPE_CODE_INT
7844 || TYPE_CODE (type) == TYPE_CODE_CHAR
7845 || TYPE_CODE (type) == TYPE_CODE_BOOL
7846 || TYPE_CODE (type) == TYPE_CODE_PTR
7847 || TYPE_IS_REFERENCE (type)
7848 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7849 {
7850 /* If the type is a plain integer, then the access is
7851 straight-forward. Otherwise we have to play around a bit
7852 more. */
7853 int len = TYPE_LENGTH (type);
7854 int regno = ARM_A1_REGNUM;
7855 ULONGEST tmp;
7856
7857 while (len > 0)
7858 {
7859 /* By using store_unsigned_integer we avoid having to do
7860 anything special for small big-endian values. */
7861 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7862 store_unsigned_integer (valbuf,
7863 (len > ARM_INT_REGISTER_SIZE
7864 ? ARM_INT_REGISTER_SIZE : len),
7865 byte_order, tmp);
7866 len -= ARM_INT_REGISTER_SIZE;
7867 valbuf += ARM_INT_REGISTER_SIZE;
7868 }
7869 }
7870 else
7871 {
7872 /* For a structure or union the behaviour is as if the value had
7873 been stored to word-aligned memory and then loaded into
7874 registers with 32-bit load instruction(s). */
7875 int len = TYPE_LENGTH (type);
7876 int regno = ARM_A1_REGNUM;
7877 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7878
7879 while (len > 0)
7880 {
7881 regs->cooked_read (regno++, tmpbuf);
7882 memcpy (valbuf, tmpbuf,
7883 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7884 len -= ARM_INT_REGISTER_SIZE;
7885 valbuf += ARM_INT_REGISTER_SIZE;
7886 }
7887 }
7888 }
7889
7890
7891 /* Will a function return an aggregate type in memory or in a
7892 register? Return 0 if an aggregate type can be returned in a
7893 register, 1 if it must be returned in memory. */
7894
7895 static int
7896 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7897 {
7898 enum type_code code;
7899
7900 type = check_typedef (type);
7901
7902 /* Simple, non-aggregate types (ie not including vectors and
7903 complex) are always returned in a register (or registers). */
7904 code = TYPE_CODE (type);
7905 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7906 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7907 return 0;
7908
7909 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7910 {
7911 /* Vector values should be returned using ARM registers if they
7912 are not over 16 bytes. */
7913 return (TYPE_LENGTH (type) > 16);
7914 }
7915
7916 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7917 {
7918 /* The AAPCS says all aggregates not larger than a word are returned
7919 in a register. */
7920 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7921 return 0;
7922
7923 return 1;
7924 }
7925 else
7926 {
7927 int nRc;
7928
7929 /* All aggregate types that won't fit in a register must be returned
7930 in memory. */
7931 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7932 return 1;
7933
7934 /* In the ARM ABI, "integer" like aggregate types are returned in
7935 registers. For an aggregate type to be integer like, its size
7936 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7937 offset of each addressable subfield must be zero. Note that bit
7938 fields are not addressable, and all addressable subfields of
7939 unions always start at offset zero.
7940
7941 This function is based on the behaviour of GCC 2.95.1.
7942 See: gcc/arm.c: arm_return_in_memory() for details.
7943
7944 Note: All versions of GCC before GCC 2.95.2 do not set up the
7945 parameters correctly for a function returning the following
7946 structure: struct { float f;}; This should be returned in memory,
7947 not a register. Richard Earnshaw sent me a patch, but I do not
7948 know of any way to detect if a function like the above has been
7949 compiled with the correct calling convention. */
7950
7951 /* Assume all other aggregate types can be returned in a register.
7952 Run a check for structures, unions and arrays. */
7953 nRc = 0;
7954
7955 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7956 {
7957 int i;
7958 /* Need to check if this struct/union is "integer" like. For
7959 this to be true, its size must be less than or equal to
7960 ARM_INT_REGISTER_SIZE and the offset of each addressable
7961 subfield must be zero. Note that bit fields are not
7962 addressable, and unions always start at offset zero. If any
7963 of the subfields is a floating point type, the struct/union
7964 cannot be an integer type. */
7965
7966 /* For each field in the object, check:
7967 1) Is it FP? --> yes, nRc = 1;
7968 2) Is it addressable (bitpos != 0) and
7969 not packed (bitsize == 0)?
7970 --> yes, nRc = 1
7971 */
7972
7973 for (i = 0; i < TYPE_NFIELDS (type); i++)
7974 {
7975 enum type_code field_type_code;
7976
7977 field_type_code
7978 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7979 i)));
7980
7981 /* Is it a floating point type field? */
7982 if (field_type_code == TYPE_CODE_FLT)
7983 {
7984 nRc = 1;
7985 break;
7986 }
7987
7988 /* If bitpos != 0, then we have to care about it. */
7989 if (TYPE_FIELD_BITPOS (type, i) != 0)
7990 {
7991 /* Bitfields are not addressable. If the field bitsize is
7992 zero, then the field is not packed. Hence it cannot be
7993 a bitfield or any other packed type. */
7994 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7995 {
7996 nRc = 1;
7997 break;
7998 }
7999 }
8000 }
8001 }
8002
8003 return nRc;
8004 }
8005 }
8006
8007 /* Write into appropriate registers a function return value of type
8008 TYPE, given in virtual format. */
8009
8010 static void
8011 arm_store_return_value (struct type *type, struct regcache *regs,
8012 const gdb_byte *valbuf)
8013 {
8014 struct gdbarch *gdbarch = regs->arch ();
8015 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8016
8017 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8018 {
8019 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8020
8021 switch (gdbarch_tdep (gdbarch)->fp_model)
8022 {
8023 case ARM_FLOAT_FPA:
8024
8025 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8026 regs->cooked_write (ARM_F0_REGNUM, buf);
8027 break;
8028
8029 case ARM_FLOAT_SOFT_FPA:
8030 case ARM_FLOAT_SOFT_VFP:
8031 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8032 not using the VFP ABI code. */
8033 case ARM_FLOAT_VFP:
8034 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8035 if (TYPE_LENGTH (type) > 4)
8036 regs->cooked_write (ARM_A1_REGNUM + 1,
8037 valbuf + ARM_INT_REGISTER_SIZE);
8038 break;
8039
8040 default:
8041 internal_error (__FILE__, __LINE__,
8042 _("arm_store_return_value: Floating "
8043 "point model not supported"));
8044 break;
8045 }
8046 }
8047 else if (TYPE_CODE (type) == TYPE_CODE_INT
8048 || TYPE_CODE (type) == TYPE_CODE_CHAR
8049 || TYPE_CODE (type) == TYPE_CODE_BOOL
8050 || TYPE_CODE (type) == TYPE_CODE_PTR
8051 || TYPE_IS_REFERENCE (type)
8052 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8053 {
8054 if (TYPE_LENGTH (type) <= 4)
8055 {
8056 /* Values of one word or less are zero/sign-extended and
8057 returned in r0. */
8058 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8059 LONGEST val = unpack_long (type, valbuf);
8060
8061 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8062 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8063 }
8064 else
8065 {
8066 /* Integral values greater than one word are stored in consecutive
8067 registers starting with r0. This will always be a multiple of
8068 the regiser size. */
8069 int len = TYPE_LENGTH (type);
8070 int regno = ARM_A1_REGNUM;
8071
8072 while (len > 0)
8073 {
8074 regs->cooked_write (regno++, valbuf);
8075 len -= ARM_INT_REGISTER_SIZE;
8076 valbuf += ARM_INT_REGISTER_SIZE;
8077 }
8078 }
8079 }
8080 else
8081 {
8082 /* For a structure or union the behaviour is as if the value had
8083 been stored to word-aligned memory and then loaded into
8084 registers with 32-bit load instruction(s). */
8085 int len = TYPE_LENGTH (type);
8086 int regno = ARM_A1_REGNUM;
8087 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8088
8089 while (len > 0)
8090 {
8091 memcpy (tmpbuf, valbuf,
8092 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8093 regs->cooked_write (regno++, tmpbuf);
8094 len -= ARM_INT_REGISTER_SIZE;
8095 valbuf += ARM_INT_REGISTER_SIZE;
8096 }
8097 }
8098 }
8099
8100
8101 /* Handle function return values. */
8102
8103 static enum return_value_convention
8104 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8105 struct type *valtype, struct regcache *regcache,
8106 gdb_byte *readbuf, const gdb_byte *writebuf)
8107 {
8108 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8109 struct type *func_type = function ? value_type (function) : NULL;
8110 enum arm_vfp_cprc_base_type vfp_base_type;
8111 int vfp_base_count;
8112
8113 if (arm_vfp_abi_for_function (gdbarch, func_type)
8114 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8115 {
8116 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8117 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8118 int i;
8119 for (i = 0; i < vfp_base_count; i++)
8120 {
8121 if (reg_char == 'q')
8122 {
8123 if (writebuf)
8124 arm_neon_quad_write (gdbarch, regcache, i,
8125 writebuf + i * unit_length);
8126
8127 if (readbuf)
8128 arm_neon_quad_read (gdbarch, regcache, i,
8129 readbuf + i * unit_length);
8130 }
8131 else
8132 {
8133 char name_buf[4];
8134 int regnum;
8135
8136 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8137 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8138 strlen (name_buf));
8139 if (writebuf)
8140 regcache->cooked_write (regnum, writebuf + i * unit_length);
8141 if (readbuf)
8142 regcache->cooked_read (regnum, readbuf + i * unit_length);
8143 }
8144 }
8145 return RETURN_VALUE_REGISTER_CONVENTION;
8146 }
8147
8148 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8149 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8150 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8151 {
8152 if (tdep->struct_return == pcc_struct_return
8153 || arm_return_in_memory (gdbarch, valtype))
8154 return RETURN_VALUE_STRUCT_CONVENTION;
8155 }
8156 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8157 {
8158 if (arm_return_in_memory (gdbarch, valtype))
8159 return RETURN_VALUE_STRUCT_CONVENTION;
8160 }
8161
8162 if (writebuf)
8163 arm_store_return_value (valtype, regcache, writebuf);
8164
8165 if (readbuf)
8166 arm_extract_return_value (valtype, regcache, readbuf);
8167
8168 return RETURN_VALUE_REGISTER_CONVENTION;
8169 }
8170
8171
8172 static int
8173 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8174 {
8175 struct gdbarch *gdbarch = get_frame_arch (frame);
8176 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8178 CORE_ADDR jb_addr;
8179 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8180
8181 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8182
8183 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8184 ARM_INT_REGISTER_SIZE))
8185 return 0;
8186
8187 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8188 return 1;
8189 }
8190 /* A call to cmse secure entry function "foo" at "a" is modified by
8191 GNU ld as "b".
8192 a) bl xxxx <foo>
8193
8194 <foo>
8195 xxxx:
8196
8197 b) bl yyyy <__acle_se_foo>
8198
8199 section .gnu.sgstubs:
8200 <foo>
8201 yyyy: sg // secure gateway
8202 b.w xxxx <__acle_se_foo> // original_branch_dest
8203
8204 <__acle_se_foo>
8205 xxxx:
8206
8207 When the control at "b", the pc contains "yyyy" (sg address) which is a
8208 trampoline and does not exist in source code. This function returns the
8209 target pc "xxxx". For more details please refer to section 5.4
8210 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8211 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8212 document on www.developer.arm.com. */
8213
8214 static CORE_ADDR
8215 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8216 {
8217 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8218 char *target_name = (char *) alloca (target_len);
8219 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8220
8221 struct bound_minimal_symbol minsym
8222 = lookup_minimal_symbol (target_name, NULL, objfile);
8223
8224 if (minsym.minsym != nullptr)
8225 return BMSYMBOL_VALUE_ADDRESS (minsym);
8226
8227 return 0;
8228 }
8229
8230 /* Return true when SEC points to ".gnu.sgstubs" section. */
8231
8232 static bool
8233 arm_is_sgstubs_section (struct obj_section *sec)
8234 {
8235 return (sec != nullptr
8236 && sec->the_bfd_section != nullptr
8237 && sec->the_bfd_section->name != nullptr
8238 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8239 }
8240
8241 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8242 return the target PC. Otherwise return 0. */
8243
8244 CORE_ADDR
8245 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8246 {
8247 const char *name;
8248 int namelen;
8249 CORE_ADDR start_addr;
8250
8251 /* Find the starting address and name of the function containing the PC. */
8252 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8253 {
8254 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8255 check here. */
8256 start_addr = arm_skip_bx_reg (frame, pc);
8257 if (start_addr != 0)
8258 return start_addr;
8259
8260 return 0;
8261 }
8262
8263 /* If PC is in a Thumb call or return stub, return the address of the
8264 target PC, which is in a register. The thunk functions are called
8265 _call_via_xx, where x is the register name. The possible names
8266 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8267 functions, named __ARM_call_via_r[0-7]. */
8268 if (startswith (name, "_call_via_")
8269 || startswith (name, "__ARM_call_via_"))
8270 {
8271 /* Use the name suffix to determine which register contains the
8272 target PC. */
8273 static const char *table[15] =
8274 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8275 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8276 };
8277 int regno;
8278 int offset = strlen (name) - 2;
8279
8280 for (regno = 0; regno <= 14; regno++)
8281 if (strcmp (&name[offset], table[regno]) == 0)
8282 return get_frame_register_unsigned (frame, regno);
8283 }
8284
8285 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8286 non-interworking calls to foo. We could decode the stubs
8287 to find the target but it's easier to use the symbol table. */
8288 namelen = strlen (name);
8289 if (name[0] == '_' && name[1] == '_'
8290 && ((namelen > 2 + strlen ("_from_thumb")
8291 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8292 || (namelen > 2 + strlen ("_from_arm")
8293 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8294 {
8295 char *target_name;
8296 int target_len = namelen - 2;
8297 struct bound_minimal_symbol minsym;
8298 struct objfile *objfile;
8299 struct obj_section *sec;
8300
8301 if (name[namelen - 1] == 'b')
8302 target_len -= strlen ("_from_thumb");
8303 else
8304 target_len -= strlen ("_from_arm");
8305
8306 target_name = (char *) alloca (target_len + 1);
8307 memcpy (target_name, name + 2, target_len);
8308 target_name[target_len] = '\0';
8309
8310 sec = find_pc_section (pc);
8311 objfile = (sec == NULL) ? NULL : sec->objfile;
8312 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8313 if (minsym.minsym != NULL)
8314 return BMSYMBOL_VALUE_ADDRESS (minsym);
8315 else
8316 return 0;
8317 }
8318
8319 struct obj_section *section = find_pc_section (pc);
8320
8321 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8322 if (arm_is_sgstubs_section (section))
8323 return arm_skip_cmse_entry (pc, name, section->objfile);
8324
8325 return 0; /* not a stub */
8326 }
8327
8328 static void
8329 set_arm_command (const char *args, int from_tty)
8330 {
8331 printf_unfiltered (_("\
8332 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8333 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8334 }
8335
8336 static void
8337 show_arm_command (const char *args, int from_tty)
8338 {
8339 cmd_show_list (showarmcmdlist, from_tty, "");
8340 }
8341
8342 static void
8343 arm_update_current_architecture (void)
8344 {
8345 struct gdbarch_info info;
8346
8347 /* If the current architecture is not ARM, we have nothing to do. */
8348 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8349 return;
8350
8351 /* Update the architecture. */
8352 gdbarch_info_init (&info);
8353
8354 if (!gdbarch_update_p (info))
8355 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8356 }
8357
8358 static void
8359 set_fp_model_sfunc (const char *args, int from_tty,
8360 struct cmd_list_element *c)
8361 {
8362 int fp_model;
8363
8364 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8365 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8366 {
8367 arm_fp_model = (enum arm_float_model) fp_model;
8368 break;
8369 }
8370
8371 if (fp_model == ARM_FLOAT_LAST)
8372 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8373 current_fp_model);
8374
8375 arm_update_current_architecture ();
8376 }
8377
8378 static void
8379 show_fp_model (struct ui_file *file, int from_tty,
8380 struct cmd_list_element *c, const char *value)
8381 {
8382 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8383
8384 if (arm_fp_model == ARM_FLOAT_AUTO
8385 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8386 fprintf_filtered (file, _("\
8387 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8388 fp_model_strings[tdep->fp_model]);
8389 else
8390 fprintf_filtered (file, _("\
8391 The current ARM floating point model is \"%s\".\n"),
8392 fp_model_strings[arm_fp_model]);
8393 }
8394
8395 static void
8396 arm_set_abi (const char *args, int from_tty,
8397 struct cmd_list_element *c)
8398 {
8399 int arm_abi;
8400
8401 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8402 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8403 {
8404 arm_abi_global = (enum arm_abi_kind) arm_abi;
8405 break;
8406 }
8407
8408 if (arm_abi == ARM_ABI_LAST)
8409 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8410 arm_abi_string);
8411
8412 arm_update_current_architecture ();
8413 }
8414
8415 static void
8416 arm_show_abi (struct ui_file *file, int from_tty,
8417 struct cmd_list_element *c, const char *value)
8418 {
8419 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8420
8421 if (arm_abi_global == ARM_ABI_AUTO
8422 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8423 fprintf_filtered (file, _("\
8424 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8425 arm_abi_strings[tdep->arm_abi]);
8426 else
8427 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8428 arm_abi_string);
8429 }
8430
8431 static void
8432 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8433 struct cmd_list_element *c, const char *value)
8434 {
8435 fprintf_filtered (file,
8436 _("The current execution mode assumed "
8437 "(when symbols are unavailable) is \"%s\".\n"),
8438 arm_fallback_mode_string);
8439 }
8440
8441 static void
8442 arm_show_force_mode (struct ui_file *file, int from_tty,
8443 struct cmd_list_element *c, const char *value)
8444 {
8445 fprintf_filtered (file,
8446 _("The current execution mode assumed "
8447 "(even when symbols are available) is \"%s\".\n"),
8448 arm_force_mode_string);
8449 }
8450
8451 /* If the user changes the register disassembly style used for info
8452 register and other commands, we have to also switch the style used
8453 in opcodes for disassembly output. This function is run in the "set
8454 arm disassembly" command, and does that. */
8455
8456 static void
8457 set_disassembly_style_sfunc (const char *args, int from_tty,
8458 struct cmd_list_element *c)
8459 {
8460 /* Convert the short style name into the long style name (eg, reg-names-*)
8461 before calling the generic set_disassembler_options() function. */
8462 std::string long_name = std::string ("reg-names-") + disassembly_style;
8463 set_disassembler_options (&long_name[0]);
8464 }
8465
8466 static void
8467 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8468 struct cmd_list_element *c, const char *value)
8469 {
8470 struct gdbarch *gdbarch = get_current_arch ();
8471 char *options = get_disassembler_options (gdbarch);
8472 const char *style = "";
8473 int len = 0;
8474 const char *opt;
8475
8476 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8477 if (CONST_STRNEQ (opt, "reg-names-"))
8478 {
8479 style = &opt[strlen ("reg-names-")];
8480 len = strcspn (style, ",");
8481 }
8482
8483 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8484 }
8485 \f
8486 /* Return the ARM register name corresponding to register I. */
8487 static const char *
8488 arm_register_name (struct gdbarch *gdbarch, int i)
8489 {
8490 const int num_regs = gdbarch_num_regs (gdbarch);
8491
8492 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8493 && i >= num_regs && i < num_regs + 32)
8494 {
8495 static const char *const vfp_pseudo_names[] = {
8496 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8497 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8498 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8499 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8500 };
8501
8502 return vfp_pseudo_names[i - num_regs];
8503 }
8504
8505 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8506 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8507 {
8508 static const char *const neon_pseudo_names[] = {
8509 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8510 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8511 };
8512
8513 return neon_pseudo_names[i - num_regs - 32];
8514 }
8515
8516 if (i >= ARRAY_SIZE (arm_register_names))
8517 /* These registers are only supported on targets which supply
8518 an XML description. */
8519 return "";
8520
8521 return arm_register_names[i];
8522 }
8523
8524 /* Test whether the coff symbol specific value corresponds to a Thumb
8525 function. */
8526
8527 static int
8528 coff_sym_is_thumb (int val)
8529 {
8530 return (val == C_THUMBEXT
8531 || val == C_THUMBSTAT
8532 || val == C_THUMBEXTFUNC
8533 || val == C_THUMBSTATFUNC
8534 || val == C_THUMBLABEL);
8535 }
8536
8537 /* arm_coff_make_msymbol_special()
8538 arm_elf_make_msymbol_special()
8539
8540 These functions test whether the COFF or ELF symbol corresponds to
8541 an address in thumb code, and set a "special" bit in a minimal
8542 symbol to indicate that it does. */
8543
8544 static void
8545 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8546 {
8547 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8548
8549 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8550 == ST_BRANCH_TO_THUMB)
8551 MSYMBOL_SET_SPECIAL (msym);
8552 }
8553
8554 static void
8555 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8556 {
8557 if (coff_sym_is_thumb (val))
8558 MSYMBOL_SET_SPECIAL (msym);
8559 }
8560
8561 static void
8562 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8563 asymbol *sym)
8564 {
8565 const char *name = bfd_asymbol_name (sym);
8566 struct arm_per_bfd *data;
8567 struct arm_mapping_symbol new_map_sym;
8568
8569 gdb_assert (name[0] == '$');
8570 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8571 return;
8572
8573 data = arm_bfd_data_key.get (objfile->obfd);
8574 if (data == NULL)
8575 data = arm_bfd_data_key.emplace (objfile->obfd,
8576 objfile->obfd->section_count);
8577 arm_mapping_symbol_vec &map
8578 = data->section_maps[bfd_asymbol_section (sym)->index];
8579
8580 new_map_sym.value = sym->value;
8581 new_map_sym.type = name[1];
8582
8583 /* Insert at the end, the vector will be sorted on first use. */
8584 map.push_back (new_map_sym);
8585 }
8586
8587 static void
8588 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8589 {
8590 struct gdbarch *gdbarch = regcache->arch ();
8591 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8592
8593 /* If necessary, set the T bit. */
8594 if (arm_apcs_32)
8595 {
8596 ULONGEST val, t_bit;
8597 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8598 t_bit = arm_psr_thumb_bit (gdbarch);
8599 if (arm_pc_is_thumb (gdbarch, pc))
8600 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8601 val | t_bit);
8602 else
8603 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8604 val & ~t_bit);
8605 }
8606 }
8607
8608 /* Read the contents of a NEON quad register, by reading from two
8609 double registers. This is used to implement the quad pseudo
8610 registers, and for argument passing in case the quad registers are
8611 missing; vectors are passed in quad registers when using the VFP
8612 ABI, even if a NEON unit is not present. REGNUM is the index of
8613 the quad register, in [0, 15]. */
8614
8615 static enum register_status
8616 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8617 int regnum, gdb_byte *buf)
8618 {
8619 char name_buf[4];
8620 gdb_byte reg_buf[8];
8621 int offset, double_regnum;
8622 enum register_status status;
8623
8624 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8625 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8626 strlen (name_buf));
8627
8628 /* d0 is always the least significant half of q0. */
8629 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8630 offset = 8;
8631 else
8632 offset = 0;
8633
8634 status = regcache->raw_read (double_regnum, reg_buf);
8635 if (status != REG_VALID)
8636 return status;
8637 memcpy (buf + offset, reg_buf, 8);
8638
8639 offset = 8 - offset;
8640 status = regcache->raw_read (double_regnum + 1, reg_buf);
8641 if (status != REG_VALID)
8642 return status;
8643 memcpy (buf + offset, reg_buf, 8);
8644
8645 return REG_VALID;
8646 }
8647
8648 static enum register_status
8649 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8650 int regnum, gdb_byte *buf)
8651 {
8652 const int num_regs = gdbarch_num_regs (gdbarch);
8653 char name_buf[4];
8654 gdb_byte reg_buf[8];
8655 int offset, double_regnum;
8656
8657 gdb_assert (regnum >= num_regs);
8658 regnum -= num_regs;
8659
8660 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8661 /* Quad-precision register. */
8662 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8663 else
8664 {
8665 enum register_status status;
8666
8667 /* Single-precision register. */
8668 gdb_assert (regnum < 32);
8669
8670 /* s0 is always the least significant half of d0. */
8671 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8672 offset = (regnum & 1) ? 0 : 4;
8673 else
8674 offset = (regnum & 1) ? 4 : 0;
8675
8676 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8677 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8678 strlen (name_buf));
8679
8680 status = regcache->raw_read (double_regnum, reg_buf);
8681 if (status == REG_VALID)
8682 memcpy (buf, reg_buf + offset, 4);
8683 return status;
8684 }
8685 }
8686
8687 /* Store the contents of BUF to a NEON quad register, by writing to
8688 two double registers. This is used to implement the quad pseudo
8689 registers, and for argument passing in case the quad registers are
8690 missing; vectors are passed in quad registers when using the VFP
8691 ABI, even if a NEON unit is not present. REGNUM is the index
8692 of the quad register, in [0, 15]. */
8693
8694 static void
8695 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8696 int regnum, const gdb_byte *buf)
8697 {
8698 char name_buf[4];
8699 int offset, double_regnum;
8700
8701 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8702 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8703 strlen (name_buf));
8704
8705 /* d0 is always the least significant half of q0. */
8706 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8707 offset = 8;
8708 else
8709 offset = 0;
8710
8711 regcache->raw_write (double_regnum, buf + offset);
8712 offset = 8 - offset;
8713 regcache->raw_write (double_regnum + 1, buf + offset);
8714 }
8715
8716 static void
8717 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8718 int regnum, const gdb_byte *buf)
8719 {
8720 const int num_regs = gdbarch_num_regs (gdbarch);
8721 char name_buf[4];
8722 gdb_byte reg_buf[8];
8723 int offset, double_regnum;
8724
8725 gdb_assert (regnum >= num_regs);
8726 regnum -= num_regs;
8727
8728 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8729 /* Quad-precision register. */
8730 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8731 else
8732 {
8733 /* Single-precision register. */
8734 gdb_assert (regnum < 32);
8735
8736 /* s0 is always the least significant half of d0. */
8737 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8738 offset = (regnum & 1) ? 0 : 4;
8739 else
8740 offset = (regnum & 1) ? 4 : 0;
8741
8742 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8743 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8744 strlen (name_buf));
8745
8746 regcache->raw_read (double_regnum, reg_buf);
8747 memcpy (reg_buf + offset, buf, 4);
8748 regcache->raw_write (double_regnum, reg_buf);
8749 }
8750 }
8751
8752 static struct value *
8753 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8754 {
8755 const int *reg_p = (const int *) baton;
8756 return value_of_register (*reg_p, frame);
8757 }
8758 \f
8759 static enum gdb_osabi
8760 arm_elf_osabi_sniffer (bfd *abfd)
8761 {
8762 unsigned int elfosabi;
8763 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8764
8765 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8766
8767 if (elfosabi == ELFOSABI_ARM)
8768 /* GNU tools use this value. Check note sections in this case,
8769 as well. */
8770 bfd_map_over_sections (abfd,
8771 generic_elf_osabi_sniff_abi_tag_sections,
8772 &osabi);
8773
8774 /* Anything else will be handled by the generic ELF sniffer. */
8775 return osabi;
8776 }
8777
8778 static int
8779 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8780 struct reggroup *group)
8781 {
8782 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8783 this, FPS register belongs to save_regroup, restore_reggroup, and
8784 all_reggroup, of course. */
8785 if (regnum == ARM_FPS_REGNUM)
8786 return (group == float_reggroup
8787 || group == save_reggroup
8788 || group == restore_reggroup
8789 || group == all_reggroup);
8790 else
8791 return default_register_reggroup_p (gdbarch, regnum, group);
8792 }
8793
8794 /* For backward-compatibility we allow two 'g' packet lengths with
8795 the remote protocol depending on whether FPA registers are
8796 supplied. M-profile targets do not have FPA registers, but some
8797 stubs already exist in the wild which use a 'g' packet which
8798 supplies them albeit with dummy values. The packet format which
8799 includes FPA registers should be considered deprecated for
8800 M-profile targets. */
8801
8802 static void
8803 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8804 {
8805 if (gdbarch_tdep (gdbarch)->is_m)
8806 {
8807 const target_desc *tdesc;
8808
8809 /* If we know from the executable this is an M-profile target,
8810 cater for remote targets whose register set layout is the
8811 same as the FPA layout. */
8812 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8813 register_remote_g_packet_guess (gdbarch,
8814 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8815 tdesc);
8816
8817 /* The regular M-profile layout. */
8818 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8819 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8820 tdesc);
8821
8822 /* M-profile plus M4F VFP. */
8823 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8824 register_remote_g_packet_guess (gdbarch,
8825 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8826 tdesc);
8827 }
8828
8829 /* Otherwise we don't have a useful guess. */
8830 }
8831
8832 /* Implement the code_of_frame_writable gdbarch method. */
8833
8834 static int
8835 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8836 {
8837 if (gdbarch_tdep (gdbarch)->is_m
8838 && get_frame_type (frame) == SIGTRAMP_FRAME)
8839 {
8840 /* M-profile exception frames return to some magic PCs, where
8841 isn't writable at all. */
8842 return 0;
8843 }
8844 else
8845 return 1;
8846 }
8847
8848 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8849 to be postfixed by a version (eg armv7hl). */
8850
8851 static const char *
8852 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8853 {
8854 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8855 return "arm(v[^- ]*)?";
8856 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8857 }
8858
8859 /* Initialize the current architecture based on INFO. If possible,
8860 re-use an architecture from ARCHES, which is a list of
8861 architectures already created during this debugging session.
8862
8863 Called e.g. at program startup, when reading a core file, and when
8864 reading a binary file. */
8865
8866 static struct gdbarch *
8867 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8868 {
8869 struct gdbarch_tdep *tdep;
8870 struct gdbarch *gdbarch;
8871 struct gdbarch_list *best_arch;
8872 enum arm_abi_kind arm_abi = arm_abi_global;
8873 enum arm_float_model fp_model = arm_fp_model;
8874 struct tdesc_arch_data *tdesc_data = NULL;
8875 int i;
8876 bool is_m = false;
8877 int vfp_register_count = 0;
8878 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8879 bool have_wmmx_registers = false;
8880 bool have_neon = false;
8881 bool have_fpa_registers = true;
8882 const struct target_desc *tdesc = info.target_desc;
8883
8884 /* If we have an object to base this architecture on, try to determine
8885 its ABI. */
8886
8887 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8888 {
8889 int ei_osabi, e_flags;
8890
8891 switch (bfd_get_flavour (info.abfd))
8892 {
8893 case bfd_target_coff_flavour:
8894 /* Assume it's an old APCS-style ABI. */
8895 /* XXX WinCE? */
8896 arm_abi = ARM_ABI_APCS;
8897 break;
8898
8899 case bfd_target_elf_flavour:
8900 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8901 e_flags = elf_elfheader (info.abfd)->e_flags;
8902
8903 if (ei_osabi == ELFOSABI_ARM)
8904 {
8905 /* GNU tools used to use this value, but do not for EABI
8906 objects. There's nowhere to tag an EABI version
8907 anyway, so assume APCS. */
8908 arm_abi = ARM_ABI_APCS;
8909 }
8910 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8911 {
8912 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8913
8914 switch (eabi_ver)
8915 {
8916 case EF_ARM_EABI_UNKNOWN:
8917 /* Assume GNU tools. */
8918 arm_abi = ARM_ABI_APCS;
8919 break;
8920
8921 case EF_ARM_EABI_VER4:
8922 case EF_ARM_EABI_VER5:
8923 arm_abi = ARM_ABI_AAPCS;
8924 /* EABI binaries default to VFP float ordering.
8925 They may also contain build attributes that can
8926 be used to identify if the VFP argument-passing
8927 ABI is in use. */
8928 if (fp_model == ARM_FLOAT_AUTO)
8929 {
8930 #ifdef HAVE_ELF
8931 switch (bfd_elf_get_obj_attr_int (info.abfd,
8932 OBJ_ATTR_PROC,
8933 Tag_ABI_VFP_args))
8934 {
8935 case AEABI_VFP_args_base:
8936 /* "The user intended FP parameter/result
8937 passing to conform to AAPCS, base
8938 variant". */
8939 fp_model = ARM_FLOAT_SOFT_VFP;
8940 break;
8941 case AEABI_VFP_args_vfp:
8942 /* "The user intended FP parameter/result
8943 passing to conform to AAPCS, VFP
8944 variant". */
8945 fp_model = ARM_FLOAT_VFP;
8946 break;
8947 case AEABI_VFP_args_toolchain:
8948 /* "The user intended FP parameter/result
8949 passing to conform to tool chain-specific
8950 conventions" - we don't know any such
8951 conventions, so leave it as "auto". */
8952 break;
8953 case AEABI_VFP_args_compatible:
8954 /* "Code is compatible with both the base
8955 and VFP variants; the user did not permit
8956 non-variadic functions to pass FP
8957 parameters/results" - leave it as
8958 "auto". */
8959 break;
8960 default:
8961 /* Attribute value not mentioned in the
8962 November 2012 ABI, so leave it as
8963 "auto". */
8964 break;
8965 }
8966 #else
8967 fp_model = ARM_FLOAT_SOFT_VFP;
8968 #endif
8969 }
8970 break;
8971
8972 default:
8973 /* Leave it as "auto". */
8974 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8975 break;
8976 }
8977
8978 #ifdef HAVE_ELF
8979 /* Detect M-profile programs. This only works if the
8980 executable file includes build attributes; GCC does
8981 copy them to the executable, but e.g. RealView does
8982 not. */
8983 int attr_arch
8984 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8985 Tag_CPU_arch);
8986 int attr_profile
8987 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8988 Tag_CPU_arch_profile);
8989
8990 /* GCC specifies the profile for v6-M; RealView only
8991 specifies the profile for architectures starting with
8992 V7 (as opposed to architectures with a tag
8993 numerically greater than TAG_CPU_ARCH_V7). */
8994 if (!tdesc_has_registers (tdesc)
8995 && (attr_arch == TAG_CPU_ARCH_V6_M
8996 || attr_arch == TAG_CPU_ARCH_V6S_M
8997 || attr_profile == 'M'))
8998 is_m = true;
8999 #endif
9000 }
9001
9002 if (fp_model == ARM_FLOAT_AUTO)
9003 {
9004 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9005 {
9006 case 0:
9007 /* Leave it as "auto". Strictly speaking this case
9008 means FPA, but almost nobody uses that now, and
9009 many toolchains fail to set the appropriate bits
9010 for the floating-point model they use. */
9011 break;
9012 case EF_ARM_SOFT_FLOAT:
9013 fp_model = ARM_FLOAT_SOFT_FPA;
9014 break;
9015 case EF_ARM_VFP_FLOAT:
9016 fp_model = ARM_FLOAT_VFP;
9017 break;
9018 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9019 fp_model = ARM_FLOAT_SOFT_VFP;
9020 break;
9021 }
9022 }
9023
9024 if (e_flags & EF_ARM_BE8)
9025 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9026
9027 break;
9028
9029 default:
9030 /* Leave it as "auto". */
9031 break;
9032 }
9033 }
9034
9035 /* Check any target description for validity. */
9036 if (tdesc_has_registers (tdesc))
9037 {
9038 /* For most registers we require GDB's default names; but also allow
9039 the numeric names for sp / lr / pc, as a convenience. */
9040 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9041 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9042 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9043
9044 const struct tdesc_feature *feature;
9045 int valid_p;
9046
9047 feature = tdesc_find_feature (tdesc,
9048 "org.gnu.gdb.arm.core");
9049 if (feature == NULL)
9050 {
9051 feature = tdesc_find_feature (tdesc,
9052 "org.gnu.gdb.arm.m-profile");
9053 if (feature == NULL)
9054 return NULL;
9055 else
9056 is_m = true;
9057 }
9058
9059 tdesc_data = tdesc_data_alloc ();
9060
9061 valid_p = 1;
9062 for (i = 0; i < ARM_SP_REGNUM; i++)
9063 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9064 arm_register_names[i]);
9065 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9066 ARM_SP_REGNUM,
9067 arm_sp_names);
9068 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9069 ARM_LR_REGNUM,
9070 arm_lr_names);
9071 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9072 ARM_PC_REGNUM,
9073 arm_pc_names);
9074 if (is_m)
9075 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9076 ARM_PS_REGNUM, "xpsr");
9077 else
9078 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9079 ARM_PS_REGNUM, "cpsr");
9080
9081 if (!valid_p)
9082 {
9083 tdesc_data_cleanup (tdesc_data);
9084 return NULL;
9085 }
9086
9087 feature = tdesc_find_feature (tdesc,
9088 "org.gnu.gdb.arm.fpa");
9089 if (feature != NULL)
9090 {
9091 valid_p = 1;
9092 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9093 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9094 arm_register_names[i]);
9095 if (!valid_p)
9096 {
9097 tdesc_data_cleanup (tdesc_data);
9098 return NULL;
9099 }
9100 }
9101 else
9102 have_fpa_registers = false;
9103
9104 feature = tdesc_find_feature (tdesc,
9105 "org.gnu.gdb.xscale.iwmmxt");
9106 if (feature != NULL)
9107 {
9108 static const char *const iwmmxt_names[] = {
9109 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9110 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9111 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9112 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9113 };
9114
9115 valid_p = 1;
9116 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9117 valid_p
9118 &= tdesc_numbered_register (feature, tdesc_data, i,
9119 iwmmxt_names[i - ARM_WR0_REGNUM]);
9120
9121 /* Check for the control registers, but do not fail if they
9122 are missing. */
9123 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9124 tdesc_numbered_register (feature, tdesc_data, i,
9125 iwmmxt_names[i - ARM_WR0_REGNUM]);
9126
9127 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9128 valid_p
9129 &= tdesc_numbered_register (feature, tdesc_data, i,
9130 iwmmxt_names[i - ARM_WR0_REGNUM]);
9131
9132 if (!valid_p)
9133 {
9134 tdesc_data_cleanup (tdesc_data);
9135 return NULL;
9136 }
9137
9138 have_wmmx_registers = true;
9139 }
9140
9141 /* If we have a VFP unit, check whether the single precision registers
9142 are present. If not, then we will synthesize them as pseudo
9143 registers. */
9144 feature = tdesc_find_feature (tdesc,
9145 "org.gnu.gdb.arm.vfp");
9146 if (feature != NULL)
9147 {
9148 static const char *const vfp_double_names[] = {
9149 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9150 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9151 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9152 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9153 };
9154
9155 /* Require the double precision registers. There must be either
9156 16 or 32. */
9157 valid_p = 1;
9158 for (i = 0; i < 32; i++)
9159 {
9160 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9161 ARM_D0_REGNUM + i,
9162 vfp_double_names[i]);
9163 if (!valid_p)
9164 break;
9165 }
9166 if (!valid_p && i == 16)
9167 valid_p = 1;
9168
9169 /* Also require FPSCR. */
9170 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9171 ARM_FPSCR_REGNUM, "fpscr");
9172 if (!valid_p)
9173 {
9174 tdesc_data_cleanup (tdesc_data);
9175 return NULL;
9176 }
9177
9178 if (tdesc_unnumbered_register (feature, "s0") == 0)
9179 have_vfp_pseudos = true;
9180
9181 vfp_register_count = i;
9182
9183 /* If we have VFP, also check for NEON. The architecture allows
9184 NEON without VFP (integer vector operations only), but GDB
9185 does not support that. */
9186 feature = tdesc_find_feature (tdesc,
9187 "org.gnu.gdb.arm.neon");
9188 if (feature != NULL)
9189 {
9190 /* NEON requires 32 double-precision registers. */
9191 if (i != 32)
9192 {
9193 tdesc_data_cleanup (tdesc_data);
9194 return NULL;
9195 }
9196
9197 /* If there are quad registers defined by the stub, use
9198 their type; otherwise (normally) provide them with
9199 the default type. */
9200 if (tdesc_unnumbered_register (feature, "q0") == 0)
9201 have_neon_pseudos = true;
9202
9203 have_neon = true;
9204 }
9205 }
9206 }
9207
9208 /* If there is already a candidate, use it. */
9209 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9210 best_arch != NULL;
9211 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9212 {
9213 if (arm_abi != ARM_ABI_AUTO
9214 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9215 continue;
9216
9217 if (fp_model != ARM_FLOAT_AUTO
9218 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9219 continue;
9220
9221 /* There are various other properties in tdep that we do not
9222 need to check here: those derived from a target description,
9223 since gdbarches with a different target description are
9224 automatically disqualified. */
9225
9226 /* Do check is_m, though, since it might come from the binary. */
9227 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9228 continue;
9229
9230 /* Found a match. */
9231 break;
9232 }
9233
9234 if (best_arch != NULL)
9235 {
9236 if (tdesc_data != NULL)
9237 tdesc_data_cleanup (tdesc_data);
9238 return best_arch->gdbarch;
9239 }
9240
9241 tdep = XCNEW (struct gdbarch_tdep);
9242 gdbarch = gdbarch_alloc (&info, tdep);
9243
9244 /* Record additional information about the architecture we are defining.
9245 These are gdbarch discriminators, like the OSABI. */
9246 tdep->arm_abi = arm_abi;
9247 tdep->fp_model = fp_model;
9248 tdep->is_m = is_m;
9249 tdep->have_fpa_registers = have_fpa_registers;
9250 tdep->have_wmmx_registers = have_wmmx_registers;
9251 gdb_assert (vfp_register_count == 0
9252 || vfp_register_count == 16
9253 || vfp_register_count == 32);
9254 tdep->vfp_register_count = vfp_register_count;
9255 tdep->have_vfp_pseudos = have_vfp_pseudos;
9256 tdep->have_neon_pseudos = have_neon_pseudos;
9257 tdep->have_neon = have_neon;
9258
9259 arm_register_g_packet_guesses (gdbarch);
9260
9261 /* Breakpoints. */
9262 switch (info.byte_order_for_code)
9263 {
9264 case BFD_ENDIAN_BIG:
9265 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9266 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9267 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9268 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9269
9270 break;
9271
9272 case BFD_ENDIAN_LITTLE:
9273 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9274 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9275 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9276 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9277
9278 break;
9279
9280 default:
9281 internal_error (__FILE__, __LINE__,
9282 _("arm_gdbarch_init: bad byte order for float format"));
9283 }
9284
9285 /* On ARM targets char defaults to unsigned. */
9286 set_gdbarch_char_signed (gdbarch, 0);
9287
9288 /* wchar_t is unsigned under the AAPCS. */
9289 if (tdep->arm_abi == ARM_ABI_AAPCS)
9290 set_gdbarch_wchar_signed (gdbarch, 0);
9291 else
9292 set_gdbarch_wchar_signed (gdbarch, 1);
9293
9294 /* Compute type alignment. */
9295 set_gdbarch_type_align (gdbarch, arm_type_align);
9296
9297 /* Note: for displaced stepping, this includes the breakpoint, and one word
9298 of additional scratch space. This setting isn't used for anything beside
9299 displaced stepping at present. */
9300 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9301
9302 /* This should be low enough for everything. */
9303 tdep->lowest_pc = 0x20;
9304 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9305
9306 /* The default, for both APCS and AAPCS, is to return small
9307 structures in registers. */
9308 tdep->struct_return = reg_struct_return;
9309
9310 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9311 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9312
9313 if (is_m)
9314 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9315
9316 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9317
9318 frame_base_set_default (gdbarch, &arm_normal_base);
9319
9320 /* Address manipulation. */
9321 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9322
9323 /* Advance PC across function entry code. */
9324 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9325
9326 /* Detect whether PC is at a point where the stack has been destroyed. */
9327 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9328
9329 /* Skip trampolines. */
9330 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9331
9332 /* The stack grows downward. */
9333 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9334
9335 /* Breakpoint manipulation. */
9336 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9337 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9338 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9339 arm_breakpoint_kind_from_current_state);
9340
9341 /* Information about registers, etc. */
9342 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9343 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9344 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9345 set_gdbarch_register_type (gdbarch, arm_register_type);
9346 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9347
9348 /* This "info float" is FPA-specific. Use the generic version if we
9349 do not have FPA. */
9350 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9351 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9352
9353 /* Internal <-> external register number maps. */
9354 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9355 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9356
9357 set_gdbarch_register_name (gdbarch, arm_register_name);
9358
9359 /* Returning results. */
9360 set_gdbarch_return_value (gdbarch, arm_return_value);
9361
9362 /* Disassembly. */
9363 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9364
9365 /* Minsymbol frobbing. */
9366 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9367 set_gdbarch_coff_make_msymbol_special (gdbarch,
9368 arm_coff_make_msymbol_special);
9369 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9370
9371 /* Thumb-2 IT block support. */
9372 set_gdbarch_adjust_breakpoint_address (gdbarch,
9373 arm_adjust_breakpoint_address);
9374
9375 /* Virtual tables. */
9376 set_gdbarch_vbit_in_delta (gdbarch, 1);
9377
9378 /* Hook in the ABI-specific overrides, if they have been registered. */
9379 gdbarch_init_osabi (info, gdbarch);
9380
9381 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9382
9383 /* Add some default predicates. */
9384 if (is_m)
9385 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9386 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9387 dwarf2_append_unwinders (gdbarch);
9388 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9389 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9390 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9391
9392 /* Now we have tuned the configuration, set a few final things,
9393 based on what the OS ABI has told us. */
9394
9395 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9396 binaries are always marked. */
9397 if (tdep->arm_abi == ARM_ABI_AUTO)
9398 tdep->arm_abi = ARM_ABI_APCS;
9399
9400 /* Watchpoints are not steppable. */
9401 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9402
9403 /* We used to default to FPA for generic ARM, but almost nobody
9404 uses that now, and we now provide a way for the user to force
9405 the model. So default to the most useful variant. */
9406 if (tdep->fp_model == ARM_FLOAT_AUTO)
9407 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9408
9409 if (tdep->jb_pc >= 0)
9410 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9411
9412 /* Floating point sizes and format. */
9413 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9414 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9415 {
9416 set_gdbarch_double_format
9417 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9418 set_gdbarch_long_double_format
9419 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9420 }
9421 else
9422 {
9423 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9424 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9425 }
9426
9427 if (have_vfp_pseudos)
9428 {
9429 /* NOTE: These are the only pseudo registers used by
9430 the ARM target at the moment. If more are added, a
9431 little more care in numbering will be needed. */
9432
9433 int num_pseudos = 32;
9434 if (have_neon_pseudos)
9435 num_pseudos += 16;
9436 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9437 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9438 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9439 }
9440
9441 if (tdesc_data)
9442 {
9443 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9444
9445 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9446
9447 /* Override tdesc_register_type to adjust the types of VFP
9448 registers for NEON. */
9449 set_gdbarch_register_type (gdbarch, arm_register_type);
9450 }
9451
9452 /* Add standard register aliases. We add aliases even for those
9453 names which are used by the current architecture - it's simpler,
9454 and does no harm, since nothing ever lists user registers. */
9455 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9456 user_reg_add (gdbarch, arm_register_aliases[i].name,
9457 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9458
9459 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9460 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9461
9462 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9463
9464 return gdbarch;
9465 }
9466
9467 static void
9468 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9469 {
9470 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9471
9472 if (tdep == NULL)
9473 return;
9474
9475 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9476 (int) tdep->fp_model);
9477 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9478 (int) tdep->have_fpa_registers);
9479 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9480 (int) tdep->have_wmmx_registers);
9481 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9482 (int) tdep->vfp_register_count);
9483 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9484 (int) tdep->have_vfp_pseudos);
9485 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9486 (int) tdep->have_neon_pseudos);
9487 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9488 (int) tdep->have_neon);
9489 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9490 (unsigned long) tdep->lowest_pc);
9491 }
9492
9493 #if GDB_SELF_TEST
9494 namespace selftests
9495 {
9496 static void arm_record_test (void);
9497 }
9498 #endif
9499
9500 void _initialize_arm_tdep ();
9501 void
9502 _initialize_arm_tdep ()
9503 {
9504 long length;
9505 int i, j;
9506 char regdesc[1024], *rdptr = regdesc;
9507 size_t rest = sizeof (regdesc);
9508
9509 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9510
9511 /* Add ourselves to objfile event chain. */
9512 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9513
9514 /* Register an ELF OS ABI sniffer for ARM binaries. */
9515 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9516 bfd_target_elf_flavour,
9517 arm_elf_osabi_sniffer);
9518
9519 /* Add root prefix command for all "set arm"/"show arm" commands. */
9520 add_prefix_cmd ("arm", no_class, set_arm_command,
9521 _("Various ARM-specific commands."),
9522 &setarmcmdlist, "set arm ", 0, &setlist);
9523
9524 add_prefix_cmd ("arm", no_class, show_arm_command,
9525 _("Various ARM-specific commands."),
9526 &showarmcmdlist, "show arm ", 0, &showlist);
9527
9528
9529 arm_disassembler_options = xstrdup ("reg-names-std");
9530 const disasm_options_t *disasm_options
9531 = &disassembler_options_arm ()->options;
9532 int num_disassembly_styles = 0;
9533 for (i = 0; disasm_options->name[i] != NULL; i++)
9534 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9535 num_disassembly_styles++;
9536
9537 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9538 valid_disassembly_styles = XNEWVEC (const char *,
9539 num_disassembly_styles + 1);
9540 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9541 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9542 {
9543 size_t offset = strlen ("reg-names-");
9544 const char *style = disasm_options->name[i];
9545 valid_disassembly_styles[j++] = &style[offset];
9546 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9547 disasm_options->description[i]);
9548 rdptr += length;
9549 rest -= length;
9550 }
9551 /* Mark the end of valid options. */
9552 valid_disassembly_styles[num_disassembly_styles] = NULL;
9553
9554 /* Create the help text. */
9555 std::string helptext = string_printf ("%s%s%s",
9556 _("The valid values are:\n"),
9557 regdesc,
9558 _("The default is \"std\"."));
9559
9560 add_setshow_enum_cmd("disassembler", no_class,
9561 valid_disassembly_styles, &disassembly_style,
9562 _("Set the disassembly style."),
9563 _("Show the disassembly style."),
9564 helptext.c_str (),
9565 set_disassembly_style_sfunc,
9566 show_disassembly_style_sfunc,
9567 &setarmcmdlist, &showarmcmdlist);
9568
9569 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9570 _("Set usage of ARM 32-bit mode."),
9571 _("Show usage of ARM 32-bit mode."),
9572 _("When off, a 26-bit PC will be used."),
9573 NULL,
9574 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9575 mode is %s. */
9576 &setarmcmdlist, &showarmcmdlist);
9577
9578 /* Add a command to allow the user to force the FPU model. */
9579 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9580 _("Set the floating point type."),
9581 _("Show the floating point type."),
9582 _("auto - Determine the FP typefrom the OS-ABI.\n\
9583 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9584 fpa - FPA co-processor (GCC compiled).\n\
9585 softvfp - Software FP with pure-endian doubles.\n\
9586 vfp - VFP co-processor."),
9587 set_fp_model_sfunc, show_fp_model,
9588 &setarmcmdlist, &showarmcmdlist);
9589
9590 /* Add a command to allow the user to force the ABI. */
9591 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9592 _("Set the ABI."),
9593 _("Show the ABI."),
9594 NULL, arm_set_abi, arm_show_abi,
9595 &setarmcmdlist, &showarmcmdlist);
9596
9597 /* Add two commands to allow the user to force the assumed
9598 execution mode. */
9599 add_setshow_enum_cmd ("fallback-mode", class_support,
9600 arm_mode_strings, &arm_fallback_mode_string,
9601 _("Set the mode assumed when symbols are unavailable."),
9602 _("Show the mode assumed when symbols are unavailable."),
9603 NULL, NULL, arm_show_fallback_mode,
9604 &setarmcmdlist, &showarmcmdlist);
9605 add_setshow_enum_cmd ("force-mode", class_support,
9606 arm_mode_strings, &arm_force_mode_string,
9607 _("Set the mode assumed even when symbols are available."),
9608 _("Show the mode assumed even when symbols are available."),
9609 NULL, NULL, arm_show_force_mode,
9610 &setarmcmdlist, &showarmcmdlist);
9611
9612 /* Debugging flag. */
9613 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9614 _("Set ARM debugging."),
9615 _("Show ARM debugging."),
9616 _("When on, arm-specific debugging is enabled."),
9617 NULL,
9618 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9619 &setdebuglist, &showdebuglist);
9620
9621 #if GDB_SELF_TEST
9622 selftests::register_test ("arm-record", selftests::arm_record_test);
9623 #endif
9624
9625 }
9626
9627 /* ARM-reversible process record data structures. */
9628
9629 #define ARM_INSN_SIZE_BYTES 4
9630 #define THUMB_INSN_SIZE_BYTES 2
9631 #define THUMB2_INSN_SIZE_BYTES 4
9632
9633
9634 /* Position of the bit within a 32-bit ARM instruction
9635 that defines whether the instruction is a load or store. */
9636 #define INSN_S_L_BIT_NUM 20
9637
9638 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9639 do \
9640 { \
9641 unsigned int reg_len = LENGTH; \
9642 if (reg_len) \
9643 { \
9644 REGS = XNEWVEC (uint32_t, reg_len); \
9645 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9646 } \
9647 } \
9648 while (0)
9649
9650 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9651 do \
9652 { \
9653 unsigned int mem_len = LENGTH; \
9654 if (mem_len) \
9655 { \
9656 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9657 memcpy(&MEMS->len, &RECORD_BUF[0], \
9658 sizeof(struct arm_mem_r) * LENGTH); \
9659 } \
9660 } \
9661 while (0)
9662
9663 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9664 #define INSN_RECORDED(ARM_RECORD) \
9665 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9666
9667 /* ARM memory record structure. */
9668 struct arm_mem_r
9669 {
9670 uint32_t len; /* Record length. */
9671 uint32_t addr; /* Memory address. */
9672 };
9673
9674 /* ARM instruction record contains opcode of current insn
9675 and execution state (before entry to decode_insn()),
9676 contains list of to-be-modified registers and
9677 memory blocks (on return from decode_insn()). */
9678
9679 typedef struct insn_decode_record_t
9680 {
9681 struct gdbarch *gdbarch;
9682 struct regcache *regcache;
9683 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9684 uint32_t arm_insn; /* Should accommodate thumb. */
9685 uint32_t cond; /* Condition code. */
9686 uint32_t opcode; /* Insn opcode. */
9687 uint32_t decode; /* Insn decode bits. */
9688 uint32_t mem_rec_count; /* No of mem records. */
9689 uint32_t reg_rec_count; /* No of reg records. */
9690 uint32_t *arm_regs; /* Registers to be saved for this record. */
9691 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9692 } insn_decode_record;
9693
9694
9695 /* Checks ARM SBZ and SBO mandatory fields. */
9696
9697 static int
9698 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9699 {
9700 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9701
9702 if (!len)
9703 return 1;
9704
9705 if (!sbo)
9706 ones = ~ones;
9707
9708 while (ones)
9709 {
9710 if (!(ones & sbo))
9711 {
9712 return 0;
9713 }
9714 ones = ones >> 1;
9715 }
9716 return 1;
9717 }
9718
9719 enum arm_record_result
9720 {
9721 ARM_RECORD_SUCCESS = 0,
9722 ARM_RECORD_FAILURE = 1
9723 };
9724
9725 typedef enum
9726 {
9727 ARM_RECORD_STRH=1,
9728 ARM_RECORD_STRD
9729 } arm_record_strx_t;
9730
9731 typedef enum
9732 {
9733 ARM_RECORD=1,
9734 THUMB_RECORD,
9735 THUMB2_RECORD
9736 } record_type_t;
9737
9738
9739 static int
9740 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9741 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9742 {
9743
9744 struct regcache *reg_cache = arm_insn_r->regcache;
9745 ULONGEST u_regval[2]= {0};
9746
9747 uint32_t reg_src1 = 0, reg_src2 = 0;
9748 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9749
9750 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9751 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9752
9753 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9754 {
9755 /* 1) Handle misc store, immediate offset. */
9756 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9757 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9758 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9759 regcache_raw_read_unsigned (reg_cache, reg_src1,
9760 &u_regval[0]);
9761 if (ARM_PC_REGNUM == reg_src1)
9762 {
9763 /* If R15 was used as Rn, hence current PC+8. */
9764 u_regval[0] = u_regval[0] + 8;
9765 }
9766 offset_8 = (immed_high << 4) | immed_low;
9767 /* Calculate target store address. */
9768 if (14 == arm_insn_r->opcode)
9769 {
9770 tgt_mem_addr = u_regval[0] + offset_8;
9771 }
9772 else
9773 {
9774 tgt_mem_addr = u_regval[0] - offset_8;
9775 }
9776 if (ARM_RECORD_STRH == str_type)
9777 {
9778 record_buf_mem[0] = 2;
9779 record_buf_mem[1] = tgt_mem_addr;
9780 arm_insn_r->mem_rec_count = 1;
9781 }
9782 else if (ARM_RECORD_STRD == str_type)
9783 {
9784 record_buf_mem[0] = 4;
9785 record_buf_mem[1] = tgt_mem_addr;
9786 record_buf_mem[2] = 4;
9787 record_buf_mem[3] = tgt_mem_addr + 4;
9788 arm_insn_r->mem_rec_count = 2;
9789 }
9790 }
9791 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9792 {
9793 /* 2) Store, register offset. */
9794 /* Get Rm. */
9795 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9796 /* Get Rn. */
9797 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9798 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9799 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9800 if (15 == reg_src2)
9801 {
9802 /* If R15 was used as Rn, hence current PC+8. */
9803 u_regval[0] = u_regval[0] + 8;
9804 }
9805 /* Calculate target store address, Rn +/- Rm, register offset. */
9806 if (12 == arm_insn_r->opcode)
9807 {
9808 tgt_mem_addr = u_regval[0] + u_regval[1];
9809 }
9810 else
9811 {
9812 tgt_mem_addr = u_regval[1] - u_regval[0];
9813 }
9814 if (ARM_RECORD_STRH == str_type)
9815 {
9816 record_buf_mem[0] = 2;
9817 record_buf_mem[1] = tgt_mem_addr;
9818 arm_insn_r->mem_rec_count = 1;
9819 }
9820 else if (ARM_RECORD_STRD == str_type)
9821 {
9822 record_buf_mem[0] = 4;
9823 record_buf_mem[1] = tgt_mem_addr;
9824 record_buf_mem[2] = 4;
9825 record_buf_mem[3] = tgt_mem_addr + 4;
9826 arm_insn_r->mem_rec_count = 2;
9827 }
9828 }
9829 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9830 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9831 {
9832 /* 3) Store, immediate pre-indexed. */
9833 /* 5) Store, immediate post-indexed. */
9834 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9835 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9836 offset_8 = (immed_high << 4) | immed_low;
9837 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9838 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9839 /* Calculate target store address, Rn +/- Rm, register offset. */
9840 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9841 {
9842 tgt_mem_addr = u_regval[0] + offset_8;
9843 }
9844 else
9845 {
9846 tgt_mem_addr = u_regval[0] - offset_8;
9847 }
9848 if (ARM_RECORD_STRH == str_type)
9849 {
9850 record_buf_mem[0] = 2;
9851 record_buf_mem[1] = tgt_mem_addr;
9852 arm_insn_r->mem_rec_count = 1;
9853 }
9854 else if (ARM_RECORD_STRD == str_type)
9855 {
9856 record_buf_mem[0] = 4;
9857 record_buf_mem[1] = tgt_mem_addr;
9858 record_buf_mem[2] = 4;
9859 record_buf_mem[3] = tgt_mem_addr + 4;
9860 arm_insn_r->mem_rec_count = 2;
9861 }
9862 /* Record Rn also as it changes. */
9863 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9864 arm_insn_r->reg_rec_count = 1;
9865 }
9866 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9867 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9868 {
9869 /* 4) Store, register pre-indexed. */
9870 /* 6) Store, register post -indexed. */
9871 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9872 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9873 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9874 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9875 /* Calculate target store address, Rn +/- Rm, register offset. */
9876 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9877 {
9878 tgt_mem_addr = u_regval[0] + u_regval[1];
9879 }
9880 else
9881 {
9882 tgt_mem_addr = u_regval[1] - u_regval[0];
9883 }
9884 if (ARM_RECORD_STRH == str_type)
9885 {
9886 record_buf_mem[0] = 2;
9887 record_buf_mem[1] = tgt_mem_addr;
9888 arm_insn_r->mem_rec_count = 1;
9889 }
9890 else if (ARM_RECORD_STRD == str_type)
9891 {
9892 record_buf_mem[0] = 4;
9893 record_buf_mem[1] = tgt_mem_addr;
9894 record_buf_mem[2] = 4;
9895 record_buf_mem[3] = tgt_mem_addr + 4;
9896 arm_insn_r->mem_rec_count = 2;
9897 }
9898 /* Record Rn also as it changes. */
9899 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9900 arm_insn_r->reg_rec_count = 1;
9901 }
9902 return 0;
9903 }
9904
9905 /* Handling ARM extension space insns. */
9906
9907 static int
9908 arm_record_extension_space (insn_decode_record *arm_insn_r)
9909 {
9910 int ret = 0; /* Return value: -1:record failure ; 0:success */
9911 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9912 uint32_t record_buf[8], record_buf_mem[8];
9913 uint32_t reg_src1 = 0;
9914 struct regcache *reg_cache = arm_insn_r->regcache;
9915 ULONGEST u_regval = 0;
9916
9917 gdb_assert (!INSN_RECORDED(arm_insn_r));
9918 /* Handle unconditional insn extension space. */
9919
9920 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9921 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9922 if (arm_insn_r->cond)
9923 {
9924 /* PLD has no affect on architectural state, it just affects
9925 the caches. */
9926 if (5 == ((opcode1 & 0xE0) >> 5))
9927 {
9928 /* BLX(1) */
9929 record_buf[0] = ARM_PS_REGNUM;
9930 record_buf[1] = ARM_LR_REGNUM;
9931 arm_insn_r->reg_rec_count = 2;
9932 }
9933 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9934 }
9935
9936
9937 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9938 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9939 {
9940 ret = -1;
9941 /* Undefined instruction on ARM V5; need to handle if later
9942 versions define it. */
9943 }
9944
9945 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9946 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9947 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9948
9949 /* Handle arithmetic insn extension space. */
9950 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9951 && !INSN_RECORDED(arm_insn_r))
9952 {
9953 /* Handle MLA(S) and MUL(S). */
9954 if (in_inclusive_range (insn_op1, 0U, 3U))
9955 {
9956 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9957 record_buf[1] = ARM_PS_REGNUM;
9958 arm_insn_r->reg_rec_count = 2;
9959 }
9960 else if (in_inclusive_range (insn_op1, 4U, 15U))
9961 {
9962 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9963 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9964 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9965 record_buf[2] = ARM_PS_REGNUM;
9966 arm_insn_r->reg_rec_count = 3;
9967 }
9968 }
9969
9970 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9971 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9972 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9973
9974 /* Handle control insn extension space. */
9975
9976 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9977 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9978 {
9979 if (!bit (arm_insn_r->arm_insn,25))
9980 {
9981 if (!bits (arm_insn_r->arm_insn, 4, 7))
9982 {
9983 if ((0 == insn_op1) || (2 == insn_op1))
9984 {
9985 /* MRS. */
9986 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9987 arm_insn_r->reg_rec_count = 1;
9988 }
9989 else if (1 == insn_op1)
9990 {
9991 /* CSPR is going to be changed. */
9992 record_buf[0] = ARM_PS_REGNUM;
9993 arm_insn_r->reg_rec_count = 1;
9994 }
9995 else if (3 == insn_op1)
9996 {
9997 /* SPSR is going to be changed. */
9998 /* We need to get SPSR value, which is yet to be done. */
9999 return -1;
10000 }
10001 }
10002 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10003 {
10004 if (1 == insn_op1)
10005 {
10006 /* BX. */
10007 record_buf[0] = ARM_PS_REGNUM;
10008 arm_insn_r->reg_rec_count = 1;
10009 }
10010 else if (3 == insn_op1)
10011 {
10012 /* CLZ. */
10013 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10014 arm_insn_r->reg_rec_count = 1;
10015 }
10016 }
10017 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10018 {
10019 /* BLX. */
10020 record_buf[0] = ARM_PS_REGNUM;
10021 record_buf[1] = ARM_LR_REGNUM;
10022 arm_insn_r->reg_rec_count = 2;
10023 }
10024 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10025 {
10026 /* QADD, QSUB, QDADD, QDSUB */
10027 record_buf[0] = ARM_PS_REGNUM;
10028 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10029 arm_insn_r->reg_rec_count = 2;
10030 }
10031 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10032 {
10033 /* BKPT. */
10034 record_buf[0] = ARM_PS_REGNUM;
10035 record_buf[1] = ARM_LR_REGNUM;
10036 arm_insn_r->reg_rec_count = 2;
10037
10038 /* Save SPSR also;how? */
10039 return -1;
10040 }
10041 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10042 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10043 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10044 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10045 )
10046 {
10047 if (0 == insn_op1 || 1 == insn_op1)
10048 {
10049 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10050 /* We dont do optimization for SMULW<y> where we
10051 need only Rd. */
10052 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10053 record_buf[1] = ARM_PS_REGNUM;
10054 arm_insn_r->reg_rec_count = 2;
10055 }
10056 else if (2 == insn_op1)
10057 {
10058 /* SMLAL<x><y>. */
10059 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10060 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10061 arm_insn_r->reg_rec_count = 2;
10062 }
10063 else if (3 == insn_op1)
10064 {
10065 /* SMUL<x><y>. */
10066 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10067 arm_insn_r->reg_rec_count = 1;
10068 }
10069 }
10070 }
10071 else
10072 {
10073 /* MSR : immediate form. */
10074 if (1 == insn_op1)
10075 {
10076 /* CSPR is going to be changed. */
10077 record_buf[0] = ARM_PS_REGNUM;
10078 arm_insn_r->reg_rec_count = 1;
10079 }
10080 else if (3 == insn_op1)
10081 {
10082 /* SPSR is going to be changed. */
10083 /* we need to get SPSR value, which is yet to be done */
10084 return -1;
10085 }
10086 }
10087 }
10088
10089 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10090 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10091 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10092
10093 /* Handle load/store insn extension space. */
10094
10095 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10096 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10097 && !INSN_RECORDED(arm_insn_r))
10098 {
10099 /* SWP/SWPB. */
10100 if (0 == insn_op1)
10101 {
10102 /* These insn, changes register and memory as well. */
10103 /* SWP or SWPB insn. */
10104 /* Get memory address given by Rn. */
10105 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10106 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10107 /* SWP insn ?, swaps word. */
10108 if (8 == arm_insn_r->opcode)
10109 {
10110 record_buf_mem[0] = 4;
10111 }
10112 else
10113 {
10114 /* SWPB insn, swaps only byte. */
10115 record_buf_mem[0] = 1;
10116 }
10117 record_buf_mem[1] = u_regval;
10118 arm_insn_r->mem_rec_count = 1;
10119 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10120 arm_insn_r->reg_rec_count = 1;
10121 }
10122 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10123 {
10124 /* STRH. */
10125 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10126 ARM_RECORD_STRH);
10127 }
10128 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10129 {
10130 /* LDRD. */
10131 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10132 record_buf[1] = record_buf[0] + 1;
10133 arm_insn_r->reg_rec_count = 2;
10134 }
10135 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10136 {
10137 /* STRD. */
10138 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10139 ARM_RECORD_STRD);
10140 }
10141 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10142 {
10143 /* LDRH, LDRSB, LDRSH. */
10144 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10145 arm_insn_r->reg_rec_count = 1;
10146 }
10147
10148 }
10149
10150 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10151 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10152 && !INSN_RECORDED(arm_insn_r))
10153 {
10154 ret = -1;
10155 /* Handle coprocessor insn extension space. */
10156 }
10157
10158 /* To be done for ARMv5 and later; as of now we return -1. */
10159 if (-1 == ret)
10160 return ret;
10161
10162 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10163 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10164
10165 return ret;
10166 }
10167
10168 /* Handling opcode 000 insns. */
10169
10170 static int
10171 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10172 {
10173 struct regcache *reg_cache = arm_insn_r->regcache;
10174 uint32_t record_buf[8], record_buf_mem[8];
10175 ULONGEST u_regval[2] = {0};
10176
10177 uint32_t reg_src1 = 0;
10178 uint32_t opcode1 = 0;
10179
10180 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10181 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10182 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10183
10184 if (!((opcode1 & 0x19) == 0x10))
10185 {
10186 /* Data-processing (register) and Data-processing (register-shifted
10187 register */
10188 /* Out of 11 shifter operands mode, all the insn modifies destination
10189 register, which is specified by 13-16 decode. */
10190 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10191 record_buf[1] = ARM_PS_REGNUM;
10192 arm_insn_r->reg_rec_count = 2;
10193 }
10194 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10195 {
10196 /* Miscellaneous instructions */
10197
10198 if (3 == arm_insn_r->decode && 0x12 == opcode1
10199 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10200 {
10201 /* Handle BLX, branch and link/exchange. */
10202 if (9 == arm_insn_r->opcode)
10203 {
10204 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10205 and R14 stores the return address. */
10206 record_buf[0] = ARM_PS_REGNUM;
10207 record_buf[1] = ARM_LR_REGNUM;
10208 arm_insn_r->reg_rec_count = 2;
10209 }
10210 }
10211 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10212 {
10213 /* Handle enhanced software breakpoint insn, BKPT. */
10214 /* CPSR is changed to be executed in ARM state, disabling normal
10215 interrupts, entering abort mode. */
10216 /* According to high vector configuration PC is set. */
10217 /* user hit breakpoint and type reverse, in
10218 that case, we need to go back with previous CPSR and
10219 Program Counter. */
10220 record_buf[0] = ARM_PS_REGNUM;
10221 record_buf[1] = ARM_LR_REGNUM;
10222 arm_insn_r->reg_rec_count = 2;
10223
10224 /* Save SPSR also; how? */
10225 return -1;
10226 }
10227 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10228 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10229 {
10230 /* Handle BX, branch and link/exchange. */
10231 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10232 record_buf[0] = ARM_PS_REGNUM;
10233 arm_insn_r->reg_rec_count = 1;
10234 }
10235 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10236 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10237 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10238 {
10239 /* Count leading zeros: CLZ. */
10240 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10241 arm_insn_r->reg_rec_count = 1;
10242 }
10243 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10244 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10245 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10246 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10247 {
10248 /* Handle MRS insn. */
10249 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10250 arm_insn_r->reg_rec_count = 1;
10251 }
10252 }
10253 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10254 {
10255 /* Multiply and multiply-accumulate */
10256
10257 /* Handle multiply instructions. */
10258 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10259 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10260 {
10261 /* Handle MLA and MUL. */
10262 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10263 record_buf[1] = ARM_PS_REGNUM;
10264 arm_insn_r->reg_rec_count = 2;
10265 }
10266 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10267 {
10268 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10269 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10270 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10271 record_buf[2] = ARM_PS_REGNUM;
10272 arm_insn_r->reg_rec_count = 3;
10273 }
10274 }
10275 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10276 {
10277 /* Synchronization primitives */
10278
10279 /* Handling SWP, SWPB. */
10280 /* These insn, changes register and memory as well. */
10281 /* SWP or SWPB insn. */
10282
10283 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10284 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10285 /* SWP insn ?, swaps word. */
10286 if (8 == arm_insn_r->opcode)
10287 {
10288 record_buf_mem[0] = 4;
10289 }
10290 else
10291 {
10292 /* SWPB insn, swaps only byte. */
10293 record_buf_mem[0] = 1;
10294 }
10295 record_buf_mem[1] = u_regval[0];
10296 arm_insn_r->mem_rec_count = 1;
10297 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10298 arm_insn_r->reg_rec_count = 1;
10299 }
10300 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10301 || 15 == arm_insn_r->decode)
10302 {
10303 if ((opcode1 & 0x12) == 2)
10304 {
10305 /* Extra load/store (unprivileged) */
10306 return -1;
10307 }
10308 else
10309 {
10310 /* Extra load/store */
10311 switch (bits (arm_insn_r->arm_insn, 5, 6))
10312 {
10313 case 1:
10314 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10315 {
10316 /* STRH (register), STRH (immediate) */
10317 arm_record_strx (arm_insn_r, &record_buf[0],
10318 &record_buf_mem[0], ARM_RECORD_STRH);
10319 }
10320 else if ((opcode1 & 0x05) == 0x1)
10321 {
10322 /* LDRH (register) */
10323 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10324 arm_insn_r->reg_rec_count = 1;
10325
10326 if (bit (arm_insn_r->arm_insn, 21))
10327 {
10328 /* Write back to Rn. */
10329 record_buf[arm_insn_r->reg_rec_count++]
10330 = bits (arm_insn_r->arm_insn, 16, 19);
10331 }
10332 }
10333 else if ((opcode1 & 0x05) == 0x5)
10334 {
10335 /* LDRH (immediate), LDRH (literal) */
10336 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10337
10338 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10339 arm_insn_r->reg_rec_count = 1;
10340
10341 if (rn != 15)
10342 {
10343 /*LDRH (immediate) */
10344 if (bit (arm_insn_r->arm_insn, 21))
10345 {
10346 /* Write back to Rn. */
10347 record_buf[arm_insn_r->reg_rec_count++] = rn;
10348 }
10349 }
10350 }
10351 else
10352 return -1;
10353 break;
10354 case 2:
10355 if ((opcode1 & 0x05) == 0x0)
10356 {
10357 /* LDRD (register) */
10358 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10359 record_buf[1] = record_buf[0] + 1;
10360 arm_insn_r->reg_rec_count = 2;
10361
10362 if (bit (arm_insn_r->arm_insn, 21))
10363 {
10364 /* Write back to Rn. */
10365 record_buf[arm_insn_r->reg_rec_count++]
10366 = bits (arm_insn_r->arm_insn, 16, 19);
10367 }
10368 }
10369 else if ((opcode1 & 0x05) == 0x1)
10370 {
10371 /* LDRSB (register) */
10372 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10373 arm_insn_r->reg_rec_count = 1;
10374
10375 if (bit (arm_insn_r->arm_insn, 21))
10376 {
10377 /* Write back to Rn. */
10378 record_buf[arm_insn_r->reg_rec_count++]
10379 = bits (arm_insn_r->arm_insn, 16, 19);
10380 }
10381 }
10382 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10383 {
10384 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10385 LDRSB (literal) */
10386 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10387
10388 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10389 arm_insn_r->reg_rec_count = 1;
10390
10391 if (rn != 15)
10392 {
10393 /*LDRD (immediate), LDRSB (immediate) */
10394 if (bit (arm_insn_r->arm_insn, 21))
10395 {
10396 /* Write back to Rn. */
10397 record_buf[arm_insn_r->reg_rec_count++] = rn;
10398 }
10399 }
10400 }
10401 else
10402 return -1;
10403 break;
10404 case 3:
10405 if ((opcode1 & 0x05) == 0x0)
10406 {
10407 /* STRD (register) */
10408 arm_record_strx (arm_insn_r, &record_buf[0],
10409 &record_buf_mem[0], ARM_RECORD_STRD);
10410 }
10411 else if ((opcode1 & 0x05) == 0x1)
10412 {
10413 /* LDRSH (register) */
10414 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10415 arm_insn_r->reg_rec_count = 1;
10416
10417 if (bit (arm_insn_r->arm_insn, 21))
10418 {
10419 /* Write back to Rn. */
10420 record_buf[arm_insn_r->reg_rec_count++]
10421 = bits (arm_insn_r->arm_insn, 16, 19);
10422 }
10423 }
10424 else if ((opcode1 & 0x05) == 0x4)
10425 {
10426 /* STRD (immediate) */
10427 arm_record_strx (arm_insn_r, &record_buf[0],
10428 &record_buf_mem[0], ARM_RECORD_STRD);
10429 }
10430 else if ((opcode1 & 0x05) == 0x5)
10431 {
10432 /* LDRSH (immediate), LDRSH (literal) */
10433 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10434 arm_insn_r->reg_rec_count = 1;
10435
10436 if (bit (arm_insn_r->arm_insn, 21))
10437 {
10438 /* Write back to Rn. */
10439 record_buf[arm_insn_r->reg_rec_count++]
10440 = bits (arm_insn_r->arm_insn, 16, 19);
10441 }
10442 }
10443 else
10444 return -1;
10445 break;
10446 default:
10447 return -1;
10448 }
10449 }
10450 }
10451 else
10452 {
10453 return -1;
10454 }
10455
10456 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10457 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10458 return 0;
10459 }
10460
10461 /* Handling opcode 001 insns. */
10462
10463 static int
10464 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10465 {
10466 uint32_t record_buf[8], record_buf_mem[8];
10467
10468 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10469 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10470
10471 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10472 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10473 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10474 )
10475 {
10476 /* Handle MSR insn. */
10477 if (9 == arm_insn_r->opcode)
10478 {
10479 /* CSPR is going to be changed. */
10480 record_buf[0] = ARM_PS_REGNUM;
10481 arm_insn_r->reg_rec_count = 1;
10482 }
10483 else
10484 {
10485 /* SPSR is going to be changed. */
10486 }
10487 }
10488 else if (arm_insn_r->opcode <= 15)
10489 {
10490 /* Normal data processing insns. */
10491 /* Out of 11 shifter operands mode, all the insn modifies destination
10492 register, which is specified by 13-16 decode. */
10493 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10494 record_buf[1] = ARM_PS_REGNUM;
10495 arm_insn_r->reg_rec_count = 2;
10496 }
10497 else
10498 {
10499 return -1;
10500 }
10501
10502 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10503 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10504 return 0;
10505 }
10506
10507 static int
10508 arm_record_media (insn_decode_record *arm_insn_r)
10509 {
10510 uint32_t record_buf[8];
10511
10512 switch (bits (arm_insn_r->arm_insn, 22, 24))
10513 {
10514 case 0:
10515 /* Parallel addition and subtraction, signed */
10516 case 1:
10517 /* Parallel addition and subtraction, unsigned */
10518 case 2:
10519 case 3:
10520 /* Packing, unpacking, saturation and reversal */
10521 {
10522 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10523
10524 record_buf[arm_insn_r->reg_rec_count++] = rd;
10525 }
10526 break;
10527
10528 case 4:
10529 case 5:
10530 /* Signed multiplies */
10531 {
10532 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10533 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10534
10535 record_buf[arm_insn_r->reg_rec_count++] = rd;
10536 if (op1 == 0x0)
10537 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10538 else if (op1 == 0x4)
10539 record_buf[arm_insn_r->reg_rec_count++]
10540 = bits (arm_insn_r->arm_insn, 12, 15);
10541 }
10542 break;
10543
10544 case 6:
10545 {
10546 if (bit (arm_insn_r->arm_insn, 21)
10547 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10548 {
10549 /* SBFX */
10550 record_buf[arm_insn_r->reg_rec_count++]
10551 = bits (arm_insn_r->arm_insn, 12, 15);
10552 }
10553 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10554 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10555 {
10556 /* USAD8 and USADA8 */
10557 record_buf[arm_insn_r->reg_rec_count++]
10558 = bits (arm_insn_r->arm_insn, 16, 19);
10559 }
10560 }
10561 break;
10562
10563 case 7:
10564 {
10565 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10566 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10567 {
10568 /* Permanently UNDEFINED */
10569 return -1;
10570 }
10571 else
10572 {
10573 /* BFC, BFI and UBFX */
10574 record_buf[arm_insn_r->reg_rec_count++]
10575 = bits (arm_insn_r->arm_insn, 12, 15);
10576 }
10577 }
10578 break;
10579
10580 default:
10581 return -1;
10582 }
10583
10584 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10585
10586 return 0;
10587 }
10588
10589 /* Handle ARM mode instructions with opcode 010. */
10590
10591 static int
10592 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10593 {
10594 struct regcache *reg_cache = arm_insn_r->regcache;
10595
10596 uint32_t reg_base , reg_dest;
10597 uint32_t offset_12, tgt_mem_addr;
10598 uint32_t record_buf[8], record_buf_mem[8];
10599 unsigned char wback;
10600 ULONGEST u_regval;
10601
10602 /* Calculate wback. */
10603 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10604 || (bit (arm_insn_r->arm_insn, 21) == 1);
10605
10606 arm_insn_r->reg_rec_count = 0;
10607 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10608
10609 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10610 {
10611 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10612 and LDRT. */
10613
10614 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10615 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10616
10617 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10618 preceeds a LDR instruction having R15 as reg_base, it
10619 emulates a branch and link instruction, and hence we need to save
10620 CPSR and PC as well. */
10621 if (ARM_PC_REGNUM == reg_dest)
10622 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10623
10624 /* If wback is true, also save the base register, which is going to be
10625 written to. */
10626 if (wback)
10627 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10628 }
10629 else
10630 {
10631 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10632
10633 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10634 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10635
10636 /* Handle bit U. */
10637 if (bit (arm_insn_r->arm_insn, 23))
10638 {
10639 /* U == 1: Add the offset. */
10640 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10641 }
10642 else
10643 {
10644 /* U == 0: subtract the offset. */
10645 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10646 }
10647
10648 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10649 bytes. */
10650 if (bit (arm_insn_r->arm_insn, 22))
10651 {
10652 /* STRB and STRBT: 1 byte. */
10653 record_buf_mem[0] = 1;
10654 }
10655 else
10656 {
10657 /* STR and STRT: 4 bytes. */
10658 record_buf_mem[0] = 4;
10659 }
10660
10661 /* Handle bit P. */
10662 if (bit (arm_insn_r->arm_insn, 24))
10663 record_buf_mem[1] = tgt_mem_addr;
10664 else
10665 record_buf_mem[1] = (uint32_t) u_regval;
10666
10667 arm_insn_r->mem_rec_count = 1;
10668
10669 /* If wback is true, also save the base register, which is going to be
10670 written to. */
10671 if (wback)
10672 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10673 }
10674
10675 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10676 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10677 return 0;
10678 }
10679
10680 /* Handling opcode 011 insns. */
10681
10682 static int
10683 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10684 {
10685 struct regcache *reg_cache = arm_insn_r->regcache;
10686
10687 uint32_t shift_imm = 0;
10688 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10689 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10690 uint32_t record_buf[8], record_buf_mem[8];
10691
10692 LONGEST s_word;
10693 ULONGEST u_regval[2];
10694
10695 if (bit (arm_insn_r->arm_insn, 4))
10696 return arm_record_media (arm_insn_r);
10697
10698 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10699 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10700
10701 /* Handle enhanced store insns and LDRD DSP insn,
10702 order begins according to addressing modes for store insns
10703 STRH insn. */
10704
10705 /* LDR or STR? */
10706 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10707 {
10708 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10709 /* LDR insn has a capability to do branching, if
10710 MOV LR, PC is preceded by LDR insn having Rn as R15
10711 in that case, it emulates branch and link insn, and hence we
10712 need to save CSPR and PC as well. */
10713 if (15 != reg_dest)
10714 {
10715 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10716 arm_insn_r->reg_rec_count = 1;
10717 }
10718 else
10719 {
10720 record_buf[0] = reg_dest;
10721 record_buf[1] = ARM_PS_REGNUM;
10722 arm_insn_r->reg_rec_count = 2;
10723 }
10724 }
10725 else
10726 {
10727 if (! bits (arm_insn_r->arm_insn, 4, 11))
10728 {
10729 /* Store insn, register offset and register pre-indexed,
10730 register post-indexed. */
10731 /* Get Rm. */
10732 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10733 /* Get Rn. */
10734 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10735 regcache_raw_read_unsigned (reg_cache, reg_src1
10736 , &u_regval[0]);
10737 regcache_raw_read_unsigned (reg_cache, reg_src2
10738 , &u_regval[1]);
10739 if (15 == reg_src2)
10740 {
10741 /* If R15 was used as Rn, hence current PC+8. */
10742 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10743 u_regval[0] = u_regval[0] + 8;
10744 }
10745 /* Calculate target store address, Rn +/- Rm, register offset. */
10746 /* U == 1. */
10747 if (bit (arm_insn_r->arm_insn, 23))
10748 {
10749 tgt_mem_addr = u_regval[0] + u_regval[1];
10750 }
10751 else
10752 {
10753 tgt_mem_addr = u_regval[1] - u_regval[0];
10754 }
10755
10756 switch (arm_insn_r->opcode)
10757 {
10758 /* STR. */
10759 case 8:
10760 case 12:
10761 /* STR. */
10762 case 9:
10763 case 13:
10764 /* STRT. */
10765 case 1:
10766 case 5:
10767 /* STR. */
10768 case 0:
10769 case 4:
10770 record_buf_mem[0] = 4;
10771 break;
10772
10773 /* STRB. */
10774 case 10:
10775 case 14:
10776 /* STRB. */
10777 case 11:
10778 case 15:
10779 /* STRBT. */
10780 case 3:
10781 case 7:
10782 /* STRB. */
10783 case 2:
10784 case 6:
10785 record_buf_mem[0] = 1;
10786 break;
10787
10788 default:
10789 gdb_assert_not_reached ("no decoding pattern found");
10790 break;
10791 }
10792 record_buf_mem[1] = tgt_mem_addr;
10793 arm_insn_r->mem_rec_count = 1;
10794
10795 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10796 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10797 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10798 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10799 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10800 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10801 )
10802 {
10803 /* Rn is going to be changed in pre-indexed mode and
10804 post-indexed mode as well. */
10805 record_buf[0] = reg_src2;
10806 arm_insn_r->reg_rec_count = 1;
10807 }
10808 }
10809 else
10810 {
10811 /* Store insn, scaled register offset; scaled pre-indexed. */
10812 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10813 /* Get Rm. */
10814 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10815 /* Get Rn. */
10816 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10817 /* Get shift_imm. */
10818 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10819 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10820 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10821 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10822 /* Offset_12 used as shift. */
10823 switch (offset_12)
10824 {
10825 case 0:
10826 /* Offset_12 used as index. */
10827 offset_12 = u_regval[0] << shift_imm;
10828 break;
10829
10830 case 1:
10831 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10832 break;
10833
10834 case 2:
10835 if (!shift_imm)
10836 {
10837 if (bit (u_regval[0], 31))
10838 {
10839 offset_12 = 0xFFFFFFFF;
10840 }
10841 else
10842 {
10843 offset_12 = 0;
10844 }
10845 }
10846 else
10847 {
10848 /* This is arithmetic shift. */
10849 offset_12 = s_word >> shift_imm;
10850 }
10851 break;
10852
10853 case 3:
10854 if (!shift_imm)
10855 {
10856 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10857 &u_regval[1]);
10858 /* Get C flag value and shift it by 31. */
10859 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10860 | (u_regval[0]) >> 1);
10861 }
10862 else
10863 {
10864 offset_12 = (u_regval[0] >> shift_imm) \
10865 | (u_regval[0] <<
10866 (sizeof(uint32_t) - shift_imm));
10867 }
10868 break;
10869
10870 default:
10871 gdb_assert_not_reached ("no decoding pattern found");
10872 break;
10873 }
10874
10875 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10876 /* bit U set. */
10877 if (bit (arm_insn_r->arm_insn, 23))
10878 {
10879 tgt_mem_addr = u_regval[1] + offset_12;
10880 }
10881 else
10882 {
10883 tgt_mem_addr = u_regval[1] - offset_12;
10884 }
10885
10886 switch (arm_insn_r->opcode)
10887 {
10888 /* STR. */
10889 case 8:
10890 case 12:
10891 /* STR. */
10892 case 9:
10893 case 13:
10894 /* STRT. */
10895 case 1:
10896 case 5:
10897 /* STR. */
10898 case 0:
10899 case 4:
10900 record_buf_mem[0] = 4;
10901 break;
10902
10903 /* STRB. */
10904 case 10:
10905 case 14:
10906 /* STRB. */
10907 case 11:
10908 case 15:
10909 /* STRBT. */
10910 case 3:
10911 case 7:
10912 /* STRB. */
10913 case 2:
10914 case 6:
10915 record_buf_mem[0] = 1;
10916 break;
10917
10918 default:
10919 gdb_assert_not_reached ("no decoding pattern found");
10920 break;
10921 }
10922 record_buf_mem[1] = tgt_mem_addr;
10923 arm_insn_r->mem_rec_count = 1;
10924
10925 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10926 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10927 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10928 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10929 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10930 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10931 )
10932 {
10933 /* Rn is going to be changed in register scaled pre-indexed
10934 mode,and scaled post indexed mode. */
10935 record_buf[0] = reg_src2;
10936 arm_insn_r->reg_rec_count = 1;
10937 }
10938 }
10939 }
10940
10941 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10942 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10943 return 0;
10944 }
10945
10946 /* Handle ARM mode instructions with opcode 100. */
10947
10948 static int
10949 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10950 {
10951 struct regcache *reg_cache = arm_insn_r->regcache;
10952 uint32_t register_count = 0, register_bits;
10953 uint32_t reg_base, addr_mode;
10954 uint32_t record_buf[24], record_buf_mem[48];
10955 uint32_t wback;
10956 ULONGEST u_regval;
10957
10958 /* Fetch the list of registers. */
10959 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10960 arm_insn_r->reg_rec_count = 0;
10961
10962 /* Fetch the base register that contains the address we are loading data
10963 to. */
10964 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10965
10966 /* Calculate wback. */
10967 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10968
10969 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10970 {
10971 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10972
10973 /* Find out which registers are going to be loaded from memory. */
10974 while (register_bits)
10975 {
10976 if (register_bits & 0x00000001)
10977 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10978 register_bits = register_bits >> 1;
10979 register_count++;
10980 }
10981
10982
10983 /* If wback is true, also save the base register, which is going to be
10984 written to. */
10985 if (wback)
10986 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10987
10988 /* Save the CPSR register. */
10989 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10990 }
10991 else
10992 {
10993 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10994
10995 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10996
10997 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10998
10999 /* Find out how many registers are going to be stored to memory. */
11000 while (register_bits)
11001 {
11002 if (register_bits & 0x00000001)
11003 register_count++;
11004 register_bits = register_bits >> 1;
11005 }
11006
11007 switch (addr_mode)
11008 {
11009 /* STMDA (STMED): Decrement after. */
11010 case 0:
11011 record_buf_mem[1] = (uint32_t) u_regval
11012 - register_count * ARM_INT_REGISTER_SIZE + 4;
11013 break;
11014 /* STM (STMIA, STMEA): Increment after. */
11015 case 1:
11016 record_buf_mem[1] = (uint32_t) u_regval;
11017 break;
11018 /* STMDB (STMFD): Decrement before. */
11019 case 2:
11020 record_buf_mem[1] = (uint32_t) u_regval
11021 - register_count * ARM_INT_REGISTER_SIZE;
11022 break;
11023 /* STMIB (STMFA): Increment before. */
11024 case 3:
11025 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11026 break;
11027 default:
11028 gdb_assert_not_reached ("no decoding pattern found");
11029 break;
11030 }
11031
11032 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11033 arm_insn_r->mem_rec_count = 1;
11034
11035 /* If wback is true, also save the base register, which is going to be
11036 written to. */
11037 if (wback)
11038 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11039 }
11040
11041 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11042 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11043 return 0;
11044 }
11045
11046 /* Handling opcode 101 insns. */
11047
11048 static int
11049 arm_record_b_bl (insn_decode_record *arm_insn_r)
11050 {
11051 uint32_t record_buf[8];
11052
11053 /* Handle B, BL, BLX(1) insns. */
11054 /* B simply branches so we do nothing here. */
11055 /* Note: BLX(1) doesnt fall here but instead it falls into
11056 extension space. */
11057 if (bit (arm_insn_r->arm_insn, 24))
11058 {
11059 record_buf[0] = ARM_LR_REGNUM;
11060 arm_insn_r->reg_rec_count = 1;
11061 }
11062
11063 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11064
11065 return 0;
11066 }
11067
11068 static int
11069 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11070 {
11071 printf_unfiltered (_("Process record does not support instruction "
11072 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11073 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11074
11075 return -1;
11076 }
11077
11078 /* Record handler for vector data transfer instructions. */
11079
11080 static int
11081 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11082 {
11083 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11084 uint32_t record_buf[4];
11085
11086 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11087 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11088 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11089 bit_l = bit (arm_insn_r->arm_insn, 20);
11090 bit_c = bit (arm_insn_r->arm_insn, 8);
11091
11092 /* Handle VMOV instruction. */
11093 if (bit_l && bit_c)
11094 {
11095 record_buf[0] = reg_t;
11096 arm_insn_r->reg_rec_count = 1;
11097 }
11098 else if (bit_l && !bit_c)
11099 {
11100 /* Handle VMOV instruction. */
11101 if (bits_a == 0x00)
11102 {
11103 record_buf[0] = reg_t;
11104 arm_insn_r->reg_rec_count = 1;
11105 }
11106 /* Handle VMRS instruction. */
11107 else if (bits_a == 0x07)
11108 {
11109 if (reg_t == 15)
11110 reg_t = ARM_PS_REGNUM;
11111
11112 record_buf[0] = reg_t;
11113 arm_insn_r->reg_rec_count = 1;
11114 }
11115 }
11116 else if (!bit_l && !bit_c)
11117 {
11118 /* Handle VMOV instruction. */
11119 if (bits_a == 0x00)
11120 {
11121 record_buf[0] = ARM_D0_REGNUM + reg_v;
11122
11123 arm_insn_r->reg_rec_count = 1;
11124 }
11125 /* Handle VMSR instruction. */
11126 else if (bits_a == 0x07)
11127 {
11128 record_buf[0] = ARM_FPSCR_REGNUM;
11129 arm_insn_r->reg_rec_count = 1;
11130 }
11131 }
11132 else if (!bit_l && bit_c)
11133 {
11134 /* Handle VMOV instruction. */
11135 if (!(bits_a & 0x04))
11136 {
11137 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11138 + ARM_D0_REGNUM;
11139 arm_insn_r->reg_rec_count = 1;
11140 }
11141 /* Handle VDUP instruction. */
11142 else
11143 {
11144 if (bit (arm_insn_r->arm_insn, 21))
11145 {
11146 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11147 record_buf[0] = reg_v + ARM_D0_REGNUM;
11148 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11149 arm_insn_r->reg_rec_count = 2;
11150 }
11151 else
11152 {
11153 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11154 record_buf[0] = reg_v + ARM_D0_REGNUM;
11155 arm_insn_r->reg_rec_count = 1;
11156 }
11157 }
11158 }
11159
11160 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11161 return 0;
11162 }
11163
11164 /* Record handler for extension register load/store instructions. */
11165
11166 static int
11167 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11168 {
11169 uint32_t opcode, single_reg;
11170 uint8_t op_vldm_vstm;
11171 uint32_t record_buf[8], record_buf_mem[128];
11172 ULONGEST u_regval = 0;
11173
11174 struct regcache *reg_cache = arm_insn_r->regcache;
11175
11176 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11177 single_reg = !bit (arm_insn_r->arm_insn, 8);
11178 op_vldm_vstm = opcode & 0x1b;
11179
11180 /* Handle VMOV instructions. */
11181 if ((opcode & 0x1e) == 0x04)
11182 {
11183 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11184 {
11185 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11186 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11187 arm_insn_r->reg_rec_count = 2;
11188 }
11189 else
11190 {
11191 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11192 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11193
11194 if (single_reg)
11195 {
11196 /* The first S register number m is REG_M:M (M is bit 5),
11197 the corresponding D register number is REG_M:M / 2, which
11198 is REG_M. */
11199 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11200 /* The second S register number is REG_M:M + 1, the
11201 corresponding D register number is (REG_M:M + 1) / 2.
11202 IOW, if bit M is 1, the first and second S registers
11203 are mapped to different D registers, otherwise, they are
11204 in the same D register. */
11205 if (bit_m)
11206 {
11207 record_buf[arm_insn_r->reg_rec_count++]
11208 = ARM_D0_REGNUM + reg_m + 1;
11209 }
11210 }
11211 else
11212 {
11213 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11214 arm_insn_r->reg_rec_count = 1;
11215 }
11216 }
11217 }
11218 /* Handle VSTM and VPUSH instructions. */
11219 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11220 || op_vldm_vstm == 0x12)
11221 {
11222 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11223 uint32_t memory_index = 0;
11224
11225 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11226 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11227 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11228 imm_off32 = imm_off8 << 2;
11229 memory_count = imm_off8;
11230
11231 if (bit (arm_insn_r->arm_insn, 23))
11232 start_address = u_regval;
11233 else
11234 start_address = u_regval - imm_off32;
11235
11236 if (bit (arm_insn_r->arm_insn, 21))
11237 {
11238 record_buf[0] = reg_rn;
11239 arm_insn_r->reg_rec_count = 1;
11240 }
11241
11242 while (memory_count > 0)
11243 {
11244 if (single_reg)
11245 {
11246 record_buf_mem[memory_index] = 4;
11247 record_buf_mem[memory_index + 1] = start_address;
11248 start_address = start_address + 4;
11249 memory_index = memory_index + 2;
11250 }
11251 else
11252 {
11253 record_buf_mem[memory_index] = 4;
11254 record_buf_mem[memory_index + 1] = start_address;
11255 record_buf_mem[memory_index + 2] = 4;
11256 record_buf_mem[memory_index + 3] = start_address + 4;
11257 start_address = start_address + 8;
11258 memory_index = memory_index + 4;
11259 }
11260 memory_count--;
11261 }
11262 arm_insn_r->mem_rec_count = (memory_index >> 1);
11263 }
11264 /* Handle VLDM instructions. */
11265 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11266 || op_vldm_vstm == 0x13)
11267 {
11268 uint32_t reg_count, reg_vd;
11269 uint32_t reg_index = 0;
11270 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11271
11272 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11273 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11274
11275 /* REG_VD is the first D register number. If the instruction
11276 loads memory to S registers (SINGLE_REG is TRUE), the register
11277 number is (REG_VD << 1 | bit D), so the corresponding D
11278 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11279 if (!single_reg)
11280 reg_vd = reg_vd | (bit_d << 4);
11281
11282 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11283 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11284
11285 /* If the instruction loads memory to D register, REG_COUNT should
11286 be divided by 2, according to the ARM Architecture Reference
11287 Manual. If the instruction loads memory to S register, divide by
11288 2 as well because two S registers are mapped to D register. */
11289 reg_count = reg_count / 2;
11290 if (single_reg && bit_d)
11291 {
11292 /* Increase the register count if S register list starts from
11293 an odd number (bit d is one). */
11294 reg_count++;
11295 }
11296
11297 while (reg_count > 0)
11298 {
11299 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11300 reg_count--;
11301 }
11302 arm_insn_r->reg_rec_count = reg_index;
11303 }
11304 /* VSTR Vector store register. */
11305 else if ((opcode & 0x13) == 0x10)
11306 {
11307 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11308 uint32_t memory_index = 0;
11309
11310 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11311 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11312 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11313 imm_off32 = imm_off8 << 2;
11314
11315 if (bit (arm_insn_r->arm_insn, 23))
11316 start_address = u_regval + imm_off32;
11317 else
11318 start_address = u_regval - imm_off32;
11319
11320 if (single_reg)
11321 {
11322 record_buf_mem[memory_index] = 4;
11323 record_buf_mem[memory_index + 1] = start_address;
11324 arm_insn_r->mem_rec_count = 1;
11325 }
11326 else
11327 {
11328 record_buf_mem[memory_index] = 4;
11329 record_buf_mem[memory_index + 1] = start_address;
11330 record_buf_mem[memory_index + 2] = 4;
11331 record_buf_mem[memory_index + 3] = start_address + 4;
11332 arm_insn_r->mem_rec_count = 2;
11333 }
11334 }
11335 /* VLDR Vector load register. */
11336 else if ((opcode & 0x13) == 0x11)
11337 {
11338 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11339
11340 if (!single_reg)
11341 {
11342 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11343 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11344 }
11345 else
11346 {
11347 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11348 /* Record register D rather than pseudo register S. */
11349 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11350 }
11351 arm_insn_r->reg_rec_count = 1;
11352 }
11353
11354 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11355 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11356 return 0;
11357 }
11358
11359 /* Record handler for arm/thumb mode VFP data processing instructions. */
11360
11361 static int
11362 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11363 {
11364 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11365 uint32_t record_buf[4];
11366 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11367 enum insn_types curr_insn_type = INSN_INV;
11368
11369 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11370 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11371 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11372 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11373 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11374 bit_d = bit (arm_insn_r->arm_insn, 22);
11375 /* Mask off the "D" bit. */
11376 opc1 = opc1 & ~0x04;
11377
11378 /* Handle VMLA, VMLS. */
11379 if (opc1 == 0x00)
11380 {
11381 if (bit (arm_insn_r->arm_insn, 10))
11382 {
11383 if (bit (arm_insn_r->arm_insn, 6))
11384 curr_insn_type = INSN_T0;
11385 else
11386 curr_insn_type = INSN_T1;
11387 }
11388 else
11389 {
11390 if (dp_op_sz)
11391 curr_insn_type = INSN_T1;
11392 else
11393 curr_insn_type = INSN_T2;
11394 }
11395 }
11396 /* Handle VNMLA, VNMLS, VNMUL. */
11397 else if (opc1 == 0x01)
11398 {
11399 if (dp_op_sz)
11400 curr_insn_type = INSN_T1;
11401 else
11402 curr_insn_type = INSN_T2;
11403 }
11404 /* Handle VMUL. */
11405 else if (opc1 == 0x02 && !(opc3 & 0x01))
11406 {
11407 if (bit (arm_insn_r->arm_insn, 10))
11408 {
11409 if (bit (arm_insn_r->arm_insn, 6))
11410 curr_insn_type = INSN_T0;
11411 else
11412 curr_insn_type = INSN_T1;
11413 }
11414 else
11415 {
11416 if (dp_op_sz)
11417 curr_insn_type = INSN_T1;
11418 else
11419 curr_insn_type = INSN_T2;
11420 }
11421 }
11422 /* Handle VADD, VSUB. */
11423 else if (opc1 == 0x03)
11424 {
11425 if (!bit (arm_insn_r->arm_insn, 9))
11426 {
11427 if (bit (arm_insn_r->arm_insn, 6))
11428 curr_insn_type = INSN_T0;
11429 else
11430 curr_insn_type = INSN_T1;
11431 }
11432 else
11433 {
11434 if (dp_op_sz)
11435 curr_insn_type = INSN_T1;
11436 else
11437 curr_insn_type = INSN_T2;
11438 }
11439 }
11440 /* Handle VDIV. */
11441 else if (opc1 == 0x08)
11442 {
11443 if (dp_op_sz)
11444 curr_insn_type = INSN_T1;
11445 else
11446 curr_insn_type = INSN_T2;
11447 }
11448 /* Handle all other vfp data processing instructions. */
11449 else if (opc1 == 0x0b)
11450 {
11451 /* Handle VMOV. */
11452 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11453 {
11454 if (bit (arm_insn_r->arm_insn, 4))
11455 {
11456 if (bit (arm_insn_r->arm_insn, 6))
11457 curr_insn_type = INSN_T0;
11458 else
11459 curr_insn_type = INSN_T1;
11460 }
11461 else
11462 {
11463 if (dp_op_sz)
11464 curr_insn_type = INSN_T1;
11465 else
11466 curr_insn_type = INSN_T2;
11467 }
11468 }
11469 /* Handle VNEG and VABS. */
11470 else if ((opc2 == 0x01 && opc3 == 0x01)
11471 || (opc2 == 0x00 && opc3 == 0x03))
11472 {
11473 if (!bit (arm_insn_r->arm_insn, 11))
11474 {
11475 if (bit (arm_insn_r->arm_insn, 6))
11476 curr_insn_type = INSN_T0;
11477 else
11478 curr_insn_type = INSN_T1;
11479 }
11480 else
11481 {
11482 if (dp_op_sz)
11483 curr_insn_type = INSN_T1;
11484 else
11485 curr_insn_type = INSN_T2;
11486 }
11487 }
11488 /* Handle VSQRT. */
11489 else if (opc2 == 0x01 && opc3 == 0x03)
11490 {
11491 if (dp_op_sz)
11492 curr_insn_type = INSN_T1;
11493 else
11494 curr_insn_type = INSN_T2;
11495 }
11496 /* Handle VCVT. */
11497 else if (opc2 == 0x07 && opc3 == 0x03)
11498 {
11499 if (!dp_op_sz)
11500 curr_insn_type = INSN_T1;
11501 else
11502 curr_insn_type = INSN_T2;
11503 }
11504 else if (opc3 & 0x01)
11505 {
11506 /* Handle VCVT. */
11507 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11508 {
11509 if (!bit (arm_insn_r->arm_insn, 18))
11510 curr_insn_type = INSN_T2;
11511 else
11512 {
11513 if (dp_op_sz)
11514 curr_insn_type = INSN_T1;
11515 else
11516 curr_insn_type = INSN_T2;
11517 }
11518 }
11519 /* Handle VCVT. */
11520 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11521 {
11522 if (dp_op_sz)
11523 curr_insn_type = INSN_T1;
11524 else
11525 curr_insn_type = INSN_T2;
11526 }
11527 /* Handle VCVTB, VCVTT. */
11528 else if ((opc2 & 0x0e) == 0x02)
11529 curr_insn_type = INSN_T2;
11530 /* Handle VCMP, VCMPE. */
11531 else if ((opc2 & 0x0e) == 0x04)
11532 curr_insn_type = INSN_T3;
11533 }
11534 }
11535
11536 switch (curr_insn_type)
11537 {
11538 case INSN_T0:
11539 reg_vd = reg_vd | (bit_d << 4);
11540 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11541 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11542 arm_insn_r->reg_rec_count = 2;
11543 break;
11544
11545 case INSN_T1:
11546 reg_vd = reg_vd | (bit_d << 4);
11547 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11548 arm_insn_r->reg_rec_count = 1;
11549 break;
11550
11551 case INSN_T2:
11552 reg_vd = (reg_vd << 1) | bit_d;
11553 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11554 arm_insn_r->reg_rec_count = 1;
11555 break;
11556
11557 case INSN_T3:
11558 record_buf[0] = ARM_FPSCR_REGNUM;
11559 arm_insn_r->reg_rec_count = 1;
11560 break;
11561
11562 default:
11563 gdb_assert_not_reached ("no decoding pattern found");
11564 break;
11565 }
11566
11567 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11568 return 0;
11569 }
11570
11571 /* Handling opcode 110 insns. */
11572
11573 static int
11574 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11575 {
11576 uint32_t op1, op1_ebit, coproc;
11577
11578 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11579 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11580 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11581
11582 if ((coproc & 0x0e) == 0x0a)
11583 {
11584 /* Handle extension register ld/st instructions. */
11585 if (!(op1 & 0x20))
11586 return arm_record_exreg_ld_st_insn (arm_insn_r);
11587
11588 /* 64-bit transfers between arm core and extension registers. */
11589 if ((op1 & 0x3e) == 0x04)
11590 return arm_record_exreg_ld_st_insn (arm_insn_r);
11591 }
11592 else
11593 {
11594 /* Handle coprocessor ld/st instructions. */
11595 if (!(op1 & 0x3a))
11596 {
11597 /* Store. */
11598 if (!op1_ebit)
11599 return arm_record_unsupported_insn (arm_insn_r);
11600 else
11601 /* Load. */
11602 return arm_record_unsupported_insn (arm_insn_r);
11603 }
11604
11605 /* Move to coprocessor from two arm core registers. */
11606 if (op1 == 0x4)
11607 return arm_record_unsupported_insn (arm_insn_r);
11608
11609 /* Move to two arm core registers from coprocessor. */
11610 if (op1 == 0x5)
11611 {
11612 uint32_t reg_t[2];
11613
11614 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11615 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11616 arm_insn_r->reg_rec_count = 2;
11617
11618 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11619 return 0;
11620 }
11621 }
11622 return arm_record_unsupported_insn (arm_insn_r);
11623 }
11624
11625 /* Handling opcode 111 insns. */
11626
11627 static int
11628 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11629 {
11630 uint32_t op, op1_ebit, coproc, bits_24_25;
11631 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11632 struct regcache *reg_cache = arm_insn_r->regcache;
11633
11634 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11635 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11636 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11637 op = bit (arm_insn_r->arm_insn, 4);
11638 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11639
11640 /* Handle arm SWI/SVC system call instructions. */
11641 if (bits_24_25 == 0x3)
11642 {
11643 if (tdep->arm_syscall_record != NULL)
11644 {
11645 ULONGEST svc_operand, svc_number;
11646
11647 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11648
11649 if (svc_operand) /* OABI. */
11650 svc_number = svc_operand - 0x900000;
11651 else /* EABI. */
11652 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11653
11654 return tdep->arm_syscall_record (reg_cache, svc_number);
11655 }
11656 else
11657 {
11658 printf_unfiltered (_("no syscall record support\n"));
11659 return -1;
11660 }
11661 }
11662 else if (bits_24_25 == 0x02)
11663 {
11664 if (op)
11665 {
11666 if ((coproc & 0x0e) == 0x0a)
11667 {
11668 /* 8, 16, and 32-bit transfer */
11669 return arm_record_vdata_transfer_insn (arm_insn_r);
11670 }
11671 else
11672 {
11673 if (op1_ebit)
11674 {
11675 /* MRC, MRC2 */
11676 uint32_t record_buf[1];
11677
11678 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11679 if (record_buf[0] == 15)
11680 record_buf[0] = ARM_PS_REGNUM;
11681
11682 arm_insn_r->reg_rec_count = 1;
11683 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11684 record_buf);
11685 return 0;
11686 }
11687 else
11688 {
11689 /* MCR, MCR2 */
11690 return -1;
11691 }
11692 }
11693 }
11694 else
11695 {
11696 if ((coproc & 0x0e) == 0x0a)
11697 {
11698 /* VFP data-processing instructions. */
11699 return arm_record_vfp_data_proc_insn (arm_insn_r);
11700 }
11701 else
11702 {
11703 /* CDP, CDP2 */
11704 return -1;
11705 }
11706 }
11707 }
11708 else
11709 {
11710 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11711
11712 if (op1 == 5)
11713 {
11714 if ((coproc & 0x0e) != 0x0a)
11715 {
11716 /* MRRC, MRRC2 */
11717 return -1;
11718 }
11719 }
11720 else if (op1 == 4 || op1 == 5)
11721 {
11722 if ((coproc & 0x0e) == 0x0a)
11723 {
11724 /* 64-bit transfers between ARM core and extension */
11725 return -1;
11726 }
11727 else if (op1 == 4)
11728 {
11729 /* MCRR, MCRR2 */
11730 return -1;
11731 }
11732 }
11733 else if (op1 == 0 || op1 == 1)
11734 {
11735 /* UNDEFINED */
11736 return -1;
11737 }
11738 else
11739 {
11740 if ((coproc & 0x0e) == 0x0a)
11741 {
11742 /* Extension register load/store */
11743 }
11744 else
11745 {
11746 /* STC, STC2, LDC, LDC2 */
11747 }
11748 return -1;
11749 }
11750 }
11751
11752 return -1;
11753 }
11754
11755 /* Handling opcode 000 insns. */
11756
11757 static int
11758 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11759 {
11760 uint32_t record_buf[8];
11761 uint32_t reg_src1 = 0;
11762
11763 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11764
11765 record_buf[0] = ARM_PS_REGNUM;
11766 record_buf[1] = reg_src1;
11767 thumb_insn_r->reg_rec_count = 2;
11768
11769 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11770
11771 return 0;
11772 }
11773
11774
11775 /* Handling opcode 001 insns. */
11776
11777 static int
11778 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11779 {
11780 uint32_t record_buf[8];
11781 uint32_t reg_src1 = 0;
11782
11783 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11784
11785 record_buf[0] = ARM_PS_REGNUM;
11786 record_buf[1] = reg_src1;
11787 thumb_insn_r->reg_rec_count = 2;
11788
11789 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11790
11791 return 0;
11792 }
11793
11794 /* Handling opcode 010 insns. */
11795
11796 static int
11797 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11798 {
11799 struct regcache *reg_cache = thumb_insn_r->regcache;
11800 uint32_t record_buf[8], record_buf_mem[8];
11801
11802 uint32_t reg_src1 = 0, reg_src2 = 0;
11803 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11804
11805 ULONGEST u_regval[2] = {0};
11806
11807 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11808
11809 if (bit (thumb_insn_r->arm_insn, 12))
11810 {
11811 /* Handle load/store register offset. */
11812 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11813
11814 if (in_inclusive_range (opB, 4U, 7U))
11815 {
11816 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11817 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11818 record_buf[0] = reg_src1;
11819 thumb_insn_r->reg_rec_count = 1;
11820 }
11821 else if (in_inclusive_range (opB, 0U, 2U))
11822 {
11823 /* STR(2), STRB(2), STRH(2) . */
11824 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11825 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11826 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11827 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11828 if (0 == opB)
11829 record_buf_mem[0] = 4; /* STR (2). */
11830 else if (2 == opB)
11831 record_buf_mem[0] = 1; /* STRB (2). */
11832 else if (1 == opB)
11833 record_buf_mem[0] = 2; /* STRH (2). */
11834 record_buf_mem[1] = u_regval[0] + u_regval[1];
11835 thumb_insn_r->mem_rec_count = 1;
11836 }
11837 }
11838 else if (bit (thumb_insn_r->arm_insn, 11))
11839 {
11840 /* Handle load from literal pool. */
11841 /* LDR(3). */
11842 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11843 record_buf[0] = reg_src1;
11844 thumb_insn_r->reg_rec_count = 1;
11845 }
11846 else if (opcode1)
11847 {
11848 /* Special data instructions and branch and exchange */
11849 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11850 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11851 if ((3 == opcode2) && (!opcode3))
11852 {
11853 /* Branch with exchange. */
11854 record_buf[0] = ARM_PS_REGNUM;
11855 thumb_insn_r->reg_rec_count = 1;
11856 }
11857 else
11858 {
11859 /* Format 8; special data processing insns. */
11860 record_buf[0] = ARM_PS_REGNUM;
11861 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11862 | bits (thumb_insn_r->arm_insn, 0, 2));
11863 thumb_insn_r->reg_rec_count = 2;
11864 }
11865 }
11866 else
11867 {
11868 /* Format 5; data processing insns. */
11869 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11870 if (bit (thumb_insn_r->arm_insn, 7))
11871 {
11872 reg_src1 = reg_src1 + 8;
11873 }
11874 record_buf[0] = ARM_PS_REGNUM;
11875 record_buf[1] = reg_src1;
11876 thumb_insn_r->reg_rec_count = 2;
11877 }
11878
11879 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11880 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11881 record_buf_mem);
11882
11883 return 0;
11884 }
11885
11886 /* Handling opcode 001 insns. */
11887
11888 static int
11889 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11890 {
11891 struct regcache *reg_cache = thumb_insn_r->regcache;
11892 uint32_t record_buf[8], record_buf_mem[8];
11893
11894 uint32_t reg_src1 = 0;
11895 uint32_t opcode = 0, immed_5 = 0;
11896
11897 ULONGEST u_regval = 0;
11898
11899 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11900
11901 if (opcode)
11902 {
11903 /* LDR(1). */
11904 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11905 record_buf[0] = reg_src1;
11906 thumb_insn_r->reg_rec_count = 1;
11907 }
11908 else
11909 {
11910 /* STR(1). */
11911 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11912 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11913 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11914 record_buf_mem[0] = 4;
11915 record_buf_mem[1] = u_regval + (immed_5 * 4);
11916 thumb_insn_r->mem_rec_count = 1;
11917 }
11918
11919 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11920 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11921 record_buf_mem);
11922
11923 return 0;
11924 }
11925
11926 /* Handling opcode 100 insns. */
11927
11928 static int
11929 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11930 {
11931 struct regcache *reg_cache = thumb_insn_r->regcache;
11932 uint32_t record_buf[8], record_buf_mem[8];
11933
11934 uint32_t reg_src1 = 0;
11935 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11936
11937 ULONGEST u_regval = 0;
11938
11939 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11940
11941 if (3 == opcode)
11942 {
11943 /* LDR(4). */
11944 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11945 record_buf[0] = reg_src1;
11946 thumb_insn_r->reg_rec_count = 1;
11947 }
11948 else if (1 == opcode)
11949 {
11950 /* LDRH(1). */
11951 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11952 record_buf[0] = reg_src1;
11953 thumb_insn_r->reg_rec_count = 1;
11954 }
11955 else if (2 == opcode)
11956 {
11957 /* STR(3). */
11958 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11959 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11960 record_buf_mem[0] = 4;
11961 record_buf_mem[1] = u_regval + (immed_8 * 4);
11962 thumb_insn_r->mem_rec_count = 1;
11963 }
11964 else if (0 == opcode)
11965 {
11966 /* STRH(1). */
11967 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11968 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11969 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11970 record_buf_mem[0] = 2;
11971 record_buf_mem[1] = u_regval + (immed_5 * 2);
11972 thumb_insn_r->mem_rec_count = 1;
11973 }
11974
11975 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11976 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11977 record_buf_mem);
11978
11979 return 0;
11980 }
11981
11982 /* Handling opcode 101 insns. */
11983
11984 static int
11985 thumb_record_misc (insn_decode_record *thumb_insn_r)
11986 {
11987 struct regcache *reg_cache = thumb_insn_r->regcache;
11988
11989 uint32_t opcode = 0;
11990 uint32_t register_bits = 0, register_count = 0;
11991 uint32_t index = 0, start_address = 0;
11992 uint32_t record_buf[24], record_buf_mem[48];
11993 uint32_t reg_src1;
11994
11995 ULONGEST u_regval = 0;
11996
11997 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11998
11999 if (opcode == 0 || opcode == 1)
12000 {
12001 /* ADR and ADD (SP plus immediate) */
12002
12003 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12004 record_buf[0] = reg_src1;
12005 thumb_insn_r->reg_rec_count = 1;
12006 }
12007 else
12008 {
12009 /* Miscellaneous 16-bit instructions */
12010 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12011
12012 switch (opcode2)
12013 {
12014 case 6:
12015 /* SETEND and CPS */
12016 break;
12017 case 0:
12018 /* ADD/SUB (SP plus immediate) */
12019 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12020 record_buf[0] = ARM_SP_REGNUM;
12021 thumb_insn_r->reg_rec_count = 1;
12022 break;
12023 case 1: /* fall through */
12024 case 3: /* fall through */
12025 case 9: /* fall through */
12026 case 11:
12027 /* CBNZ, CBZ */
12028 break;
12029 case 2:
12030 /* SXTH, SXTB, UXTH, UXTB */
12031 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12032 thumb_insn_r->reg_rec_count = 1;
12033 break;
12034 case 4: /* fall through */
12035 case 5:
12036 /* PUSH. */
12037 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12038 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12039 while (register_bits)
12040 {
12041 if (register_bits & 0x00000001)
12042 register_count++;
12043 register_bits = register_bits >> 1;
12044 }
12045 start_address = u_regval - \
12046 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12047 thumb_insn_r->mem_rec_count = register_count;
12048 while (register_count)
12049 {
12050 record_buf_mem[(register_count * 2) - 1] = start_address;
12051 record_buf_mem[(register_count * 2) - 2] = 4;
12052 start_address = start_address + 4;
12053 register_count--;
12054 }
12055 record_buf[0] = ARM_SP_REGNUM;
12056 thumb_insn_r->reg_rec_count = 1;
12057 break;
12058 case 10:
12059 /* REV, REV16, REVSH */
12060 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12061 thumb_insn_r->reg_rec_count = 1;
12062 break;
12063 case 12: /* fall through */
12064 case 13:
12065 /* POP. */
12066 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12067 while (register_bits)
12068 {
12069 if (register_bits & 0x00000001)
12070 record_buf[index++] = register_count;
12071 register_bits = register_bits >> 1;
12072 register_count++;
12073 }
12074 record_buf[index++] = ARM_PS_REGNUM;
12075 record_buf[index++] = ARM_SP_REGNUM;
12076 thumb_insn_r->reg_rec_count = index;
12077 break;
12078 case 0xe:
12079 /* BKPT insn. */
12080 /* Handle enhanced software breakpoint insn, BKPT. */
12081 /* CPSR is changed to be executed in ARM state, disabling normal
12082 interrupts, entering abort mode. */
12083 /* According to high vector configuration PC is set. */
12084 /* User hits breakpoint and type reverse, in that case, we need to go back with
12085 previous CPSR and Program Counter. */
12086 record_buf[0] = ARM_PS_REGNUM;
12087 record_buf[1] = ARM_LR_REGNUM;
12088 thumb_insn_r->reg_rec_count = 2;
12089 /* We need to save SPSR value, which is not yet done. */
12090 printf_unfiltered (_("Process record does not support instruction "
12091 "0x%0x at address %s.\n"),
12092 thumb_insn_r->arm_insn,
12093 paddress (thumb_insn_r->gdbarch,
12094 thumb_insn_r->this_addr));
12095 return -1;
12096
12097 case 0xf:
12098 /* If-Then, and hints */
12099 break;
12100 default:
12101 return -1;
12102 };
12103 }
12104
12105 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12106 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12107 record_buf_mem);
12108
12109 return 0;
12110 }
12111
12112 /* Handling opcode 110 insns. */
12113
12114 static int
12115 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12116 {
12117 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12118 struct regcache *reg_cache = thumb_insn_r->regcache;
12119
12120 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12121 uint32_t reg_src1 = 0;
12122 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12123 uint32_t index = 0, start_address = 0;
12124 uint32_t record_buf[24], record_buf_mem[48];
12125
12126 ULONGEST u_regval = 0;
12127
12128 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12129 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12130
12131 if (1 == opcode2)
12132 {
12133
12134 /* LDMIA. */
12135 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12136 /* Get Rn. */
12137 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12138 while (register_bits)
12139 {
12140 if (register_bits & 0x00000001)
12141 record_buf[index++] = register_count;
12142 register_bits = register_bits >> 1;
12143 register_count++;
12144 }
12145 record_buf[index++] = reg_src1;
12146 thumb_insn_r->reg_rec_count = index;
12147 }
12148 else if (0 == opcode2)
12149 {
12150 /* It handles both STMIA. */
12151 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12152 /* Get Rn. */
12153 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12154 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12155 while (register_bits)
12156 {
12157 if (register_bits & 0x00000001)
12158 register_count++;
12159 register_bits = register_bits >> 1;
12160 }
12161 start_address = u_regval;
12162 thumb_insn_r->mem_rec_count = register_count;
12163 while (register_count)
12164 {
12165 record_buf_mem[(register_count * 2) - 1] = start_address;
12166 record_buf_mem[(register_count * 2) - 2] = 4;
12167 start_address = start_address + 4;
12168 register_count--;
12169 }
12170 }
12171 else if (0x1F == opcode1)
12172 {
12173 /* Handle arm syscall insn. */
12174 if (tdep->arm_syscall_record != NULL)
12175 {
12176 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12177 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12178 }
12179 else
12180 {
12181 printf_unfiltered (_("no syscall record support\n"));
12182 return -1;
12183 }
12184 }
12185
12186 /* B (1), conditional branch is automatically taken care in process_record,
12187 as PC is saved there. */
12188
12189 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12190 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12191 record_buf_mem);
12192
12193 return ret;
12194 }
12195
12196 /* Handling opcode 111 insns. */
12197
12198 static int
12199 thumb_record_branch (insn_decode_record *thumb_insn_r)
12200 {
12201 uint32_t record_buf[8];
12202 uint32_t bits_h = 0;
12203
12204 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12205
12206 if (2 == bits_h || 3 == bits_h)
12207 {
12208 /* BL */
12209 record_buf[0] = ARM_LR_REGNUM;
12210 thumb_insn_r->reg_rec_count = 1;
12211 }
12212 else if (1 == bits_h)
12213 {
12214 /* BLX(1). */
12215 record_buf[0] = ARM_PS_REGNUM;
12216 record_buf[1] = ARM_LR_REGNUM;
12217 thumb_insn_r->reg_rec_count = 2;
12218 }
12219
12220 /* B(2) is automatically taken care in process_record, as PC is
12221 saved there. */
12222
12223 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12224
12225 return 0;
12226 }
12227
12228 /* Handler for thumb2 load/store multiple instructions. */
12229
12230 static int
12231 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12232 {
12233 struct regcache *reg_cache = thumb2_insn_r->regcache;
12234
12235 uint32_t reg_rn, op;
12236 uint32_t register_bits = 0, register_count = 0;
12237 uint32_t index = 0, start_address = 0;
12238 uint32_t record_buf[24], record_buf_mem[48];
12239
12240 ULONGEST u_regval = 0;
12241
12242 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12243 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12244
12245 if (0 == op || 3 == op)
12246 {
12247 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12248 {
12249 /* Handle RFE instruction. */
12250 record_buf[0] = ARM_PS_REGNUM;
12251 thumb2_insn_r->reg_rec_count = 1;
12252 }
12253 else
12254 {
12255 /* Handle SRS instruction after reading banked SP. */
12256 return arm_record_unsupported_insn (thumb2_insn_r);
12257 }
12258 }
12259 else if (1 == op || 2 == op)
12260 {
12261 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12262 {
12263 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12264 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12265 while (register_bits)
12266 {
12267 if (register_bits & 0x00000001)
12268 record_buf[index++] = register_count;
12269
12270 register_count++;
12271 register_bits = register_bits >> 1;
12272 }
12273 record_buf[index++] = reg_rn;
12274 record_buf[index++] = ARM_PS_REGNUM;
12275 thumb2_insn_r->reg_rec_count = index;
12276 }
12277 else
12278 {
12279 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12280 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12281 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12282 while (register_bits)
12283 {
12284 if (register_bits & 0x00000001)
12285 register_count++;
12286
12287 register_bits = register_bits >> 1;
12288 }
12289
12290 if (1 == op)
12291 {
12292 /* Start address calculation for LDMDB/LDMEA. */
12293 start_address = u_regval;
12294 }
12295 else if (2 == op)
12296 {
12297 /* Start address calculation for LDMDB/LDMEA. */
12298 start_address = u_regval - register_count * 4;
12299 }
12300
12301 thumb2_insn_r->mem_rec_count = register_count;
12302 while (register_count)
12303 {
12304 record_buf_mem[register_count * 2 - 1] = start_address;
12305 record_buf_mem[register_count * 2 - 2] = 4;
12306 start_address = start_address + 4;
12307 register_count--;
12308 }
12309 record_buf[0] = reg_rn;
12310 record_buf[1] = ARM_PS_REGNUM;
12311 thumb2_insn_r->reg_rec_count = 2;
12312 }
12313 }
12314
12315 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12316 record_buf_mem);
12317 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12318 record_buf);
12319 return ARM_RECORD_SUCCESS;
12320 }
12321
12322 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12323 instructions. */
12324
12325 static int
12326 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12327 {
12328 struct regcache *reg_cache = thumb2_insn_r->regcache;
12329
12330 uint32_t reg_rd, reg_rn, offset_imm;
12331 uint32_t reg_dest1, reg_dest2;
12332 uint32_t address, offset_addr;
12333 uint32_t record_buf[8], record_buf_mem[8];
12334 uint32_t op1, op2, op3;
12335
12336 ULONGEST u_regval[2];
12337
12338 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12339 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12340 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12341
12342 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12343 {
12344 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12345 {
12346 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12347 record_buf[0] = reg_dest1;
12348 record_buf[1] = ARM_PS_REGNUM;
12349 thumb2_insn_r->reg_rec_count = 2;
12350 }
12351
12352 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12353 {
12354 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12355 record_buf[2] = reg_dest2;
12356 thumb2_insn_r->reg_rec_count = 3;
12357 }
12358 }
12359 else
12360 {
12361 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12362 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12363
12364 if (0 == op1 && 0 == op2)
12365 {
12366 /* Handle STREX. */
12367 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12368 address = u_regval[0] + (offset_imm * 4);
12369 record_buf_mem[0] = 4;
12370 record_buf_mem[1] = address;
12371 thumb2_insn_r->mem_rec_count = 1;
12372 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12373 record_buf[0] = reg_rd;
12374 thumb2_insn_r->reg_rec_count = 1;
12375 }
12376 else if (1 == op1 && 0 == op2)
12377 {
12378 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12379 record_buf[0] = reg_rd;
12380 thumb2_insn_r->reg_rec_count = 1;
12381 address = u_regval[0];
12382 record_buf_mem[1] = address;
12383
12384 if (4 == op3)
12385 {
12386 /* Handle STREXB. */
12387 record_buf_mem[0] = 1;
12388 thumb2_insn_r->mem_rec_count = 1;
12389 }
12390 else if (5 == op3)
12391 {
12392 /* Handle STREXH. */
12393 record_buf_mem[0] = 2 ;
12394 thumb2_insn_r->mem_rec_count = 1;
12395 }
12396 else if (7 == op3)
12397 {
12398 /* Handle STREXD. */
12399 address = u_regval[0];
12400 record_buf_mem[0] = 4;
12401 record_buf_mem[2] = 4;
12402 record_buf_mem[3] = address + 4;
12403 thumb2_insn_r->mem_rec_count = 2;
12404 }
12405 }
12406 else
12407 {
12408 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12409
12410 if (bit (thumb2_insn_r->arm_insn, 24))
12411 {
12412 if (bit (thumb2_insn_r->arm_insn, 23))
12413 offset_addr = u_regval[0] + (offset_imm * 4);
12414 else
12415 offset_addr = u_regval[0] - (offset_imm * 4);
12416
12417 address = offset_addr;
12418 }
12419 else
12420 address = u_regval[0];
12421
12422 record_buf_mem[0] = 4;
12423 record_buf_mem[1] = address;
12424 record_buf_mem[2] = 4;
12425 record_buf_mem[3] = address + 4;
12426 thumb2_insn_r->mem_rec_count = 2;
12427 record_buf[0] = reg_rn;
12428 thumb2_insn_r->reg_rec_count = 1;
12429 }
12430 }
12431
12432 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12433 record_buf);
12434 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12435 record_buf_mem);
12436 return ARM_RECORD_SUCCESS;
12437 }
12438
12439 /* Handler for thumb2 data processing (shift register and modified immediate)
12440 instructions. */
12441
12442 static int
12443 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12444 {
12445 uint32_t reg_rd, op;
12446 uint32_t record_buf[8];
12447
12448 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12449 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12450
12451 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12452 {
12453 record_buf[0] = ARM_PS_REGNUM;
12454 thumb2_insn_r->reg_rec_count = 1;
12455 }
12456 else
12457 {
12458 record_buf[0] = reg_rd;
12459 record_buf[1] = ARM_PS_REGNUM;
12460 thumb2_insn_r->reg_rec_count = 2;
12461 }
12462
12463 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12464 record_buf);
12465 return ARM_RECORD_SUCCESS;
12466 }
12467
12468 /* Generic handler for thumb2 instructions which effect destination and PS
12469 registers. */
12470
12471 static int
12472 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12473 {
12474 uint32_t reg_rd;
12475 uint32_t record_buf[8];
12476
12477 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12478
12479 record_buf[0] = reg_rd;
12480 record_buf[1] = ARM_PS_REGNUM;
12481 thumb2_insn_r->reg_rec_count = 2;
12482
12483 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12484 record_buf);
12485 return ARM_RECORD_SUCCESS;
12486 }
12487
12488 /* Handler for thumb2 branch and miscellaneous control instructions. */
12489
12490 static int
12491 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12492 {
12493 uint32_t op, op1, op2;
12494 uint32_t record_buf[8];
12495
12496 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12497 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12498 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12499
12500 /* Handle MSR insn. */
12501 if (!(op1 & 0x2) && 0x38 == op)
12502 {
12503 if (!(op2 & 0x3))
12504 {
12505 /* CPSR is going to be changed. */
12506 record_buf[0] = ARM_PS_REGNUM;
12507 thumb2_insn_r->reg_rec_count = 1;
12508 }
12509 else
12510 {
12511 arm_record_unsupported_insn(thumb2_insn_r);
12512 return -1;
12513 }
12514 }
12515 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12516 {
12517 /* BLX. */
12518 record_buf[0] = ARM_PS_REGNUM;
12519 record_buf[1] = ARM_LR_REGNUM;
12520 thumb2_insn_r->reg_rec_count = 2;
12521 }
12522
12523 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12524 record_buf);
12525 return ARM_RECORD_SUCCESS;
12526 }
12527
12528 /* Handler for thumb2 store single data item instructions. */
12529
12530 static int
12531 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12532 {
12533 struct regcache *reg_cache = thumb2_insn_r->regcache;
12534
12535 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12536 uint32_t address, offset_addr;
12537 uint32_t record_buf[8], record_buf_mem[8];
12538 uint32_t op1, op2;
12539
12540 ULONGEST u_regval[2];
12541
12542 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12543 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12544 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12545 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12546
12547 if (bit (thumb2_insn_r->arm_insn, 23))
12548 {
12549 /* T2 encoding. */
12550 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12551 offset_addr = u_regval[0] + offset_imm;
12552 address = offset_addr;
12553 }
12554 else
12555 {
12556 /* T3 encoding. */
12557 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12558 {
12559 /* Handle STRB (register). */
12560 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12561 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12562 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12563 offset_addr = u_regval[1] << shift_imm;
12564 address = u_regval[0] + offset_addr;
12565 }
12566 else
12567 {
12568 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12569 if (bit (thumb2_insn_r->arm_insn, 10))
12570 {
12571 if (bit (thumb2_insn_r->arm_insn, 9))
12572 offset_addr = u_regval[0] + offset_imm;
12573 else
12574 offset_addr = u_regval[0] - offset_imm;
12575
12576 address = offset_addr;
12577 }
12578 else
12579 address = u_regval[0];
12580 }
12581 }
12582
12583 switch (op1)
12584 {
12585 /* Store byte instructions. */
12586 case 4:
12587 case 0:
12588 record_buf_mem[0] = 1;
12589 break;
12590 /* Store half word instructions. */
12591 case 1:
12592 case 5:
12593 record_buf_mem[0] = 2;
12594 break;
12595 /* Store word instructions. */
12596 case 2:
12597 case 6:
12598 record_buf_mem[0] = 4;
12599 break;
12600
12601 default:
12602 gdb_assert_not_reached ("no decoding pattern found");
12603 break;
12604 }
12605
12606 record_buf_mem[1] = address;
12607 thumb2_insn_r->mem_rec_count = 1;
12608 record_buf[0] = reg_rn;
12609 thumb2_insn_r->reg_rec_count = 1;
12610
12611 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12612 record_buf);
12613 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12614 record_buf_mem);
12615 return ARM_RECORD_SUCCESS;
12616 }
12617
12618 /* Handler for thumb2 load memory hints instructions. */
12619
12620 static int
12621 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12622 {
12623 uint32_t record_buf[8];
12624 uint32_t reg_rt, reg_rn;
12625
12626 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12627 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12628
12629 if (ARM_PC_REGNUM != reg_rt)
12630 {
12631 record_buf[0] = reg_rt;
12632 record_buf[1] = reg_rn;
12633 record_buf[2] = ARM_PS_REGNUM;
12634 thumb2_insn_r->reg_rec_count = 3;
12635
12636 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12637 record_buf);
12638 return ARM_RECORD_SUCCESS;
12639 }
12640
12641 return ARM_RECORD_FAILURE;
12642 }
12643
12644 /* Handler for thumb2 load word instructions. */
12645
12646 static int
12647 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12648 {
12649 uint32_t record_buf[8];
12650
12651 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12652 record_buf[1] = ARM_PS_REGNUM;
12653 thumb2_insn_r->reg_rec_count = 2;
12654
12655 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12656 record_buf);
12657 return ARM_RECORD_SUCCESS;
12658 }
12659
12660 /* Handler for thumb2 long multiply, long multiply accumulate, and
12661 divide instructions. */
12662
12663 static int
12664 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12665 {
12666 uint32_t opcode1 = 0, opcode2 = 0;
12667 uint32_t record_buf[8];
12668
12669 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12670 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12671
12672 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12673 {
12674 /* Handle SMULL, UMULL, SMULAL. */
12675 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12676 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12677 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12678 record_buf[2] = ARM_PS_REGNUM;
12679 thumb2_insn_r->reg_rec_count = 3;
12680 }
12681 else if (1 == opcode1 || 3 == opcode2)
12682 {
12683 /* Handle SDIV and UDIV. */
12684 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12685 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12686 record_buf[2] = ARM_PS_REGNUM;
12687 thumb2_insn_r->reg_rec_count = 3;
12688 }
12689 else
12690 return ARM_RECORD_FAILURE;
12691
12692 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12693 record_buf);
12694 return ARM_RECORD_SUCCESS;
12695 }
12696
12697 /* Record handler for thumb32 coprocessor instructions. */
12698
12699 static int
12700 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12701 {
12702 if (bit (thumb2_insn_r->arm_insn, 25))
12703 return arm_record_coproc_data_proc (thumb2_insn_r);
12704 else
12705 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12706 }
12707
12708 /* Record handler for advance SIMD structure load/store instructions. */
12709
12710 static int
12711 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12712 {
12713 struct regcache *reg_cache = thumb2_insn_r->regcache;
12714 uint32_t l_bit, a_bit, b_bits;
12715 uint32_t record_buf[128], record_buf_mem[128];
12716 uint32_t reg_rn, reg_vd, address, f_elem;
12717 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12718 uint8_t f_ebytes;
12719
12720 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12721 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12722 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12723 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12724 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12725 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12726 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12727 f_elem = 8 / f_ebytes;
12728
12729 if (!l_bit)
12730 {
12731 ULONGEST u_regval = 0;
12732 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12733 address = u_regval;
12734
12735 if (!a_bit)
12736 {
12737 /* Handle VST1. */
12738 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12739 {
12740 if (b_bits == 0x07)
12741 bf_regs = 1;
12742 else if (b_bits == 0x0a)
12743 bf_regs = 2;
12744 else if (b_bits == 0x06)
12745 bf_regs = 3;
12746 else if (b_bits == 0x02)
12747 bf_regs = 4;
12748 else
12749 bf_regs = 0;
12750
12751 for (index_r = 0; index_r < bf_regs; index_r++)
12752 {
12753 for (index_e = 0; index_e < f_elem; index_e++)
12754 {
12755 record_buf_mem[index_m++] = f_ebytes;
12756 record_buf_mem[index_m++] = address;
12757 address = address + f_ebytes;
12758 thumb2_insn_r->mem_rec_count += 1;
12759 }
12760 }
12761 }
12762 /* Handle VST2. */
12763 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12764 {
12765 if (b_bits == 0x09 || b_bits == 0x08)
12766 bf_regs = 1;
12767 else if (b_bits == 0x03)
12768 bf_regs = 2;
12769 else
12770 bf_regs = 0;
12771
12772 for (index_r = 0; index_r < bf_regs; index_r++)
12773 for (index_e = 0; index_e < f_elem; index_e++)
12774 {
12775 for (loop_t = 0; loop_t < 2; loop_t++)
12776 {
12777 record_buf_mem[index_m++] = f_ebytes;
12778 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12779 thumb2_insn_r->mem_rec_count += 1;
12780 }
12781 address = address + (2 * f_ebytes);
12782 }
12783 }
12784 /* Handle VST3. */
12785 else if ((b_bits & 0x0e) == 0x04)
12786 {
12787 for (index_e = 0; index_e < f_elem; index_e++)
12788 {
12789 for (loop_t = 0; loop_t < 3; loop_t++)
12790 {
12791 record_buf_mem[index_m++] = f_ebytes;
12792 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12793 thumb2_insn_r->mem_rec_count += 1;
12794 }
12795 address = address + (3 * f_ebytes);
12796 }
12797 }
12798 /* Handle VST4. */
12799 else if (!(b_bits & 0x0e))
12800 {
12801 for (index_e = 0; index_e < f_elem; index_e++)
12802 {
12803 for (loop_t = 0; loop_t < 4; loop_t++)
12804 {
12805 record_buf_mem[index_m++] = f_ebytes;
12806 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12807 thumb2_insn_r->mem_rec_count += 1;
12808 }
12809 address = address + (4 * f_ebytes);
12810 }
12811 }
12812 }
12813 else
12814 {
12815 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12816
12817 if (bft_size == 0x00)
12818 f_ebytes = 1;
12819 else if (bft_size == 0x01)
12820 f_ebytes = 2;
12821 else if (bft_size == 0x02)
12822 f_ebytes = 4;
12823 else
12824 f_ebytes = 0;
12825
12826 /* Handle VST1. */
12827 if (!(b_bits & 0x0b) || b_bits == 0x08)
12828 thumb2_insn_r->mem_rec_count = 1;
12829 /* Handle VST2. */
12830 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12831 thumb2_insn_r->mem_rec_count = 2;
12832 /* Handle VST3. */
12833 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12834 thumb2_insn_r->mem_rec_count = 3;
12835 /* Handle VST4. */
12836 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12837 thumb2_insn_r->mem_rec_count = 4;
12838
12839 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12840 {
12841 record_buf_mem[index_m] = f_ebytes;
12842 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12843 }
12844 }
12845 }
12846 else
12847 {
12848 if (!a_bit)
12849 {
12850 /* Handle VLD1. */
12851 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12852 thumb2_insn_r->reg_rec_count = 1;
12853 /* Handle VLD2. */
12854 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12855 thumb2_insn_r->reg_rec_count = 2;
12856 /* Handle VLD3. */
12857 else if ((b_bits & 0x0e) == 0x04)
12858 thumb2_insn_r->reg_rec_count = 3;
12859 /* Handle VLD4. */
12860 else if (!(b_bits & 0x0e))
12861 thumb2_insn_r->reg_rec_count = 4;
12862 }
12863 else
12864 {
12865 /* Handle VLD1. */
12866 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12867 thumb2_insn_r->reg_rec_count = 1;
12868 /* Handle VLD2. */
12869 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12870 thumb2_insn_r->reg_rec_count = 2;
12871 /* Handle VLD3. */
12872 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12873 thumb2_insn_r->reg_rec_count = 3;
12874 /* Handle VLD4. */
12875 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12876 thumb2_insn_r->reg_rec_count = 4;
12877
12878 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12879 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12880 }
12881 }
12882
12883 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12884 {
12885 record_buf[index_r] = reg_rn;
12886 thumb2_insn_r->reg_rec_count += 1;
12887 }
12888
12889 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12890 record_buf);
12891 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12892 record_buf_mem);
12893 return 0;
12894 }
12895
12896 /* Decodes thumb2 instruction type and invokes its record handler. */
12897
12898 static unsigned int
12899 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12900 {
12901 uint32_t op, op1, op2;
12902
12903 op = bit (thumb2_insn_r->arm_insn, 15);
12904 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12905 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12906
12907 if (op1 == 0x01)
12908 {
12909 if (!(op2 & 0x64 ))
12910 {
12911 /* Load/store multiple instruction. */
12912 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12913 }
12914 else if ((op2 & 0x64) == 0x4)
12915 {
12916 /* Load/store (dual/exclusive) and table branch instruction. */
12917 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12918 }
12919 else if ((op2 & 0x60) == 0x20)
12920 {
12921 /* Data-processing (shifted register). */
12922 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12923 }
12924 else if (op2 & 0x40)
12925 {
12926 /* Co-processor instructions. */
12927 return thumb2_record_coproc_insn (thumb2_insn_r);
12928 }
12929 }
12930 else if (op1 == 0x02)
12931 {
12932 if (op)
12933 {
12934 /* Branches and miscellaneous control instructions. */
12935 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12936 }
12937 else if (op2 & 0x20)
12938 {
12939 /* Data-processing (plain binary immediate) instruction. */
12940 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12941 }
12942 else
12943 {
12944 /* Data-processing (modified immediate). */
12945 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12946 }
12947 }
12948 else if (op1 == 0x03)
12949 {
12950 if (!(op2 & 0x71 ))
12951 {
12952 /* Store single data item. */
12953 return thumb2_record_str_single_data (thumb2_insn_r);
12954 }
12955 else if (!((op2 & 0x71) ^ 0x10))
12956 {
12957 /* Advanced SIMD or structure load/store instructions. */
12958 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12959 }
12960 else if (!((op2 & 0x67) ^ 0x01))
12961 {
12962 /* Load byte, memory hints instruction. */
12963 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12964 }
12965 else if (!((op2 & 0x67) ^ 0x03))
12966 {
12967 /* Load halfword, memory hints instruction. */
12968 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12969 }
12970 else if (!((op2 & 0x67) ^ 0x05))
12971 {
12972 /* Load word instruction. */
12973 return thumb2_record_ld_word (thumb2_insn_r);
12974 }
12975 else if (!((op2 & 0x70) ^ 0x20))
12976 {
12977 /* Data-processing (register) instruction. */
12978 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12979 }
12980 else if (!((op2 & 0x78) ^ 0x30))
12981 {
12982 /* Multiply, multiply accumulate, abs diff instruction. */
12983 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12984 }
12985 else if (!((op2 & 0x78) ^ 0x38))
12986 {
12987 /* Long multiply, long multiply accumulate, and divide. */
12988 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12989 }
12990 else if (op2 & 0x40)
12991 {
12992 /* Co-processor instructions. */
12993 return thumb2_record_coproc_insn (thumb2_insn_r);
12994 }
12995 }
12996
12997 return -1;
12998 }
12999
13000 namespace {
13001 /* Abstract memory reader. */
13002
13003 class abstract_memory_reader
13004 {
13005 public:
13006 /* Read LEN bytes of target memory at address MEMADDR, placing the
13007 results in GDB's memory at BUF. Return true on success. */
13008
13009 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13010 };
13011
13012 /* Instruction reader from real target. */
13013
13014 class instruction_reader : public abstract_memory_reader
13015 {
13016 public:
13017 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13018 {
13019 if (target_read_memory (memaddr, buf, len))
13020 return false;
13021 else
13022 return true;
13023 }
13024 };
13025
13026 } // namespace
13027
13028 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13029 and positive val on failure. */
13030
13031 static int
13032 extract_arm_insn (abstract_memory_reader& reader,
13033 insn_decode_record *insn_record, uint32_t insn_size)
13034 {
13035 gdb_byte buf[insn_size];
13036
13037 memset (&buf[0], 0, insn_size);
13038
13039 if (!reader.read (insn_record->this_addr, buf, insn_size))
13040 return 1;
13041 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13042 insn_size,
13043 gdbarch_byte_order_for_code (insn_record->gdbarch));
13044 return 0;
13045 }
13046
13047 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13048
13049 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13050 dispatch it. */
13051
13052 static int
13053 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13054 record_type_t record_type, uint32_t insn_size)
13055 {
13056
13057 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13058 instruction. */
13059 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13060 {
13061 arm_record_data_proc_misc_ld_str, /* 000. */
13062 arm_record_data_proc_imm, /* 001. */
13063 arm_record_ld_st_imm_offset, /* 010. */
13064 arm_record_ld_st_reg_offset, /* 011. */
13065 arm_record_ld_st_multiple, /* 100. */
13066 arm_record_b_bl, /* 101. */
13067 arm_record_asimd_vfp_coproc, /* 110. */
13068 arm_record_coproc_data_proc /* 111. */
13069 };
13070
13071 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13072 instruction. */
13073 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13074 { \
13075 thumb_record_shift_add_sub, /* 000. */
13076 thumb_record_add_sub_cmp_mov, /* 001. */
13077 thumb_record_ld_st_reg_offset, /* 010. */
13078 thumb_record_ld_st_imm_offset, /* 011. */
13079 thumb_record_ld_st_stack, /* 100. */
13080 thumb_record_misc, /* 101. */
13081 thumb_record_ldm_stm_swi, /* 110. */
13082 thumb_record_branch /* 111. */
13083 };
13084
13085 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13086 uint32_t insn_id = 0;
13087
13088 if (extract_arm_insn (reader, arm_record, insn_size))
13089 {
13090 if (record_debug)
13091 {
13092 printf_unfiltered (_("Process record: error reading memory at "
13093 "addr %s len = %d.\n"),
13094 paddress (arm_record->gdbarch,
13095 arm_record->this_addr), insn_size);
13096 }
13097 return -1;
13098 }
13099 else if (ARM_RECORD == record_type)
13100 {
13101 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13102 insn_id = bits (arm_record->arm_insn, 25, 27);
13103
13104 if (arm_record->cond == 0xf)
13105 ret = arm_record_extension_space (arm_record);
13106 else
13107 {
13108 /* If this insn has fallen into extension space
13109 then we need not decode it anymore. */
13110 ret = arm_handle_insn[insn_id] (arm_record);
13111 }
13112 if (ret != ARM_RECORD_SUCCESS)
13113 {
13114 arm_record_unsupported_insn (arm_record);
13115 ret = -1;
13116 }
13117 }
13118 else if (THUMB_RECORD == record_type)
13119 {
13120 /* As thumb does not have condition codes, we set negative. */
13121 arm_record->cond = -1;
13122 insn_id = bits (arm_record->arm_insn, 13, 15);
13123 ret = thumb_handle_insn[insn_id] (arm_record);
13124 if (ret != ARM_RECORD_SUCCESS)
13125 {
13126 arm_record_unsupported_insn (arm_record);
13127 ret = -1;
13128 }
13129 }
13130 else if (THUMB2_RECORD == record_type)
13131 {
13132 /* As thumb does not have condition codes, we set negative. */
13133 arm_record->cond = -1;
13134
13135 /* Swap first half of 32bit thumb instruction with second half. */
13136 arm_record->arm_insn
13137 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13138
13139 ret = thumb2_record_decode_insn_handler (arm_record);
13140
13141 if (ret != ARM_RECORD_SUCCESS)
13142 {
13143 arm_record_unsupported_insn (arm_record);
13144 ret = -1;
13145 }
13146 }
13147 else
13148 {
13149 /* Throw assertion. */
13150 gdb_assert_not_reached ("not a valid instruction, could not decode");
13151 }
13152
13153 return ret;
13154 }
13155
13156 #if GDB_SELF_TEST
13157 namespace selftests {
13158
13159 /* Provide both 16-bit and 32-bit thumb instructions. */
13160
13161 class instruction_reader_thumb : public abstract_memory_reader
13162 {
13163 public:
13164 template<size_t SIZE>
13165 instruction_reader_thumb (enum bfd_endian endian,
13166 const uint16_t (&insns)[SIZE])
13167 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13168 {}
13169
13170 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13171 {
13172 SELF_CHECK (len == 4 || len == 2);
13173 SELF_CHECK (memaddr % 2 == 0);
13174 SELF_CHECK ((memaddr / 2) < m_insns_size);
13175
13176 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13177 if (len == 4)
13178 {
13179 store_unsigned_integer (&buf[2], 2, m_endian,
13180 m_insns[memaddr / 2 + 1]);
13181 }
13182 return true;
13183 }
13184
13185 private:
13186 enum bfd_endian m_endian;
13187 const uint16_t *m_insns;
13188 size_t m_insns_size;
13189 };
13190
13191 static void
13192 arm_record_test (void)
13193 {
13194 struct gdbarch_info info;
13195 gdbarch_info_init (&info);
13196 info.bfd_arch_info = bfd_scan_arch ("arm");
13197
13198 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13199
13200 SELF_CHECK (gdbarch != NULL);
13201
13202 /* 16-bit Thumb instructions. */
13203 {
13204 insn_decode_record arm_record;
13205
13206 memset (&arm_record, 0, sizeof (insn_decode_record));
13207 arm_record.gdbarch = gdbarch;
13208
13209 static const uint16_t insns[] = {
13210 /* db b2 uxtb r3, r3 */
13211 0xb2db,
13212 /* cd 58 ldr r5, [r1, r3] */
13213 0x58cd,
13214 };
13215
13216 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13217 instruction_reader_thumb reader (endian, insns);
13218 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13219 THUMB_INSN_SIZE_BYTES);
13220
13221 SELF_CHECK (ret == 0);
13222 SELF_CHECK (arm_record.mem_rec_count == 0);
13223 SELF_CHECK (arm_record.reg_rec_count == 1);
13224 SELF_CHECK (arm_record.arm_regs[0] == 3);
13225
13226 arm_record.this_addr += 2;
13227 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13228 THUMB_INSN_SIZE_BYTES);
13229
13230 SELF_CHECK (ret == 0);
13231 SELF_CHECK (arm_record.mem_rec_count == 0);
13232 SELF_CHECK (arm_record.reg_rec_count == 1);
13233 SELF_CHECK (arm_record.arm_regs[0] == 5);
13234 }
13235
13236 /* 32-bit Thumb-2 instructions. */
13237 {
13238 insn_decode_record arm_record;
13239
13240 memset (&arm_record, 0, sizeof (insn_decode_record));
13241 arm_record.gdbarch = gdbarch;
13242
13243 static const uint16_t insns[] = {
13244 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13245 0xee1d, 0x7f70,
13246 };
13247
13248 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13249 instruction_reader_thumb reader (endian, insns);
13250 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13251 THUMB2_INSN_SIZE_BYTES);
13252
13253 SELF_CHECK (ret == 0);
13254 SELF_CHECK (arm_record.mem_rec_count == 0);
13255 SELF_CHECK (arm_record.reg_rec_count == 1);
13256 SELF_CHECK (arm_record.arm_regs[0] == 7);
13257 }
13258 }
13259 } // namespace selftests
13260 #endif /* GDB_SELF_TEST */
13261
13262 /* Cleans up local record registers and memory allocations. */
13263
13264 static void
13265 deallocate_reg_mem (insn_decode_record *record)
13266 {
13267 xfree (record->arm_regs);
13268 xfree (record->arm_mems);
13269 }
13270
13271
13272 /* Parse the current instruction and record the values of the registers and
13273 memory that will be changed in current instruction to record_arch_list".
13274 Return -1 if something is wrong. */
13275
13276 int
13277 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13278 CORE_ADDR insn_addr)
13279 {
13280
13281 uint32_t no_of_rec = 0;
13282 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13283 ULONGEST t_bit = 0, insn_id = 0;
13284
13285 ULONGEST u_regval = 0;
13286
13287 insn_decode_record arm_record;
13288
13289 memset (&arm_record, 0, sizeof (insn_decode_record));
13290 arm_record.regcache = regcache;
13291 arm_record.this_addr = insn_addr;
13292 arm_record.gdbarch = gdbarch;
13293
13294
13295 if (record_debug > 1)
13296 {
13297 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13298 "addr = %s\n",
13299 paddress (gdbarch, arm_record.this_addr));
13300 }
13301
13302 instruction_reader reader;
13303 if (extract_arm_insn (reader, &arm_record, 2))
13304 {
13305 if (record_debug)
13306 {
13307 printf_unfiltered (_("Process record: error reading memory at "
13308 "addr %s len = %d.\n"),
13309 paddress (arm_record.gdbarch,
13310 arm_record.this_addr), 2);
13311 }
13312 return -1;
13313 }
13314
13315 /* Check the insn, whether it is thumb or arm one. */
13316
13317 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13318 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13319
13320
13321 if (!(u_regval & t_bit))
13322 {
13323 /* We are decoding arm insn. */
13324 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13325 }
13326 else
13327 {
13328 insn_id = bits (arm_record.arm_insn, 11, 15);
13329 /* is it thumb2 insn? */
13330 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13331 {
13332 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13333 THUMB2_INSN_SIZE_BYTES);
13334 }
13335 else
13336 {
13337 /* We are decoding thumb insn. */
13338 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13339 THUMB_INSN_SIZE_BYTES);
13340 }
13341 }
13342
13343 if (0 == ret)
13344 {
13345 /* Record registers. */
13346 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13347 if (arm_record.arm_regs)
13348 {
13349 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13350 {
13351 if (record_full_arch_list_add_reg
13352 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13353 ret = -1;
13354 }
13355 }
13356 /* Record memories. */
13357 if (arm_record.arm_mems)
13358 {
13359 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13360 {
13361 if (record_full_arch_list_add_mem
13362 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13363 arm_record.arm_mems[no_of_rec].len))
13364 ret = -1;
13365 }
13366 }
13367
13368 if (record_full_arch_list_add_end ())
13369 ret = -1;
13370 }
13371
13372
13373 deallocate_reg_mem (&arm_record);
13374
13375 return ret;
13376 }
13377
13378 /* See arm-tdep.h. */
13379
13380 const target_desc *
13381 arm_read_description (arm_fp_type fp_type)
13382 {
13383 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13384
13385 if (tdesc == nullptr)
13386 {
13387 tdesc = arm_create_target_description (fp_type);
13388 tdesc_arm_list[fp_type] = tdesc;
13389 }
13390
13391 return tdesc;
13392 }
13393
13394 /* See arm-tdep.h. */
13395
13396 const target_desc *
13397 arm_read_mprofile_description (arm_m_profile_type m_type)
13398 {
13399 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13400
13401 if (tdesc == nullptr)
13402 {
13403 tdesc = arm_create_mprofile_target_description (m_type);
13404 tdesc_arm_mprofile_list[m_type] = tdesc;
13405 }
13406
13407 return tdesc;
13408 }
This page took 0.387103 seconds and 4 git commands to generate.