Arm: Allow version strings in the triplet regexp
[deliverable/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "disasm.h"
31 #include "regcache.h"
32 #include "reggroups.h"
33 #include "target-float.h"
34 #include "value.h"
35 #include "arch-utils.h"
36 #include "osabi.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
40 #include "objfiles.h"
41 #include "dwarf2-frame.h"
42 #include "gdbtypes.h"
43 #include "prologue-value.h"
44 #include "remote.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48
49 #include "arch/arm.h"
50 #include "arch/arm-get-next-pcs.h"
51 #include "arm-tdep.h"
52 #include "gdb/sim-arm.h"
53
54 #include "elf-bfd.h"
55 #include "coff/internal.h"
56 #include "elf/arm.h"
57
58 #include "common/vec.h"
59
60 #include "record.h"
61 #include "record-full.h"
62 #include <algorithm>
63
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
71
72 #if GDB_SELF_TEST
73 #include "common/selftest.h"
74 #endif
75
76 static int arm_debug;
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
90
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
93
94 struct arm_mapping_symbol
95 {
96 bfd_vma value;
97 char type;
98
99 bool operator< (const arm_mapping_symbol &other) const
100 { return this->value < other.value; }
101 };
102
103 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
104
105 struct arm_per_objfile
106 {
107 explicit arm_per_objfile (size_t num_sections)
108 : section_maps (new arm_mapping_symbol_vec[num_sections]),
109 section_maps_sorted (new bool[num_sections] ())
110 {}
111
112 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
113
114 /* Information about mapping symbols ($a, $d, $t) in the objfile.
115
116 The format is an array of vectors of arm_mapping_symbols, there is one
117 vector for each section of the objfile (the array is index by BFD section
118 index).
119
120 For each section, the vector of arm_mapping_symbol is sorted by
121 symbol value (address). */
122 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
123
124 /* For each corresponding element of section_maps above, is this vector
125 sorted. */
126 std::unique_ptr<bool[]> section_maps_sorted;
127 };
128
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element *setarmcmdlist = NULL;
131 static struct cmd_list_element *showarmcmdlist = NULL;
132
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings[] =
136 {
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143 };
144
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147 static const char *current_fp_model = "auto";
148
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings[] =
151 {
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156 };
157
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160 static const char *arm_abi_string = "auto";
161
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171 static const char *arm_fallback_mode_string = "auto";
172 static const char *arm_force_mode_string = "auto";
173
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178 static const struct
179 {
180 const char *name;
181 int regnum;
182 } arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223 };
224
225 static const char *const arm_register_names[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options;
236
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles;
239
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style;
242
243 /* This is used to keep the bfd arch_info in sync with the disassembly
244 style. */
245 static void set_disassembly_style_sfunc (const char *, int,
246 struct cmd_list_element *);
247 static void show_disassembly_style_sfunc (struct ui_file *, int,
248 struct cmd_list_element *,
249 const char *);
250
251 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
252 readable_regcache *regcache,
253 int regnum, gdb_byte *buf);
254 static void arm_neon_quad_write (struct gdbarch *gdbarch,
255 struct regcache *regcache,
256 int regnum, const gdb_byte *buf);
257
258 static CORE_ADDR
259 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
260
261
262 /* get_next_pcs operations. */
263 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
264 arm_get_next_pcs_read_memory_unsigned_integer,
265 arm_get_next_pcs_syscall_next_pc,
266 arm_get_next_pcs_addr_bits_remove,
267 arm_get_next_pcs_is_thumb,
268 NULL,
269 };
270
271 struct arm_prologue_cache
272 {
273 /* The stack pointer at the time this frame was created; i.e. the
274 caller's stack pointer when this function was called. It is used
275 to identify this frame. */
276 CORE_ADDR prev_sp;
277
278 /* The frame base for this frame is just prev_sp - frame size.
279 FRAMESIZE is the distance from the frame pointer to the
280 initial stack pointer. */
281
282 int framesize;
283
284 /* The register used to hold the frame pointer for this frame. */
285 int framereg;
286
287 /* Saved register offsets. */
288 struct trad_frame_saved_reg *saved_regs;
289 };
290
291 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
292 CORE_ADDR prologue_start,
293 CORE_ADDR prologue_end,
294 struct arm_prologue_cache *cache);
295
296 /* Architecture version for displaced stepping. This effects the behaviour of
297 certain instructions, and really should not be hard-wired. */
298
299 #define DISPLACED_STEPPING_ARCH_VERSION 5
300
301 /* Set to true if the 32-bit mode is in use. */
302
303 int arm_apcs_32 = 1;
304
305 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
306
307 int
308 arm_psr_thumb_bit (struct gdbarch *gdbarch)
309 {
310 if (gdbarch_tdep (gdbarch)->is_m)
311 return XPSR_T;
312 else
313 return CPSR_T;
314 }
315
316 /* Determine if the processor is currently executing in Thumb mode. */
317
318 int
319 arm_is_thumb (struct regcache *regcache)
320 {
321 ULONGEST cpsr;
322 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
323
324 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
325
326 return (cpsr & t_bit) != 0;
327 }
328
329 /* Determine if FRAME is executing in Thumb mode. */
330
331 int
332 arm_frame_is_thumb (struct frame_info *frame)
333 {
334 CORE_ADDR cpsr;
335 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
336
337 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
338 directly (from a signal frame or dummy frame) or by interpreting
339 the saved LR (from a prologue or DWARF frame). So consult it and
340 trust the unwinders. */
341 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
342
343 return (cpsr & t_bit) != 0;
344 }
345
346 /* Search for the mapping symbol covering MEMADDR. If one is found,
347 return its type. Otherwise, return 0. If START is non-NULL,
348 set *START to the location of the mapping symbol. */
349
350 static char
351 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
352 {
353 struct obj_section *sec;
354
355 /* If there are mapping symbols, consult them. */
356 sec = find_pc_section (memaddr);
357 if (sec != NULL)
358 {
359 arm_per_objfile *data
360 = (struct arm_per_objfile *) objfile_data (sec->objfile,
361 arm_objfile_data_key);
362 if (data != NULL)
363 {
364 unsigned int section_idx = sec->the_bfd_section->index;
365 arm_mapping_symbol_vec &map
366 = data->section_maps[section_idx];
367
368 /* Sort the vector on first use. */
369 if (!data->section_maps_sorted[section_idx])
370 {
371 std::sort (map.begin (), map.end ());
372 data->section_maps_sorted[section_idx] = true;
373 }
374
375 struct arm_mapping_symbol map_key
376 = { memaddr - obj_section_addr (sec), 0 };
377 arm_mapping_symbol_vec::const_iterator it
378 = std::lower_bound (map.begin (), map.end (), map_key);
379
380 /* std::lower_bound finds the earliest ordered insertion
381 point. If the symbol at this position starts at this exact
382 address, we use that; otherwise, the preceding
383 mapping symbol covers this address. */
384 if (it < map.end ())
385 {
386 if (it->value == map_key.value)
387 {
388 if (start)
389 *start = it->value + obj_section_addr (sec);
390 return it->type;
391 }
392 }
393
394 if (it > map.begin ())
395 {
396 arm_mapping_symbol_vec::const_iterator prev_it
397 = it - 1;
398
399 if (start)
400 *start = prev_it->value + obj_section_addr (sec);
401 return prev_it->type;
402 }
403 }
404 }
405
406 return 0;
407 }
408
409 /* Determine if the program counter specified in MEMADDR is in a Thumb
410 function. This function should be called for addresses unrelated to
411 any executing frame; otherwise, prefer arm_frame_is_thumb. */
412
413 int
414 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
415 {
416 struct bound_minimal_symbol sym;
417 char type;
418 arm_displaced_step_closure *dsc
419 = ((arm_displaced_step_closure * )
420 get_displaced_step_closure_by_addr (memaddr));
421
422 /* If checking the mode of displaced instruction in copy area, the mode
423 should be determined by instruction on the original address. */
424 if (dsc)
425 {
426 if (debug_displaced)
427 fprintf_unfiltered (gdb_stdlog,
428 "displaced: check mode of %.8lx instead of %.8lx\n",
429 (unsigned long) dsc->insn_addr,
430 (unsigned long) memaddr);
431 memaddr = dsc->insn_addr;
432 }
433
434 /* If bit 0 of the address is set, assume this is a Thumb address. */
435 if (IS_THUMB_ADDR (memaddr))
436 return 1;
437
438 /* If the user wants to override the symbol table, let him. */
439 if (strcmp (arm_force_mode_string, "arm") == 0)
440 return 0;
441 if (strcmp (arm_force_mode_string, "thumb") == 0)
442 return 1;
443
444 /* ARM v6-M and v7-M are always in Thumb mode. */
445 if (gdbarch_tdep (gdbarch)->is_m)
446 return 1;
447
448 /* If there are mapping symbols, consult them. */
449 type = arm_find_mapping_symbol (memaddr, NULL);
450 if (type)
451 return type == 't';
452
453 /* Thumb functions have a "special" bit set in minimal symbols. */
454 sym = lookup_minimal_symbol_by_pc (memaddr);
455 if (sym.minsym)
456 return (MSYMBOL_IS_SPECIAL (sym.minsym));
457
458 /* If the user wants to override the fallback mode, let them. */
459 if (strcmp (arm_fallback_mode_string, "arm") == 0)
460 return 0;
461 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
462 return 1;
463
464 /* If we couldn't find any symbol, but we're talking to a running
465 target, then trust the current value of $cpsr. This lets
466 "display/i $pc" always show the correct mode (though if there is
467 a symbol table we will not reach here, so it still may not be
468 displayed in the mode it will be executed). */
469 if (target_has_registers)
470 return arm_frame_is_thumb (get_current_frame ());
471
472 /* Otherwise we're out of luck; we assume ARM. */
473 return 0;
474 }
475
476 /* Determine if the address specified equals any of these magic return
477 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
478 architectures.
479
480 From ARMv6-M Reference Manual B1.5.8
481 Table B1-5 Exception return behavior
482
483 EXC_RETURN Return To Return Stack
484 0xFFFFFFF1 Handler mode Main
485 0xFFFFFFF9 Thread mode Main
486 0xFFFFFFFD Thread mode Process
487
488 From ARMv7-M Reference Manual B1.5.8
489 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
490
491 EXC_RETURN Return To Return Stack
492 0xFFFFFFF1 Handler mode Main
493 0xFFFFFFF9 Thread mode Main
494 0xFFFFFFFD Thread mode Process
495
496 Table B1-9 EXC_RETURN definition of exception return behavior, with
497 FP
498
499 EXC_RETURN Return To Return Stack Frame Type
500 0xFFFFFFE1 Handler mode Main Extended
501 0xFFFFFFE9 Thread mode Main Extended
502 0xFFFFFFED Thread mode Process Extended
503 0xFFFFFFF1 Handler mode Main Basic
504 0xFFFFFFF9 Thread mode Main Basic
505 0xFFFFFFFD Thread mode Process Basic
506
507 For more details see "B1.5.8 Exception return behavior"
508 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
509
510 static int
511 arm_m_addr_is_magic (CORE_ADDR addr)
512 {
513 switch (addr)
514 {
515 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
516 the exception return behavior. */
517 case 0xffffffe1:
518 case 0xffffffe9:
519 case 0xffffffed:
520 case 0xfffffff1:
521 case 0xfffffff9:
522 case 0xfffffffd:
523 /* Address is magic. */
524 return 1;
525
526 default:
527 /* Address is not magic. */
528 return 0;
529 }
530 }
531
532 /* Remove useless bits from addresses in a running program. */
533 static CORE_ADDR
534 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
535 {
536 /* On M-profile devices, do not strip the low bit from EXC_RETURN
537 (the magic exception return address). */
538 if (gdbarch_tdep (gdbarch)->is_m
539 && arm_m_addr_is_magic (val))
540 return val;
541
542 if (arm_apcs_32)
543 return UNMAKE_THUMB_ADDR (val);
544 else
545 return (val & 0x03fffffc);
546 }
547
548 /* Return 1 if PC is the start of a compiler helper function which
549 can be safely ignored during prologue skipping. IS_THUMB is true
550 if the function is known to be a Thumb function due to the way it
551 is being called. */
552 static int
553 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
554 {
555 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
556 struct bound_minimal_symbol msym;
557
558 msym = lookup_minimal_symbol_by_pc (pc);
559 if (msym.minsym != NULL
560 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
561 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
562 {
563 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
564
565 /* The GNU linker's Thumb call stub to foo is named
566 __foo_from_thumb. */
567 if (strstr (name, "_from_thumb") != NULL)
568 name += 2;
569
570 /* On soft-float targets, __truncdfsf2 is called to convert promoted
571 arguments to their argument types in non-prototyped
572 functions. */
573 if (startswith (name, "__truncdfsf2"))
574 return 1;
575 if (startswith (name, "__aeabi_d2f"))
576 return 1;
577
578 /* Internal functions related to thread-local storage. */
579 if (startswith (name, "__tls_get_addr"))
580 return 1;
581 if (startswith (name, "__aeabi_read_tp"))
582 return 1;
583 }
584 else
585 {
586 /* If we run against a stripped glibc, we may be unable to identify
587 special functions by name. Check for one important case,
588 __aeabi_read_tp, by comparing the *code* against the default
589 implementation (this is hand-written ARM assembler in glibc). */
590
591 if (!is_thumb
592 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
593 == 0xe3e00a0f /* mov r0, #0xffff0fff */
594 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
595 == 0xe240f01f) /* sub pc, r0, #31 */
596 return 1;
597 }
598
599 return 0;
600 }
601
602 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
603 the first 16-bit of instruction, and INSN2 is the second 16-bit of
604 instruction. */
605 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
606 ((bits ((insn1), 0, 3) << 12) \
607 | (bits ((insn1), 10, 10) << 11) \
608 | (bits ((insn2), 12, 14) << 8) \
609 | bits ((insn2), 0, 7))
610
611 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
612 the 32-bit instruction. */
613 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
614 ((bits ((insn), 16, 19) << 12) \
615 | bits ((insn), 0, 11))
616
617 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
618
619 static unsigned int
620 thumb_expand_immediate (unsigned int imm)
621 {
622 unsigned int count = imm >> 7;
623
624 if (count < 8)
625 switch (count / 2)
626 {
627 case 0:
628 return imm & 0xff;
629 case 1:
630 return (imm & 0xff) | ((imm & 0xff) << 16);
631 case 2:
632 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
633 case 3:
634 return (imm & 0xff) | ((imm & 0xff) << 8)
635 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
636 }
637
638 return (0x80 | (imm & 0x7f)) << (32 - count);
639 }
640
641 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
642 epilogue, 0 otherwise. */
643
644 static int
645 thumb_instruction_restores_sp (unsigned short insn)
646 {
647 return (insn == 0x46bd /* mov sp, r7 */
648 || (insn & 0xff80) == 0xb000 /* add sp, imm */
649 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
650 }
651
652 /* Analyze a Thumb prologue, looking for a recognizable stack frame
653 and frame pointer. Scan until we encounter a store that could
654 clobber the stack frame unexpectedly, or an unknown instruction.
655 Return the last address which is definitely safe to skip for an
656 initial breakpoint. */
657
658 static CORE_ADDR
659 thumb_analyze_prologue (struct gdbarch *gdbarch,
660 CORE_ADDR start, CORE_ADDR limit,
661 struct arm_prologue_cache *cache)
662 {
663 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
664 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
665 int i;
666 pv_t regs[16];
667 CORE_ADDR offset;
668 CORE_ADDR unrecognized_pc = 0;
669
670 for (i = 0; i < 16; i++)
671 regs[i] = pv_register (i, 0);
672 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
673
674 while (start < limit)
675 {
676 unsigned short insn;
677
678 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
679
680 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
681 {
682 int regno;
683 int mask;
684
685 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
686 break;
687
688 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
689 whether to save LR (R14). */
690 mask = (insn & 0xff) | ((insn & 0x100) << 6);
691
692 /* Calculate offsets of saved R0-R7 and LR. */
693 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
694 if (mask & (1 << regno))
695 {
696 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
697 -4);
698 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
699 }
700 }
701 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
702 {
703 offset = (insn & 0x7f) << 2; /* get scaled offset */
704 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
705 -offset);
706 }
707 else if (thumb_instruction_restores_sp (insn))
708 {
709 /* Don't scan past the epilogue. */
710 break;
711 }
712 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
713 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
714 (insn & 0xff) << 2);
715 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
716 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
717 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
718 bits (insn, 6, 8));
719 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
720 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
721 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
722 bits (insn, 0, 7));
723 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
724 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
725 && pv_is_constant (regs[bits (insn, 3, 5)]))
726 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
727 regs[bits (insn, 6, 8)]);
728 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
729 && pv_is_constant (regs[bits (insn, 3, 6)]))
730 {
731 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
732 int rm = bits (insn, 3, 6);
733 regs[rd] = pv_add (regs[rd], regs[rm]);
734 }
735 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
736 {
737 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
738 int src_reg = (insn & 0x78) >> 3;
739 regs[dst_reg] = regs[src_reg];
740 }
741 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
742 {
743 /* Handle stores to the stack. Normally pushes are used,
744 but with GCC -mtpcs-frame, there may be other stores
745 in the prologue to create the frame. */
746 int regno = (insn >> 8) & 0x7;
747 pv_t addr;
748
749 offset = (insn & 0xff) << 2;
750 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
751
752 if (stack.store_would_trash (addr))
753 break;
754
755 stack.store (addr, 4, regs[regno]);
756 }
757 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
758 {
759 int rd = bits (insn, 0, 2);
760 int rn = bits (insn, 3, 5);
761 pv_t addr;
762
763 offset = bits (insn, 6, 10) << 2;
764 addr = pv_add_constant (regs[rn], offset);
765
766 if (stack.store_would_trash (addr))
767 break;
768
769 stack.store (addr, 4, regs[rd]);
770 }
771 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
772 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
773 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
774 /* Ignore stores of argument registers to the stack. */
775 ;
776 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
777 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
778 /* Ignore block loads from the stack, potentially copying
779 parameters from memory. */
780 ;
781 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
782 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
783 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
784 /* Similarly ignore single loads from the stack. */
785 ;
786 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
787 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
788 /* Skip register copies, i.e. saves to another register
789 instead of the stack. */
790 ;
791 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
792 /* Recognize constant loads; even with small stacks these are necessary
793 on Thumb. */
794 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
795 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
796 {
797 /* Constant pool loads, for the same reason. */
798 unsigned int constant;
799 CORE_ADDR loc;
800
801 loc = start + 4 + bits (insn, 0, 7) * 4;
802 constant = read_memory_unsigned_integer (loc, 4, byte_order);
803 regs[bits (insn, 8, 10)] = pv_constant (constant);
804 }
805 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
806 {
807 unsigned short inst2;
808
809 inst2 = read_code_unsigned_integer (start + 2, 2,
810 byte_order_for_code);
811
812 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
813 {
814 /* BL, BLX. Allow some special function calls when
815 skipping the prologue; GCC generates these before
816 storing arguments to the stack. */
817 CORE_ADDR nextpc;
818 int j1, j2, imm1, imm2;
819
820 imm1 = sbits (insn, 0, 10);
821 imm2 = bits (inst2, 0, 10);
822 j1 = bit (inst2, 13);
823 j2 = bit (inst2, 11);
824
825 offset = ((imm1 << 12) + (imm2 << 1));
826 offset ^= ((!j2) << 22) | ((!j1) << 23);
827
828 nextpc = start + 4 + offset;
829 /* For BLX make sure to clear the low bits. */
830 if (bit (inst2, 12) == 0)
831 nextpc = nextpc & 0xfffffffc;
832
833 if (!skip_prologue_function (gdbarch, nextpc,
834 bit (inst2, 12) != 0))
835 break;
836 }
837
838 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
839 { registers } */
840 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
841 {
842 pv_t addr = regs[bits (insn, 0, 3)];
843 int regno;
844
845 if (stack.store_would_trash (addr))
846 break;
847
848 /* Calculate offsets of saved registers. */
849 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
850 if (inst2 & (1 << regno))
851 {
852 addr = pv_add_constant (addr, -4);
853 stack.store (addr, 4, regs[regno]);
854 }
855
856 if (insn & 0x0020)
857 regs[bits (insn, 0, 3)] = addr;
858 }
859
860 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
861 [Rn, #+/-imm]{!} */
862 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
863 {
864 int regno1 = bits (inst2, 12, 15);
865 int regno2 = bits (inst2, 8, 11);
866 pv_t addr = regs[bits (insn, 0, 3)];
867
868 offset = inst2 & 0xff;
869 if (insn & 0x0080)
870 addr = pv_add_constant (addr, offset);
871 else
872 addr = pv_add_constant (addr, -offset);
873
874 if (stack.store_would_trash (addr))
875 break;
876
877 stack.store (addr, 4, regs[regno1]);
878 stack.store (pv_add_constant (addr, 4),
879 4, regs[regno2]);
880
881 if (insn & 0x0020)
882 regs[bits (insn, 0, 3)] = addr;
883 }
884
885 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
886 && (inst2 & 0x0c00) == 0x0c00
887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
888 {
889 int regno = bits (inst2, 12, 15);
890 pv_t addr = regs[bits (insn, 0, 3)];
891
892 offset = inst2 & 0xff;
893 if (inst2 & 0x0200)
894 addr = pv_add_constant (addr, offset);
895 else
896 addr = pv_add_constant (addr, -offset);
897
898 if (stack.store_would_trash (addr))
899 break;
900
901 stack.store (addr, 4, regs[regno]);
902
903 if (inst2 & 0x0100)
904 regs[bits (insn, 0, 3)] = addr;
905 }
906
907 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
908 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
909 {
910 int regno = bits (inst2, 12, 15);
911 pv_t addr;
912
913 offset = inst2 & 0xfff;
914 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
915
916 if (stack.store_would_trash (addr))
917 break;
918
919 stack.store (addr, 4, regs[regno]);
920 }
921
922 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 /* Ignore stores of argument registers to the stack. */
925 ;
926
927 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
928 && (inst2 & 0x0d00) == 0x0c00
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 /* Ignore stores of argument registers to the stack. */
931 ;
932
933 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
934 { registers } */
935 && (inst2 & 0x8000) == 0x0000
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 /* Ignore block loads from the stack, potentially copying
938 parameters from memory. */
939 ;
940
941 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
942 [Rn, #+/-imm] */
943 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 /* Similarly ignore dual loads from the stack. */
945 ;
946
947 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
948 && (inst2 & 0x0d00) == 0x0c00
949 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
950 /* Similarly ignore single loads from the stack. */
951 ;
952
953 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
954 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
955 /* Similarly ignore single loads from the stack. */
956 ;
957
958 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
959 && (inst2 & 0x8000) == 0x0000)
960 {
961 unsigned int imm = ((bits (insn, 10, 10) << 11)
962 | (bits (inst2, 12, 14) << 8)
963 | bits (inst2, 0, 7));
964
965 regs[bits (inst2, 8, 11)]
966 = pv_add_constant (regs[bits (insn, 0, 3)],
967 thumb_expand_immediate (imm));
968 }
969
970 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
971 && (inst2 & 0x8000) == 0x0000)
972 {
973 unsigned int imm = ((bits (insn, 10, 10) << 11)
974 | (bits (inst2, 12, 14) << 8)
975 | bits (inst2, 0, 7));
976
977 regs[bits (inst2, 8, 11)]
978 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
979 }
980
981 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
982 && (inst2 & 0x8000) == 0x0000)
983 {
984 unsigned int imm = ((bits (insn, 10, 10) << 11)
985 | (bits (inst2, 12, 14) << 8)
986 | bits (inst2, 0, 7));
987
988 regs[bits (inst2, 8, 11)]
989 = pv_add_constant (regs[bits (insn, 0, 3)],
990 - (CORE_ADDR) thumb_expand_immediate (imm));
991 }
992
993 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
994 && (inst2 & 0x8000) == 0x0000)
995 {
996 unsigned int imm = ((bits (insn, 10, 10) << 11)
997 | (bits (inst2, 12, 14) << 8)
998 | bits (inst2, 0, 7));
999
1000 regs[bits (inst2, 8, 11)]
1001 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1002 }
1003
1004 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1005 {
1006 unsigned int imm = ((bits (insn, 10, 10) << 11)
1007 | (bits (inst2, 12, 14) << 8)
1008 | bits (inst2, 0, 7));
1009
1010 regs[bits (inst2, 8, 11)]
1011 = pv_constant (thumb_expand_immediate (imm));
1012 }
1013
1014 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1015 {
1016 unsigned int imm
1017 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1018
1019 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1020 }
1021
1022 else if (insn == 0xea5f /* mov.w Rd,Rm */
1023 && (inst2 & 0xf0f0) == 0)
1024 {
1025 int dst_reg = (inst2 & 0x0f00) >> 8;
1026 int src_reg = inst2 & 0xf;
1027 regs[dst_reg] = regs[src_reg];
1028 }
1029
1030 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1031 {
1032 /* Constant pool loads. */
1033 unsigned int constant;
1034 CORE_ADDR loc;
1035
1036 offset = bits (inst2, 0, 11);
1037 if (insn & 0x0080)
1038 loc = start + 4 + offset;
1039 else
1040 loc = start + 4 - offset;
1041
1042 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1043 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1044 }
1045
1046 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1047 {
1048 /* Constant pool loads. */
1049 unsigned int constant;
1050 CORE_ADDR loc;
1051
1052 offset = bits (inst2, 0, 7) << 2;
1053 if (insn & 0x0080)
1054 loc = start + 4 + offset;
1055 else
1056 loc = start + 4 - offset;
1057
1058 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1059 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1060
1061 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1062 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1063 }
1064
1065 else if (thumb2_instruction_changes_pc (insn, inst2))
1066 {
1067 /* Don't scan past anything that might change control flow. */
1068 break;
1069 }
1070 else
1071 {
1072 /* The optimizer might shove anything into the prologue,
1073 so we just skip what we don't recognize. */
1074 unrecognized_pc = start;
1075 }
1076
1077 start += 2;
1078 }
1079 else if (thumb_instruction_changes_pc (insn))
1080 {
1081 /* Don't scan past anything that might change control flow. */
1082 break;
1083 }
1084 else
1085 {
1086 /* The optimizer might shove anything into the prologue,
1087 so we just skip what we don't recognize. */
1088 unrecognized_pc = start;
1089 }
1090
1091 start += 2;
1092 }
1093
1094 if (arm_debug)
1095 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1096 paddress (gdbarch, start));
1097
1098 if (unrecognized_pc == 0)
1099 unrecognized_pc = start;
1100
1101 if (cache == NULL)
1102 return unrecognized_pc;
1103
1104 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1105 {
1106 /* Frame pointer is fp. Frame size is constant. */
1107 cache->framereg = ARM_FP_REGNUM;
1108 cache->framesize = -regs[ARM_FP_REGNUM].k;
1109 }
1110 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1111 {
1112 /* Frame pointer is r7. Frame size is constant. */
1113 cache->framereg = THUMB_FP_REGNUM;
1114 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1115 }
1116 else
1117 {
1118 /* Try the stack pointer... this is a bit desperate. */
1119 cache->framereg = ARM_SP_REGNUM;
1120 cache->framesize = -regs[ARM_SP_REGNUM].k;
1121 }
1122
1123 for (i = 0; i < 16; i++)
1124 if (stack.find_reg (gdbarch, i, &offset))
1125 cache->saved_regs[i].addr = offset;
1126
1127 return unrecognized_pc;
1128 }
1129
1130
1131 /* Try to analyze the instructions starting from PC, which load symbol
1132 __stack_chk_guard. Return the address of instruction after loading this
1133 symbol, set the dest register number to *BASEREG, and set the size of
1134 instructions for loading symbol in OFFSET. Return 0 if instructions are
1135 not recognized. */
1136
1137 static CORE_ADDR
1138 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1139 unsigned int *destreg, int *offset)
1140 {
1141 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1142 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1143 unsigned int low, high, address;
1144
1145 address = 0;
1146 if (is_thumb)
1147 {
1148 unsigned short insn1
1149 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1150
1151 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1152 {
1153 *destreg = bits (insn1, 8, 10);
1154 *offset = 2;
1155 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1156 address = read_memory_unsigned_integer (address, 4,
1157 byte_order_for_code);
1158 }
1159 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1160 {
1161 unsigned short insn2
1162 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1163
1164 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1165
1166 insn1
1167 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1168 insn2
1169 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1170
1171 /* movt Rd, #const */
1172 if ((insn1 & 0xfbc0) == 0xf2c0)
1173 {
1174 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1175 *destreg = bits (insn2, 8, 11);
1176 *offset = 8;
1177 address = (high << 16 | low);
1178 }
1179 }
1180 }
1181 else
1182 {
1183 unsigned int insn
1184 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1185
1186 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1187 {
1188 address = bits (insn, 0, 11) + pc + 8;
1189 address = read_memory_unsigned_integer (address, 4,
1190 byte_order_for_code);
1191
1192 *destreg = bits (insn, 12, 15);
1193 *offset = 4;
1194 }
1195 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1196 {
1197 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1198
1199 insn
1200 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1201
1202 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1203 {
1204 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1205 *destreg = bits (insn, 12, 15);
1206 *offset = 8;
1207 address = (high << 16 | low);
1208 }
1209 }
1210 }
1211
1212 return address;
1213 }
1214
1215 /* Try to skip a sequence of instructions used for stack protector. If PC
1216 points to the first instruction of this sequence, return the address of
1217 first instruction after this sequence, otherwise, return original PC.
1218
1219 On arm, this sequence of instructions is composed of mainly three steps,
1220 Step 1: load symbol __stack_chk_guard,
1221 Step 2: load from address of __stack_chk_guard,
1222 Step 3: store it to somewhere else.
1223
1224 Usually, instructions on step 2 and step 3 are the same on various ARM
1225 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1226 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1227 instructions in step 1 vary from different ARM architectures. On ARMv7,
1228 they are,
1229
1230 movw Rn, #:lower16:__stack_chk_guard
1231 movt Rn, #:upper16:__stack_chk_guard
1232
1233 On ARMv5t, it is,
1234
1235 ldr Rn, .Label
1236 ....
1237 .Lable:
1238 .word __stack_chk_guard
1239
1240 Since ldr/str is a very popular instruction, we can't use them as
1241 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1242 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1243 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1244
1245 static CORE_ADDR
1246 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1247 {
1248 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1249 unsigned int basereg;
1250 struct bound_minimal_symbol stack_chk_guard;
1251 int offset;
1252 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1253 CORE_ADDR addr;
1254
1255 /* Try to parse the instructions in Step 1. */
1256 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1257 &basereg, &offset);
1258 if (!addr)
1259 return pc;
1260
1261 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1262 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1263 Otherwise, this sequence cannot be for stack protector. */
1264 if (stack_chk_guard.minsym == NULL
1265 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1266 return pc;
1267
1268 if (is_thumb)
1269 {
1270 unsigned int destreg;
1271 unsigned short insn
1272 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1273
1274 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1275 if ((insn & 0xf800) != 0x6800)
1276 return pc;
1277 if (bits (insn, 3, 5) != basereg)
1278 return pc;
1279 destreg = bits (insn, 0, 2);
1280
1281 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1282 byte_order_for_code);
1283 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1284 if ((insn & 0xf800) != 0x6000)
1285 return pc;
1286 if (destreg != bits (insn, 0, 2))
1287 return pc;
1288 }
1289 else
1290 {
1291 unsigned int destreg;
1292 unsigned int insn
1293 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1294
1295 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1296 if ((insn & 0x0e500000) != 0x04100000)
1297 return pc;
1298 if (bits (insn, 16, 19) != basereg)
1299 return pc;
1300 destreg = bits (insn, 12, 15);
1301 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1302 insn = read_code_unsigned_integer (pc + offset + 4,
1303 4, byte_order_for_code);
1304 if ((insn & 0x0e500000) != 0x04000000)
1305 return pc;
1306 if (bits (insn, 12, 15) != destreg)
1307 return pc;
1308 }
1309 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1310 on arm. */
1311 if (is_thumb)
1312 return pc + offset + 4;
1313 else
1314 return pc + offset + 8;
1315 }
1316
1317 /* Advance the PC across any function entry prologue instructions to
1318 reach some "real" code.
1319
1320 The APCS (ARM Procedure Call Standard) defines the following
1321 prologue:
1322
1323 mov ip, sp
1324 [stmfd sp!, {a1,a2,a3,a4}]
1325 stmfd sp!, {...,fp,ip,lr,pc}
1326 [stfe f7, [sp, #-12]!]
1327 [stfe f6, [sp, #-12]!]
1328 [stfe f5, [sp, #-12]!]
1329 [stfe f4, [sp, #-12]!]
1330 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1331
1332 static CORE_ADDR
1333 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1334 {
1335 CORE_ADDR func_addr, limit_pc;
1336
1337 /* See if we can determine the end of the prologue via the symbol table.
1338 If so, then return either PC, or the PC after the prologue, whichever
1339 is greater. */
1340 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1341 {
1342 CORE_ADDR post_prologue_pc
1343 = skip_prologue_using_sal (gdbarch, func_addr);
1344 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1345
1346 if (post_prologue_pc)
1347 post_prologue_pc
1348 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1349
1350
1351 /* GCC always emits a line note before the prologue and another
1352 one after, even if the two are at the same address or on the
1353 same line. Take advantage of this so that we do not need to
1354 know every instruction that might appear in the prologue. We
1355 will have producer information for most binaries; if it is
1356 missing (e.g. for -gstabs), assuming the GNU tools. */
1357 if (post_prologue_pc
1358 && (cust == NULL
1359 || COMPUNIT_PRODUCER (cust) == NULL
1360 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1361 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1362 return post_prologue_pc;
1363
1364 if (post_prologue_pc != 0)
1365 {
1366 CORE_ADDR analyzed_limit;
1367
1368 /* For non-GCC compilers, make sure the entire line is an
1369 acceptable prologue; GDB will round this function's
1370 return value up to the end of the following line so we
1371 can not skip just part of a line (and we do not want to).
1372
1373 RealView does not treat the prologue specially, but does
1374 associate prologue code with the opening brace; so this
1375 lets us skip the first line if we think it is the opening
1376 brace. */
1377 if (arm_pc_is_thumb (gdbarch, func_addr))
1378 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1379 post_prologue_pc, NULL);
1380 else
1381 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1382 post_prologue_pc, NULL);
1383
1384 if (analyzed_limit != post_prologue_pc)
1385 return func_addr;
1386
1387 return post_prologue_pc;
1388 }
1389 }
1390
1391 /* Can't determine prologue from the symbol table, need to examine
1392 instructions. */
1393
1394 /* Find an upper limit on the function prologue using the debug
1395 information. If the debug information could not be used to provide
1396 that bound, then use an arbitrary large number as the upper bound. */
1397 /* Like arm_scan_prologue, stop no later than pc + 64. */
1398 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1399 if (limit_pc == 0)
1400 limit_pc = pc + 64; /* Magic. */
1401
1402
1403 /* Check if this is Thumb code. */
1404 if (arm_pc_is_thumb (gdbarch, pc))
1405 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1406 else
1407 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1408 }
1409
1410 /* *INDENT-OFF* */
1411 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1412 This function decodes a Thumb function prologue to determine:
1413 1) the size of the stack frame
1414 2) which registers are saved on it
1415 3) the offsets of saved regs
1416 4) the offset from the stack pointer to the frame pointer
1417
1418 A typical Thumb function prologue would create this stack frame
1419 (offsets relative to FP)
1420 old SP -> 24 stack parameters
1421 20 LR
1422 16 R7
1423 R7 -> 0 local variables (16 bytes)
1424 SP -> -12 additional stack space (12 bytes)
1425 The frame size would thus be 36 bytes, and the frame offset would be
1426 12 bytes. The frame register is R7.
1427
1428 The comments for thumb_skip_prolog() describe the algorithm we use
1429 to detect the end of the prolog. */
1430 /* *INDENT-ON* */
1431
1432 static void
1433 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1434 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1435 {
1436 CORE_ADDR prologue_start;
1437 CORE_ADDR prologue_end;
1438
1439 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1440 &prologue_end))
1441 {
1442 /* See comment in arm_scan_prologue for an explanation of
1443 this heuristics. */
1444 if (prologue_end > prologue_start + 64)
1445 {
1446 prologue_end = prologue_start + 64;
1447 }
1448 }
1449 else
1450 /* We're in the boondocks: we have no idea where the start of the
1451 function is. */
1452 return;
1453
1454 prologue_end = std::min (prologue_end, prev_pc);
1455
1456 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1457 }
1458
1459 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1460 otherwise. */
1461
1462 static int
1463 arm_instruction_restores_sp (unsigned int insn)
1464 {
1465 if (bits (insn, 28, 31) != INST_NV)
1466 {
1467 if ((insn & 0x0df0f000) == 0x0080d000
1468 /* ADD SP (register or immediate). */
1469 || (insn & 0x0df0f000) == 0x0040d000
1470 /* SUB SP (register or immediate). */
1471 || (insn & 0x0ffffff0) == 0x01a0d000
1472 /* MOV SP. */
1473 || (insn & 0x0fff0000) == 0x08bd0000
1474 /* POP (LDMIA). */
1475 || (insn & 0x0fff0000) == 0x049d0000)
1476 /* POP of a single register. */
1477 return 1;
1478 }
1479
1480 return 0;
1481 }
1482
1483 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1484 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1485 fill it in. Return the first address not recognized as a prologue
1486 instruction.
1487
1488 We recognize all the instructions typically found in ARM prologues,
1489 plus harmless instructions which can be skipped (either for analysis
1490 purposes, or a more restrictive set that can be skipped when finding
1491 the end of the prologue). */
1492
1493 static CORE_ADDR
1494 arm_analyze_prologue (struct gdbarch *gdbarch,
1495 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1496 struct arm_prologue_cache *cache)
1497 {
1498 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1499 int regno;
1500 CORE_ADDR offset, current_pc;
1501 pv_t regs[ARM_FPS_REGNUM];
1502 CORE_ADDR unrecognized_pc = 0;
1503
1504 /* Search the prologue looking for instructions that set up the
1505 frame pointer, adjust the stack pointer, and save registers.
1506
1507 Be careful, however, and if it doesn't look like a prologue,
1508 don't try to scan it. If, for instance, a frameless function
1509 begins with stmfd sp!, then we will tell ourselves there is
1510 a frame, which will confuse stack traceback, as well as "finish"
1511 and other operations that rely on a knowledge of the stack
1512 traceback. */
1513
1514 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1515 regs[regno] = pv_register (regno, 0);
1516 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1517
1518 for (current_pc = prologue_start;
1519 current_pc < prologue_end;
1520 current_pc += 4)
1521 {
1522 unsigned int insn
1523 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1524
1525 if (insn == 0xe1a0c00d) /* mov ip, sp */
1526 {
1527 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1528 continue;
1529 }
1530 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1531 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1532 {
1533 unsigned imm = insn & 0xff; /* immediate value */
1534 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1535 int rd = bits (insn, 12, 15);
1536 imm = (imm >> rot) | (imm << (32 - rot));
1537 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1538 continue;
1539 }
1540 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1541 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1542 {
1543 unsigned imm = insn & 0xff; /* immediate value */
1544 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1545 int rd = bits (insn, 12, 15);
1546 imm = (imm >> rot) | (imm << (32 - rot));
1547 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1548 continue;
1549 }
1550 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1551 [sp, #-4]! */
1552 {
1553 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1554 break;
1555 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1556 stack.store (regs[ARM_SP_REGNUM], 4,
1557 regs[bits (insn, 12, 15)]);
1558 continue;
1559 }
1560 else if ((insn & 0xffff0000) == 0xe92d0000)
1561 /* stmfd sp!, {..., fp, ip, lr, pc}
1562 or
1563 stmfd sp!, {a1, a2, a3, a4} */
1564 {
1565 int mask = insn & 0xffff;
1566
1567 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1568 break;
1569
1570 /* Calculate offsets of saved registers. */
1571 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1572 if (mask & (1 << regno))
1573 {
1574 regs[ARM_SP_REGNUM]
1575 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1576 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1577 }
1578 }
1579 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1580 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1581 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1582 {
1583 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 continue;
1585 }
1586 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1587 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1588 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1589 {
1590 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 continue;
1592 }
1593 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1594 { registers } */
1595 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1596 {
1597 /* No need to add this to saved_regs -- it's just arg regs. */
1598 continue;
1599 }
1600 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1601 {
1602 unsigned imm = insn & 0xff; /* immediate value */
1603 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1604 imm = (imm >> rot) | (imm << (32 - rot));
1605 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1606 }
1607 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1608 {
1609 unsigned imm = insn & 0xff; /* immediate value */
1610 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1611 imm = (imm >> rot) | (imm << (32 - rot));
1612 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1613 }
1614 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1615 [sp, -#c]! */
1616 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1617 {
1618 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1619 break;
1620
1621 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1622 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1623 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1624 }
1625 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1626 [sp!] */
1627 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1628 {
1629 int n_saved_fp_regs;
1630 unsigned int fp_start_reg, fp_bound_reg;
1631
1632 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1633 break;
1634
1635 if ((insn & 0x800) == 0x800) /* N0 is set */
1636 {
1637 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1638 n_saved_fp_regs = 3;
1639 else
1640 n_saved_fp_regs = 1;
1641 }
1642 else
1643 {
1644 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1645 n_saved_fp_regs = 2;
1646 else
1647 n_saved_fp_regs = 4;
1648 }
1649
1650 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1651 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1652 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1653 {
1654 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1655 stack.store (regs[ARM_SP_REGNUM], 12,
1656 regs[fp_start_reg++]);
1657 }
1658 }
1659 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1660 {
1661 /* Allow some special function calls when skipping the
1662 prologue; GCC generates these before storing arguments to
1663 the stack. */
1664 CORE_ADDR dest = BranchDest (current_pc, insn);
1665
1666 if (skip_prologue_function (gdbarch, dest, 0))
1667 continue;
1668 else
1669 break;
1670 }
1671 else if ((insn & 0xf0000000) != 0xe0000000)
1672 break; /* Condition not true, exit early. */
1673 else if (arm_instruction_changes_pc (insn))
1674 /* Don't scan past anything that might change control flow. */
1675 break;
1676 else if (arm_instruction_restores_sp (insn))
1677 {
1678 /* Don't scan past the epilogue. */
1679 break;
1680 }
1681 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1682 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1683 /* Ignore block loads from the stack, potentially copying
1684 parameters from memory. */
1685 continue;
1686 else if ((insn & 0xfc500000) == 0xe4100000
1687 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1688 /* Similarly ignore single loads from the stack. */
1689 continue;
1690 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1691 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1692 register instead of the stack. */
1693 continue;
1694 else
1695 {
1696 /* The optimizer might shove anything into the prologue, if
1697 we build up cache (cache != NULL) from scanning prologue,
1698 we just skip what we don't recognize and scan further to
1699 make cache as complete as possible. However, if we skip
1700 prologue, we'll stop immediately on unrecognized
1701 instruction. */
1702 unrecognized_pc = current_pc;
1703 if (cache != NULL)
1704 continue;
1705 else
1706 break;
1707 }
1708 }
1709
1710 if (unrecognized_pc == 0)
1711 unrecognized_pc = current_pc;
1712
1713 if (cache)
1714 {
1715 int framereg, framesize;
1716
1717 /* The frame size is just the distance from the frame register
1718 to the original stack pointer. */
1719 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1720 {
1721 /* Frame pointer is fp. */
1722 framereg = ARM_FP_REGNUM;
1723 framesize = -regs[ARM_FP_REGNUM].k;
1724 }
1725 else
1726 {
1727 /* Try the stack pointer... this is a bit desperate. */
1728 framereg = ARM_SP_REGNUM;
1729 framesize = -regs[ARM_SP_REGNUM].k;
1730 }
1731
1732 cache->framereg = framereg;
1733 cache->framesize = framesize;
1734
1735 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1736 if (stack.find_reg (gdbarch, regno, &offset))
1737 cache->saved_regs[regno].addr = offset;
1738 }
1739
1740 if (arm_debug)
1741 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1742 paddress (gdbarch, unrecognized_pc));
1743
1744 return unrecognized_pc;
1745 }
1746
1747 static void
1748 arm_scan_prologue (struct frame_info *this_frame,
1749 struct arm_prologue_cache *cache)
1750 {
1751 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1752 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1753 CORE_ADDR prologue_start, prologue_end;
1754 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1755 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1756
1757 /* Assume there is no frame until proven otherwise. */
1758 cache->framereg = ARM_SP_REGNUM;
1759 cache->framesize = 0;
1760
1761 /* Check for Thumb prologue. */
1762 if (arm_frame_is_thumb (this_frame))
1763 {
1764 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1765 return;
1766 }
1767
1768 /* Find the function prologue. If we can't find the function in
1769 the symbol table, peek in the stack frame to find the PC. */
1770 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1771 &prologue_end))
1772 {
1773 /* One way to find the end of the prologue (which works well
1774 for unoptimized code) is to do the following:
1775
1776 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1777
1778 if (sal.line == 0)
1779 prologue_end = prev_pc;
1780 else if (sal.end < prologue_end)
1781 prologue_end = sal.end;
1782
1783 This mechanism is very accurate so long as the optimizer
1784 doesn't move any instructions from the function body into the
1785 prologue. If this happens, sal.end will be the last
1786 instruction in the first hunk of prologue code just before
1787 the first instruction that the scheduler has moved from
1788 the body to the prologue.
1789
1790 In order to make sure that we scan all of the prologue
1791 instructions, we use a slightly less accurate mechanism which
1792 may scan more than necessary. To help compensate for this
1793 lack of accuracy, the prologue scanning loop below contains
1794 several clauses which'll cause the loop to terminate early if
1795 an implausible prologue instruction is encountered.
1796
1797 The expression
1798
1799 prologue_start + 64
1800
1801 is a suitable endpoint since it accounts for the largest
1802 possible prologue plus up to five instructions inserted by
1803 the scheduler. */
1804
1805 if (prologue_end > prologue_start + 64)
1806 {
1807 prologue_end = prologue_start + 64; /* See above. */
1808 }
1809 }
1810 else
1811 {
1812 /* We have no symbol information. Our only option is to assume this
1813 function has a standard stack frame and the normal frame register.
1814 Then, we can find the value of our frame pointer on entrance to
1815 the callee (or at the present moment if this is the innermost frame).
1816 The value stored there should be the address of the stmfd + 8. */
1817 CORE_ADDR frame_loc;
1818 ULONGEST return_value;
1819
1820 /* AAPCS does not use a frame register, so we can abort here. */
1821 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1822 return;
1823
1824 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1825 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1826 &return_value))
1827 return;
1828 else
1829 {
1830 prologue_start = gdbarch_addr_bits_remove
1831 (gdbarch, return_value) - 8;
1832 prologue_end = prologue_start + 64; /* See above. */
1833 }
1834 }
1835
1836 if (prev_pc < prologue_end)
1837 prologue_end = prev_pc;
1838
1839 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1840 }
1841
1842 static struct arm_prologue_cache *
1843 arm_make_prologue_cache (struct frame_info *this_frame)
1844 {
1845 int reg;
1846 struct arm_prologue_cache *cache;
1847 CORE_ADDR unwound_fp;
1848
1849 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1850 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1851
1852 arm_scan_prologue (this_frame, cache);
1853
1854 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1855 if (unwound_fp == 0)
1856 return cache;
1857
1858 cache->prev_sp = unwound_fp + cache->framesize;
1859
1860 /* Calculate actual addresses of saved registers using offsets
1861 determined by arm_scan_prologue. */
1862 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1863 if (trad_frame_addr_p (cache->saved_regs, reg))
1864 cache->saved_regs[reg].addr += cache->prev_sp;
1865
1866 return cache;
1867 }
1868
1869 /* Implementation of the stop_reason hook for arm_prologue frames. */
1870
1871 static enum unwind_stop_reason
1872 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1873 void **this_cache)
1874 {
1875 struct arm_prologue_cache *cache;
1876 CORE_ADDR pc;
1877
1878 if (*this_cache == NULL)
1879 *this_cache = arm_make_prologue_cache (this_frame);
1880 cache = (struct arm_prologue_cache *) *this_cache;
1881
1882 /* This is meant to halt the backtrace at "_start". */
1883 pc = get_frame_pc (this_frame);
1884 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1885 return UNWIND_OUTERMOST;
1886
1887 /* If we've hit a wall, stop. */
1888 if (cache->prev_sp == 0)
1889 return UNWIND_OUTERMOST;
1890
1891 return UNWIND_NO_REASON;
1892 }
1893
1894 /* Our frame ID for a normal frame is the current function's starting PC
1895 and the caller's SP when we were called. */
1896
1897 static void
1898 arm_prologue_this_id (struct frame_info *this_frame,
1899 void **this_cache,
1900 struct frame_id *this_id)
1901 {
1902 struct arm_prologue_cache *cache;
1903 struct frame_id id;
1904 CORE_ADDR pc, func;
1905
1906 if (*this_cache == NULL)
1907 *this_cache = arm_make_prologue_cache (this_frame);
1908 cache = (struct arm_prologue_cache *) *this_cache;
1909
1910 /* Use function start address as part of the frame ID. If we cannot
1911 identify the start address (due to missing symbol information),
1912 fall back to just using the current PC. */
1913 pc = get_frame_pc (this_frame);
1914 func = get_frame_func (this_frame);
1915 if (!func)
1916 func = pc;
1917
1918 id = frame_id_build (cache->prev_sp, func);
1919 *this_id = id;
1920 }
1921
1922 static struct value *
1923 arm_prologue_prev_register (struct frame_info *this_frame,
1924 void **this_cache,
1925 int prev_regnum)
1926 {
1927 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1928 struct arm_prologue_cache *cache;
1929
1930 if (*this_cache == NULL)
1931 *this_cache = arm_make_prologue_cache (this_frame);
1932 cache = (struct arm_prologue_cache *) *this_cache;
1933
1934 /* If we are asked to unwind the PC, then we need to return the LR
1935 instead. The prologue may save PC, but it will point into this
1936 frame's prologue, not the next frame's resume location. Also
1937 strip the saved T bit. A valid LR may have the low bit set, but
1938 a valid PC never does. */
1939 if (prev_regnum == ARM_PC_REGNUM)
1940 {
1941 CORE_ADDR lr;
1942
1943 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1944 return frame_unwind_got_constant (this_frame, prev_regnum,
1945 arm_addr_bits_remove (gdbarch, lr));
1946 }
1947
1948 /* SP is generally not saved to the stack, but this frame is
1949 identified by the next frame's stack pointer at the time of the call.
1950 The value was already reconstructed into PREV_SP. */
1951 if (prev_regnum == ARM_SP_REGNUM)
1952 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1953
1954 /* The CPSR may have been changed by the call instruction and by the
1955 called function. The only bit we can reconstruct is the T bit,
1956 by checking the low bit of LR as of the call. This is a reliable
1957 indicator of Thumb-ness except for some ARM v4T pre-interworking
1958 Thumb code, which could get away with a clear low bit as long as
1959 the called function did not use bx. Guess that all other
1960 bits are unchanged; the condition flags are presumably lost,
1961 but the processor status is likely valid. */
1962 if (prev_regnum == ARM_PS_REGNUM)
1963 {
1964 CORE_ADDR lr, cpsr;
1965 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1966
1967 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1968 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1969 if (IS_THUMB_ADDR (lr))
1970 cpsr |= t_bit;
1971 else
1972 cpsr &= ~t_bit;
1973 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1974 }
1975
1976 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1977 prev_regnum);
1978 }
1979
1980 struct frame_unwind arm_prologue_unwind = {
1981 NORMAL_FRAME,
1982 arm_prologue_unwind_stop_reason,
1983 arm_prologue_this_id,
1984 arm_prologue_prev_register,
1985 NULL,
1986 default_frame_sniffer
1987 };
1988
1989 /* Maintain a list of ARM exception table entries per objfile, similar to the
1990 list of mapping symbols. We only cache entries for standard ARM-defined
1991 personality routines; the cache will contain only the frame unwinding
1992 instructions associated with the entry (not the descriptors). */
1993
1994 static const struct objfile_data *arm_exidx_data_key;
1995
1996 struct arm_exidx_entry
1997 {
1998 bfd_vma addr;
1999 gdb_byte *entry;
2000 };
2001 typedef struct arm_exidx_entry arm_exidx_entry_s;
2002 DEF_VEC_O(arm_exidx_entry_s);
2003
2004 struct arm_exidx_data
2005 {
2006 VEC(arm_exidx_entry_s) **section_maps;
2007 };
2008
2009 static void
2010 arm_exidx_data_free (struct objfile *objfile, void *arg)
2011 {
2012 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2013 unsigned int i;
2014
2015 for (i = 0; i < objfile->obfd->section_count; i++)
2016 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2017 }
2018
2019 static inline int
2020 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2021 const struct arm_exidx_entry *rhs)
2022 {
2023 return lhs->addr < rhs->addr;
2024 }
2025
2026 static struct obj_section *
2027 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2028 {
2029 struct obj_section *osect;
2030
2031 ALL_OBJFILE_OSECTIONS (objfile, osect)
2032 if (bfd_get_section_flags (objfile->obfd,
2033 osect->the_bfd_section) & SEC_ALLOC)
2034 {
2035 bfd_vma start, size;
2036 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2037 size = bfd_get_section_size (osect->the_bfd_section);
2038
2039 if (start <= vma && vma < start + size)
2040 return osect;
2041 }
2042
2043 return NULL;
2044 }
2045
2046 /* Parse contents of exception table and exception index sections
2047 of OBJFILE, and fill in the exception table entry cache.
2048
2049 For each entry that refers to a standard ARM-defined personality
2050 routine, extract the frame unwinding instructions (from either
2051 the index or the table section). The unwinding instructions
2052 are normalized by:
2053 - extracting them from the rest of the table data
2054 - converting to host endianness
2055 - appending the implicit 0xb0 ("Finish") code
2056
2057 The extracted and normalized instructions are stored for later
2058 retrieval by the arm_find_exidx_entry routine. */
2059
2060 static void
2061 arm_exidx_new_objfile (struct objfile *objfile)
2062 {
2063 struct arm_exidx_data *data;
2064 asection *exidx, *extab;
2065 bfd_vma exidx_vma = 0, extab_vma = 0;
2066 LONGEST i;
2067
2068 /* If we've already touched this file, do nothing. */
2069 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2070 return;
2071
2072 /* Read contents of exception table and index. */
2073 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2074 gdb::byte_vector exidx_data;
2075 if (exidx)
2076 {
2077 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2078 exidx_data.resize (bfd_get_section_size (exidx));
2079
2080 if (!bfd_get_section_contents (objfile->obfd, exidx,
2081 exidx_data.data (), 0,
2082 exidx_data.size ()))
2083 return;
2084 }
2085
2086 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2087 gdb::byte_vector extab_data;
2088 if (extab)
2089 {
2090 extab_vma = bfd_section_vma (objfile->obfd, extab);
2091 extab_data.resize (bfd_get_section_size (extab));
2092
2093 if (!bfd_get_section_contents (objfile->obfd, extab,
2094 extab_data.data (), 0,
2095 extab_data.size ()))
2096 return;
2097 }
2098
2099 /* Allocate exception table data structure. */
2100 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2101 set_objfile_data (objfile, arm_exidx_data_key, data);
2102 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2103 objfile->obfd->section_count,
2104 VEC(arm_exidx_entry_s) *);
2105
2106 /* Fill in exception table. */
2107 for (i = 0; i < exidx_data.size () / 8; i++)
2108 {
2109 struct arm_exidx_entry new_exidx_entry;
2110 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2111 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2112 exidx_data.data () + i * 8 + 4);
2113 bfd_vma addr = 0, word = 0;
2114 int n_bytes = 0, n_words = 0;
2115 struct obj_section *sec;
2116 gdb_byte *entry = NULL;
2117
2118 /* Extract address of start of function. */
2119 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2120 idx += exidx_vma + i * 8;
2121
2122 /* Find section containing function and compute section offset. */
2123 sec = arm_obj_section_from_vma (objfile, idx);
2124 if (sec == NULL)
2125 continue;
2126 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2127
2128 /* Determine address of exception table entry. */
2129 if (val == 1)
2130 {
2131 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2132 }
2133 else if ((val & 0xff000000) == 0x80000000)
2134 {
2135 /* Exception table entry embedded in .ARM.exidx
2136 -- must be short form. */
2137 word = val;
2138 n_bytes = 3;
2139 }
2140 else if (!(val & 0x80000000))
2141 {
2142 /* Exception table entry in .ARM.extab. */
2143 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2144 addr += exidx_vma + i * 8 + 4;
2145
2146 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2147 {
2148 word = bfd_h_get_32 (objfile->obfd,
2149 extab_data.data () + addr - extab_vma);
2150 addr += 4;
2151
2152 if ((word & 0xff000000) == 0x80000000)
2153 {
2154 /* Short form. */
2155 n_bytes = 3;
2156 }
2157 else if ((word & 0xff000000) == 0x81000000
2158 || (word & 0xff000000) == 0x82000000)
2159 {
2160 /* Long form. */
2161 n_bytes = 2;
2162 n_words = ((word >> 16) & 0xff);
2163 }
2164 else if (!(word & 0x80000000))
2165 {
2166 bfd_vma pers;
2167 struct obj_section *pers_sec;
2168 int gnu_personality = 0;
2169
2170 /* Custom personality routine. */
2171 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2172 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2173
2174 /* Check whether we've got one of the variants of the
2175 GNU personality routines. */
2176 pers_sec = arm_obj_section_from_vma (objfile, pers);
2177 if (pers_sec)
2178 {
2179 static const char *personality[] =
2180 {
2181 "__gcc_personality_v0",
2182 "__gxx_personality_v0",
2183 "__gcj_personality_v0",
2184 "__gnu_objc_personality_v0",
2185 NULL
2186 };
2187
2188 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2189 int k;
2190
2191 for (k = 0; personality[k]; k++)
2192 if (lookup_minimal_symbol_by_pc_name
2193 (pc, personality[k], objfile))
2194 {
2195 gnu_personality = 1;
2196 break;
2197 }
2198 }
2199
2200 /* If so, the next word contains a word count in the high
2201 byte, followed by the same unwind instructions as the
2202 pre-defined forms. */
2203 if (gnu_personality
2204 && addr + 4 <= extab_vma + extab_data.size ())
2205 {
2206 word = bfd_h_get_32 (objfile->obfd,
2207 (extab_data.data ()
2208 + addr - extab_vma));
2209 addr += 4;
2210 n_bytes = 3;
2211 n_words = ((word >> 24) & 0xff);
2212 }
2213 }
2214 }
2215 }
2216
2217 /* Sanity check address. */
2218 if (n_words)
2219 if (addr < extab_vma
2220 || addr + 4 * n_words > extab_vma + extab_data.size ())
2221 n_words = n_bytes = 0;
2222
2223 /* The unwind instructions reside in WORD (only the N_BYTES least
2224 significant bytes are valid), followed by N_WORDS words in the
2225 extab section starting at ADDR. */
2226 if (n_bytes || n_words)
2227 {
2228 gdb_byte *p = entry
2229 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2230 n_bytes + n_words * 4 + 1);
2231
2232 while (n_bytes--)
2233 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2234
2235 while (n_words--)
2236 {
2237 word = bfd_h_get_32 (objfile->obfd,
2238 extab_data.data () + addr - extab_vma);
2239 addr += 4;
2240
2241 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2242 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2243 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2244 *p++ = (gdb_byte) (word & 0xff);
2245 }
2246
2247 /* Implied "Finish" to terminate the list. */
2248 *p++ = 0xb0;
2249 }
2250
2251 /* Push entry onto vector. They are guaranteed to always
2252 appear in order of increasing addresses. */
2253 new_exidx_entry.addr = idx;
2254 new_exidx_entry.entry = entry;
2255 VEC_safe_push (arm_exidx_entry_s,
2256 data->section_maps[sec->the_bfd_section->index],
2257 &new_exidx_entry);
2258 }
2259 }
2260
2261 /* Search for the exception table entry covering MEMADDR. If one is found,
2262 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2263 set *START to the start of the region covered by this entry. */
2264
2265 static gdb_byte *
2266 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2267 {
2268 struct obj_section *sec;
2269
2270 sec = find_pc_section (memaddr);
2271 if (sec != NULL)
2272 {
2273 struct arm_exidx_data *data;
2274 VEC(arm_exidx_entry_s) *map;
2275 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2276 unsigned int idx;
2277
2278 data = ((struct arm_exidx_data *)
2279 objfile_data (sec->objfile, arm_exidx_data_key));
2280 if (data != NULL)
2281 {
2282 map = data->section_maps[sec->the_bfd_section->index];
2283 if (!VEC_empty (arm_exidx_entry_s, map))
2284 {
2285 struct arm_exidx_entry *map_sym;
2286
2287 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2288 arm_compare_exidx_entries);
2289
2290 /* VEC_lower_bound finds the earliest ordered insertion
2291 point. If the following symbol starts at this exact
2292 address, we use that; otherwise, the preceding
2293 exception table entry covers this address. */
2294 if (idx < VEC_length (arm_exidx_entry_s, map))
2295 {
2296 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2297 if (map_sym->addr == map_key.addr)
2298 {
2299 if (start)
2300 *start = map_sym->addr + obj_section_addr (sec);
2301 return map_sym->entry;
2302 }
2303 }
2304
2305 if (idx > 0)
2306 {
2307 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2308 if (start)
2309 *start = map_sym->addr + obj_section_addr (sec);
2310 return map_sym->entry;
2311 }
2312 }
2313 }
2314 }
2315
2316 return NULL;
2317 }
2318
2319 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2320 instruction list from the ARM exception table entry ENTRY, allocate and
2321 return a prologue cache structure describing how to unwind this frame.
2322
2323 Return NULL if the unwinding instruction list contains a "spare",
2324 "reserved" or "refuse to unwind" instruction as defined in section
2325 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2326 for the ARM Architecture" document. */
2327
2328 static struct arm_prologue_cache *
2329 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2330 {
2331 CORE_ADDR vsp = 0;
2332 int vsp_valid = 0;
2333
2334 struct arm_prologue_cache *cache;
2335 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2336 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2337
2338 for (;;)
2339 {
2340 gdb_byte insn;
2341
2342 /* Whenever we reload SP, we actually have to retrieve its
2343 actual value in the current frame. */
2344 if (!vsp_valid)
2345 {
2346 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2347 {
2348 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2349 vsp = get_frame_register_unsigned (this_frame, reg);
2350 }
2351 else
2352 {
2353 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2354 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2355 }
2356
2357 vsp_valid = 1;
2358 }
2359
2360 /* Decode next unwind instruction. */
2361 insn = *entry++;
2362
2363 if ((insn & 0xc0) == 0)
2364 {
2365 int offset = insn & 0x3f;
2366 vsp += (offset << 2) + 4;
2367 }
2368 else if ((insn & 0xc0) == 0x40)
2369 {
2370 int offset = insn & 0x3f;
2371 vsp -= (offset << 2) + 4;
2372 }
2373 else if ((insn & 0xf0) == 0x80)
2374 {
2375 int mask = ((insn & 0xf) << 8) | *entry++;
2376 int i;
2377
2378 /* The special case of an all-zero mask identifies
2379 "Refuse to unwind". We return NULL to fall back
2380 to the prologue analyzer. */
2381 if (mask == 0)
2382 return NULL;
2383
2384 /* Pop registers r4..r15 under mask. */
2385 for (i = 0; i < 12; i++)
2386 if (mask & (1 << i))
2387 {
2388 cache->saved_regs[4 + i].addr = vsp;
2389 vsp += 4;
2390 }
2391
2392 /* Special-case popping SP -- we need to reload vsp. */
2393 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2394 vsp_valid = 0;
2395 }
2396 else if ((insn & 0xf0) == 0x90)
2397 {
2398 int reg = insn & 0xf;
2399
2400 /* Reserved cases. */
2401 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2402 return NULL;
2403
2404 /* Set SP from another register and mark VSP for reload. */
2405 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2406 vsp_valid = 0;
2407 }
2408 else if ((insn & 0xf0) == 0xa0)
2409 {
2410 int count = insn & 0x7;
2411 int pop_lr = (insn & 0x8) != 0;
2412 int i;
2413
2414 /* Pop r4..r[4+count]. */
2415 for (i = 0; i <= count; i++)
2416 {
2417 cache->saved_regs[4 + i].addr = vsp;
2418 vsp += 4;
2419 }
2420
2421 /* If indicated by flag, pop LR as well. */
2422 if (pop_lr)
2423 {
2424 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2425 vsp += 4;
2426 }
2427 }
2428 else if (insn == 0xb0)
2429 {
2430 /* We could only have updated PC by popping into it; if so, it
2431 will show up as address. Otherwise, copy LR into PC. */
2432 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2433 cache->saved_regs[ARM_PC_REGNUM]
2434 = cache->saved_regs[ARM_LR_REGNUM];
2435
2436 /* We're done. */
2437 break;
2438 }
2439 else if (insn == 0xb1)
2440 {
2441 int mask = *entry++;
2442 int i;
2443
2444 /* All-zero mask and mask >= 16 is "spare". */
2445 if (mask == 0 || mask >= 16)
2446 return NULL;
2447
2448 /* Pop r0..r3 under mask. */
2449 for (i = 0; i < 4; i++)
2450 if (mask & (1 << i))
2451 {
2452 cache->saved_regs[i].addr = vsp;
2453 vsp += 4;
2454 }
2455 }
2456 else if (insn == 0xb2)
2457 {
2458 ULONGEST offset = 0;
2459 unsigned shift = 0;
2460
2461 do
2462 {
2463 offset |= (*entry & 0x7f) << shift;
2464 shift += 7;
2465 }
2466 while (*entry++ & 0x80);
2467
2468 vsp += 0x204 + (offset << 2);
2469 }
2470 else if (insn == 0xb3)
2471 {
2472 int start = *entry >> 4;
2473 int count = (*entry++) & 0xf;
2474 int i;
2475
2476 /* Only registers D0..D15 are valid here. */
2477 if (start + count >= 16)
2478 return NULL;
2479
2480 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2481 for (i = 0; i <= count; i++)
2482 {
2483 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2484 vsp += 8;
2485 }
2486
2487 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2488 vsp += 4;
2489 }
2490 else if ((insn & 0xf8) == 0xb8)
2491 {
2492 int count = insn & 0x7;
2493 int i;
2494
2495 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2496 for (i = 0; i <= count; i++)
2497 {
2498 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2499 vsp += 8;
2500 }
2501
2502 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2503 vsp += 4;
2504 }
2505 else if (insn == 0xc6)
2506 {
2507 int start = *entry >> 4;
2508 int count = (*entry++) & 0xf;
2509 int i;
2510
2511 /* Only registers WR0..WR15 are valid. */
2512 if (start + count >= 16)
2513 return NULL;
2514
2515 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2516 for (i = 0; i <= count; i++)
2517 {
2518 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2519 vsp += 8;
2520 }
2521 }
2522 else if (insn == 0xc7)
2523 {
2524 int mask = *entry++;
2525 int i;
2526
2527 /* All-zero mask and mask >= 16 is "spare". */
2528 if (mask == 0 || mask >= 16)
2529 return NULL;
2530
2531 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2532 for (i = 0; i < 4; i++)
2533 if (mask & (1 << i))
2534 {
2535 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2536 vsp += 4;
2537 }
2538 }
2539 else if ((insn & 0xf8) == 0xc0)
2540 {
2541 int count = insn & 0x7;
2542 int i;
2543
2544 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2545 for (i = 0; i <= count; i++)
2546 {
2547 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2548 vsp += 8;
2549 }
2550 }
2551 else if (insn == 0xc8)
2552 {
2553 int start = *entry >> 4;
2554 int count = (*entry++) & 0xf;
2555 int i;
2556
2557 /* Only registers D0..D31 are valid. */
2558 if (start + count >= 16)
2559 return NULL;
2560
2561 /* Pop VFP double-precision registers
2562 D[16+start]..D[16+start+count]. */
2563 for (i = 0; i <= count; i++)
2564 {
2565 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2566 vsp += 8;
2567 }
2568 }
2569 else if (insn == 0xc9)
2570 {
2571 int start = *entry >> 4;
2572 int count = (*entry++) & 0xf;
2573 int i;
2574
2575 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2576 for (i = 0; i <= count; i++)
2577 {
2578 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2579 vsp += 8;
2580 }
2581 }
2582 else if ((insn & 0xf8) == 0xd0)
2583 {
2584 int count = insn & 0x7;
2585 int i;
2586
2587 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2588 for (i = 0; i <= count; i++)
2589 {
2590 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2591 vsp += 8;
2592 }
2593 }
2594 else
2595 {
2596 /* Everything else is "spare". */
2597 return NULL;
2598 }
2599 }
2600
2601 /* If we restore SP from a register, assume this was the frame register.
2602 Otherwise just fall back to SP as frame register. */
2603 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2604 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2605 else
2606 cache->framereg = ARM_SP_REGNUM;
2607
2608 /* Determine offset to previous frame. */
2609 cache->framesize
2610 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2611
2612 /* We already got the previous SP. */
2613 cache->prev_sp = vsp;
2614
2615 return cache;
2616 }
2617
2618 /* Unwinding via ARM exception table entries. Note that the sniffer
2619 already computes a filled-in prologue cache, which is then used
2620 with the same arm_prologue_this_id and arm_prologue_prev_register
2621 routines also used for prologue-parsing based unwinding. */
2622
2623 static int
2624 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2625 struct frame_info *this_frame,
2626 void **this_prologue_cache)
2627 {
2628 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2629 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2630 CORE_ADDR addr_in_block, exidx_region, func_start;
2631 struct arm_prologue_cache *cache;
2632 gdb_byte *entry;
2633
2634 /* See if we have an ARM exception table entry covering this address. */
2635 addr_in_block = get_frame_address_in_block (this_frame);
2636 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2637 if (!entry)
2638 return 0;
2639
2640 /* The ARM exception table does not describe unwind information
2641 for arbitrary PC values, but is guaranteed to be correct only
2642 at call sites. We have to decide here whether we want to use
2643 ARM exception table information for this frame, or fall back
2644 to using prologue parsing. (Note that if we have DWARF CFI,
2645 this sniffer isn't even called -- CFI is always preferred.)
2646
2647 Before we make this decision, however, we check whether we
2648 actually have *symbol* information for the current frame.
2649 If not, prologue parsing would not work anyway, so we might
2650 as well use the exception table and hope for the best. */
2651 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2652 {
2653 int exc_valid = 0;
2654
2655 /* If the next frame is "normal", we are at a call site in this
2656 frame, so exception information is guaranteed to be valid. */
2657 if (get_next_frame (this_frame)
2658 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2659 exc_valid = 1;
2660
2661 /* We also assume exception information is valid if we're currently
2662 blocked in a system call. The system library is supposed to
2663 ensure this, so that e.g. pthread cancellation works. */
2664 if (arm_frame_is_thumb (this_frame))
2665 {
2666 ULONGEST insn;
2667
2668 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2669 2, byte_order_for_code, &insn)
2670 && (insn & 0xff00) == 0xdf00 /* svc */)
2671 exc_valid = 1;
2672 }
2673 else
2674 {
2675 ULONGEST insn;
2676
2677 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2678 4, byte_order_for_code, &insn)
2679 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2680 exc_valid = 1;
2681 }
2682
2683 /* Bail out if we don't know that exception information is valid. */
2684 if (!exc_valid)
2685 return 0;
2686
2687 /* The ARM exception index does not mark the *end* of the region
2688 covered by the entry, and some functions will not have any entry.
2689 To correctly recognize the end of the covered region, the linker
2690 should have inserted dummy records with a CANTUNWIND marker.
2691
2692 Unfortunately, current versions of GNU ld do not reliably do
2693 this, and thus we may have found an incorrect entry above.
2694 As a (temporary) sanity check, we only use the entry if it
2695 lies *within* the bounds of the function. Note that this check
2696 might reject perfectly valid entries that just happen to cover
2697 multiple functions; therefore this check ought to be removed
2698 once the linker is fixed. */
2699 if (func_start > exidx_region)
2700 return 0;
2701 }
2702
2703 /* Decode the list of unwinding instructions into a prologue cache.
2704 Note that this may fail due to e.g. a "refuse to unwind" code. */
2705 cache = arm_exidx_fill_cache (this_frame, entry);
2706 if (!cache)
2707 return 0;
2708
2709 *this_prologue_cache = cache;
2710 return 1;
2711 }
2712
2713 struct frame_unwind arm_exidx_unwind = {
2714 NORMAL_FRAME,
2715 default_frame_unwind_stop_reason,
2716 arm_prologue_this_id,
2717 arm_prologue_prev_register,
2718 NULL,
2719 arm_exidx_unwind_sniffer
2720 };
2721
2722 static struct arm_prologue_cache *
2723 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2724 {
2725 struct arm_prologue_cache *cache;
2726 int reg;
2727
2728 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2729 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2730
2731 /* Still rely on the offset calculated from prologue. */
2732 arm_scan_prologue (this_frame, cache);
2733
2734 /* Since we are in epilogue, the SP has been restored. */
2735 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2736
2737 /* Calculate actual addresses of saved registers using offsets
2738 determined by arm_scan_prologue. */
2739 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2740 if (trad_frame_addr_p (cache->saved_regs, reg))
2741 cache->saved_regs[reg].addr += cache->prev_sp;
2742
2743 return cache;
2744 }
2745
2746 /* Implementation of function hook 'this_id' in
2747 'struct frame_uwnind' for epilogue unwinder. */
2748
2749 static void
2750 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2751 void **this_cache,
2752 struct frame_id *this_id)
2753 {
2754 struct arm_prologue_cache *cache;
2755 CORE_ADDR pc, func;
2756
2757 if (*this_cache == NULL)
2758 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2759 cache = (struct arm_prologue_cache *) *this_cache;
2760
2761 /* Use function start address as part of the frame ID. If we cannot
2762 identify the start address (due to missing symbol information),
2763 fall back to just using the current PC. */
2764 pc = get_frame_pc (this_frame);
2765 func = get_frame_func (this_frame);
2766 if (func == 0)
2767 func = pc;
2768
2769 (*this_id) = frame_id_build (cache->prev_sp, pc);
2770 }
2771
2772 /* Implementation of function hook 'prev_register' in
2773 'struct frame_uwnind' for epilogue unwinder. */
2774
2775 static struct value *
2776 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2777 void **this_cache, int regnum)
2778 {
2779 if (*this_cache == NULL)
2780 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2781
2782 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2783 }
2784
2785 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2786 CORE_ADDR pc);
2787 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2788 CORE_ADDR pc);
2789
2790 /* Implementation of function hook 'sniffer' in
2791 'struct frame_uwnind' for epilogue unwinder. */
2792
2793 static int
2794 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2795 struct frame_info *this_frame,
2796 void **this_prologue_cache)
2797 {
2798 if (frame_relative_level (this_frame) == 0)
2799 {
2800 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2801 CORE_ADDR pc = get_frame_pc (this_frame);
2802
2803 if (arm_frame_is_thumb (this_frame))
2804 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2805 else
2806 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2807 }
2808 else
2809 return 0;
2810 }
2811
2812 /* Frame unwinder from epilogue. */
2813
2814 static const struct frame_unwind arm_epilogue_frame_unwind =
2815 {
2816 NORMAL_FRAME,
2817 default_frame_unwind_stop_reason,
2818 arm_epilogue_frame_this_id,
2819 arm_epilogue_frame_prev_register,
2820 NULL,
2821 arm_epilogue_frame_sniffer,
2822 };
2823
2824 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2825 trampoline, return the target PC. Otherwise return 0.
2826
2827 void call0a (char c, short s, int i, long l) {}
2828
2829 int main (void)
2830 {
2831 (*pointer_to_call0a) (c, s, i, l);
2832 }
2833
2834 Instead of calling a stub library function _call_via_xx (xx is
2835 the register name), GCC may inline the trampoline in the object
2836 file as below (register r2 has the address of call0a).
2837
2838 .global main
2839 .type main, %function
2840 ...
2841 bl .L1
2842 ...
2843 .size main, .-main
2844
2845 .L1:
2846 bx r2
2847
2848 The trampoline 'bx r2' doesn't belong to main. */
2849
2850 static CORE_ADDR
2851 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2852 {
2853 /* The heuristics of recognizing such trampoline is that FRAME is
2854 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2855 if (arm_frame_is_thumb (frame))
2856 {
2857 gdb_byte buf[2];
2858
2859 if (target_read_memory (pc, buf, 2) == 0)
2860 {
2861 struct gdbarch *gdbarch = get_frame_arch (frame);
2862 enum bfd_endian byte_order_for_code
2863 = gdbarch_byte_order_for_code (gdbarch);
2864 uint16_t insn
2865 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2866
2867 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2868 {
2869 CORE_ADDR dest
2870 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2871
2872 /* Clear the LSB so that gdb core sets step-resume
2873 breakpoint at the right address. */
2874 return UNMAKE_THUMB_ADDR (dest);
2875 }
2876 }
2877 }
2878
2879 return 0;
2880 }
2881
2882 static struct arm_prologue_cache *
2883 arm_make_stub_cache (struct frame_info *this_frame)
2884 {
2885 struct arm_prologue_cache *cache;
2886
2887 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2888 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2889
2890 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2891
2892 return cache;
2893 }
2894
2895 /* Our frame ID for a stub frame is the current SP and LR. */
2896
2897 static void
2898 arm_stub_this_id (struct frame_info *this_frame,
2899 void **this_cache,
2900 struct frame_id *this_id)
2901 {
2902 struct arm_prologue_cache *cache;
2903
2904 if (*this_cache == NULL)
2905 *this_cache = arm_make_stub_cache (this_frame);
2906 cache = (struct arm_prologue_cache *) *this_cache;
2907
2908 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2909 }
2910
2911 static int
2912 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2913 struct frame_info *this_frame,
2914 void **this_prologue_cache)
2915 {
2916 CORE_ADDR addr_in_block;
2917 gdb_byte dummy[4];
2918 CORE_ADDR pc, start_addr;
2919 const char *name;
2920
2921 addr_in_block = get_frame_address_in_block (this_frame);
2922 pc = get_frame_pc (this_frame);
2923 if (in_plt_section (addr_in_block)
2924 /* We also use the stub winder if the target memory is unreadable
2925 to avoid having the prologue unwinder trying to read it. */
2926 || target_read_memory (pc, dummy, 4) != 0)
2927 return 1;
2928
2929 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2930 && arm_skip_bx_reg (this_frame, pc) != 0)
2931 return 1;
2932
2933 return 0;
2934 }
2935
2936 struct frame_unwind arm_stub_unwind = {
2937 NORMAL_FRAME,
2938 default_frame_unwind_stop_reason,
2939 arm_stub_this_id,
2940 arm_prologue_prev_register,
2941 NULL,
2942 arm_stub_unwind_sniffer
2943 };
2944
2945 /* Put here the code to store, into CACHE->saved_regs, the addresses
2946 of the saved registers of frame described by THIS_FRAME. CACHE is
2947 returned. */
2948
2949 static struct arm_prologue_cache *
2950 arm_m_exception_cache (struct frame_info *this_frame)
2951 {
2952 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2953 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2954 struct arm_prologue_cache *cache;
2955 CORE_ADDR unwound_sp;
2956 LONGEST xpsr;
2957
2958 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2959 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2960
2961 unwound_sp = get_frame_register_unsigned (this_frame,
2962 ARM_SP_REGNUM);
2963
2964 /* The hardware saves eight 32-bit words, comprising xPSR,
2965 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2966 "B1.5.6 Exception entry behavior" in
2967 "ARMv7-M Architecture Reference Manual". */
2968 cache->saved_regs[0].addr = unwound_sp;
2969 cache->saved_regs[1].addr = unwound_sp + 4;
2970 cache->saved_regs[2].addr = unwound_sp + 8;
2971 cache->saved_regs[3].addr = unwound_sp + 12;
2972 cache->saved_regs[12].addr = unwound_sp + 16;
2973 cache->saved_regs[14].addr = unwound_sp + 20;
2974 cache->saved_regs[15].addr = unwound_sp + 24;
2975 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2976
2977 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2978 aligner between the top of the 32-byte stack frame and the
2979 previous context's stack pointer. */
2980 cache->prev_sp = unwound_sp + 32;
2981 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2982 && (xpsr & (1 << 9)) != 0)
2983 cache->prev_sp += 4;
2984
2985 return cache;
2986 }
2987
2988 /* Implementation of function hook 'this_id' in
2989 'struct frame_uwnind'. */
2990
2991 static void
2992 arm_m_exception_this_id (struct frame_info *this_frame,
2993 void **this_cache,
2994 struct frame_id *this_id)
2995 {
2996 struct arm_prologue_cache *cache;
2997
2998 if (*this_cache == NULL)
2999 *this_cache = arm_m_exception_cache (this_frame);
3000 cache = (struct arm_prologue_cache *) *this_cache;
3001
3002 /* Our frame ID for a stub frame is the current SP and LR. */
3003 *this_id = frame_id_build (cache->prev_sp,
3004 get_frame_pc (this_frame));
3005 }
3006
3007 /* Implementation of function hook 'prev_register' in
3008 'struct frame_uwnind'. */
3009
3010 static struct value *
3011 arm_m_exception_prev_register (struct frame_info *this_frame,
3012 void **this_cache,
3013 int prev_regnum)
3014 {
3015 struct arm_prologue_cache *cache;
3016
3017 if (*this_cache == NULL)
3018 *this_cache = arm_m_exception_cache (this_frame);
3019 cache = (struct arm_prologue_cache *) *this_cache;
3020
3021 /* The value was already reconstructed into PREV_SP. */
3022 if (prev_regnum == ARM_SP_REGNUM)
3023 return frame_unwind_got_constant (this_frame, prev_regnum,
3024 cache->prev_sp);
3025
3026 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3027 prev_regnum);
3028 }
3029
3030 /* Implementation of function hook 'sniffer' in
3031 'struct frame_uwnind'. */
3032
3033 static int
3034 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3035 struct frame_info *this_frame,
3036 void **this_prologue_cache)
3037 {
3038 CORE_ADDR this_pc = get_frame_pc (this_frame);
3039
3040 /* No need to check is_m; this sniffer is only registered for
3041 M-profile architectures. */
3042
3043 /* Check if exception frame returns to a magic PC value. */
3044 return arm_m_addr_is_magic (this_pc);
3045 }
3046
3047 /* Frame unwinder for M-profile exceptions. */
3048
3049 struct frame_unwind arm_m_exception_unwind =
3050 {
3051 SIGTRAMP_FRAME,
3052 default_frame_unwind_stop_reason,
3053 arm_m_exception_this_id,
3054 arm_m_exception_prev_register,
3055 NULL,
3056 arm_m_exception_unwind_sniffer
3057 };
3058
3059 static CORE_ADDR
3060 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3061 {
3062 struct arm_prologue_cache *cache;
3063
3064 if (*this_cache == NULL)
3065 *this_cache = arm_make_prologue_cache (this_frame);
3066 cache = (struct arm_prologue_cache *) *this_cache;
3067
3068 return cache->prev_sp - cache->framesize;
3069 }
3070
3071 struct frame_base arm_normal_base = {
3072 &arm_prologue_unwind,
3073 arm_normal_frame_base,
3074 arm_normal_frame_base,
3075 arm_normal_frame_base
3076 };
3077
3078 static struct value *
3079 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3080 int regnum)
3081 {
3082 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3083 CORE_ADDR lr, cpsr;
3084 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3085
3086 switch (regnum)
3087 {
3088 case ARM_PC_REGNUM:
3089 /* The PC is normally copied from the return column, which
3090 describes saves of LR. However, that version may have an
3091 extra bit set to indicate Thumb state. The bit is not
3092 part of the PC. */
3093 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3094 return frame_unwind_got_constant (this_frame, regnum,
3095 arm_addr_bits_remove (gdbarch, lr));
3096
3097 case ARM_PS_REGNUM:
3098 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3099 cpsr = get_frame_register_unsigned (this_frame, regnum);
3100 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3101 if (IS_THUMB_ADDR (lr))
3102 cpsr |= t_bit;
3103 else
3104 cpsr &= ~t_bit;
3105 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3106
3107 default:
3108 internal_error (__FILE__, __LINE__,
3109 _("Unexpected register %d"), regnum);
3110 }
3111 }
3112
3113 static void
3114 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3115 struct dwarf2_frame_state_reg *reg,
3116 struct frame_info *this_frame)
3117 {
3118 switch (regnum)
3119 {
3120 case ARM_PC_REGNUM:
3121 case ARM_PS_REGNUM:
3122 reg->how = DWARF2_FRAME_REG_FN;
3123 reg->loc.fn = arm_dwarf2_prev_register;
3124 break;
3125 case ARM_SP_REGNUM:
3126 reg->how = DWARF2_FRAME_REG_CFA;
3127 break;
3128 }
3129 }
3130
3131 /* Implement the stack_frame_destroyed_p gdbarch method. */
3132
3133 static int
3134 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3135 {
3136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3137 unsigned int insn, insn2;
3138 int found_return = 0, found_stack_adjust = 0;
3139 CORE_ADDR func_start, func_end;
3140 CORE_ADDR scan_pc;
3141 gdb_byte buf[4];
3142
3143 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3144 return 0;
3145
3146 /* The epilogue is a sequence of instructions along the following lines:
3147
3148 - add stack frame size to SP or FP
3149 - [if frame pointer used] restore SP from FP
3150 - restore registers from SP [may include PC]
3151 - a return-type instruction [if PC wasn't already restored]
3152
3153 In a first pass, we scan forward from the current PC and verify the
3154 instructions we find as compatible with this sequence, ending in a
3155 return instruction.
3156
3157 However, this is not sufficient to distinguish indirect function calls
3158 within a function from indirect tail calls in the epilogue in some cases.
3159 Therefore, if we didn't already find any SP-changing instruction during
3160 forward scan, we add a backward scanning heuristic to ensure we actually
3161 are in the epilogue. */
3162
3163 scan_pc = pc;
3164 while (scan_pc < func_end && !found_return)
3165 {
3166 if (target_read_memory (scan_pc, buf, 2))
3167 break;
3168
3169 scan_pc += 2;
3170 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3171
3172 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3173 found_return = 1;
3174 else if (insn == 0x46f7) /* mov pc, lr */
3175 found_return = 1;
3176 else if (thumb_instruction_restores_sp (insn))
3177 {
3178 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3179 found_return = 1;
3180 }
3181 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3182 {
3183 if (target_read_memory (scan_pc, buf, 2))
3184 break;
3185
3186 scan_pc += 2;
3187 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3188
3189 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3190 {
3191 if (insn2 & 0x8000) /* <registers> include PC. */
3192 found_return = 1;
3193 }
3194 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3195 && (insn2 & 0x0fff) == 0x0b04)
3196 {
3197 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3198 found_return = 1;
3199 }
3200 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3201 && (insn2 & 0x0e00) == 0x0a00)
3202 ;
3203 else
3204 break;
3205 }
3206 else
3207 break;
3208 }
3209
3210 if (!found_return)
3211 return 0;
3212
3213 /* Since any instruction in the epilogue sequence, with the possible
3214 exception of return itself, updates the stack pointer, we need to
3215 scan backwards for at most one instruction. Try either a 16-bit or
3216 a 32-bit instruction. This is just a heuristic, so we do not worry
3217 too much about false positives. */
3218
3219 if (pc - 4 < func_start)
3220 return 0;
3221 if (target_read_memory (pc - 4, buf, 4))
3222 return 0;
3223
3224 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3225 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3226
3227 if (thumb_instruction_restores_sp (insn2))
3228 found_stack_adjust = 1;
3229 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3230 found_stack_adjust = 1;
3231 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3232 && (insn2 & 0x0fff) == 0x0b04)
3233 found_stack_adjust = 1;
3234 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3235 && (insn2 & 0x0e00) == 0x0a00)
3236 found_stack_adjust = 1;
3237
3238 return found_stack_adjust;
3239 }
3240
3241 static int
3242 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3243 {
3244 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3245 unsigned int insn;
3246 int found_return;
3247 CORE_ADDR func_start, func_end;
3248
3249 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3250 return 0;
3251
3252 /* We are in the epilogue if the previous instruction was a stack
3253 adjustment and the next instruction is a possible return (bx, mov
3254 pc, or pop). We could have to scan backwards to find the stack
3255 adjustment, or forwards to find the return, but this is a decent
3256 approximation. First scan forwards. */
3257
3258 found_return = 0;
3259 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3260 if (bits (insn, 28, 31) != INST_NV)
3261 {
3262 if ((insn & 0x0ffffff0) == 0x012fff10)
3263 /* BX. */
3264 found_return = 1;
3265 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3266 /* MOV PC. */
3267 found_return = 1;
3268 else if ((insn & 0x0fff0000) == 0x08bd0000
3269 && (insn & 0x0000c000) != 0)
3270 /* POP (LDMIA), including PC or LR. */
3271 found_return = 1;
3272 }
3273
3274 if (!found_return)
3275 return 0;
3276
3277 /* Scan backwards. This is just a heuristic, so do not worry about
3278 false positives from mode changes. */
3279
3280 if (pc < func_start + 4)
3281 return 0;
3282
3283 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3284 if (arm_instruction_restores_sp (insn))
3285 return 1;
3286
3287 return 0;
3288 }
3289
3290 /* Implement the stack_frame_destroyed_p gdbarch method. */
3291
3292 static int
3293 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3294 {
3295 if (arm_pc_is_thumb (gdbarch, pc))
3296 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3297 else
3298 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3299 }
3300
3301 /* When arguments must be pushed onto the stack, they go on in reverse
3302 order. The code below implements a FILO (stack) to do this. */
3303
3304 struct stack_item
3305 {
3306 int len;
3307 struct stack_item *prev;
3308 gdb_byte *data;
3309 };
3310
3311 static struct stack_item *
3312 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3313 {
3314 struct stack_item *si;
3315 si = XNEW (struct stack_item);
3316 si->data = (gdb_byte *) xmalloc (len);
3317 si->len = len;
3318 si->prev = prev;
3319 memcpy (si->data, contents, len);
3320 return si;
3321 }
3322
3323 static struct stack_item *
3324 pop_stack_item (struct stack_item *si)
3325 {
3326 struct stack_item *dead = si;
3327 si = si->prev;
3328 xfree (dead->data);
3329 xfree (dead);
3330 return si;
3331 }
3332
3333 /* Implement the gdbarch type alignment method, overrides the generic
3334 alignment algorithm for anything that is arm specific. */
3335
3336 static ULONGEST
3337 arm_type_align (gdbarch *gdbarch, struct type *t)
3338 {
3339 t = check_typedef (t);
3340 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3341 {
3342 /* Use the natural alignment for vector types (the same for
3343 scalar type), but the maximum alignment is 64-bit. */
3344 if (TYPE_LENGTH (t) > 8)
3345 return 8;
3346 else
3347 return TYPE_LENGTH (t);
3348 }
3349
3350 /* Allow the common code to calculate the alignment. */
3351 return 0;
3352 }
3353
3354 /* Possible base types for a candidate for passing and returning in
3355 VFP registers. */
3356
3357 enum arm_vfp_cprc_base_type
3358 {
3359 VFP_CPRC_UNKNOWN,
3360 VFP_CPRC_SINGLE,
3361 VFP_CPRC_DOUBLE,
3362 VFP_CPRC_VEC64,
3363 VFP_CPRC_VEC128
3364 };
3365
3366 /* The length of one element of base type B. */
3367
3368 static unsigned
3369 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3370 {
3371 switch (b)
3372 {
3373 case VFP_CPRC_SINGLE:
3374 return 4;
3375 case VFP_CPRC_DOUBLE:
3376 return 8;
3377 case VFP_CPRC_VEC64:
3378 return 8;
3379 case VFP_CPRC_VEC128:
3380 return 16;
3381 default:
3382 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3383 (int) b);
3384 }
3385 }
3386
3387 /* The character ('s', 'd' or 'q') for the type of VFP register used
3388 for passing base type B. */
3389
3390 static int
3391 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3392 {
3393 switch (b)
3394 {
3395 case VFP_CPRC_SINGLE:
3396 return 's';
3397 case VFP_CPRC_DOUBLE:
3398 return 'd';
3399 case VFP_CPRC_VEC64:
3400 return 'd';
3401 case VFP_CPRC_VEC128:
3402 return 'q';
3403 default:
3404 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3405 (int) b);
3406 }
3407 }
3408
3409 /* Determine whether T may be part of a candidate for passing and
3410 returning in VFP registers, ignoring the limit on the total number
3411 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3412 classification of the first valid component found; if it is not
3413 VFP_CPRC_UNKNOWN, all components must have the same classification
3414 as *BASE_TYPE. If it is found that T contains a type not permitted
3415 for passing and returning in VFP registers, a type differently
3416 classified from *BASE_TYPE, or two types differently classified
3417 from each other, return -1, otherwise return the total number of
3418 base-type elements found (possibly 0 in an empty structure or
3419 array). Vector types are not currently supported, matching the
3420 generic AAPCS support. */
3421
3422 static int
3423 arm_vfp_cprc_sub_candidate (struct type *t,
3424 enum arm_vfp_cprc_base_type *base_type)
3425 {
3426 t = check_typedef (t);
3427 switch (TYPE_CODE (t))
3428 {
3429 case TYPE_CODE_FLT:
3430 switch (TYPE_LENGTH (t))
3431 {
3432 case 4:
3433 if (*base_type == VFP_CPRC_UNKNOWN)
3434 *base_type = VFP_CPRC_SINGLE;
3435 else if (*base_type != VFP_CPRC_SINGLE)
3436 return -1;
3437 return 1;
3438
3439 case 8:
3440 if (*base_type == VFP_CPRC_UNKNOWN)
3441 *base_type = VFP_CPRC_DOUBLE;
3442 else if (*base_type != VFP_CPRC_DOUBLE)
3443 return -1;
3444 return 1;
3445
3446 default:
3447 return -1;
3448 }
3449 break;
3450
3451 case TYPE_CODE_COMPLEX:
3452 /* Arguments of complex T where T is one of the types float or
3453 double get treated as if they are implemented as:
3454
3455 struct complexT
3456 {
3457 T real;
3458 T imag;
3459 };
3460
3461 */
3462 switch (TYPE_LENGTH (t))
3463 {
3464 case 8:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_SINGLE;
3467 else if (*base_type != VFP_CPRC_SINGLE)
3468 return -1;
3469 return 2;
3470
3471 case 16:
3472 if (*base_type == VFP_CPRC_UNKNOWN)
3473 *base_type = VFP_CPRC_DOUBLE;
3474 else if (*base_type != VFP_CPRC_DOUBLE)
3475 return -1;
3476 return 2;
3477
3478 default:
3479 return -1;
3480 }
3481 break;
3482
3483 case TYPE_CODE_ARRAY:
3484 {
3485 if (TYPE_VECTOR (t))
3486 {
3487 /* A 64-bit or 128-bit containerized vector type are VFP
3488 CPRCs. */
3489 switch (TYPE_LENGTH (t))
3490 {
3491 case 8:
3492 if (*base_type == VFP_CPRC_UNKNOWN)
3493 *base_type = VFP_CPRC_VEC64;
3494 return 1;
3495 case 16:
3496 if (*base_type == VFP_CPRC_UNKNOWN)
3497 *base_type = VFP_CPRC_VEC128;
3498 return 1;
3499 default:
3500 return -1;
3501 }
3502 }
3503 else
3504 {
3505 int count;
3506 unsigned unitlen;
3507
3508 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3509 base_type);
3510 if (count == -1)
3511 return -1;
3512 if (TYPE_LENGTH (t) == 0)
3513 {
3514 gdb_assert (count == 0);
3515 return 0;
3516 }
3517 else if (count == 0)
3518 return -1;
3519 unitlen = arm_vfp_cprc_unit_length (*base_type);
3520 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3521 return TYPE_LENGTH (t) / unitlen;
3522 }
3523 }
3524 break;
3525
3526 case TYPE_CODE_STRUCT:
3527 {
3528 int count = 0;
3529 unsigned unitlen;
3530 int i;
3531 for (i = 0; i < TYPE_NFIELDS (t); i++)
3532 {
3533 int sub_count = 0;
3534
3535 if (!field_is_static (&TYPE_FIELD (t, i)))
3536 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3537 base_type);
3538 if (sub_count == -1)
3539 return -1;
3540 count += sub_count;
3541 }
3542 if (TYPE_LENGTH (t) == 0)
3543 {
3544 gdb_assert (count == 0);
3545 return 0;
3546 }
3547 else if (count == 0)
3548 return -1;
3549 unitlen = arm_vfp_cprc_unit_length (*base_type);
3550 if (TYPE_LENGTH (t) != unitlen * count)
3551 return -1;
3552 return count;
3553 }
3554
3555 case TYPE_CODE_UNION:
3556 {
3557 int count = 0;
3558 unsigned unitlen;
3559 int i;
3560 for (i = 0; i < TYPE_NFIELDS (t); i++)
3561 {
3562 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3563 base_type);
3564 if (sub_count == -1)
3565 return -1;
3566 count = (count > sub_count ? count : sub_count);
3567 }
3568 if (TYPE_LENGTH (t) == 0)
3569 {
3570 gdb_assert (count == 0);
3571 return 0;
3572 }
3573 else if (count == 0)
3574 return -1;
3575 unitlen = arm_vfp_cprc_unit_length (*base_type);
3576 if (TYPE_LENGTH (t) != unitlen * count)
3577 return -1;
3578 return count;
3579 }
3580
3581 default:
3582 break;
3583 }
3584
3585 return -1;
3586 }
3587
3588 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3589 if passed to or returned from a non-variadic function with the VFP
3590 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3591 *BASE_TYPE to the base type for T and *COUNT to the number of
3592 elements of that base type before returning. */
3593
3594 static int
3595 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3596 int *count)
3597 {
3598 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3599 int c = arm_vfp_cprc_sub_candidate (t, &b);
3600 if (c <= 0 || c > 4)
3601 return 0;
3602 *base_type = b;
3603 *count = c;
3604 return 1;
3605 }
3606
3607 /* Return 1 if the VFP ABI should be used for passing arguments to and
3608 returning values from a function of type FUNC_TYPE, 0
3609 otherwise. */
3610
3611 static int
3612 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3613 {
3614 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3615 /* Variadic functions always use the base ABI. Assume that functions
3616 without debug info are not variadic. */
3617 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3618 return 0;
3619 /* The VFP ABI is only supported as a variant of AAPCS. */
3620 if (tdep->arm_abi != ARM_ABI_AAPCS)
3621 return 0;
3622 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3623 }
3624
3625 /* We currently only support passing parameters in integer registers, which
3626 conforms with GCC's default model, and VFP argument passing following
3627 the VFP variant of AAPCS. Several other variants exist and
3628 we should probably support some of them based on the selected ABI. */
3629
3630 static CORE_ADDR
3631 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3632 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3633 struct value **args, CORE_ADDR sp,
3634 function_call_return_method return_method,
3635 CORE_ADDR struct_addr)
3636 {
3637 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3638 int argnum;
3639 int argreg;
3640 int nstack;
3641 struct stack_item *si = NULL;
3642 int use_vfp_abi;
3643 struct type *ftype;
3644 unsigned vfp_regs_free = (1 << 16) - 1;
3645
3646 /* Determine the type of this function and whether the VFP ABI
3647 applies. */
3648 ftype = check_typedef (value_type (function));
3649 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3650 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3651 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3652
3653 /* Set the return address. For the ARM, the return breakpoint is
3654 always at BP_ADDR. */
3655 if (arm_pc_is_thumb (gdbarch, bp_addr))
3656 bp_addr |= 1;
3657 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3658
3659 /* Walk through the list of args and determine how large a temporary
3660 stack is required. Need to take care here as structs may be
3661 passed on the stack, and we have to push them. */
3662 nstack = 0;
3663
3664 argreg = ARM_A1_REGNUM;
3665 nstack = 0;
3666
3667 /* The struct_return pointer occupies the first parameter
3668 passing register. */
3669 if (return_method == return_method_struct)
3670 {
3671 if (arm_debug)
3672 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3673 gdbarch_register_name (gdbarch, argreg),
3674 paddress (gdbarch, struct_addr));
3675 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3676 argreg++;
3677 }
3678
3679 for (argnum = 0; argnum < nargs; argnum++)
3680 {
3681 int len;
3682 struct type *arg_type;
3683 struct type *target_type;
3684 enum type_code typecode;
3685 const bfd_byte *val;
3686 int align;
3687 enum arm_vfp_cprc_base_type vfp_base_type;
3688 int vfp_base_count;
3689 int may_use_core_reg = 1;
3690
3691 arg_type = check_typedef (value_type (args[argnum]));
3692 len = TYPE_LENGTH (arg_type);
3693 target_type = TYPE_TARGET_TYPE (arg_type);
3694 typecode = TYPE_CODE (arg_type);
3695 val = value_contents (args[argnum]);
3696
3697 align = type_align (arg_type);
3698 /* Round alignment up to a whole number of words. */
3699 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3700 /* Different ABIs have different maximum alignments. */
3701 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3702 {
3703 /* The APCS ABI only requires word alignment. */
3704 align = INT_REGISTER_SIZE;
3705 }
3706 else
3707 {
3708 /* The AAPCS requires at most doubleword alignment. */
3709 if (align > INT_REGISTER_SIZE * 2)
3710 align = INT_REGISTER_SIZE * 2;
3711 }
3712
3713 if (use_vfp_abi
3714 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3715 &vfp_base_count))
3716 {
3717 int regno;
3718 int unit_length;
3719 int shift;
3720 unsigned mask;
3721
3722 /* Because this is a CPRC it cannot go in a core register or
3723 cause a core register to be skipped for alignment.
3724 Either it goes in VFP registers and the rest of this loop
3725 iteration is skipped for this argument, or it goes on the
3726 stack (and the stack alignment code is correct for this
3727 case). */
3728 may_use_core_reg = 0;
3729
3730 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3731 shift = unit_length / 4;
3732 mask = (1 << (shift * vfp_base_count)) - 1;
3733 for (regno = 0; regno < 16; regno += shift)
3734 if (((vfp_regs_free >> regno) & mask) == mask)
3735 break;
3736
3737 if (regno < 16)
3738 {
3739 int reg_char;
3740 int reg_scaled;
3741 int i;
3742
3743 vfp_regs_free &= ~(mask << regno);
3744 reg_scaled = regno / shift;
3745 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3746 for (i = 0; i < vfp_base_count; i++)
3747 {
3748 char name_buf[4];
3749 int regnum;
3750 if (reg_char == 'q')
3751 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3752 val + i * unit_length);
3753 else
3754 {
3755 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3756 reg_char, reg_scaled + i);
3757 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3758 strlen (name_buf));
3759 regcache->cooked_write (regnum, val + i * unit_length);
3760 }
3761 }
3762 continue;
3763 }
3764 else
3765 {
3766 /* This CPRC could not go in VFP registers, so all VFP
3767 registers are now marked as used. */
3768 vfp_regs_free = 0;
3769 }
3770 }
3771
3772 /* Push stack padding for dowubleword alignment. */
3773 if (nstack & (align - 1))
3774 {
3775 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3776 nstack += INT_REGISTER_SIZE;
3777 }
3778
3779 /* Doubleword aligned quantities must go in even register pairs. */
3780 if (may_use_core_reg
3781 && argreg <= ARM_LAST_ARG_REGNUM
3782 && align > INT_REGISTER_SIZE
3783 && argreg & 1)
3784 argreg++;
3785
3786 /* If the argument is a pointer to a function, and it is a
3787 Thumb function, create a LOCAL copy of the value and set
3788 the THUMB bit in it. */
3789 if (TYPE_CODE_PTR == typecode
3790 && target_type != NULL
3791 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3792 {
3793 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3794 if (arm_pc_is_thumb (gdbarch, regval))
3795 {
3796 bfd_byte *copy = (bfd_byte *) alloca (len);
3797 store_unsigned_integer (copy, len, byte_order,
3798 MAKE_THUMB_ADDR (regval));
3799 val = copy;
3800 }
3801 }
3802
3803 /* Copy the argument to general registers or the stack in
3804 register-sized pieces. Large arguments are split between
3805 registers and stack. */
3806 while (len > 0)
3807 {
3808 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3809 CORE_ADDR regval
3810 = extract_unsigned_integer (val, partial_len, byte_order);
3811
3812 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3813 {
3814 /* The argument is being passed in a general purpose
3815 register. */
3816 if (byte_order == BFD_ENDIAN_BIG)
3817 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3818 if (arm_debug)
3819 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3820 argnum,
3821 gdbarch_register_name
3822 (gdbarch, argreg),
3823 phex (regval, INT_REGISTER_SIZE));
3824 regcache_cooked_write_unsigned (regcache, argreg, regval);
3825 argreg++;
3826 }
3827 else
3828 {
3829 gdb_byte buf[INT_REGISTER_SIZE];
3830
3831 memset (buf, 0, sizeof (buf));
3832 store_unsigned_integer (buf, partial_len, byte_order, regval);
3833
3834 /* Push the arguments onto the stack. */
3835 if (arm_debug)
3836 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3837 argnum, nstack);
3838 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3839 nstack += INT_REGISTER_SIZE;
3840 }
3841
3842 len -= partial_len;
3843 val += partial_len;
3844 }
3845 }
3846 /* If we have an odd number of words to push, then decrement the stack
3847 by one word now, so first stack argument will be dword aligned. */
3848 if (nstack & 4)
3849 sp -= 4;
3850
3851 while (si)
3852 {
3853 sp -= si->len;
3854 write_memory (sp, si->data, si->len);
3855 si = pop_stack_item (si);
3856 }
3857
3858 /* Finally, update teh SP register. */
3859 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3860
3861 return sp;
3862 }
3863
3864
3865 /* Always align the frame to an 8-byte boundary. This is required on
3866 some platforms and harmless on the rest. */
3867
3868 static CORE_ADDR
3869 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3870 {
3871 /* Align the stack to eight bytes. */
3872 return sp & ~ (CORE_ADDR) 7;
3873 }
3874
3875 static void
3876 print_fpu_flags (struct ui_file *file, int flags)
3877 {
3878 if (flags & (1 << 0))
3879 fputs_filtered ("IVO ", file);
3880 if (flags & (1 << 1))
3881 fputs_filtered ("DVZ ", file);
3882 if (flags & (1 << 2))
3883 fputs_filtered ("OFL ", file);
3884 if (flags & (1 << 3))
3885 fputs_filtered ("UFL ", file);
3886 if (flags & (1 << 4))
3887 fputs_filtered ("INX ", file);
3888 fputc_filtered ('\n', file);
3889 }
3890
3891 /* Print interesting information about the floating point processor
3892 (if present) or emulator. */
3893 static void
3894 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3895 struct frame_info *frame, const char *args)
3896 {
3897 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3898 int type;
3899
3900 type = (status >> 24) & 127;
3901 if (status & (1 << 31))
3902 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3903 else
3904 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3905 /* i18n: [floating point unit] mask */
3906 fputs_filtered (_("mask: "), file);
3907 print_fpu_flags (file, status >> 16);
3908 /* i18n: [floating point unit] flags */
3909 fputs_filtered (_("flags: "), file);
3910 print_fpu_flags (file, status);
3911 }
3912
3913 /* Construct the ARM extended floating point type. */
3914 static struct type *
3915 arm_ext_type (struct gdbarch *gdbarch)
3916 {
3917 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3918
3919 if (!tdep->arm_ext_type)
3920 tdep->arm_ext_type
3921 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3922 floatformats_arm_ext);
3923
3924 return tdep->arm_ext_type;
3925 }
3926
3927 static struct type *
3928 arm_neon_double_type (struct gdbarch *gdbarch)
3929 {
3930 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3931
3932 if (tdep->neon_double_type == NULL)
3933 {
3934 struct type *t, *elem;
3935
3936 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3937 TYPE_CODE_UNION);
3938 elem = builtin_type (gdbarch)->builtin_uint8;
3939 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3940 elem = builtin_type (gdbarch)->builtin_uint16;
3941 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3942 elem = builtin_type (gdbarch)->builtin_uint32;
3943 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3944 elem = builtin_type (gdbarch)->builtin_uint64;
3945 append_composite_type_field (t, "u64", elem);
3946 elem = builtin_type (gdbarch)->builtin_float;
3947 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3948 elem = builtin_type (gdbarch)->builtin_double;
3949 append_composite_type_field (t, "f64", elem);
3950
3951 TYPE_VECTOR (t) = 1;
3952 TYPE_NAME (t) = "neon_d";
3953 tdep->neon_double_type = t;
3954 }
3955
3956 return tdep->neon_double_type;
3957 }
3958
3959 /* FIXME: The vector types are not correctly ordered on big-endian
3960 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3961 bits of d0 - regardless of what unit size is being held in d0. So
3962 the offset of the first uint8 in d0 is 7, but the offset of the
3963 first float is 4. This code works as-is for little-endian
3964 targets. */
3965
3966 static struct type *
3967 arm_neon_quad_type (struct gdbarch *gdbarch)
3968 {
3969 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3970
3971 if (tdep->neon_quad_type == NULL)
3972 {
3973 struct type *t, *elem;
3974
3975 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3976 TYPE_CODE_UNION);
3977 elem = builtin_type (gdbarch)->builtin_uint8;
3978 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3979 elem = builtin_type (gdbarch)->builtin_uint16;
3980 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3981 elem = builtin_type (gdbarch)->builtin_uint32;
3982 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3983 elem = builtin_type (gdbarch)->builtin_uint64;
3984 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3985 elem = builtin_type (gdbarch)->builtin_float;
3986 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3987 elem = builtin_type (gdbarch)->builtin_double;
3988 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3989
3990 TYPE_VECTOR (t) = 1;
3991 TYPE_NAME (t) = "neon_q";
3992 tdep->neon_quad_type = t;
3993 }
3994
3995 return tdep->neon_quad_type;
3996 }
3997
3998 /* Return the GDB type object for the "standard" data type of data in
3999 register N. */
4000
4001 static struct type *
4002 arm_register_type (struct gdbarch *gdbarch, int regnum)
4003 {
4004 int num_regs = gdbarch_num_regs (gdbarch);
4005
4006 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4007 && regnum >= num_regs && regnum < num_regs + 32)
4008 return builtin_type (gdbarch)->builtin_float;
4009
4010 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4011 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4012 return arm_neon_quad_type (gdbarch);
4013
4014 /* If the target description has register information, we are only
4015 in this function so that we can override the types of
4016 double-precision registers for NEON. */
4017 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4018 {
4019 struct type *t = tdesc_register_type (gdbarch, regnum);
4020
4021 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4022 && TYPE_CODE (t) == TYPE_CODE_FLT
4023 && gdbarch_tdep (gdbarch)->have_neon)
4024 return arm_neon_double_type (gdbarch);
4025 else
4026 return t;
4027 }
4028
4029 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4030 {
4031 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4032 return builtin_type (gdbarch)->builtin_void;
4033
4034 return arm_ext_type (gdbarch);
4035 }
4036 else if (regnum == ARM_SP_REGNUM)
4037 return builtin_type (gdbarch)->builtin_data_ptr;
4038 else if (regnum == ARM_PC_REGNUM)
4039 return builtin_type (gdbarch)->builtin_func_ptr;
4040 else if (regnum >= ARRAY_SIZE (arm_register_names))
4041 /* These registers are only supported on targets which supply
4042 an XML description. */
4043 return builtin_type (gdbarch)->builtin_int0;
4044 else
4045 return builtin_type (gdbarch)->builtin_uint32;
4046 }
4047
4048 /* Map a DWARF register REGNUM onto the appropriate GDB register
4049 number. */
4050
4051 static int
4052 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4053 {
4054 /* Core integer regs. */
4055 if (reg >= 0 && reg <= 15)
4056 return reg;
4057
4058 /* Legacy FPA encoding. These were once used in a way which
4059 overlapped with VFP register numbering, so their use is
4060 discouraged, but GDB doesn't support the ARM toolchain
4061 which used them for VFP. */
4062 if (reg >= 16 && reg <= 23)
4063 return ARM_F0_REGNUM + reg - 16;
4064
4065 /* New assignments for the FPA registers. */
4066 if (reg >= 96 && reg <= 103)
4067 return ARM_F0_REGNUM + reg - 96;
4068
4069 /* WMMX register assignments. */
4070 if (reg >= 104 && reg <= 111)
4071 return ARM_WCGR0_REGNUM + reg - 104;
4072
4073 if (reg >= 112 && reg <= 127)
4074 return ARM_WR0_REGNUM + reg - 112;
4075
4076 if (reg >= 192 && reg <= 199)
4077 return ARM_WC0_REGNUM + reg - 192;
4078
4079 /* VFP v2 registers. A double precision value is actually
4080 in d1 rather than s2, but the ABI only defines numbering
4081 for the single precision registers. This will "just work"
4082 in GDB for little endian targets (we'll read eight bytes,
4083 starting in s0 and then progressing to s1), but will be
4084 reversed on big endian targets with VFP. This won't
4085 be a problem for the new Neon quad registers; you're supposed
4086 to use DW_OP_piece for those. */
4087 if (reg >= 64 && reg <= 95)
4088 {
4089 char name_buf[4];
4090
4091 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4092 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4093 strlen (name_buf));
4094 }
4095
4096 /* VFP v3 / Neon registers. This range is also used for VFP v2
4097 registers, except that it now describes d0 instead of s0. */
4098 if (reg >= 256 && reg <= 287)
4099 {
4100 char name_buf[4];
4101
4102 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4103 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4104 strlen (name_buf));
4105 }
4106
4107 return -1;
4108 }
4109
4110 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4111 static int
4112 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4113 {
4114 int reg = regnum;
4115 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4116
4117 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4118 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4119
4120 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4121 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4122
4123 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4124 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4125
4126 if (reg < NUM_GREGS)
4127 return SIM_ARM_R0_REGNUM + reg;
4128 reg -= NUM_GREGS;
4129
4130 if (reg < NUM_FREGS)
4131 return SIM_ARM_FP0_REGNUM + reg;
4132 reg -= NUM_FREGS;
4133
4134 if (reg < NUM_SREGS)
4135 return SIM_ARM_FPS_REGNUM + reg;
4136 reg -= NUM_SREGS;
4137
4138 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4139 }
4140
4141 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4142 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4143 NULL if an error occurs. BUF is freed. */
4144
4145 static gdb_byte *
4146 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4147 int old_len, int new_len)
4148 {
4149 gdb_byte *new_buf;
4150 int bytes_to_read = new_len - old_len;
4151
4152 new_buf = (gdb_byte *) xmalloc (new_len);
4153 memcpy (new_buf + bytes_to_read, buf, old_len);
4154 xfree (buf);
4155 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4156 {
4157 xfree (new_buf);
4158 return NULL;
4159 }
4160 return new_buf;
4161 }
4162
4163 /* An IT block is at most the 2-byte IT instruction followed by
4164 four 4-byte instructions. The furthest back we must search to
4165 find an IT block that affects the current instruction is thus
4166 2 + 3 * 4 == 14 bytes. */
4167 #define MAX_IT_BLOCK_PREFIX 14
4168
4169 /* Use a quick scan if there are more than this many bytes of
4170 code. */
4171 #define IT_SCAN_THRESHOLD 32
4172
4173 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4174 A breakpoint in an IT block may not be hit, depending on the
4175 condition flags. */
4176 static CORE_ADDR
4177 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4178 {
4179 gdb_byte *buf;
4180 char map_type;
4181 CORE_ADDR boundary, func_start;
4182 int buf_len;
4183 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4184 int i, any, last_it, last_it_count;
4185
4186 /* If we are using BKPT breakpoints, none of this is necessary. */
4187 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4188 return bpaddr;
4189
4190 /* ARM mode does not have this problem. */
4191 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4192 return bpaddr;
4193
4194 /* We are setting a breakpoint in Thumb code that could potentially
4195 contain an IT block. The first step is to find how much Thumb
4196 code there is; we do not need to read outside of known Thumb
4197 sequences. */
4198 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4199 if (map_type == 0)
4200 /* Thumb-2 code must have mapping symbols to have a chance. */
4201 return bpaddr;
4202
4203 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4204
4205 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4206 && func_start > boundary)
4207 boundary = func_start;
4208
4209 /* Search for a candidate IT instruction. We have to do some fancy
4210 footwork to distinguish a real IT instruction from the second
4211 half of a 32-bit instruction, but there is no need for that if
4212 there's no candidate. */
4213 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4214 if (buf_len == 0)
4215 /* No room for an IT instruction. */
4216 return bpaddr;
4217
4218 buf = (gdb_byte *) xmalloc (buf_len);
4219 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4220 return bpaddr;
4221 any = 0;
4222 for (i = 0; i < buf_len; i += 2)
4223 {
4224 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4225 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4226 {
4227 any = 1;
4228 break;
4229 }
4230 }
4231
4232 if (any == 0)
4233 {
4234 xfree (buf);
4235 return bpaddr;
4236 }
4237
4238 /* OK, the code bytes before this instruction contain at least one
4239 halfword which resembles an IT instruction. We know that it's
4240 Thumb code, but there are still two possibilities. Either the
4241 halfword really is an IT instruction, or it is the second half of
4242 a 32-bit Thumb instruction. The only way we can tell is to
4243 scan forwards from a known instruction boundary. */
4244 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4245 {
4246 int definite;
4247
4248 /* There's a lot of code before this instruction. Start with an
4249 optimistic search; it's easy to recognize halfwords that can
4250 not be the start of a 32-bit instruction, and use that to
4251 lock on to the instruction boundaries. */
4252 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4253 if (buf == NULL)
4254 return bpaddr;
4255 buf_len = IT_SCAN_THRESHOLD;
4256
4257 definite = 0;
4258 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4259 {
4260 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4261 if (thumb_insn_size (inst1) == 2)
4262 {
4263 definite = 1;
4264 break;
4265 }
4266 }
4267
4268 /* At this point, if DEFINITE, BUF[I] is the first place we
4269 are sure that we know the instruction boundaries, and it is far
4270 enough from BPADDR that we could not miss an IT instruction
4271 affecting BPADDR. If ! DEFINITE, give up - start from a
4272 known boundary. */
4273 if (! definite)
4274 {
4275 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4276 bpaddr - boundary);
4277 if (buf == NULL)
4278 return bpaddr;
4279 buf_len = bpaddr - boundary;
4280 i = 0;
4281 }
4282 }
4283 else
4284 {
4285 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4286 if (buf == NULL)
4287 return bpaddr;
4288 buf_len = bpaddr - boundary;
4289 i = 0;
4290 }
4291
4292 /* Scan forwards. Find the last IT instruction before BPADDR. */
4293 last_it = -1;
4294 last_it_count = 0;
4295 while (i < buf_len)
4296 {
4297 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4298 last_it_count--;
4299 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4300 {
4301 last_it = i;
4302 if (inst1 & 0x0001)
4303 last_it_count = 4;
4304 else if (inst1 & 0x0002)
4305 last_it_count = 3;
4306 else if (inst1 & 0x0004)
4307 last_it_count = 2;
4308 else
4309 last_it_count = 1;
4310 }
4311 i += thumb_insn_size (inst1);
4312 }
4313
4314 xfree (buf);
4315
4316 if (last_it == -1)
4317 /* There wasn't really an IT instruction after all. */
4318 return bpaddr;
4319
4320 if (last_it_count < 1)
4321 /* It was too far away. */
4322 return bpaddr;
4323
4324 /* This really is a trouble spot. Move the breakpoint to the IT
4325 instruction. */
4326 return bpaddr - buf_len + last_it;
4327 }
4328
4329 /* ARM displaced stepping support.
4330
4331 Generally ARM displaced stepping works as follows:
4332
4333 1. When an instruction is to be single-stepped, it is first decoded by
4334 arm_process_displaced_insn. Depending on the type of instruction, it is
4335 then copied to a scratch location, possibly in a modified form. The
4336 copy_* set of functions performs such modification, as necessary. A
4337 breakpoint is placed after the modified instruction in the scratch space
4338 to return control to GDB. Note in particular that instructions which
4339 modify the PC will no longer do so after modification.
4340
4341 2. The instruction is single-stepped, by setting the PC to the scratch
4342 location address, and resuming. Control returns to GDB when the
4343 breakpoint is hit.
4344
4345 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4346 function used for the current instruction. This function's job is to
4347 put the CPU/memory state back to what it would have been if the
4348 instruction had been executed unmodified in its original location. */
4349
4350 /* NOP instruction (mov r0, r0). */
4351 #define ARM_NOP 0xe1a00000
4352 #define THUMB_NOP 0x4600
4353
4354 /* Helper for register reads for displaced stepping. In particular, this
4355 returns the PC as it would be seen by the instruction at its original
4356 location. */
4357
4358 ULONGEST
4359 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4360 int regno)
4361 {
4362 ULONGEST ret;
4363 CORE_ADDR from = dsc->insn_addr;
4364
4365 if (regno == ARM_PC_REGNUM)
4366 {
4367 /* Compute pipeline offset:
4368 - When executing an ARM instruction, PC reads as the address of the
4369 current instruction plus 8.
4370 - When executing a Thumb instruction, PC reads as the address of the
4371 current instruction plus 4. */
4372
4373 if (!dsc->is_thumb)
4374 from += 8;
4375 else
4376 from += 4;
4377
4378 if (debug_displaced)
4379 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4380 (unsigned long) from);
4381 return (ULONGEST) from;
4382 }
4383 else
4384 {
4385 regcache_cooked_read_unsigned (regs, regno, &ret);
4386 if (debug_displaced)
4387 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4388 regno, (unsigned long) ret);
4389 return ret;
4390 }
4391 }
4392
4393 static int
4394 displaced_in_arm_mode (struct regcache *regs)
4395 {
4396 ULONGEST ps;
4397 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4398
4399 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4400
4401 return (ps & t_bit) == 0;
4402 }
4403
4404 /* Write to the PC as from a branch instruction. */
4405
4406 static void
4407 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4408 ULONGEST val)
4409 {
4410 if (!dsc->is_thumb)
4411 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4412 architecture versions < 6. */
4413 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4414 val & ~(ULONGEST) 0x3);
4415 else
4416 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4417 val & ~(ULONGEST) 0x1);
4418 }
4419
4420 /* Write to the PC as from a branch-exchange instruction. */
4421
4422 static void
4423 bx_write_pc (struct regcache *regs, ULONGEST val)
4424 {
4425 ULONGEST ps;
4426 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4427
4428 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4429
4430 if ((val & 1) == 1)
4431 {
4432 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4433 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4434 }
4435 else if ((val & 2) == 0)
4436 {
4437 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4438 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4439 }
4440 else
4441 {
4442 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4443 mode, align dest to 4 bytes). */
4444 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4445 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4446 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4447 }
4448 }
4449
4450 /* Write to the PC as if from a load instruction. */
4451
4452 static void
4453 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4454 ULONGEST val)
4455 {
4456 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4457 bx_write_pc (regs, val);
4458 else
4459 branch_write_pc (regs, dsc, val);
4460 }
4461
4462 /* Write to the PC as if from an ALU instruction. */
4463
4464 static void
4465 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4466 ULONGEST val)
4467 {
4468 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4469 bx_write_pc (regs, val);
4470 else
4471 branch_write_pc (regs, dsc, val);
4472 }
4473
4474 /* Helper for writing to registers for displaced stepping. Writing to the PC
4475 has a varying effects depending on the instruction which does the write:
4476 this is controlled by the WRITE_PC argument. */
4477
4478 void
4479 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4480 int regno, ULONGEST val, enum pc_write_style write_pc)
4481 {
4482 if (regno == ARM_PC_REGNUM)
4483 {
4484 if (debug_displaced)
4485 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4486 (unsigned long) val);
4487 switch (write_pc)
4488 {
4489 case BRANCH_WRITE_PC:
4490 branch_write_pc (regs, dsc, val);
4491 break;
4492
4493 case BX_WRITE_PC:
4494 bx_write_pc (regs, val);
4495 break;
4496
4497 case LOAD_WRITE_PC:
4498 load_write_pc (regs, dsc, val);
4499 break;
4500
4501 case ALU_WRITE_PC:
4502 alu_write_pc (regs, dsc, val);
4503 break;
4504
4505 case CANNOT_WRITE_PC:
4506 warning (_("Instruction wrote to PC in an unexpected way when "
4507 "single-stepping"));
4508 break;
4509
4510 default:
4511 internal_error (__FILE__, __LINE__,
4512 _("Invalid argument to displaced_write_reg"));
4513 }
4514
4515 dsc->wrote_to_pc = 1;
4516 }
4517 else
4518 {
4519 if (debug_displaced)
4520 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4521 regno, (unsigned long) val);
4522 regcache_cooked_write_unsigned (regs, regno, val);
4523 }
4524 }
4525
4526 /* This function is used to concisely determine if an instruction INSN
4527 references PC. Register fields of interest in INSN should have the
4528 corresponding fields of BITMASK set to 0b1111. The function
4529 returns return 1 if any of these fields in INSN reference the PC
4530 (also 0b1111, r15), else it returns 0. */
4531
4532 static int
4533 insn_references_pc (uint32_t insn, uint32_t bitmask)
4534 {
4535 uint32_t lowbit = 1;
4536
4537 while (bitmask != 0)
4538 {
4539 uint32_t mask;
4540
4541 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4542 ;
4543
4544 if (!lowbit)
4545 break;
4546
4547 mask = lowbit * 0xf;
4548
4549 if ((insn & mask) == mask)
4550 return 1;
4551
4552 bitmask &= ~mask;
4553 }
4554
4555 return 0;
4556 }
4557
4558 /* The simplest copy function. Many instructions have the same effect no
4559 matter what address they are executed at: in those cases, use this. */
4560
4561 static int
4562 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4563 const char *iname, arm_displaced_step_closure *dsc)
4564 {
4565 if (debug_displaced)
4566 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4567 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4568 iname);
4569
4570 dsc->modinsn[0] = insn;
4571
4572 return 0;
4573 }
4574
4575 static int
4576 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4577 uint16_t insn2, const char *iname,
4578 arm_displaced_step_closure *dsc)
4579 {
4580 if (debug_displaced)
4581 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4582 "opcode/class '%s' unmodified\n", insn1, insn2,
4583 iname);
4584
4585 dsc->modinsn[0] = insn1;
4586 dsc->modinsn[1] = insn2;
4587 dsc->numinsns = 2;
4588
4589 return 0;
4590 }
4591
4592 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4593 modification. */
4594 static int
4595 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4596 const char *iname,
4597 arm_displaced_step_closure *dsc)
4598 {
4599 if (debug_displaced)
4600 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4601 "opcode/class '%s' unmodified\n", insn,
4602 iname);
4603
4604 dsc->modinsn[0] = insn;
4605
4606 return 0;
4607 }
4608
4609 /* Preload instructions with immediate offset. */
4610
4611 static void
4612 cleanup_preload (struct gdbarch *gdbarch,
4613 struct regcache *regs, arm_displaced_step_closure *dsc)
4614 {
4615 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4616 if (!dsc->u.preload.immed)
4617 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4618 }
4619
4620 static void
4621 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4622 arm_displaced_step_closure *dsc, unsigned int rn)
4623 {
4624 ULONGEST rn_val;
4625 /* Preload instructions:
4626
4627 {pli/pld} [rn, #+/-imm]
4628 ->
4629 {pli/pld} [r0, #+/-imm]. */
4630
4631 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4632 rn_val = displaced_read_reg (regs, dsc, rn);
4633 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4634 dsc->u.preload.immed = 1;
4635
4636 dsc->cleanup = &cleanup_preload;
4637 }
4638
4639 static int
4640 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4641 arm_displaced_step_closure *dsc)
4642 {
4643 unsigned int rn = bits (insn, 16, 19);
4644
4645 if (!insn_references_pc (insn, 0x000f0000ul))
4646 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4647
4648 if (debug_displaced)
4649 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4650 (unsigned long) insn);
4651
4652 dsc->modinsn[0] = insn & 0xfff0ffff;
4653
4654 install_preload (gdbarch, regs, dsc, rn);
4655
4656 return 0;
4657 }
4658
4659 static int
4660 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4661 struct regcache *regs, arm_displaced_step_closure *dsc)
4662 {
4663 unsigned int rn = bits (insn1, 0, 3);
4664 unsigned int u_bit = bit (insn1, 7);
4665 int imm12 = bits (insn2, 0, 11);
4666 ULONGEST pc_val;
4667
4668 if (rn != ARM_PC_REGNUM)
4669 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4670
4671 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4672 PLD (literal) Encoding T1. */
4673 if (debug_displaced)
4674 fprintf_unfiltered (gdb_stdlog,
4675 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4676 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4677 imm12);
4678
4679 if (!u_bit)
4680 imm12 = -1 * imm12;
4681
4682 /* Rewrite instruction {pli/pld} PC imm12 into:
4683 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4684
4685 {pli/pld} [r0, r1]
4686
4687 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4688
4689 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4690 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4691
4692 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4693
4694 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4695 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4696 dsc->u.preload.immed = 0;
4697
4698 /* {pli/pld} [r0, r1] */
4699 dsc->modinsn[0] = insn1 & 0xfff0;
4700 dsc->modinsn[1] = 0xf001;
4701 dsc->numinsns = 2;
4702
4703 dsc->cleanup = &cleanup_preload;
4704 return 0;
4705 }
4706
4707 /* Preload instructions with register offset. */
4708
4709 static void
4710 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4711 arm_displaced_step_closure *dsc, unsigned int rn,
4712 unsigned int rm)
4713 {
4714 ULONGEST rn_val, rm_val;
4715
4716 /* Preload register-offset instructions:
4717
4718 {pli/pld} [rn, rm {, shift}]
4719 ->
4720 {pli/pld} [r0, r1 {, shift}]. */
4721
4722 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4723 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4724 rn_val = displaced_read_reg (regs, dsc, rn);
4725 rm_val = displaced_read_reg (regs, dsc, rm);
4726 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4727 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4728 dsc->u.preload.immed = 0;
4729
4730 dsc->cleanup = &cleanup_preload;
4731 }
4732
4733 static int
4734 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4735 struct regcache *regs,
4736 arm_displaced_step_closure *dsc)
4737 {
4738 unsigned int rn = bits (insn, 16, 19);
4739 unsigned int rm = bits (insn, 0, 3);
4740
4741
4742 if (!insn_references_pc (insn, 0x000f000ful))
4743 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4744
4745 if (debug_displaced)
4746 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4747 (unsigned long) insn);
4748
4749 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4750
4751 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4752 return 0;
4753 }
4754
4755 /* Copy/cleanup coprocessor load and store instructions. */
4756
4757 static void
4758 cleanup_copro_load_store (struct gdbarch *gdbarch,
4759 struct regcache *regs,
4760 arm_displaced_step_closure *dsc)
4761 {
4762 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4763
4764 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4765
4766 if (dsc->u.ldst.writeback)
4767 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4768 }
4769
4770 static void
4771 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4772 arm_displaced_step_closure *dsc,
4773 int writeback, unsigned int rn)
4774 {
4775 ULONGEST rn_val;
4776
4777 /* Coprocessor load/store instructions:
4778
4779 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4780 ->
4781 {stc/stc2} [r0, #+/-imm].
4782
4783 ldc/ldc2 are handled identically. */
4784
4785 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4786 rn_val = displaced_read_reg (regs, dsc, rn);
4787 /* PC should be 4-byte aligned. */
4788 rn_val = rn_val & 0xfffffffc;
4789 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4790
4791 dsc->u.ldst.writeback = writeback;
4792 dsc->u.ldst.rn = rn;
4793
4794 dsc->cleanup = &cleanup_copro_load_store;
4795 }
4796
4797 static int
4798 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4799 struct regcache *regs,
4800 arm_displaced_step_closure *dsc)
4801 {
4802 unsigned int rn = bits (insn, 16, 19);
4803
4804 if (!insn_references_pc (insn, 0x000f0000ul))
4805 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4806
4807 if (debug_displaced)
4808 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4809 "load/store insn %.8lx\n", (unsigned long) insn);
4810
4811 dsc->modinsn[0] = insn & 0xfff0ffff;
4812
4813 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4814
4815 return 0;
4816 }
4817
4818 static int
4819 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4820 uint16_t insn2, struct regcache *regs,
4821 arm_displaced_step_closure *dsc)
4822 {
4823 unsigned int rn = bits (insn1, 0, 3);
4824
4825 if (rn != ARM_PC_REGNUM)
4826 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4827 "copro load/store", dsc);
4828
4829 if (debug_displaced)
4830 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4831 "load/store insn %.4x%.4x\n", insn1, insn2);
4832
4833 dsc->modinsn[0] = insn1 & 0xfff0;
4834 dsc->modinsn[1] = insn2;
4835 dsc->numinsns = 2;
4836
4837 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4838 doesn't support writeback, so pass 0. */
4839 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4840
4841 return 0;
4842 }
4843
4844 /* Clean up branch instructions (actually perform the branch, by setting
4845 PC). */
4846
4847 static void
4848 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4849 arm_displaced_step_closure *dsc)
4850 {
4851 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4852 int branch_taken = condition_true (dsc->u.branch.cond, status);
4853 enum pc_write_style write_pc = dsc->u.branch.exchange
4854 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4855
4856 if (!branch_taken)
4857 return;
4858
4859 if (dsc->u.branch.link)
4860 {
4861 /* The value of LR should be the next insn of current one. In order
4862 not to confuse logic hanlding later insn `bx lr', if current insn mode
4863 is Thumb, the bit 0 of LR value should be set to 1. */
4864 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4865
4866 if (dsc->is_thumb)
4867 next_insn_addr |= 0x1;
4868
4869 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4870 CANNOT_WRITE_PC);
4871 }
4872
4873 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4874 }
4875
4876 /* Copy B/BL/BLX instructions with immediate destinations. */
4877
4878 static void
4879 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4880 arm_displaced_step_closure *dsc,
4881 unsigned int cond, int exchange, int link, long offset)
4882 {
4883 /* Implement "BL<cond> <label>" as:
4884
4885 Preparation: cond <- instruction condition
4886 Insn: mov r0, r0 (nop)
4887 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4888
4889 B<cond> similar, but don't set r14 in cleanup. */
4890
4891 dsc->u.branch.cond = cond;
4892 dsc->u.branch.link = link;
4893 dsc->u.branch.exchange = exchange;
4894
4895 dsc->u.branch.dest = dsc->insn_addr;
4896 if (link && exchange)
4897 /* For BLX, offset is computed from the Align (PC, 4). */
4898 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4899
4900 if (dsc->is_thumb)
4901 dsc->u.branch.dest += 4 + offset;
4902 else
4903 dsc->u.branch.dest += 8 + offset;
4904
4905 dsc->cleanup = &cleanup_branch;
4906 }
4907 static int
4908 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4909 struct regcache *regs, arm_displaced_step_closure *dsc)
4910 {
4911 unsigned int cond = bits (insn, 28, 31);
4912 int exchange = (cond == 0xf);
4913 int link = exchange || bit (insn, 24);
4914 long offset;
4915
4916 if (debug_displaced)
4917 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4918 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4919 (unsigned long) insn);
4920 if (exchange)
4921 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4922 then arrange the switch into Thumb mode. */
4923 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4924 else
4925 offset = bits (insn, 0, 23) << 2;
4926
4927 if (bit (offset, 25))
4928 offset = offset | ~0x3ffffff;
4929
4930 dsc->modinsn[0] = ARM_NOP;
4931
4932 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4933 return 0;
4934 }
4935
4936 static int
4937 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4938 uint16_t insn2, struct regcache *regs,
4939 arm_displaced_step_closure *dsc)
4940 {
4941 int link = bit (insn2, 14);
4942 int exchange = link && !bit (insn2, 12);
4943 int cond = INST_AL;
4944 long offset = 0;
4945 int j1 = bit (insn2, 13);
4946 int j2 = bit (insn2, 11);
4947 int s = sbits (insn1, 10, 10);
4948 int i1 = !(j1 ^ bit (insn1, 10));
4949 int i2 = !(j2 ^ bit (insn1, 10));
4950
4951 if (!link && !exchange) /* B */
4952 {
4953 offset = (bits (insn2, 0, 10) << 1);
4954 if (bit (insn2, 12)) /* Encoding T4 */
4955 {
4956 offset |= (bits (insn1, 0, 9) << 12)
4957 | (i2 << 22)
4958 | (i1 << 23)
4959 | (s << 24);
4960 cond = INST_AL;
4961 }
4962 else /* Encoding T3 */
4963 {
4964 offset |= (bits (insn1, 0, 5) << 12)
4965 | (j1 << 18)
4966 | (j2 << 19)
4967 | (s << 20);
4968 cond = bits (insn1, 6, 9);
4969 }
4970 }
4971 else
4972 {
4973 offset = (bits (insn1, 0, 9) << 12);
4974 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4975 offset |= exchange ?
4976 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4977 }
4978
4979 if (debug_displaced)
4980 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4981 "%.4x %.4x with offset %.8lx\n",
4982 link ? (exchange) ? "blx" : "bl" : "b",
4983 insn1, insn2, offset);
4984
4985 dsc->modinsn[0] = THUMB_NOP;
4986
4987 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4988 return 0;
4989 }
4990
4991 /* Copy B Thumb instructions. */
4992 static int
4993 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4994 arm_displaced_step_closure *dsc)
4995 {
4996 unsigned int cond = 0;
4997 int offset = 0;
4998 unsigned short bit_12_15 = bits (insn, 12, 15);
4999 CORE_ADDR from = dsc->insn_addr;
5000
5001 if (bit_12_15 == 0xd)
5002 {
5003 /* offset = SignExtend (imm8:0, 32) */
5004 offset = sbits ((insn << 1), 0, 8);
5005 cond = bits (insn, 8, 11);
5006 }
5007 else if (bit_12_15 == 0xe) /* Encoding T2 */
5008 {
5009 offset = sbits ((insn << 1), 0, 11);
5010 cond = INST_AL;
5011 }
5012
5013 if (debug_displaced)
5014 fprintf_unfiltered (gdb_stdlog,
5015 "displaced: copying b immediate insn %.4x "
5016 "with offset %d\n", insn, offset);
5017
5018 dsc->u.branch.cond = cond;
5019 dsc->u.branch.link = 0;
5020 dsc->u.branch.exchange = 0;
5021 dsc->u.branch.dest = from + 4 + offset;
5022
5023 dsc->modinsn[0] = THUMB_NOP;
5024
5025 dsc->cleanup = &cleanup_branch;
5026
5027 return 0;
5028 }
5029
5030 /* Copy BX/BLX with register-specified destinations. */
5031
5032 static void
5033 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5034 arm_displaced_step_closure *dsc, int link,
5035 unsigned int cond, unsigned int rm)
5036 {
5037 /* Implement {BX,BLX}<cond> <reg>" as:
5038
5039 Preparation: cond <- instruction condition
5040 Insn: mov r0, r0 (nop)
5041 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5042
5043 Don't set r14 in cleanup for BX. */
5044
5045 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5046
5047 dsc->u.branch.cond = cond;
5048 dsc->u.branch.link = link;
5049
5050 dsc->u.branch.exchange = 1;
5051
5052 dsc->cleanup = &cleanup_branch;
5053 }
5054
5055 static int
5056 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5057 struct regcache *regs, arm_displaced_step_closure *dsc)
5058 {
5059 unsigned int cond = bits (insn, 28, 31);
5060 /* BX: x12xxx1x
5061 BLX: x12xxx3x. */
5062 int link = bit (insn, 5);
5063 unsigned int rm = bits (insn, 0, 3);
5064
5065 if (debug_displaced)
5066 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5067 (unsigned long) insn);
5068
5069 dsc->modinsn[0] = ARM_NOP;
5070
5071 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5072 return 0;
5073 }
5074
5075 static int
5076 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5077 struct regcache *regs,
5078 arm_displaced_step_closure *dsc)
5079 {
5080 int link = bit (insn, 7);
5081 unsigned int rm = bits (insn, 3, 6);
5082
5083 if (debug_displaced)
5084 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5085 (unsigned short) insn);
5086
5087 dsc->modinsn[0] = THUMB_NOP;
5088
5089 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5090
5091 return 0;
5092 }
5093
5094
5095 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5096
5097 static void
5098 cleanup_alu_imm (struct gdbarch *gdbarch,
5099 struct regcache *regs, arm_displaced_step_closure *dsc)
5100 {
5101 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5102 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5103 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5104 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5105 }
5106
5107 static int
5108 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5109 arm_displaced_step_closure *dsc)
5110 {
5111 unsigned int rn = bits (insn, 16, 19);
5112 unsigned int rd = bits (insn, 12, 15);
5113 unsigned int op = bits (insn, 21, 24);
5114 int is_mov = (op == 0xd);
5115 ULONGEST rd_val, rn_val;
5116
5117 if (!insn_references_pc (insn, 0x000ff000ul))
5118 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5119
5120 if (debug_displaced)
5121 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5122 "%.8lx\n", is_mov ? "move" : "ALU",
5123 (unsigned long) insn);
5124
5125 /* Instruction is of form:
5126
5127 <op><cond> rd, [rn,] #imm
5128
5129 Rewrite as:
5130
5131 Preparation: tmp1, tmp2 <- r0, r1;
5132 r0, r1 <- rd, rn
5133 Insn: <op><cond> r0, r1, #imm
5134 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5135 */
5136
5137 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5138 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5139 rn_val = displaced_read_reg (regs, dsc, rn);
5140 rd_val = displaced_read_reg (regs, dsc, rd);
5141 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5142 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5143 dsc->rd = rd;
5144
5145 if (is_mov)
5146 dsc->modinsn[0] = insn & 0xfff00fff;
5147 else
5148 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5149
5150 dsc->cleanup = &cleanup_alu_imm;
5151
5152 return 0;
5153 }
5154
5155 static int
5156 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5157 uint16_t insn2, struct regcache *regs,
5158 arm_displaced_step_closure *dsc)
5159 {
5160 unsigned int op = bits (insn1, 5, 8);
5161 unsigned int rn, rm, rd;
5162 ULONGEST rd_val, rn_val;
5163
5164 rn = bits (insn1, 0, 3); /* Rn */
5165 rm = bits (insn2, 0, 3); /* Rm */
5166 rd = bits (insn2, 8, 11); /* Rd */
5167
5168 /* This routine is only called for instruction MOV. */
5169 gdb_assert (op == 0x2 && rn == 0xf);
5170
5171 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5172 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5173
5174 if (debug_displaced)
5175 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5176 "ALU", insn1, insn2);
5177
5178 /* Instruction is of form:
5179
5180 <op><cond> rd, [rn,] #imm
5181
5182 Rewrite as:
5183
5184 Preparation: tmp1, tmp2 <- r0, r1;
5185 r0, r1 <- rd, rn
5186 Insn: <op><cond> r0, r1, #imm
5187 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5188 */
5189
5190 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5191 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5192 rn_val = displaced_read_reg (regs, dsc, rn);
5193 rd_val = displaced_read_reg (regs, dsc, rd);
5194 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5195 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5196 dsc->rd = rd;
5197
5198 dsc->modinsn[0] = insn1;
5199 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5200 dsc->numinsns = 2;
5201
5202 dsc->cleanup = &cleanup_alu_imm;
5203
5204 return 0;
5205 }
5206
5207 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5208
5209 static void
5210 cleanup_alu_reg (struct gdbarch *gdbarch,
5211 struct regcache *regs, arm_displaced_step_closure *dsc)
5212 {
5213 ULONGEST rd_val;
5214 int i;
5215
5216 rd_val = displaced_read_reg (regs, dsc, 0);
5217
5218 for (i = 0; i < 3; i++)
5219 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5220
5221 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5222 }
5223
5224 static void
5225 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5226 arm_displaced_step_closure *dsc,
5227 unsigned int rd, unsigned int rn, unsigned int rm)
5228 {
5229 ULONGEST rd_val, rn_val, rm_val;
5230
5231 /* Instruction is of form:
5232
5233 <op><cond> rd, [rn,] rm [, <shift>]
5234
5235 Rewrite as:
5236
5237 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5238 r0, r1, r2 <- rd, rn, rm
5239 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5240 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5241 */
5242
5243 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5244 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5245 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5246 rd_val = displaced_read_reg (regs, dsc, rd);
5247 rn_val = displaced_read_reg (regs, dsc, rn);
5248 rm_val = displaced_read_reg (regs, dsc, rm);
5249 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5250 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5251 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5252 dsc->rd = rd;
5253
5254 dsc->cleanup = &cleanup_alu_reg;
5255 }
5256
5257 static int
5258 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5259 arm_displaced_step_closure *dsc)
5260 {
5261 unsigned int op = bits (insn, 21, 24);
5262 int is_mov = (op == 0xd);
5263
5264 if (!insn_references_pc (insn, 0x000ff00ful))
5265 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5266
5267 if (debug_displaced)
5268 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5269 is_mov ? "move" : "ALU", (unsigned long) insn);
5270
5271 if (is_mov)
5272 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5273 else
5274 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5275
5276 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5277 bits (insn, 0, 3));
5278 return 0;
5279 }
5280
5281 static int
5282 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5283 struct regcache *regs,
5284 arm_displaced_step_closure *dsc)
5285 {
5286 unsigned rm, rd;
5287
5288 rm = bits (insn, 3, 6);
5289 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5290
5291 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5292 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5293
5294 if (debug_displaced)
5295 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5296 (unsigned short) insn);
5297
5298 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5299
5300 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5301
5302 return 0;
5303 }
5304
5305 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5306
5307 static void
5308 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5309 struct regcache *regs,
5310 arm_displaced_step_closure *dsc)
5311 {
5312 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5313 int i;
5314
5315 for (i = 0; i < 4; i++)
5316 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5317
5318 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5319 }
5320
5321 static void
5322 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5323 arm_displaced_step_closure *dsc,
5324 unsigned int rd, unsigned int rn, unsigned int rm,
5325 unsigned rs)
5326 {
5327 int i;
5328 ULONGEST rd_val, rn_val, rm_val, rs_val;
5329
5330 /* Instruction is of form:
5331
5332 <op><cond> rd, [rn,] rm, <shift> rs
5333
5334 Rewrite as:
5335
5336 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5337 r0, r1, r2, r3 <- rd, rn, rm, rs
5338 Insn: <op><cond> r0, r1, r2, <shift> r3
5339 Cleanup: tmp5 <- r0
5340 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5341 rd <- tmp5
5342 */
5343
5344 for (i = 0; i < 4; i++)
5345 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5346
5347 rd_val = displaced_read_reg (regs, dsc, rd);
5348 rn_val = displaced_read_reg (regs, dsc, rn);
5349 rm_val = displaced_read_reg (regs, dsc, rm);
5350 rs_val = displaced_read_reg (regs, dsc, rs);
5351 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5352 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5353 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5354 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5355 dsc->rd = rd;
5356 dsc->cleanup = &cleanup_alu_shifted_reg;
5357 }
5358
5359 static int
5360 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5361 struct regcache *regs,
5362 arm_displaced_step_closure *dsc)
5363 {
5364 unsigned int op = bits (insn, 21, 24);
5365 int is_mov = (op == 0xd);
5366 unsigned int rd, rn, rm, rs;
5367
5368 if (!insn_references_pc (insn, 0x000fff0ful))
5369 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5370
5371 if (debug_displaced)
5372 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5373 "%.8lx\n", is_mov ? "move" : "ALU",
5374 (unsigned long) insn);
5375
5376 rn = bits (insn, 16, 19);
5377 rm = bits (insn, 0, 3);
5378 rs = bits (insn, 8, 11);
5379 rd = bits (insn, 12, 15);
5380
5381 if (is_mov)
5382 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5383 else
5384 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5385
5386 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5387
5388 return 0;
5389 }
5390
5391 /* Clean up load instructions. */
5392
5393 static void
5394 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5395 arm_displaced_step_closure *dsc)
5396 {
5397 ULONGEST rt_val, rt_val2 = 0, rn_val;
5398
5399 rt_val = displaced_read_reg (regs, dsc, 0);
5400 if (dsc->u.ldst.xfersize == 8)
5401 rt_val2 = displaced_read_reg (regs, dsc, 1);
5402 rn_val = displaced_read_reg (regs, dsc, 2);
5403
5404 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5405 if (dsc->u.ldst.xfersize > 4)
5406 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5407 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5408 if (!dsc->u.ldst.immed)
5409 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5410
5411 /* Handle register writeback. */
5412 if (dsc->u.ldst.writeback)
5413 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5414 /* Put result in right place. */
5415 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5416 if (dsc->u.ldst.xfersize == 8)
5417 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5418 }
5419
5420 /* Clean up store instructions. */
5421
5422 static void
5423 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5424 arm_displaced_step_closure *dsc)
5425 {
5426 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5427
5428 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5429 if (dsc->u.ldst.xfersize > 4)
5430 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5431 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5432 if (!dsc->u.ldst.immed)
5433 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5434 if (!dsc->u.ldst.restore_r4)
5435 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5436
5437 /* Writeback. */
5438 if (dsc->u.ldst.writeback)
5439 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5440 }
5441
5442 /* Copy "extra" load/store instructions. These are halfword/doubleword
5443 transfers, which have a different encoding to byte/word transfers. */
5444
5445 static int
5446 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5447 struct regcache *regs, arm_displaced_step_closure *dsc)
5448 {
5449 unsigned int op1 = bits (insn, 20, 24);
5450 unsigned int op2 = bits (insn, 5, 6);
5451 unsigned int rt = bits (insn, 12, 15);
5452 unsigned int rn = bits (insn, 16, 19);
5453 unsigned int rm = bits (insn, 0, 3);
5454 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5455 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5456 int immed = (op1 & 0x4) != 0;
5457 int opcode;
5458 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5459
5460 if (!insn_references_pc (insn, 0x000ff00ful))
5461 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5462
5463 if (debug_displaced)
5464 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5465 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5466 (unsigned long) insn);
5467
5468 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5469
5470 if (opcode < 0)
5471 internal_error (__FILE__, __LINE__,
5472 _("copy_extra_ld_st: instruction decode error"));
5473
5474 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5475 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5476 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5477 if (!immed)
5478 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5479
5480 rt_val = displaced_read_reg (regs, dsc, rt);
5481 if (bytesize[opcode] == 8)
5482 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5483 rn_val = displaced_read_reg (regs, dsc, rn);
5484 if (!immed)
5485 rm_val = displaced_read_reg (regs, dsc, rm);
5486
5487 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5488 if (bytesize[opcode] == 8)
5489 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5490 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5491 if (!immed)
5492 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5493
5494 dsc->rd = rt;
5495 dsc->u.ldst.xfersize = bytesize[opcode];
5496 dsc->u.ldst.rn = rn;
5497 dsc->u.ldst.immed = immed;
5498 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5499 dsc->u.ldst.restore_r4 = 0;
5500
5501 if (immed)
5502 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5503 ->
5504 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5505 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5506 else
5507 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5508 ->
5509 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5510 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5511
5512 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5513
5514 return 0;
5515 }
5516
5517 /* Copy byte/half word/word loads and stores. */
5518
5519 static void
5520 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5521 arm_displaced_step_closure *dsc, int load,
5522 int immed, int writeback, int size, int usermode,
5523 int rt, int rm, int rn)
5524 {
5525 ULONGEST rt_val, rn_val, rm_val = 0;
5526
5527 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5528 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5529 if (!immed)
5530 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5531 if (!load)
5532 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5533
5534 rt_val = displaced_read_reg (regs, dsc, rt);
5535 rn_val = displaced_read_reg (regs, dsc, rn);
5536 if (!immed)
5537 rm_val = displaced_read_reg (regs, dsc, rm);
5538
5539 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5540 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5541 if (!immed)
5542 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5543 dsc->rd = rt;
5544 dsc->u.ldst.xfersize = size;
5545 dsc->u.ldst.rn = rn;
5546 dsc->u.ldst.immed = immed;
5547 dsc->u.ldst.writeback = writeback;
5548
5549 /* To write PC we can do:
5550
5551 Before this sequence of instructions:
5552 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5553 r2 is the Rn value got from dispalced_read_reg.
5554
5555 Insn1: push {pc} Write address of STR instruction + offset on stack
5556 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5557 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5558 = addr(Insn1) + offset - addr(Insn3) - 8
5559 = offset - 16
5560 Insn4: add r4, r4, #8 r4 = offset - 8
5561 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5562 = from + offset
5563 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5564
5565 Otherwise we don't know what value to write for PC, since the offset is
5566 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5567 of this can be found in Section "Saving from r15" in
5568 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5569
5570 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5571 }
5572
5573
5574 static int
5575 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5576 uint16_t insn2, struct regcache *regs,
5577 arm_displaced_step_closure *dsc, int size)
5578 {
5579 unsigned int u_bit = bit (insn1, 7);
5580 unsigned int rt = bits (insn2, 12, 15);
5581 int imm12 = bits (insn2, 0, 11);
5582 ULONGEST pc_val;
5583
5584 if (debug_displaced)
5585 fprintf_unfiltered (gdb_stdlog,
5586 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5587 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5588 imm12);
5589
5590 if (!u_bit)
5591 imm12 = -1 * imm12;
5592
5593 /* Rewrite instruction LDR Rt imm12 into:
5594
5595 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5596
5597 LDR R0, R2, R3,
5598
5599 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5600
5601
5602 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5603 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5604 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5605
5606 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5607
5608 pc_val = pc_val & 0xfffffffc;
5609
5610 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5611 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5612
5613 dsc->rd = rt;
5614
5615 dsc->u.ldst.xfersize = size;
5616 dsc->u.ldst.immed = 0;
5617 dsc->u.ldst.writeback = 0;
5618 dsc->u.ldst.restore_r4 = 0;
5619
5620 /* LDR R0, R2, R3 */
5621 dsc->modinsn[0] = 0xf852;
5622 dsc->modinsn[1] = 0x3;
5623 dsc->numinsns = 2;
5624
5625 dsc->cleanup = &cleanup_load;
5626
5627 return 0;
5628 }
5629
5630 static int
5631 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5632 uint16_t insn2, struct regcache *regs,
5633 arm_displaced_step_closure *dsc,
5634 int writeback, int immed)
5635 {
5636 unsigned int rt = bits (insn2, 12, 15);
5637 unsigned int rn = bits (insn1, 0, 3);
5638 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5639 /* In LDR (register), there is also a register Rm, which is not allowed to
5640 be PC, so we don't have to check it. */
5641
5642 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5643 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5644 dsc);
5645
5646 if (debug_displaced)
5647 fprintf_unfiltered (gdb_stdlog,
5648 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5649 rt, rn, insn1, insn2);
5650
5651 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5652 0, rt, rm, rn);
5653
5654 dsc->u.ldst.restore_r4 = 0;
5655
5656 if (immed)
5657 /* ldr[b]<cond> rt, [rn, #imm], etc.
5658 ->
5659 ldr[b]<cond> r0, [r2, #imm]. */
5660 {
5661 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5662 dsc->modinsn[1] = insn2 & 0x0fff;
5663 }
5664 else
5665 /* ldr[b]<cond> rt, [rn, rm], etc.
5666 ->
5667 ldr[b]<cond> r0, [r2, r3]. */
5668 {
5669 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5670 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5671 }
5672
5673 dsc->numinsns = 2;
5674
5675 return 0;
5676 }
5677
5678
5679 static int
5680 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5681 struct regcache *regs,
5682 arm_displaced_step_closure *dsc,
5683 int load, int size, int usermode)
5684 {
5685 int immed = !bit (insn, 25);
5686 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5687 unsigned int rt = bits (insn, 12, 15);
5688 unsigned int rn = bits (insn, 16, 19);
5689 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5690
5691 if (!insn_references_pc (insn, 0x000ff00ful))
5692 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5693
5694 if (debug_displaced)
5695 fprintf_unfiltered (gdb_stdlog,
5696 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5697 load ? (size == 1 ? "ldrb" : "ldr")
5698 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5699 rt, rn,
5700 (unsigned long) insn);
5701
5702 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5703 usermode, rt, rm, rn);
5704
5705 if (load || rt != ARM_PC_REGNUM)
5706 {
5707 dsc->u.ldst.restore_r4 = 0;
5708
5709 if (immed)
5710 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5711 ->
5712 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5713 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5714 else
5715 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5716 ->
5717 {ldr,str}[b]<cond> r0, [r2, r3]. */
5718 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5719 }
5720 else
5721 {
5722 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5723 dsc->u.ldst.restore_r4 = 1;
5724 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5725 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5726 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5727 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5728 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5729
5730 /* As above. */
5731 if (immed)
5732 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5733 else
5734 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5735
5736 dsc->numinsns = 6;
5737 }
5738
5739 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5740
5741 return 0;
5742 }
5743
5744 /* Cleanup LDM instructions with fully-populated register list. This is an
5745 unfortunate corner case: it's impossible to implement correctly by modifying
5746 the instruction. The issue is as follows: we have an instruction,
5747
5748 ldm rN, {r0-r15}
5749
5750 which we must rewrite to avoid loading PC. A possible solution would be to
5751 do the load in two halves, something like (with suitable cleanup
5752 afterwards):
5753
5754 mov r8, rN
5755 ldm[id][ab] r8!, {r0-r7}
5756 str r7, <temp>
5757 ldm[id][ab] r8, {r7-r14}
5758 <bkpt>
5759
5760 but at present there's no suitable place for <temp>, since the scratch space
5761 is overwritten before the cleanup routine is called. For now, we simply
5762 emulate the instruction. */
5763
5764 static void
5765 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5766 arm_displaced_step_closure *dsc)
5767 {
5768 int inc = dsc->u.block.increment;
5769 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5770 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5771 uint32_t regmask = dsc->u.block.regmask;
5772 int regno = inc ? 0 : 15;
5773 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5774 int exception_return = dsc->u.block.load && dsc->u.block.user
5775 && (regmask & 0x8000) != 0;
5776 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5777 int do_transfer = condition_true (dsc->u.block.cond, status);
5778 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5779
5780 if (!do_transfer)
5781 return;
5782
5783 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5784 sensible we can do here. Complain loudly. */
5785 if (exception_return)
5786 error (_("Cannot single-step exception return"));
5787
5788 /* We don't handle any stores here for now. */
5789 gdb_assert (dsc->u.block.load != 0);
5790
5791 if (debug_displaced)
5792 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5793 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5794 dsc->u.block.increment ? "inc" : "dec",
5795 dsc->u.block.before ? "before" : "after");
5796
5797 while (regmask)
5798 {
5799 uint32_t memword;
5800
5801 if (inc)
5802 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5803 regno++;
5804 else
5805 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5806 regno--;
5807
5808 xfer_addr += bump_before;
5809
5810 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5811 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5812
5813 xfer_addr += bump_after;
5814
5815 regmask &= ~(1 << regno);
5816 }
5817
5818 if (dsc->u.block.writeback)
5819 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5820 CANNOT_WRITE_PC);
5821 }
5822
5823 /* Clean up an STM which included the PC in the register list. */
5824
5825 static void
5826 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5827 arm_displaced_step_closure *dsc)
5828 {
5829 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5830 int store_executed = condition_true (dsc->u.block.cond, status);
5831 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5832 CORE_ADDR stm_insn_addr;
5833 uint32_t pc_val;
5834 long offset;
5835 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5836
5837 /* If condition code fails, there's nothing else to do. */
5838 if (!store_executed)
5839 return;
5840
5841 if (dsc->u.block.increment)
5842 {
5843 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5844
5845 if (dsc->u.block.before)
5846 pc_stored_at += 4;
5847 }
5848 else
5849 {
5850 pc_stored_at = dsc->u.block.xfer_addr;
5851
5852 if (dsc->u.block.before)
5853 pc_stored_at -= 4;
5854 }
5855
5856 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5857 stm_insn_addr = dsc->scratch_base;
5858 offset = pc_val - stm_insn_addr;
5859
5860 if (debug_displaced)
5861 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5862 "STM instruction\n", offset);
5863
5864 /* Rewrite the stored PC to the proper value for the non-displaced original
5865 instruction. */
5866 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5867 dsc->insn_addr + offset);
5868 }
5869
5870 /* Clean up an LDM which includes the PC in the register list. We clumped all
5871 the registers in the transferred list into a contiguous range r0...rX (to
5872 avoid loading PC directly and losing control of the debugged program), so we
5873 must undo that here. */
5874
5875 static void
5876 cleanup_block_load_pc (struct gdbarch *gdbarch,
5877 struct regcache *regs,
5878 arm_displaced_step_closure *dsc)
5879 {
5880 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5881 int load_executed = condition_true (dsc->u.block.cond, status);
5882 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5883 unsigned int regs_loaded = bitcount (mask);
5884 unsigned int num_to_shuffle = regs_loaded, clobbered;
5885
5886 /* The method employed here will fail if the register list is fully populated
5887 (we need to avoid loading PC directly). */
5888 gdb_assert (num_to_shuffle < 16);
5889
5890 if (!load_executed)
5891 return;
5892
5893 clobbered = (1 << num_to_shuffle) - 1;
5894
5895 while (num_to_shuffle > 0)
5896 {
5897 if ((mask & (1 << write_reg)) != 0)
5898 {
5899 unsigned int read_reg = num_to_shuffle - 1;
5900
5901 if (read_reg != write_reg)
5902 {
5903 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5904 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5905 if (debug_displaced)
5906 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5907 "loaded register r%d to r%d\n"), read_reg,
5908 write_reg);
5909 }
5910 else if (debug_displaced)
5911 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5912 "r%d already in the right place\n"),
5913 write_reg);
5914
5915 clobbered &= ~(1 << write_reg);
5916
5917 num_to_shuffle--;
5918 }
5919
5920 write_reg--;
5921 }
5922
5923 /* Restore any registers we scribbled over. */
5924 for (write_reg = 0; clobbered != 0; write_reg++)
5925 {
5926 if ((clobbered & (1 << write_reg)) != 0)
5927 {
5928 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5929 CANNOT_WRITE_PC);
5930 if (debug_displaced)
5931 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5932 "clobbered register r%d\n"), write_reg);
5933 clobbered &= ~(1 << write_reg);
5934 }
5935 }
5936
5937 /* Perform register writeback manually. */
5938 if (dsc->u.block.writeback)
5939 {
5940 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5941
5942 if (dsc->u.block.increment)
5943 new_rn_val += regs_loaded * 4;
5944 else
5945 new_rn_val -= regs_loaded * 4;
5946
5947 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5948 CANNOT_WRITE_PC);
5949 }
5950 }
5951
5952 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5953 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5954
5955 static int
5956 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5957 struct regcache *regs,
5958 arm_displaced_step_closure *dsc)
5959 {
5960 int load = bit (insn, 20);
5961 int user = bit (insn, 22);
5962 int increment = bit (insn, 23);
5963 int before = bit (insn, 24);
5964 int writeback = bit (insn, 21);
5965 int rn = bits (insn, 16, 19);
5966
5967 /* Block transfers which don't mention PC can be run directly
5968 out-of-line. */
5969 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5970 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5971
5972 if (rn == ARM_PC_REGNUM)
5973 {
5974 warning (_("displaced: Unpredictable LDM or STM with "
5975 "base register r15"));
5976 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5977 }
5978
5979 if (debug_displaced)
5980 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5981 "%.8lx\n", (unsigned long) insn);
5982
5983 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5984 dsc->u.block.rn = rn;
5985
5986 dsc->u.block.load = load;
5987 dsc->u.block.user = user;
5988 dsc->u.block.increment = increment;
5989 dsc->u.block.before = before;
5990 dsc->u.block.writeback = writeback;
5991 dsc->u.block.cond = bits (insn, 28, 31);
5992
5993 dsc->u.block.regmask = insn & 0xffff;
5994
5995 if (load)
5996 {
5997 if ((insn & 0xffff) == 0xffff)
5998 {
5999 /* LDM with a fully-populated register list. This case is
6000 particularly tricky. Implement for now by fully emulating the
6001 instruction (which might not behave perfectly in all cases, but
6002 these instructions should be rare enough for that not to matter
6003 too much). */
6004 dsc->modinsn[0] = ARM_NOP;
6005
6006 dsc->cleanup = &cleanup_block_load_all;
6007 }
6008 else
6009 {
6010 /* LDM of a list of registers which includes PC. Implement by
6011 rewriting the list of registers to be transferred into a
6012 contiguous chunk r0...rX before doing the transfer, then shuffling
6013 registers into the correct places in the cleanup routine. */
6014 unsigned int regmask = insn & 0xffff;
6015 unsigned int num_in_list = bitcount (regmask), new_regmask;
6016 unsigned int i;
6017
6018 for (i = 0; i < num_in_list; i++)
6019 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6020
6021 /* Writeback makes things complicated. We need to avoid clobbering
6022 the base register with one of the registers in our modified
6023 register list, but just using a different register can't work in
6024 all cases, e.g.:
6025
6026 ldm r14!, {r0-r13,pc}
6027
6028 which would need to be rewritten as:
6029
6030 ldm rN!, {r0-r14}
6031
6032 but that can't work, because there's no free register for N.
6033
6034 Solve this by turning off the writeback bit, and emulating
6035 writeback manually in the cleanup routine. */
6036
6037 if (writeback)
6038 insn &= ~(1 << 21);
6039
6040 new_regmask = (1 << num_in_list) - 1;
6041
6042 if (debug_displaced)
6043 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6044 "{..., pc}: original reg list %.4x, modified "
6045 "list %.4x\n"), rn, writeback ? "!" : "",
6046 (int) insn & 0xffff, new_regmask);
6047
6048 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6049
6050 dsc->cleanup = &cleanup_block_load_pc;
6051 }
6052 }
6053 else
6054 {
6055 /* STM of a list of registers which includes PC. Run the instruction
6056 as-is, but out of line: this will store the wrong value for the PC,
6057 so we must manually fix up the memory in the cleanup routine.
6058 Doing things this way has the advantage that we can auto-detect
6059 the offset of the PC write (which is architecture-dependent) in
6060 the cleanup routine. */
6061 dsc->modinsn[0] = insn;
6062
6063 dsc->cleanup = &cleanup_block_store_pc;
6064 }
6065
6066 return 0;
6067 }
6068
6069 static int
6070 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6071 struct regcache *regs,
6072 arm_displaced_step_closure *dsc)
6073 {
6074 int rn = bits (insn1, 0, 3);
6075 int load = bit (insn1, 4);
6076 int writeback = bit (insn1, 5);
6077
6078 /* Block transfers which don't mention PC can be run directly
6079 out-of-line. */
6080 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6081 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6082
6083 if (rn == ARM_PC_REGNUM)
6084 {
6085 warning (_("displaced: Unpredictable LDM or STM with "
6086 "base register r15"));
6087 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6088 "unpredictable ldm/stm", dsc);
6089 }
6090
6091 if (debug_displaced)
6092 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6093 "%.4x%.4x\n", insn1, insn2);
6094
6095 /* Clear bit 13, since it should be always zero. */
6096 dsc->u.block.regmask = (insn2 & 0xdfff);
6097 dsc->u.block.rn = rn;
6098
6099 dsc->u.block.load = load;
6100 dsc->u.block.user = 0;
6101 dsc->u.block.increment = bit (insn1, 7);
6102 dsc->u.block.before = bit (insn1, 8);
6103 dsc->u.block.writeback = writeback;
6104 dsc->u.block.cond = INST_AL;
6105 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6106
6107 if (load)
6108 {
6109 if (dsc->u.block.regmask == 0xffff)
6110 {
6111 /* This branch is impossible to happen. */
6112 gdb_assert (0);
6113 }
6114 else
6115 {
6116 unsigned int regmask = dsc->u.block.regmask;
6117 unsigned int num_in_list = bitcount (regmask), new_regmask;
6118 unsigned int i;
6119
6120 for (i = 0; i < num_in_list; i++)
6121 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6122
6123 if (writeback)
6124 insn1 &= ~(1 << 5);
6125
6126 new_regmask = (1 << num_in_list) - 1;
6127
6128 if (debug_displaced)
6129 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6130 "{..., pc}: original reg list %.4x, modified "
6131 "list %.4x\n"), rn, writeback ? "!" : "",
6132 (int) dsc->u.block.regmask, new_regmask);
6133
6134 dsc->modinsn[0] = insn1;
6135 dsc->modinsn[1] = (new_regmask & 0xffff);
6136 dsc->numinsns = 2;
6137
6138 dsc->cleanup = &cleanup_block_load_pc;
6139 }
6140 }
6141 else
6142 {
6143 dsc->modinsn[0] = insn1;
6144 dsc->modinsn[1] = insn2;
6145 dsc->numinsns = 2;
6146 dsc->cleanup = &cleanup_block_store_pc;
6147 }
6148 return 0;
6149 }
6150
6151 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6152 This is used to avoid a dependency on BFD's bfd_endian enum. */
6153
6154 ULONGEST
6155 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6156 int byte_order)
6157 {
6158 return read_memory_unsigned_integer (memaddr, len,
6159 (enum bfd_endian) byte_order);
6160 }
6161
6162 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6163
6164 CORE_ADDR
6165 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6166 CORE_ADDR val)
6167 {
6168 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6169 }
6170
6171 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6172
6173 static CORE_ADDR
6174 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6175 {
6176 return 0;
6177 }
6178
6179 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6180
6181 int
6182 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6183 {
6184 return arm_is_thumb (self->regcache);
6185 }
6186
6187 /* single_step() is called just before we want to resume the inferior,
6188 if we want to single-step it but there is no hardware or kernel
6189 single-step support. We find the target of the coming instructions
6190 and breakpoint them. */
6191
6192 std::vector<CORE_ADDR>
6193 arm_software_single_step (struct regcache *regcache)
6194 {
6195 struct gdbarch *gdbarch = regcache->arch ();
6196 struct arm_get_next_pcs next_pcs_ctx;
6197
6198 arm_get_next_pcs_ctor (&next_pcs_ctx,
6199 &arm_get_next_pcs_ops,
6200 gdbarch_byte_order (gdbarch),
6201 gdbarch_byte_order_for_code (gdbarch),
6202 0,
6203 regcache);
6204
6205 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6206
6207 for (CORE_ADDR &pc_ref : next_pcs)
6208 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6209
6210 return next_pcs;
6211 }
6212
6213 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6214 for Linux, where some SVC instructions must be treated specially. */
6215
6216 static void
6217 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6218 arm_displaced_step_closure *dsc)
6219 {
6220 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6221
6222 if (debug_displaced)
6223 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6224 "%.8lx\n", (unsigned long) resume_addr);
6225
6226 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6227 }
6228
6229
6230 /* Common copy routine for svc instruciton. */
6231
6232 static int
6233 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6234 arm_displaced_step_closure *dsc)
6235 {
6236 /* Preparation: none.
6237 Insn: unmodified svc.
6238 Cleanup: pc <- insn_addr + insn_size. */
6239
6240 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6241 instruction. */
6242 dsc->wrote_to_pc = 1;
6243
6244 /* Allow OS-specific code to override SVC handling. */
6245 if (dsc->u.svc.copy_svc_os)
6246 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6247 else
6248 {
6249 dsc->cleanup = &cleanup_svc;
6250 return 0;
6251 }
6252 }
6253
6254 static int
6255 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6256 struct regcache *regs, arm_displaced_step_closure *dsc)
6257 {
6258
6259 if (debug_displaced)
6260 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6261 (unsigned long) insn);
6262
6263 dsc->modinsn[0] = insn;
6264
6265 return install_svc (gdbarch, regs, dsc);
6266 }
6267
6268 static int
6269 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6270 struct regcache *regs, arm_displaced_step_closure *dsc)
6271 {
6272
6273 if (debug_displaced)
6274 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6275 insn);
6276
6277 dsc->modinsn[0] = insn;
6278
6279 return install_svc (gdbarch, regs, dsc);
6280 }
6281
6282 /* Copy undefined instructions. */
6283
6284 static int
6285 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6286 arm_displaced_step_closure *dsc)
6287 {
6288 if (debug_displaced)
6289 fprintf_unfiltered (gdb_stdlog,
6290 "displaced: copying undefined insn %.8lx\n",
6291 (unsigned long) insn);
6292
6293 dsc->modinsn[0] = insn;
6294
6295 return 0;
6296 }
6297
6298 static int
6299 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6300 arm_displaced_step_closure *dsc)
6301 {
6302
6303 if (debug_displaced)
6304 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6305 "%.4x %.4x\n", (unsigned short) insn1,
6306 (unsigned short) insn2);
6307
6308 dsc->modinsn[0] = insn1;
6309 dsc->modinsn[1] = insn2;
6310 dsc->numinsns = 2;
6311
6312 return 0;
6313 }
6314
6315 /* Copy unpredictable instructions. */
6316
6317 static int
6318 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6319 arm_displaced_step_closure *dsc)
6320 {
6321 if (debug_displaced)
6322 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6323 "%.8lx\n", (unsigned long) insn);
6324
6325 dsc->modinsn[0] = insn;
6326
6327 return 0;
6328 }
6329
6330 /* The decode_* functions are instruction decoding helpers. They mostly follow
6331 the presentation in the ARM ARM. */
6332
6333 static int
6334 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6335 struct regcache *regs,
6336 arm_displaced_step_closure *dsc)
6337 {
6338 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6339 unsigned int rn = bits (insn, 16, 19);
6340
6341 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6342 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6343 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6344 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6345 else if ((op1 & 0x60) == 0x20)
6346 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6347 else if ((op1 & 0x71) == 0x40)
6348 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6349 dsc);
6350 else if ((op1 & 0x77) == 0x41)
6351 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6352 else if ((op1 & 0x77) == 0x45)
6353 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6354 else if ((op1 & 0x77) == 0x51)
6355 {
6356 if (rn != 0xf)
6357 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6358 else
6359 return arm_copy_unpred (gdbarch, insn, dsc);
6360 }
6361 else if ((op1 & 0x77) == 0x55)
6362 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6363 else if (op1 == 0x57)
6364 switch (op2)
6365 {
6366 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6367 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6368 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6369 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6370 default: return arm_copy_unpred (gdbarch, insn, dsc);
6371 }
6372 else if ((op1 & 0x63) == 0x43)
6373 return arm_copy_unpred (gdbarch, insn, dsc);
6374 else if ((op2 & 0x1) == 0x0)
6375 switch (op1 & ~0x80)
6376 {
6377 case 0x61:
6378 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6379 case 0x65:
6380 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6381 case 0x71: case 0x75:
6382 /* pld/pldw reg. */
6383 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6384 case 0x63: case 0x67: case 0x73: case 0x77:
6385 return arm_copy_unpred (gdbarch, insn, dsc);
6386 default:
6387 return arm_copy_undef (gdbarch, insn, dsc);
6388 }
6389 else
6390 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6391 }
6392
6393 static int
6394 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6395 struct regcache *regs,
6396 arm_displaced_step_closure *dsc)
6397 {
6398 if (bit (insn, 27) == 0)
6399 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6400 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6401 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6402 {
6403 case 0x0: case 0x2:
6404 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6405
6406 case 0x1: case 0x3:
6407 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6408
6409 case 0x4: case 0x5: case 0x6: case 0x7:
6410 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6411
6412 case 0x8:
6413 switch ((insn & 0xe00000) >> 21)
6414 {
6415 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6416 /* stc/stc2. */
6417 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6418
6419 case 0x2:
6420 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6421
6422 default:
6423 return arm_copy_undef (gdbarch, insn, dsc);
6424 }
6425
6426 case 0x9:
6427 {
6428 int rn_f = (bits (insn, 16, 19) == 0xf);
6429 switch ((insn & 0xe00000) >> 21)
6430 {
6431 case 0x1: case 0x3:
6432 /* ldc/ldc2 imm (undefined for rn == pc). */
6433 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6434 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6435
6436 case 0x2:
6437 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6438
6439 case 0x4: case 0x5: case 0x6: case 0x7:
6440 /* ldc/ldc2 lit (undefined for rn != pc). */
6441 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6442 : arm_copy_undef (gdbarch, insn, dsc);
6443
6444 default:
6445 return arm_copy_undef (gdbarch, insn, dsc);
6446 }
6447 }
6448
6449 case 0xa:
6450 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6451
6452 case 0xb:
6453 if (bits (insn, 16, 19) == 0xf)
6454 /* ldc/ldc2 lit. */
6455 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6456 else
6457 return arm_copy_undef (gdbarch, insn, dsc);
6458
6459 case 0xc:
6460 if (bit (insn, 4))
6461 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6462 else
6463 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6464
6465 case 0xd:
6466 if (bit (insn, 4))
6467 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6468 else
6469 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6470
6471 default:
6472 return arm_copy_undef (gdbarch, insn, dsc);
6473 }
6474 }
6475
6476 /* Decode miscellaneous instructions in dp/misc encoding space. */
6477
6478 static int
6479 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6480 struct regcache *regs,
6481 arm_displaced_step_closure *dsc)
6482 {
6483 unsigned int op2 = bits (insn, 4, 6);
6484 unsigned int op = bits (insn, 21, 22);
6485
6486 switch (op2)
6487 {
6488 case 0x0:
6489 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6490
6491 case 0x1:
6492 if (op == 0x1) /* bx. */
6493 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6494 else if (op == 0x3)
6495 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6496 else
6497 return arm_copy_undef (gdbarch, insn, dsc);
6498
6499 case 0x2:
6500 if (op == 0x1)
6501 /* Not really supported. */
6502 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6503 else
6504 return arm_copy_undef (gdbarch, insn, dsc);
6505
6506 case 0x3:
6507 if (op == 0x1)
6508 return arm_copy_bx_blx_reg (gdbarch, insn,
6509 regs, dsc); /* blx register. */
6510 else
6511 return arm_copy_undef (gdbarch, insn, dsc);
6512
6513 case 0x5:
6514 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6515
6516 case 0x7:
6517 if (op == 0x1)
6518 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6519 else if (op == 0x3)
6520 /* Not really supported. */
6521 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6522 /* Fall through. */
6523
6524 default:
6525 return arm_copy_undef (gdbarch, insn, dsc);
6526 }
6527 }
6528
6529 static int
6530 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6531 struct regcache *regs,
6532 arm_displaced_step_closure *dsc)
6533 {
6534 if (bit (insn, 25))
6535 switch (bits (insn, 20, 24))
6536 {
6537 case 0x10:
6538 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6539
6540 case 0x14:
6541 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6542
6543 case 0x12: case 0x16:
6544 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6545
6546 default:
6547 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6548 }
6549 else
6550 {
6551 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6552
6553 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6554 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6555 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6556 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6557 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6558 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6559 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6560 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6561 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6562 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6563 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6564 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6565 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6566 /* 2nd arg means "unprivileged". */
6567 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6568 dsc);
6569 }
6570
6571 /* Should be unreachable. */
6572 return 1;
6573 }
6574
6575 static int
6576 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6577 struct regcache *regs,
6578 arm_displaced_step_closure *dsc)
6579 {
6580 int a = bit (insn, 25), b = bit (insn, 4);
6581 uint32_t op1 = bits (insn, 20, 24);
6582
6583 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6584 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6585 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6586 else if ((!a && (op1 & 0x17) == 0x02)
6587 || (a && (op1 & 0x17) == 0x02 && !b))
6588 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6589 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6590 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6591 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6592 else if ((!a && (op1 & 0x17) == 0x03)
6593 || (a && (op1 & 0x17) == 0x03 && !b))
6594 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6595 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6596 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6597 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6598 else if ((!a && (op1 & 0x17) == 0x06)
6599 || (a && (op1 & 0x17) == 0x06 && !b))
6600 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6601 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6602 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6603 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6604 else if ((!a && (op1 & 0x17) == 0x07)
6605 || (a && (op1 & 0x17) == 0x07 && !b))
6606 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6607
6608 /* Should be unreachable. */
6609 return 1;
6610 }
6611
6612 static int
6613 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6614 arm_displaced_step_closure *dsc)
6615 {
6616 switch (bits (insn, 20, 24))
6617 {
6618 case 0x00: case 0x01: case 0x02: case 0x03:
6619 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6620
6621 case 0x04: case 0x05: case 0x06: case 0x07:
6622 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6623
6624 case 0x08: case 0x09: case 0x0a: case 0x0b:
6625 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6626 return arm_copy_unmodified (gdbarch, insn,
6627 "decode/pack/unpack/saturate/reverse", dsc);
6628
6629 case 0x18:
6630 if (bits (insn, 5, 7) == 0) /* op2. */
6631 {
6632 if (bits (insn, 12, 15) == 0xf)
6633 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6634 else
6635 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6636 }
6637 else
6638 return arm_copy_undef (gdbarch, insn, dsc);
6639
6640 case 0x1a: case 0x1b:
6641 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6642 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6643 else
6644 return arm_copy_undef (gdbarch, insn, dsc);
6645
6646 case 0x1c: case 0x1d:
6647 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6648 {
6649 if (bits (insn, 0, 3) == 0xf)
6650 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6651 else
6652 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6653 }
6654 else
6655 return arm_copy_undef (gdbarch, insn, dsc);
6656
6657 case 0x1e: case 0x1f:
6658 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6659 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6660 else
6661 return arm_copy_undef (gdbarch, insn, dsc);
6662 }
6663
6664 /* Should be unreachable. */
6665 return 1;
6666 }
6667
6668 static int
6669 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6670 struct regcache *regs,
6671 arm_displaced_step_closure *dsc)
6672 {
6673 if (bit (insn, 25))
6674 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6675 else
6676 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6677 }
6678
6679 static int
6680 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6681 struct regcache *regs,
6682 arm_displaced_step_closure *dsc)
6683 {
6684 unsigned int opcode = bits (insn, 20, 24);
6685
6686 switch (opcode)
6687 {
6688 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6689 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6690
6691 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6692 case 0x12: case 0x16:
6693 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6694
6695 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6696 case 0x13: case 0x17:
6697 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6698
6699 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6700 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6701 /* Note: no writeback for these instructions. Bit 25 will always be
6702 zero though (via caller), so the following works OK. */
6703 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6704 }
6705
6706 /* Should be unreachable. */
6707 return 1;
6708 }
6709
6710 /* Decode shifted register instructions. */
6711
6712 static int
6713 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6714 uint16_t insn2, struct regcache *regs,
6715 arm_displaced_step_closure *dsc)
6716 {
6717 /* PC is only allowed to be used in instruction MOV. */
6718
6719 unsigned int op = bits (insn1, 5, 8);
6720 unsigned int rn = bits (insn1, 0, 3);
6721
6722 if (op == 0x2 && rn == 0xf) /* MOV */
6723 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6724 else
6725 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6726 "dp (shift reg)", dsc);
6727 }
6728
6729
6730 /* Decode extension register load/store. Exactly the same as
6731 arm_decode_ext_reg_ld_st. */
6732
6733 static int
6734 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6735 uint16_t insn2, struct regcache *regs,
6736 arm_displaced_step_closure *dsc)
6737 {
6738 unsigned int opcode = bits (insn1, 4, 8);
6739
6740 switch (opcode)
6741 {
6742 case 0x04: case 0x05:
6743 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6744 "vfp/neon vmov", dsc);
6745
6746 case 0x08: case 0x0c: /* 01x00 */
6747 case 0x0a: case 0x0e: /* 01x10 */
6748 case 0x12: case 0x16: /* 10x10 */
6749 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6750 "vfp/neon vstm/vpush", dsc);
6751
6752 case 0x09: case 0x0d: /* 01x01 */
6753 case 0x0b: case 0x0f: /* 01x11 */
6754 case 0x13: case 0x17: /* 10x11 */
6755 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6756 "vfp/neon vldm/vpop", dsc);
6757
6758 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6759 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6760 "vstr", dsc);
6761 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6762 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6763 }
6764
6765 /* Should be unreachable. */
6766 return 1;
6767 }
6768
6769 static int
6770 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6771 struct regcache *regs, arm_displaced_step_closure *dsc)
6772 {
6773 unsigned int op1 = bits (insn, 20, 25);
6774 int op = bit (insn, 4);
6775 unsigned int coproc = bits (insn, 8, 11);
6776
6777 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6778 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6779 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6780 && (coproc & 0xe) != 0xa)
6781 /* stc/stc2. */
6782 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6783 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6784 && (coproc & 0xe) != 0xa)
6785 /* ldc/ldc2 imm/lit. */
6786 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6787 else if ((op1 & 0x3e) == 0x00)
6788 return arm_copy_undef (gdbarch, insn, dsc);
6789 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6790 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6791 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6792 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6793 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6794 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6795 else if ((op1 & 0x30) == 0x20 && !op)
6796 {
6797 if ((coproc & 0xe) == 0xa)
6798 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6799 else
6800 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6801 }
6802 else if ((op1 & 0x30) == 0x20 && op)
6803 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6804 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6805 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6806 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6807 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6808 else if ((op1 & 0x30) == 0x30)
6809 return arm_copy_svc (gdbarch, insn, regs, dsc);
6810 else
6811 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6812 }
6813
6814 static int
6815 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6816 uint16_t insn2, struct regcache *regs,
6817 arm_displaced_step_closure *dsc)
6818 {
6819 unsigned int coproc = bits (insn2, 8, 11);
6820 unsigned int bit_5_8 = bits (insn1, 5, 8);
6821 unsigned int bit_9 = bit (insn1, 9);
6822 unsigned int bit_4 = bit (insn1, 4);
6823
6824 if (bit_9 == 0)
6825 {
6826 if (bit_5_8 == 2)
6827 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6828 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6829 dsc);
6830 else if (bit_5_8 == 0) /* UNDEFINED. */
6831 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6832 else
6833 {
6834 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6835 if ((coproc & 0xe) == 0xa)
6836 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6837 dsc);
6838 else /* coproc is not 101x. */
6839 {
6840 if (bit_4 == 0) /* STC/STC2. */
6841 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6842 "stc/stc2", dsc);
6843 else /* LDC/LDC2 {literal, immeidate}. */
6844 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6845 regs, dsc);
6846 }
6847 }
6848 }
6849 else
6850 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6851
6852 return 0;
6853 }
6854
6855 static void
6856 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6857 arm_displaced_step_closure *dsc, int rd)
6858 {
6859 /* ADR Rd, #imm
6860
6861 Rewrite as:
6862
6863 Preparation: Rd <- PC
6864 Insn: ADD Rd, #imm
6865 Cleanup: Null.
6866 */
6867
6868 /* Rd <- PC */
6869 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6870 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6871 }
6872
6873 static int
6874 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6875 arm_displaced_step_closure *dsc,
6876 int rd, unsigned int imm)
6877 {
6878
6879 /* Encoding T2: ADDS Rd, #imm */
6880 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6881
6882 install_pc_relative (gdbarch, regs, dsc, rd);
6883
6884 return 0;
6885 }
6886
6887 static int
6888 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6889 struct regcache *regs,
6890 arm_displaced_step_closure *dsc)
6891 {
6892 unsigned int rd = bits (insn, 8, 10);
6893 unsigned int imm8 = bits (insn, 0, 7);
6894
6895 if (debug_displaced)
6896 fprintf_unfiltered (gdb_stdlog,
6897 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6898 rd, imm8, insn);
6899
6900 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6901 }
6902
6903 static int
6904 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6905 uint16_t insn2, struct regcache *regs,
6906 arm_displaced_step_closure *dsc)
6907 {
6908 unsigned int rd = bits (insn2, 8, 11);
6909 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6910 extract raw immediate encoding rather than computing immediate. When
6911 generating ADD or SUB instruction, we can simply perform OR operation to
6912 set immediate into ADD. */
6913 unsigned int imm_3_8 = insn2 & 0x70ff;
6914 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6915
6916 if (debug_displaced)
6917 fprintf_unfiltered (gdb_stdlog,
6918 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6919 rd, imm_i, imm_3_8, insn1, insn2);
6920
6921 if (bit (insn1, 7)) /* Encoding T2 */
6922 {
6923 /* Encoding T3: SUB Rd, Rd, #imm */
6924 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6925 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6926 }
6927 else /* Encoding T3 */
6928 {
6929 /* Encoding T3: ADD Rd, Rd, #imm */
6930 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6931 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6932 }
6933 dsc->numinsns = 2;
6934
6935 install_pc_relative (gdbarch, regs, dsc, rd);
6936
6937 return 0;
6938 }
6939
6940 static int
6941 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6942 struct regcache *regs,
6943 arm_displaced_step_closure *dsc)
6944 {
6945 unsigned int rt = bits (insn1, 8, 10);
6946 unsigned int pc;
6947 int imm8 = (bits (insn1, 0, 7) << 2);
6948
6949 /* LDR Rd, #imm8
6950
6951 Rwrite as:
6952
6953 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6954
6955 Insn: LDR R0, [R2, R3];
6956 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6957
6958 if (debug_displaced)
6959 fprintf_unfiltered (gdb_stdlog,
6960 "displaced: copying thumb ldr r%d [pc #%d]\n"
6961 , rt, imm8);
6962
6963 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6964 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6965 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6966 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6967 /* The assembler calculates the required value of the offset from the
6968 Align(PC,4) value of this instruction to the label. */
6969 pc = pc & 0xfffffffc;
6970
6971 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6972 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6973
6974 dsc->rd = rt;
6975 dsc->u.ldst.xfersize = 4;
6976 dsc->u.ldst.rn = 0;
6977 dsc->u.ldst.immed = 0;
6978 dsc->u.ldst.writeback = 0;
6979 dsc->u.ldst.restore_r4 = 0;
6980
6981 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6982
6983 dsc->cleanup = &cleanup_load;
6984
6985 return 0;
6986 }
6987
6988 /* Copy Thumb cbnz/cbz insruction. */
6989
6990 static int
6991 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6992 struct regcache *regs,
6993 arm_displaced_step_closure *dsc)
6994 {
6995 int non_zero = bit (insn1, 11);
6996 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6997 CORE_ADDR from = dsc->insn_addr;
6998 int rn = bits (insn1, 0, 2);
6999 int rn_val = displaced_read_reg (regs, dsc, rn);
7000
7001 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7002 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7003 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7004 condition is false, let it be, cleanup_branch will do nothing. */
7005 if (dsc->u.branch.cond)
7006 {
7007 dsc->u.branch.cond = INST_AL;
7008 dsc->u.branch.dest = from + 4 + imm5;
7009 }
7010 else
7011 dsc->u.branch.dest = from + 2;
7012
7013 dsc->u.branch.link = 0;
7014 dsc->u.branch.exchange = 0;
7015
7016 if (debug_displaced)
7017 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7018 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7019 rn, rn_val, insn1, dsc->u.branch.dest);
7020
7021 dsc->modinsn[0] = THUMB_NOP;
7022
7023 dsc->cleanup = &cleanup_branch;
7024 return 0;
7025 }
7026
7027 /* Copy Table Branch Byte/Halfword */
7028 static int
7029 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7030 uint16_t insn2, struct regcache *regs,
7031 arm_displaced_step_closure *dsc)
7032 {
7033 ULONGEST rn_val, rm_val;
7034 int is_tbh = bit (insn2, 4);
7035 CORE_ADDR halfwords = 0;
7036 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7037
7038 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7039 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7040
7041 if (is_tbh)
7042 {
7043 gdb_byte buf[2];
7044
7045 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7046 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7047 }
7048 else
7049 {
7050 gdb_byte buf[1];
7051
7052 target_read_memory (rn_val + rm_val, buf, 1);
7053 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7054 }
7055
7056 if (debug_displaced)
7057 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7058 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7059 (unsigned int) rn_val, (unsigned int) rm_val,
7060 (unsigned int) halfwords);
7061
7062 dsc->u.branch.cond = INST_AL;
7063 dsc->u.branch.link = 0;
7064 dsc->u.branch.exchange = 0;
7065 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7066
7067 dsc->cleanup = &cleanup_branch;
7068
7069 return 0;
7070 }
7071
7072 static void
7073 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7074 arm_displaced_step_closure *dsc)
7075 {
7076 /* PC <- r7 */
7077 int val = displaced_read_reg (regs, dsc, 7);
7078 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7079
7080 /* r7 <- r8 */
7081 val = displaced_read_reg (regs, dsc, 8);
7082 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7083
7084 /* r8 <- tmp[0] */
7085 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7086
7087 }
7088
7089 static int
7090 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7091 struct regcache *regs,
7092 arm_displaced_step_closure *dsc)
7093 {
7094 dsc->u.block.regmask = insn1 & 0x00ff;
7095
7096 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7097 to :
7098
7099 (1) register list is full, that is, r0-r7 are used.
7100 Prepare: tmp[0] <- r8
7101
7102 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7103 MOV r8, r7; Move value of r7 to r8;
7104 POP {r7}; Store PC value into r7.
7105
7106 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7107
7108 (2) register list is not full, supposing there are N registers in
7109 register list (except PC, 0 <= N <= 7).
7110 Prepare: for each i, 0 - N, tmp[i] <- ri.
7111
7112 POP {r0, r1, ...., rN};
7113
7114 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7115 from tmp[] properly.
7116 */
7117 if (debug_displaced)
7118 fprintf_unfiltered (gdb_stdlog,
7119 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7120 dsc->u.block.regmask, insn1);
7121
7122 if (dsc->u.block.regmask == 0xff)
7123 {
7124 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7125
7126 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7127 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7128 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7129
7130 dsc->numinsns = 3;
7131 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7132 }
7133 else
7134 {
7135 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7136 unsigned int i;
7137 unsigned int new_regmask;
7138
7139 for (i = 0; i < num_in_list + 1; i++)
7140 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7141
7142 new_regmask = (1 << (num_in_list + 1)) - 1;
7143
7144 if (debug_displaced)
7145 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7146 "{..., pc}: original reg list %.4x,"
7147 " modified list %.4x\n"),
7148 (int) dsc->u.block.regmask, new_regmask);
7149
7150 dsc->u.block.regmask |= 0x8000;
7151 dsc->u.block.writeback = 0;
7152 dsc->u.block.cond = INST_AL;
7153
7154 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7155
7156 dsc->cleanup = &cleanup_block_load_pc;
7157 }
7158
7159 return 0;
7160 }
7161
7162 static void
7163 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7164 struct regcache *regs,
7165 arm_displaced_step_closure *dsc)
7166 {
7167 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7168 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7169 int err = 0;
7170
7171 /* 16-bit thumb instructions. */
7172 switch (op_bit_12_15)
7173 {
7174 /* Shift (imme), add, subtract, move and compare. */
7175 case 0: case 1: case 2: case 3:
7176 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7177 "shift/add/sub/mov/cmp",
7178 dsc);
7179 break;
7180 case 4:
7181 switch (op_bit_10_11)
7182 {
7183 case 0: /* Data-processing */
7184 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7185 "data-processing",
7186 dsc);
7187 break;
7188 case 1: /* Special data instructions and branch and exchange. */
7189 {
7190 unsigned short op = bits (insn1, 7, 9);
7191 if (op == 6 || op == 7) /* BX or BLX */
7192 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7193 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7194 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7195 else
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7197 dsc);
7198 }
7199 break;
7200 default: /* LDR (literal) */
7201 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7202 }
7203 break;
7204 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7205 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7206 break;
7207 case 10:
7208 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7209 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7210 else /* Generate SP-relative address */
7211 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7212 break;
7213 case 11: /* Misc 16-bit instructions */
7214 {
7215 switch (bits (insn1, 8, 11))
7216 {
7217 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7218 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7219 break;
7220 case 12: case 13: /* POP */
7221 if (bit (insn1, 8)) /* PC is in register list. */
7222 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7223 else
7224 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7225 break;
7226 case 15: /* If-Then, and hints */
7227 if (bits (insn1, 0, 3))
7228 /* If-Then makes up to four following instructions conditional.
7229 IT instruction itself is not conditional, so handle it as a
7230 common unmodified instruction. */
7231 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7232 dsc);
7233 else
7234 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7235 break;
7236 default:
7237 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7238 }
7239 }
7240 break;
7241 case 12:
7242 if (op_bit_10_11 < 2) /* Store multiple registers */
7243 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7244 else /* Load multiple registers */
7245 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7246 break;
7247 case 13: /* Conditional branch and supervisor call */
7248 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7249 err = thumb_copy_b (gdbarch, insn1, dsc);
7250 else
7251 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7252 break;
7253 case 14: /* Unconditional branch */
7254 err = thumb_copy_b (gdbarch, insn1, dsc);
7255 break;
7256 default:
7257 err = 1;
7258 }
7259
7260 if (err)
7261 internal_error (__FILE__, __LINE__,
7262 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7263 }
7264
7265 static int
7266 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7267 uint16_t insn1, uint16_t insn2,
7268 struct regcache *regs,
7269 arm_displaced_step_closure *dsc)
7270 {
7271 int rt = bits (insn2, 12, 15);
7272 int rn = bits (insn1, 0, 3);
7273 int op1 = bits (insn1, 7, 8);
7274
7275 switch (bits (insn1, 5, 6))
7276 {
7277 case 0: /* Load byte and memory hints */
7278 if (rt == 0xf) /* PLD/PLI */
7279 {
7280 if (rn == 0xf)
7281 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7282 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7283 else
7284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7285 "pli/pld", dsc);
7286 }
7287 else
7288 {
7289 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7290 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7291 1);
7292 else
7293 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7294 "ldrb{reg, immediate}/ldrbt",
7295 dsc);
7296 }
7297
7298 break;
7299 case 1: /* Load halfword and memory hints. */
7300 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7302 "pld/unalloc memhint", dsc);
7303 else
7304 {
7305 if (rn == 0xf)
7306 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7307 2);
7308 else
7309 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7310 "ldrh/ldrht", dsc);
7311 }
7312 break;
7313 case 2: /* Load word */
7314 {
7315 int insn2_bit_8_11 = bits (insn2, 8, 11);
7316
7317 if (rn == 0xf)
7318 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7319 else if (op1 == 0x1) /* Encoding T3 */
7320 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7321 0, 1);
7322 else /* op1 == 0x0 */
7323 {
7324 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7325 /* LDR (immediate) */
7326 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7327 dsc, bit (insn2, 8), 1);
7328 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7329 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7330 "ldrt", dsc);
7331 else
7332 /* LDR (register) */
7333 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7334 dsc, 0, 0);
7335 }
7336 break;
7337 }
7338 default:
7339 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7340 break;
7341 }
7342 return 0;
7343 }
7344
7345 static void
7346 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7347 uint16_t insn2, struct regcache *regs,
7348 arm_displaced_step_closure *dsc)
7349 {
7350 int err = 0;
7351 unsigned short op = bit (insn2, 15);
7352 unsigned int op1 = bits (insn1, 11, 12);
7353
7354 switch (op1)
7355 {
7356 case 1:
7357 {
7358 switch (bits (insn1, 9, 10))
7359 {
7360 case 0:
7361 if (bit (insn1, 6))
7362 {
7363 /* Load/store {dual, execlusive}, table branch. */
7364 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7365 && bits (insn2, 5, 7) == 0)
7366 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7367 dsc);
7368 else
7369 /* PC is not allowed to use in load/store {dual, exclusive}
7370 instructions. */
7371 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7372 "load/store dual/ex", dsc);
7373 }
7374 else /* load/store multiple */
7375 {
7376 switch (bits (insn1, 7, 8))
7377 {
7378 case 0: case 3: /* SRS, RFE */
7379 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7380 "srs/rfe", dsc);
7381 break;
7382 case 1: case 2: /* LDM/STM/PUSH/POP */
7383 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7384 break;
7385 }
7386 }
7387 break;
7388
7389 case 1:
7390 /* Data-processing (shift register). */
7391 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7392 dsc);
7393 break;
7394 default: /* Coprocessor instructions. */
7395 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7396 break;
7397 }
7398 break;
7399 }
7400 case 2: /* op1 = 2 */
7401 if (op) /* Branch and misc control. */
7402 {
7403 if (bit (insn2, 14) /* BLX/BL */
7404 || bit (insn2, 12) /* Unconditional branch */
7405 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7406 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7407 else
7408 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7409 "misc ctrl", dsc);
7410 }
7411 else
7412 {
7413 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7414 {
7415 int dp_op = bits (insn1, 4, 8);
7416 int rn = bits (insn1, 0, 3);
7417 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7418 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7419 regs, dsc);
7420 else
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "dp/pb", dsc);
7423 }
7424 else /* Data processing (modified immeidate) */
7425 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7426 "dp/mi", dsc);
7427 }
7428 break;
7429 case 3: /* op1 = 3 */
7430 switch (bits (insn1, 9, 10))
7431 {
7432 case 0:
7433 if (bit (insn1, 4))
7434 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7435 regs, dsc);
7436 else /* NEON Load/Store and Store single data item */
7437 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7438 "neon elt/struct load/store",
7439 dsc);
7440 break;
7441 case 1: /* op1 = 3, bits (9, 10) == 1 */
7442 switch (bits (insn1, 7, 8))
7443 {
7444 case 0: case 1: /* Data processing (register) */
7445 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7446 "dp(reg)", dsc);
7447 break;
7448 case 2: /* Multiply and absolute difference */
7449 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7450 "mul/mua/diff", dsc);
7451 break;
7452 case 3: /* Long multiply and divide */
7453 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7454 "lmul/lmua", dsc);
7455 break;
7456 }
7457 break;
7458 default: /* Coprocessor instructions */
7459 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7460 break;
7461 }
7462 break;
7463 default:
7464 err = 1;
7465 }
7466
7467 if (err)
7468 internal_error (__FILE__, __LINE__,
7469 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7470
7471 }
7472
7473 static void
7474 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7475 struct regcache *regs,
7476 arm_displaced_step_closure *dsc)
7477 {
7478 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7479 uint16_t insn1
7480 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7481
7482 if (debug_displaced)
7483 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7484 "at %.8lx\n", insn1, (unsigned long) from);
7485
7486 dsc->is_thumb = 1;
7487 dsc->insn_size = thumb_insn_size (insn1);
7488 if (thumb_insn_size (insn1) == 4)
7489 {
7490 uint16_t insn2
7491 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7492 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7493 }
7494 else
7495 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7496 }
7497
7498 void
7499 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7500 CORE_ADDR to, struct regcache *regs,
7501 arm_displaced_step_closure *dsc)
7502 {
7503 int err = 0;
7504 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7505 uint32_t insn;
7506
7507 /* Most displaced instructions use a 1-instruction scratch space, so set this
7508 here and override below if/when necessary. */
7509 dsc->numinsns = 1;
7510 dsc->insn_addr = from;
7511 dsc->scratch_base = to;
7512 dsc->cleanup = NULL;
7513 dsc->wrote_to_pc = 0;
7514
7515 if (!displaced_in_arm_mode (regs))
7516 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7517
7518 dsc->is_thumb = 0;
7519 dsc->insn_size = 4;
7520 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7521 if (debug_displaced)
7522 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7523 "at %.8lx\n", (unsigned long) insn,
7524 (unsigned long) from);
7525
7526 if ((insn & 0xf0000000) == 0xf0000000)
7527 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7528 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7529 {
7530 case 0x0: case 0x1: case 0x2: case 0x3:
7531 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7532 break;
7533
7534 case 0x4: case 0x5: case 0x6:
7535 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7536 break;
7537
7538 case 0x7:
7539 err = arm_decode_media (gdbarch, insn, dsc);
7540 break;
7541
7542 case 0x8: case 0x9: case 0xa: case 0xb:
7543 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7544 break;
7545
7546 case 0xc: case 0xd: case 0xe: case 0xf:
7547 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7548 break;
7549 }
7550
7551 if (err)
7552 internal_error (__FILE__, __LINE__,
7553 _("arm_process_displaced_insn: Instruction decode error"));
7554 }
7555
7556 /* Actually set up the scratch space for a displaced instruction. */
7557
7558 void
7559 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7560 CORE_ADDR to, arm_displaced_step_closure *dsc)
7561 {
7562 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7563 unsigned int i, len, offset;
7564 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7565 int size = dsc->is_thumb? 2 : 4;
7566 const gdb_byte *bkp_insn;
7567
7568 offset = 0;
7569 /* Poke modified instruction(s). */
7570 for (i = 0; i < dsc->numinsns; i++)
7571 {
7572 if (debug_displaced)
7573 {
7574 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7575 if (size == 4)
7576 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7577 dsc->modinsn[i]);
7578 else if (size == 2)
7579 fprintf_unfiltered (gdb_stdlog, "%.4x",
7580 (unsigned short)dsc->modinsn[i]);
7581
7582 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7583 (unsigned long) to + offset);
7584
7585 }
7586 write_memory_unsigned_integer (to + offset, size,
7587 byte_order_for_code,
7588 dsc->modinsn[i]);
7589 offset += size;
7590 }
7591
7592 /* Choose the correct breakpoint instruction. */
7593 if (dsc->is_thumb)
7594 {
7595 bkp_insn = tdep->thumb_breakpoint;
7596 len = tdep->thumb_breakpoint_size;
7597 }
7598 else
7599 {
7600 bkp_insn = tdep->arm_breakpoint;
7601 len = tdep->arm_breakpoint_size;
7602 }
7603
7604 /* Put breakpoint afterwards. */
7605 write_memory (to + offset, bkp_insn, len);
7606
7607 if (debug_displaced)
7608 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7609 paddress (gdbarch, from), paddress (gdbarch, to));
7610 }
7611
7612 /* Entry point for cleaning things up after a displaced instruction has been
7613 single-stepped. */
7614
7615 void
7616 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7617 struct displaced_step_closure *dsc_,
7618 CORE_ADDR from, CORE_ADDR to,
7619 struct regcache *regs)
7620 {
7621 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7622
7623 if (dsc->cleanup)
7624 dsc->cleanup (gdbarch, regs, dsc);
7625
7626 if (!dsc->wrote_to_pc)
7627 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7628 dsc->insn_addr + dsc->insn_size);
7629
7630 }
7631
7632 #include "bfd-in2.h"
7633 #include "libcoff.h"
7634
7635 static int
7636 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7637 {
7638 gdb_disassembler *di
7639 = static_cast<gdb_disassembler *>(info->application_data);
7640 struct gdbarch *gdbarch = di->arch ();
7641
7642 if (arm_pc_is_thumb (gdbarch, memaddr))
7643 {
7644 static asymbol *asym;
7645 static combined_entry_type ce;
7646 static struct coff_symbol_struct csym;
7647 static struct bfd fake_bfd;
7648 static bfd_target fake_target;
7649
7650 if (csym.native == NULL)
7651 {
7652 /* Create a fake symbol vector containing a Thumb symbol.
7653 This is solely so that the code in print_insn_little_arm()
7654 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7655 the presence of a Thumb symbol and switch to decoding
7656 Thumb instructions. */
7657
7658 fake_target.flavour = bfd_target_coff_flavour;
7659 fake_bfd.xvec = &fake_target;
7660 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7661 csym.native = &ce;
7662 csym.symbol.the_bfd = &fake_bfd;
7663 csym.symbol.name = "fake";
7664 asym = (asymbol *) & csym;
7665 }
7666
7667 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7668 info->symbols = &asym;
7669 }
7670 else
7671 info->symbols = NULL;
7672
7673 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7674 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7675 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7676 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7677 in default_print_insn. */
7678 if (exec_bfd != NULL)
7679 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7680
7681 return default_print_insn (memaddr, info);
7682 }
7683
7684 /* The following define instruction sequences that will cause ARM
7685 cpu's to take an undefined instruction trap. These are used to
7686 signal a breakpoint to GDB.
7687
7688 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7689 modes. A different instruction is required for each mode. The ARM
7690 cpu's can also be big or little endian. Thus four different
7691 instructions are needed to support all cases.
7692
7693 Note: ARMv4 defines several new instructions that will take the
7694 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7695 not in fact add the new instructions. The new undefined
7696 instructions in ARMv4 are all instructions that had no defined
7697 behaviour in earlier chips. There is no guarantee that they will
7698 raise an exception, but may be treated as NOP's. In practice, it
7699 may only safe to rely on instructions matching:
7700
7701 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7702 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7703 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7704
7705 Even this may only true if the condition predicate is true. The
7706 following use a condition predicate of ALWAYS so it is always TRUE.
7707
7708 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7709 and NetBSD all use a software interrupt rather than an undefined
7710 instruction to force a trap. This can be handled by by the
7711 abi-specific code during establishment of the gdbarch vector. */
7712
7713 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7714 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7715 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7716 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7717
7718 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7719 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7720 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7721 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7722
7723 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7724
7725 static int
7726 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7727 {
7728 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7729 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7730
7731 if (arm_pc_is_thumb (gdbarch, *pcptr))
7732 {
7733 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7734
7735 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7736 check whether we are replacing a 32-bit instruction. */
7737 if (tdep->thumb2_breakpoint != NULL)
7738 {
7739 gdb_byte buf[2];
7740
7741 if (target_read_memory (*pcptr, buf, 2) == 0)
7742 {
7743 unsigned short inst1;
7744
7745 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7746 if (thumb_insn_size (inst1) == 4)
7747 return ARM_BP_KIND_THUMB2;
7748 }
7749 }
7750
7751 return ARM_BP_KIND_THUMB;
7752 }
7753 else
7754 return ARM_BP_KIND_ARM;
7755
7756 }
7757
7758 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7759
7760 static const gdb_byte *
7761 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7762 {
7763 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7764
7765 switch (kind)
7766 {
7767 case ARM_BP_KIND_ARM:
7768 *size = tdep->arm_breakpoint_size;
7769 return tdep->arm_breakpoint;
7770 case ARM_BP_KIND_THUMB:
7771 *size = tdep->thumb_breakpoint_size;
7772 return tdep->thumb_breakpoint;
7773 case ARM_BP_KIND_THUMB2:
7774 *size = tdep->thumb2_breakpoint_size;
7775 return tdep->thumb2_breakpoint;
7776 default:
7777 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7778 }
7779 }
7780
7781 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7782
7783 static int
7784 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7785 struct regcache *regcache,
7786 CORE_ADDR *pcptr)
7787 {
7788 gdb_byte buf[4];
7789
7790 /* Check the memory pointed by PC is readable. */
7791 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7792 {
7793 struct arm_get_next_pcs next_pcs_ctx;
7794
7795 arm_get_next_pcs_ctor (&next_pcs_ctx,
7796 &arm_get_next_pcs_ops,
7797 gdbarch_byte_order (gdbarch),
7798 gdbarch_byte_order_for_code (gdbarch),
7799 0,
7800 regcache);
7801
7802 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7803
7804 /* If MEMADDR is the next instruction of current pc, do the
7805 software single step computation, and get the thumb mode by
7806 the destination address. */
7807 for (CORE_ADDR pc : next_pcs)
7808 {
7809 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7810 {
7811 if (IS_THUMB_ADDR (pc))
7812 {
7813 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7814 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7815 }
7816 else
7817 return ARM_BP_KIND_ARM;
7818 }
7819 }
7820 }
7821
7822 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7823 }
7824
7825 /* Extract from an array REGBUF containing the (raw) register state a
7826 function return value of type TYPE, and copy that, in virtual
7827 format, into VALBUF. */
7828
7829 static void
7830 arm_extract_return_value (struct type *type, struct regcache *regs,
7831 gdb_byte *valbuf)
7832 {
7833 struct gdbarch *gdbarch = regs->arch ();
7834 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7835
7836 if (TYPE_CODE_FLT == TYPE_CODE (type))
7837 {
7838 switch (gdbarch_tdep (gdbarch)->fp_model)
7839 {
7840 case ARM_FLOAT_FPA:
7841 {
7842 /* The value is in register F0 in internal format. We need to
7843 extract the raw value and then convert it to the desired
7844 internal type. */
7845 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7846
7847 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7848 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7849 valbuf, type);
7850 }
7851 break;
7852
7853 case ARM_FLOAT_SOFT_FPA:
7854 case ARM_FLOAT_SOFT_VFP:
7855 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7856 not using the VFP ABI code. */
7857 case ARM_FLOAT_VFP:
7858 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7859 if (TYPE_LENGTH (type) > 4)
7860 regs->cooked_read (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
7861 break;
7862
7863 default:
7864 internal_error (__FILE__, __LINE__,
7865 _("arm_extract_return_value: "
7866 "Floating point model not supported"));
7867 break;
7868 }
7869 }
7870 else if (TYPE_CODE (type) == TYPE_CODE_INT
7871 || TYPE_CODE (type) == TYPE_CODE_CHAR
7872 || TYPE_CODE (type) == TYPE_CODE_BOOL
7873 || TYPE_CODE (type) == TYPE_CODE_PTR
7874 || TYPE_IS_REFERENCE (type)
7875 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7876 {
7877 /* If the type is a plain integer, then the access is
7878 straight-forward. Otherwise we have to play around a bit
7879 more. */
7880 int len = TYPE_LENGTH (type);
7881 int regno = ARM_A1_REGNUM;
7882 ULONGEST tmp;
7883
7884 while (len > 0)
7885 {
7886 /* By using store_unsigned_integer we avoid having to do
7887 anything special for small big-endian values. */
7888 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7889 store_unsigned_integer (valbuf,
7890 (len > INT_REGISTER_SIZE
7891 ? INT_REGISTER_SIZE : len),
7892 byte_order, tmp);
7893 len -= INT_REGISTER_SIZE;
7894 valbuf += INT_REGISTER_SIZE;
7895 }
7896 }
7897 else
7898 {
7899 /* For a structure or union the behaviour is as if the value had
7900 been stored to word-aligned memory and then loaded into
7901 registers with 32-bit load instruction(s). */
7902 int len = TYPE_LENGTH (type);
7903 int regno = ARM_A1_REGNUM;
7904 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7905
7906 while (len > 0)
7907 {
7908 regs->cooked_read (regno++, tmpbuf);
7909 memcpy (valbuf, tmpbuf,
7910 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7911 len -= INT_REGISTER_SIZE;
7912 valbuf += INT_REGISTER_SIZE;
7913 }
7914 }
7915 }
7916
7917
7918 /* Will a function return an aggregate type in memory or in a
7919 register? Return 0 if an aggregate type can be returned in a
7920 register, 1 if it must be returned in memory. */
7921
7922 static int
7923 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7924 {
7925 enum type_code code;
7926
7927 type = check_typedef (type);
7928
7929 /* Simple, non-aggregate types (ie not including vectors and
7930 complex) are always returned in a register (or registers). */
7931 code = TYPE_CODE (type);
7932 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7933 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7934 return 0;
7935
7936 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7937 {
7938 /* Vector values should be returned using ARM registers if they
7939 are not over 16 bytes. */
7940 return (TYPE_LENGTH (type) > 16);
7941 }
7942
7943 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7944 {
7945 /* The AAPCS says all aggregates not larger than a word are returned
7946 in a register. */
7947 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7948 return 0;
7949
7950 return 1;
7951 }
7952 else
7953 {
7954 int nRc;
7955
7956 /* All aggregate types that won't fit in a register must be returned
7957 in memory. */
7958 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7959 return 1;
7960
7961 /* In the ARM ABI, "integer" like aggregate types are returned in
7962 registers. For an aggregate type to be integer like, its size
7963 must be less than or equal to INT_REGISTER_SIZE and the
7964 offset of each addressable subfield must be zero. Note that bit
7965 fields are not addressable, and all addressable subfields of
7966 unions always start at offset zero.
7967
7968 This function is based on the behaviour of GCC 2.95.1.
7969 See: gcc/arm.c: arm_return_in_memory() for details.
7970
7971 Note: All versions of GCC before GCC 2.95.2 do not set up the
7972 parameters correctly for a function returning the following
7973 structure: struct { float f;}; This should be returned in memory,
7974 not a register. Richard Earnshaw sent me a patch, but I do not
7975 know of any way to detect if a function like the above has been
7976 compiled with the correct calling convention. */
7977
7978 /* Assume all other aggregate types can be returned in a register.
7979 Run a check for structures, unions and arrays. */
7980 nRc = 0;
7981
7982 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7983 {
7984 int i;
7985 /* Need to check if this struct/union is "integer" like. For
7986 this to be true, its size must be less than or equal to
7987 INT_REGISTER_SIZE and the offset of each addressable
7988 subfield must be zero. Note that bit fields are not
7989 addressable, and unions always start at offset zero. If any
7990 of the subfields is a floating point type, the struct/union
7991 cannot be an integer type. */
7992
7993 /* For each field in the object, check:
7994 1) Is it FP? --> yes, nRc = 1;
7995 2) Is it addressable (bitpos != 0) and
7996 not packed (bitsize == 0)?
7997 --> yes, nRc = 1
7998 */
7999
8000 for (i = 0; i < TYPE_NFIELDS (type); i++)
8001 {
8002 enum type_code field_type_code;
8003
8004 field_type_code
8005 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8006 i)));
8007
8008 /* Is it a floating point type field? */
8009 if (field_type_code == TYPE_CODE_FLT)
8010 {
8011 nRc = 1;
8012 break;
8013 }
8014
8015 /* If bitpos != 0, then we have to care about it. */
8016 if (TYPE_FIELD_BITPOS (type, i) != 0)
8017 {
8018 /* Bitfields are not addressable. If the field bitsize is
8019 zero, then the field is not packed. Hence it cannot be
8020 a bitfield or any other packed type. */
8021 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8022 {
8023 nRc = 1;
8024 break;
8025 }
8026 }
8027 }
8028 }
8029
8030 return nRc;
8031 }
8032 }
8033
8034 /* Write into appropriate registers a function return value of type
8035 TYPE, given in virtual format. */
8036
8037 static void
8038 arm_store_return_value (struct type *type, struct regcache *regs,
8039 const gdb_byte *valbuf)
8040 {
8041 struct gdbarch *gdbarch = regs->arch ();
8042 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8043
8044 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8045 {
8046 gdb_byte buf[FP_REGISTER_SIZE];
8047
8048 switch (gdbarch_tdep (gdbarch)->fp_model)
8049 {
8050 case ARM_FLOAT_FPA:
8051
8052 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8053 regs->cooked_write (ARM_F0_REGNUM, buf);
8054 break;
8055
8056 case ARM_FLOAT_SOFT_FPA:
8057 case ARM_FLOAT_SOFT_VFP:
8058 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8059 not using the VFP ABI code. */
8060 case ARM_FLOAT_VFP:
8061 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8062 if (TYPE_LENGTH (type) > 4)
8063 regs->cooked_write (ARM_A1_REGNUM + 1, valbuf + INT_REGISTER_SIZE);
8064 break;
8065
8066 default:
8067 internal_error (__FILE__, __LINE__,
8068 _("arm_store_return_value: Floating "
8069 "point model not supported"));
8070 break;
8071 }
8072 }
8073 else if (TYPE_CODE (type) == TYPE_CODE_INT
8074 || TYPE_CODE (type) == TYPE_CODE_CHAR
8075 || TYPE_CODE (type) == TYPE_CODE_BOOL
8076 || TYPE_CODE (type) == TYPE_CODE_PTR
8077 || TYPE_IS_REFERENCE (type)
8078 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8079 {
8080 if (TYPE_LENGTH (type) <= 4)
8081 {
8082 /* Values of one word or less are zero/sign-extended and
8083 returned in r0. */
8084 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8085 LONGEST val = unpack_long (type, valbuf);
8086
8087 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8088 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8089 }
8090 else
8091 {
8092 /* Integral values greater than one word are stored in consecutive
8093 registers starting with r0. This will always be a multiple of
8094 the regiser size. */
8095 int len = TYPE_LENGTH (type);
8096 int regno = ARM_A1_REGNUM;
8097
8098 while (len > 0)
8099 {
8100 regs->cooked_write (regno++, valbuf);
8101 len -= INT_REGISTER_SIZE;
8102 valbuf += INT_REGISTER_SIZE;
8103 }
8104 }
8105 }
8106 else
8107 {
8108 /* For a structure or union the behaviour is as if the value had
8109 been stored to word-aligned memory and then loaded into
8110 registers with 32-bit load instruction(s). */
8111 int len = TYPE_LENGTH (type);
8112 int regno = ARM_A1_REGNUM;
8113 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8114
8115 while (len > 0)
8116 {
8117 memcpy (tmpbuf, valbuf,
8118 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8119 regs->cooked_write (regno++, tmpbuf);
8120 len -= INT_REGISTER_SIZE;
8121 valbuf += INT_REGISTER_SIZE;
8122 }
8123 }
8124 }
8125
8126
8127 /* Handle function return values. */
8128
8129 static enum return_value_convention
8130 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8131 struct type *valtype, struct regcache *regcache,
8132 gdb_byte *readbuf, const gdb_byte *writebuf)
8133 {
8134 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8135 struct type *func_type = function ? value_type (function) : NULL;
8136 enum arm_vfp_cprc_base_type vfp_base_type;
8137 int vfp_base_count;
8138
8139 if (arm_vfp_abi_for_function (gdbarch, func_type)
8140 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8141 {
8142 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8143 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8144 int i;
8145 for (i = 0; i < vfp_base_count; i++)
8146 {
8147 if (reg_char == 'q')
8148 {
8149 if (writebuf)
8150 arm_neon_quad_write (gdbarch, regcache, i,
8151 writebuf + i * unit_length);
8152
8153 if (readbuf)
8154 arm_neon_quad_read (gdbarch, regcache, i,
8155 readbuf + i * unit_length);
8156 }
8157 else
8158 {
8159 char name_buf[4];
8160 int regnum;
8161
8162 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8163 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8164 strlen (name_buf));
8165 if (writebuf)
8166 regcache->cooked_write (regnum, writebuf + i * unit_length);
8167 if (readbuf)
8168 regcache->cooked_read (regnum, readbuf + i * unit_length);
8169 }
8170 }
8171 return RETURN_VALUE_REGISTER_CONVENTION;
8172 }
8173
8174 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8175 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8176 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8177 {
8178 if (tdep->struct_return == pcc_struct_return
8179 || arm_return_in_memory (gdbarch, valtype))
8180 return RETURN_VALUE_STRUCT_CONVENTION;
8181 }
8182 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8183 {
8184 if (arm_return_in_memory (gdbarch, valtype))
8185 return RETURN_VALUE_STRUCT_CONVENTION;
8186 }
8187
8188 if (writebuf)
8189 arm_store_return_value (valtype, regcache, writebuf);
8190
8191 if (readbuf)
8192 arm_extract_return_value (valtype, regcache, readbuf);
8193
8194 return RETURN_VALUE_REGISTER_CONVENTION;
8195 }
8196
8197
8198 static int
8199 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8200 {
8201 struct gdbarch *gdbarch = get_frame_arch (frame);
8202 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8203 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8204 CORE_ADDR jb_addr;
8205 gdb_byte buf[INT_REGISTER_SIZE];
8206
8207 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8208
8209 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8210 INT_REGISTER_SIZE))
8211 return 0;
8212
8213 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8214 return 1;
8215 }
8216
8217 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8218 return the target PC. Otherwise return 0. */
8219
8220 CORE_ADDR
8221 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8222 {
8223 const char *name;
8224 int namelen;
8225 CORE_ADDR start_addr;
8226
8227 /* Find the starting address and name of the function containing the PC. */
8228 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8229 {
8230 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8231 check here. */
8232 start_addr = arm_skip_bx_reg (frame, pc);
8233 if (start_addr != 0)
8234 return start_addr;
8235
8236 return 0;
8237 }
8238
8239 /* If PC is in a Thumb call or return stub, return the address of the
8240 target PC, which is in a register. The thunk functions are called
8241 _call_via_xx, where x is the register name. The possible names
8242 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8243 functions, named __ARM_call_via_r[0-7]. */
8244 if (startswith (name, "_call_via_")
8245 || startswith (name, "__ARM_call_via_"))
8246 {
8247 /* Use the name suffix to determine which register contains the
8248 target PC. */
8249 static const char *table[15] =
8250 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8251 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8252 };
8253 int regno;
8254 int offset = strlen (name) - 2;
8255
8256 for (regno = 0; regno <= 14; regno++)
8257 if (strcmp (&name[offset], table[regno]) == 0)
8258 return get_frame_register_unsigned (frame, regno);
8259 }
8260
8261 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8262 non-interworking calls to foo. We could decode the stubs
8263 to find the target but it's easier to use the symbol table. */
8264 namelen = strlen (name);
8265 if (name[0] == '_' && name[1] == '_'
8266 && ((namelen > 2 + strlen ("_from_thumb")
8267 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8268 || (namelen > 2 + strlen ("_from_arm")
8269 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8270 {
8271 char *target_name;
8272 int target_len = namelen - 2;
8273 struct bound_minimal_symbol minsym;
8274 struct objfile *objfile;
8275 struct obj_section *sec;
8276
8277 if (name[namelen - 1] == 'b')
8278 target_len -= strlen ("_from_thumb");
8279 else
8280 target_len -= strlen ("_from_arm");
8281
8282 target_name = (char *) alloca (target_len + 1);
8283 memcpy (target_name, name + 2, target_len);
8284 target_name[target_len] = '\0';
8285
8286 sec = find_pc_section (pc);
8287 objfile = (sec == NULL) ? NULL : sec->objfile;
8288 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8289 if (minsym.minsym != NULL)
8290 return BMSYMBOL_VALUE_ADDRESS (minsym);
8291 else
8292 return 0;
8293 }
8294
8295 return 0; /* not a stub */
8296 }
8297
8298 static void
8299 set_arm_command (const char *args, int from_tty)
8300 {
8301 printf_unfiltered (_("\
8302 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8303 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8304 }
8305
8306 static void
8307 show_arm_command (const char *args, int from_tty)
8308 {
8309 cmd_show_list (showarmcmdlist, from_tty, "");
8310 }
8311
8312 static void
8313 arm_update_current_architecture (void)
8314 {
8315 struct gdbarch_info info;
8316
8317 /* If the current architecture is not ARM, we have nothing to do. */
8318 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8319 return;
8320
8321 /* Update the architecture. */
8322 gdbarch_info_init (&info);
8323
8324 if (!gdbarch_update_p (info))
8325 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8326 }
8327
8328 static void
8329 set_fp_model_sfunc (const char *args, int from_tty,
8330 struct cmd_list_element *c)
8331 {
8332 int fp_model;
8333
8334 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8335 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8336 {
8337 arm_fp_model = (enum arm_float_model) fp_model;
8338 break;
8339 }
8340
8341 if (fp_model == ARM_FLOAT_LAST)
8342 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8343 current_fp_model);
8344
8345 arm_update_current_architecture ();
8346 }
8347
8348 static void
8349 show_fp_model (struct ui_file *file, int from_tty,
8350 struct cmd_list_element *c, const char *value)
8351 {
8352 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8353
8354 if (arm_fp_model == ARM_FLOAT_AUTO
8355 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8356 fprintf_filtered (file, _("\
8357 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8358 fp_model_strings[tdep->fp_model]);
8359 else
8360 fprintf_filtered (file, _("\
8361 The current ARM floating point model is \"%s\".\n"),
8362 fp_model_strings[arm_fp_model]);
8363 }
8364
8365 static void
8366 arm_set_abi (const char *args, int from_tty,
8367 struct cmd_list_element *c)
8368 {
8369 int arm_abi;
8370
8371 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8372 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8373 {
8374 arm_abi_global = (enum arm_abi_kind) arm_abi;
8375 break;
8376 }
8377
8378 if (arm_abi == ARM_ABI_LAST)
8379 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8380 arm_abi_string);
8381
8382 arm_update_current_architecture ();
8383 }
8384
8385 static void
8386 arm_show_abi (struct ui_file *file, int from_tty,
8387 struct cmd_list_element *c, const char *value)
8388 {
8389 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8390
8391 if (arm_abi_global == ARM_ABI_AUTO
8392 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8393 fprintf_filtered (file, _("\
8394 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8395 arm_abi_strings[tdep->arm_abi]);
8396 else
8397 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8398 arm_abi_string);
8399 }
8400
8401 static void
8402 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8403 struct cmd_list_element *c, const char *value)
8404 {
8405 fprintf_filtered (file,
8406 _("The current execution mode assumed "
8407 "(when symbols are unavailable) is \"%s\".\n"),
8408 arm_fallback_mode_string);
8409 }
8410
8411 static void
8412 arm_show_force_mode (struct ui_file *file, int from_tty,
8413 struct cmd_list_element *c, const char *value)
8414 {
8415 fprintf_filtered (file,
8416 _("The current execution mode assumed "
8417 "(even when symbols are available) is \"%s\".\n"),
8418 arm_force_mode_string);
8419 }
8420
8421 /* If the user changes the register disassembly style used for info
8422 register and other commands, we have to also switch the style used
8423 in opcodes for disassembly output. This function is run in the "set
8424 arm disassembly" command, and does that. */
8425
8426 static void
8427 set_disassembly_style_sfunc (const char *args, int from_tty,
8428 struct cmd_list_element *c)
8429 {
8430 /* Convert the short style name into the long style name (eg, reg-names-*)
8431 before calling the generic set_disassembler_options() function. */
8432 std::string long_name = std::string ("reg-names-") + disassembly_style;
8433 set_disassembler_options (&long_name[0]);
8434 }
8435
8436 static void
8437 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8438 struct cmd_list_element *c, const char *value)
8439 {
8440 struct gdbarch *gdbarch = get_current_arch ();
8441 char *options = get_disassembler_options (gdbarch);
8442 const char *style = "";
8443 int len = 0;
8444 const char *opt;
8445
8446 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8447 if (CONST_STRNEQ (opt, "reg-names-"))
8448 {
8449 style = &opt[strlen ("reg-names-")];
8450 len = strcspn (style, ",");
8451 }
8452
8453 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8454 }
8455 \f
8456 /* Return the ARM register name corresponding to register I. */
8457 static const char *
8458 arm_register_name (struct gdbarch *gdbarch, int i)
8459 {
8460 const int num_regs = gdbarch_num_regs (gdbarch);
8461
8462 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8463 && i >= num_regs && i < num_regs + 32)
8464 {
8465 static const char *const vfp_pseudo_names[] = {
8466 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8467 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8468 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8469 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8470 };
8471
8472 return vfp_pseudo_names[i - num_regs];
8473 }
8474
8475 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8476 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8477 {
8478 static const char *const neon_pseudo_names[] = {
8479 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8480 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8481 };
8482
8483 return neon_pseudo_names[i - num_regs - 32];
8484 }
8485
8486 if (i >= ARRAY_SIZE (arm_register_names))
8487 /* These registers are only supported on targets which supply
8488 an XML description. */
8489 return "";
8490
8491 return arm_register_names[i];
8492 }
8493
8494 /* Test whether the coff symbol specific value corresponds to a Thumb
8495 function. */
8496
8497 static int
8498 coff_sym_is_thumb (int val)
8499 {
8500 return (val == C_THUMBEXT
8501 || val == C_THUMBSTAT
8502 || val == C_THUMBEXTFUNC
8503 || val == C_THUMBSTATFUNC
8504 || val == C_THUMBLABEL);
8505 }
8506
8507 /* arm_coff_make_msymbol_special()
8508 arm_elf_make_msymbol_special()
8509
8510 These functions test whether the COFF or ELF symbol corresponds to
8511 an address in thumb code, and set a "special" bit in a minimal
8512 symbol to indicate that it does. */
8513
8514 static void
8515 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8516 {
8517 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8518
8519 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8520 == ST_BRANCH_TO_THUMB)
8521 MSYMBOL_SET_SPECIAL (msym);
8522 }
8523
8524 static void
8525 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8526 {
8527 if (coff_sym_is_thumb (val))
8528 MSYMBOL_SET_SPECIAL (msym);
8529 }
8530
8531 static void
8532 arm_objfile_data_free (struct objfile *objfile, void *arg)
8533 {
8534 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8535
8536 delete data;
8537 }
8538
8539 static void
8540 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8541 asymbol *sym)
8542 {
8543 const char *name = bfd_asymbol_name (sym);
8544 struct arm_per_objfile *data;
8545 struct arm_mapping_symbol new_map_sym;
8546
8547 gdb_assert (name[0] == '$');
8548 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8549 return;
8550
8551 data = (struct arm_per_objfile *) objfile_data (objfile,
8552 arm_objfile_data_key);
8553 if (data == NULL)
8554 {
8555 data = new arm_per_objfile (objfile->obfd->section_count);
8556 set_objfile_data (objfile, arm_objfile_data_key, data);
8557 }
8558 arm_mapping_symbol_vec &map
8559 = data->section_maps[bfd_get_section (sym)->index];
8560
8561 new_map_sym.value = sym->value;
8562 new_map_sym.type = name[1];
8563
8564 /* Insert at the end, the vector will be sorted on first use. */
8565 map.push_back (new_map_sym);
8566 }
8567
8568 static void
8569 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8570 {
8571 struct gdbarch *gdbarch = regcache->arch ();
8572 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8573
8574 /* If necessary, set the T bit. */
8575 if (arm_apcs_32)
8576 {
8577 ULONGEST val, t_bit;
8578 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8579 t_bit = arm_psr_thumb_bit (gdbarch);
8580 if (arm_pc_is_thumb (gdbarch, pc))
8581 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8582 val | t_bit);
8583 else
8584 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8585 val & ~t_bit);
8586 }
8587 }
8588
8589 /* Read the contents of a NEON quad register, by reading from two
8590 double registers. This is used to implement the quad pseudo
8591 registers, and for argument passing in case the quad registers are
8592 missing; vectors are passed in quad registers when using the VFP
8593 ABI, even if a NEON unit is not present. REGNUM is the index of
8594 the quad register, in [0, 15]. */
8595
8596 static enum register_status
8597 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8598 int regnum, gdb_byte *buf)
8599 {
8600 char name_buf[4];
8601 gdb_byte reg_buf[8];
8602 int offset, double_regnum;
8603 enum register_status status;
8604
8605 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8606 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8607 strlen (name_buf));
8608
8609 /* d0 is always the least significant half of q0. */
8610 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8611 offset = 8;
8612 else
8613 offset = 0;
8614
8615 status = regcache->raw_read (double_regnum, reg_buf);
8616 if (status != REG_VALID)
8617 return status;
8618 memcpy (buf + offset, reg_buf, 8);
8619
8620 offset = 8 - offset;
8621 status = regcache->raw_read (double_regnum + 1, reg_buf);
8622 if (status != REG_VALID)
8623 return status;
8624 memcpy (buf + offset, reg_buf, 8);
8625
8626 return REG_VALID;
8627 }
8628
8629 static enum register_status
8630 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8631 int regnum, gdb_byte *buf)
8632 {
8633 const int num_regs = gdbarch_num_regs (gdbarch);
8634 char name_buf[4];
8635 gdb_byte reg_buf[8];
8636 int offset, double_regnum;
8637
8638 gdb_assert (regnum >= num_regs);
8639 regnum -= num_regs;
8640
8641 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8642 /* Quad-precision register. */
8643 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8644 else
8645 {
8646 enum register_status status;
8647
8648 /* Single-precision register. */
8649 gdb_assert (regnum < 32);
8650
8651 /* s0 is always the least significant half of d0. */
8652 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8653 offset = (regnum & 1) ? 0 : 4;
8654 else
8655 offset = (regnum & 1) ? 4 : 0;
8656
8657 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8658 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8659 strlen (name_buf));
8660
8661 status = regcache->raw_read (double_regnum, reg_buf);
8662 if (status == REG_VALID)
8663 memcpy (buf, reg_buf + offset, 4);
8664 return status;
8665 }
8666 }
8667
8668 /* Store the contents of BUF to a NEON quad register, by writing to
8669 two double registers. This is used to implement the quad pseudo
8670 registers, and for argument passing in case the quad registers are
8671 missing; vectors are passed in quad registers when using the VFP
8672 ABI, even if a NEON unit is not present. REGNUM is the index
8673 of the quad register, in [0, 15]. */
8674
8675 static void
8676 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8677 int regnum, const gdb_byte *buf)
8678 {
8679 char name_buf[4];
8680 int offset, double_regnum;
8681
8682 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8683 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8684 strlen (name_buf));
8685
8686 /* d0 is always the least significant half of q0. */
8687 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8688 offset = 8;
8689 else
8690 offset = 0;
8691
8692 regcache->raw_write (double_regnum, buf + offset);
8693 offset = 8 - offset;
8694 regcache->raw_write (double_regnum + 1, buf + offset);
8695 }
8696
8697 static void
8698 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8699 int regnum, const gdb_byte *buf)
8700 {
8701 const int num_regs = gdbarch_num_regs (gdbarch);
8702 char name_buf[4];
8703 gdb_byte reg_buf[8];
8704 int offset, double_regnum;
8705
8706 gdb_assert (regnum >= num_regs);
8707 regnum -= num_regs;
8708
8709 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8710 /* Quad-precision register. */
8711 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8712 else
8713 {
8714 /* Single-precision register. */
8715 gdb_assert (regnum < 32);
8716
8717 /* s0 is always the least significant half of d0. */
8718 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8719 offset = (regnum & 1) ? 0 : 4;
8720 else
8721 offset = (regnum & 1) ? 4 : 0;
8722
8723 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8724 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8725 strlen (name_buf));
8726
8727 regcache->raw_read (double_regnum, reg_buf);
8728 memcpy (reg_buf + offset, buf, 4);
8729 regcache->raw_write (double_regnum, reg_buf);
8730 }
8731 }
8732
8733 static struct value *
8734 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8735 {
8736 const int *reg_p = (const int *) baton;
8737 return value_of_register (*reg_p, frame);
8738 }
8739 \f
8740 static enum gdb_osabi
8741 arm_elf_osabi_sniffer (bfd *abfd)
8742 {
8743 unsigned int elfosabi;
8744 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8745
8746 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8747
8748 if (elfosabi == ELFOSABI_ARM)
8749 /* GNU tools use this value. Check note sections in this case,
8750 as well. */
8751 bfd_map_over_sections (abfd,
8752 generic_elf_osabi_sniff_abi_tag_sections,
8753 &osabi);
8754
8755 /* Anything else will be handled by the generic ELF sniffer. */
8756 return osabi;
8757 }
8758
8759 static int
8760 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8761 struct reggroup *group)
8762 {
8763 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8764 this, FPS register belongs to save_regroup, restore_reggroup, and
8765 all_reggroup, of course. */
8766 if (regnum == ARM_FPS_REGNUM)
8767 return (group == float_reggroup
8768 || group == save_reggroup
8769 || group == restore_reggroup
8770 || group == all_reggroup);
8771 else
8772 return default_register_reggroup_p (gdbarch, regnum, group);
8773 }
8774
8775 \f
8776 /* For backward-compatibility we allow two 'g' packet lengths with
8777 the remote protocol depending on whether FPA registers are
8778 supplied. M-profile targets do not have FPA registers, but some
8779 stubs already exist in the wild which use a 'g' packet which
8780 supplies them albeit with dummy values. The packet format which
8781 includes FPA registers should be considered deprecated for
8782 M-profile targets. */
8783
8784 static void
8785 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8786 {
8787 if (gdbarch_tdep (gdbarch)->is_m)
8788 {
8789 /* If we know from the executable this is an M-profile target,
8790 cater for remote targets whose register set layout is the
8791 same as the FPA layout. */
8792 register_remote_g_packet_guess (gdbarch,
8793 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8794 (16 * INT_REGISTER_SIZE)
8795 + (8 * FP_REGISTER_SIZE)
8796 + (2 * INT_REGISTER_SIZE),
8797 tdesc_arm_with_m_fpa_layout);
8798
8799 /* The regular M-profile layout. */
8800 register_remote_g_packet_guess (gdbarch,
8801 /* r0-r12,sp,lr,pc; xpsr */
8802 (16 * INT_REGISTER_SIZE)
8803 + INT_REGISTER_SIZE,
8804 tdesc_arm_with_m);
8805
8806 /* M-profile plus M4F VFP. */
8807 register_remote_g_packet_guess (gdbarch,
8808 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8809 (16 * INT_REGISTER_SIZE)
8810 + (16 * VFP_REGISTER_SIZE)
8811 + (2 * INT_REGISTER_SIZE),
8812 tdesc_arm_with_m_vfp_d16);
8813 }
8814
8815 /* Otherwise we don't have a useful guess. */
8816 }
8817
8818 /* Implement the code_of_frame_writable gdbarch method. */
8819
8820 static int
8821 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8822 {
8823 if (gdbarch_tdep (gdbarch)->is_m
8824 && get_frame_type (frame) == SIGTRAMP_FRAME)
8825 {
8826 /* M-profile exception frames return to some magic PCs, where
8827 isn't writable at all. */
8828 return 0;
8829 }
8830 else
8831 return 1;
8832 }
8833
8834 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8835 to be postfixed by a version (eg armv7hl). */
8836
8837 static const char *
8838 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8839 {
8840 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8841 return "arm(v[^- ]*)?";
8842 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8843 }
8844
8845 /* Initialize the current architecture based on INFO. If possible,
8846 re-use an architecture from ARCHES, which is a list of
8847 architectures already created during this debugging session.
8848
8849 Called e.g. at program startup, when reading a core file, and when
8850 reading a binary file. */
8851
8852 static struct gdbarch *
8853 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8854 {
8855 struct gdbarch_tdep *tdep;
8856 struct gdbarch *gdbarch;
8857 struct gdbarch_list *best_arch;
8858 enum arm_abi_kind arm_abi = arm_abi_global;
8859 enum arm_float_model fp_model = arm_fp_model;
8860 struct tdesc_arch_data *tdesc_data = NULL;
8861 int i, is_m = 0;
8862 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8863 int have_wmmx_registers = 0;
8864 int have_neon = 0;
8865 int have_fpa_registers = 1;
8866 const struct target_desc *tdesc = info.target_desc;
8867
8868 /* If we have an object to base this architecture on, try to determine
8869 its ABI. */
8870
8871 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8872 {
8873 int ei_osabi, e_flags;
8874
8875 switch (bfd_get_flavour (info.abfd))
8876 {
8877 case bfd_target_coff_flavour:
8878 /* Assume it's an old APCS-style ABI. */
8879 /* XXX WinCE? */
8880 arm_abi = ARM_ABI_APCS;
8881 break;
8882
8883 case bfd_target_elf_flavour:
8884 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8885 e_flags = elf_elfheader (info.abfd)->e_flags;
8886
8887 if (ei_osabi == ELFOSABI_ARM)
8888 {
8889 /* GNU tools used to use this value, but do not for EABI
8890 objects. There's nowhere to tag an EABI version
8891 anyway, so assume APCS. */
8892 arm_abi = ARM_ABI_APCS;
8893 }
8894 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8895 {
8896 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8897
8898 switch (eabi_ver)
8899 {
8900 case EF_ARM_EABI_UNKNOWN:
8901 /* Assume GNU tools. */
8902 arm_abi = ARM_ABI_APCS;
8903 break;
8904
8905 case EF_ARM_EABI_VER4:
8906 case EF_ARM_EABI_VER5:
8907 arm_abi = ARM_ABI_AAPCS;
8908 /* EABI binaries default to VFP float ordering.
8909 They may also contain build attributes that can
8910 be used to identify if the VFP argument-passing
8911 ABI is in use. */
8912 if (fp_model == ARM_FLOAT_AUTO)
8913 {
8914 #ifdef HAVE_ELF
8915 switch (bfd_elf_get_obj_attr_int (info.abfd,
8916 OBJ_ATTR_PROC,
8917 Tag_ABI_VFP_args))
8918 {
8919 case AEABI_VFP_args_base:
8920 /* "The user intended FP parameter/result
8921 passing to conform to AAPCS, base
8922 variant". */
8923 fp_model = ARM_FLOAT_SOFT_VFP;
8924 break;
8925 case AEABI_VFP_args_vfp:
8926 /* "The user intended FP parameter/result
8927 passing to conform to AAPCS, VFP
8928 variant". */
8929 fp_model = ARM_FLOAT_VFP;
8930 break;
8931 case AEABI_VFP_args_toolchain:
8932 /* "The user intended FP parameter/result
8933 passing to conform to tool chain-specific
8934 conventions" - we don't know any such
8935 conventions, so leave it as "auto". */
8936 break;
8937 case AEABI_VFP_args_compatible:
8938 /* "Code is compatible with both the base
8939 and VFP variants; the user did not permit
8940 non-variadic functions to pass FP
8941 parameters/results" - leave it as
8942 "auto". */
8943 break;
8944 default:
8945 /* Attribute value not mentioned in the
8946 November 2012 ABI, so leave it as
8947 "auto". */
8948 break;
8949 }
8950 #else
8951 fp_model = ARM_FLOAT_SOFT_VFP;
8952 #endif
8953 }
8954 break;
8955
8956 default:
8957 /* Leave it as "auto". */
8958 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8959 break;
8960 }
8961
8962 #ifdef HAVE_ELF
8963 /* Detect M-profile programs. This only works if the
8964 executable file includes build attributes; GCC does
8965 copy them to the executable, but e.g. RealView does
8966 not. */
8967 int attr_arch
8968 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8969 Tag_CPU_arch);
8970 int attr_profile
8971 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8972 Tag_CPU_arch_profile);
8973
8974 /* GCC specifies the profile for v6-M; RealView only
8975 specifies the profile for architectures starting with
8976 V7 (as opposed to architectures with a tag
8977 numerically greater than TAG_CPU_ARCH_V7). */
8978 if (!tdesc_has_registers (tdesc)
8979 && (attr_arch == TAG_CPU_ARCH_V6_M
8980 || attr_arch == TAG_CPU_ARCH_V6S_M
8981 || attr_profile == 'M'))
8982 is_m = 1;
8983 #endif
8984 }
8985
8986 if (fp_model == ARM_FLOAT_AUTO)
8987 {
8988 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8989 {
8990 case 0:
8991 /* Leave it as "auto". Strictly speaking this case
8992 means FPA, but almost nobody uses that now, and
8993 many toolchains fail to set the appropriate bits
8994 for the floating-point model they use. */
8995 break;
8996 case EF_ARM_SOFT_FLOAT:
8997 fp_model = ARM_FLOAT_SOFT_FPA;
8998 break;
8999 case EF_ARM_VFP_FLOAT:
9000 fp_model = ARM_FLOAT_VFP;
9001 break;
9002 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9003 fp_model = ARM_FLOAT_SOFT_VFP;
9004 break;
9005 }
9006 }
9007
9008 if (e_flags & EF_ARM_BE8)
9009 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9010
9011 break;
9012
9013 default:
9014 /* Leave it as "auto". */
9015 break;
9016 }
9017 }
9018
9019 /* Check any target description for validity. */
9020 if (tdesc_has_registers (tdesc))
9021 {
9022 /* For most registers we require GDB's default names; but also allow
9023 the numeric names for sp / lr / pc, as a convenience. */
9024 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9025 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9026 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9027
9028 const struct tdesc_feature *feature;
9029 int valid_p;
9030
9031 feature = tdesc_find_feature (tdesc,
9032 "org.gnu.gdb.arm.core");
9033 if (feature == NULL)
9034 {
9035 feature = tdesc_find_feature (tdesc,
9036 "org.gnu.gdb.arm.m-profile");
9037 if (feature == NULL)
9038 return NULL;
9039 else
9040 is_m = 1;
9041 }
9042
9043 tdesc_data = tdesc_data_alloc ();
9044
9045 valid_p = 1;
9046 for (i = 0; i < ARM_SP_REGNUM; i++)
9047 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9048 arm_register_names[i]);
9049 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9050 ARM_SP_REGNUM,
9051 arm_sp_names);
9052 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9053 ARM_LR_REGNUM,
9054 arm_lr_names);
9055 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9056 ARM_PC_REGNUM,
9057 arm_pc_names);
9058 if (is_m)
9059 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9060 ARM_PS_REGNUM, "xpsr");
9061 else
9062 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9063 ARM_PS_REGNUM, "cpsr");
9064
9065 if (!valid_p)
9066 {
9067 tdesc_data_cleanup (tdesc_data);
9068 return NULL;
9069 }
9070
9071 feature = tdesc_find_feature (tdesc,
9072 "org.gnu.gdb.arm.fpa");
9073 if (feature != NULL)
9074 {
9075 valid_p = 1;
9076 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9077 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9078 arm_register_names[i]);
9079 if (!valid_p)
9080 {
9081 tdesc_data_cleanup (tdesc_data);
9082 return NULL;
9083 }
9084 }
9085 else
9086 have_fpa_registers = 0;
9087
9088 feature = tdesc_find_feature (tdesc,
9089 "org.gnu.gdb.xscale.iwmmxt");
9090 if (feature != NULL)
9091 {
9092 static const char *const iwmmxt_names[] = {
9093 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9094 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9095 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9096 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9097 };
9098
9099 valid_p = 1;
9100 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9101 valid_p
9102 &= tdesc_numbered_register (feature, tdesc_data, i,
9103 iwmmxt_names[i - ARM_WR0_REGNUM]);
9104
9105 /* Check for the control registers, but do not fail if they
9106 are missing. */
9107 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9108 tdesc_numbered_register (feature, tdesc_data, i,
9109 iwmmxt_names[i - ARM_WR0_REGNUM]);
9110
9111 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9112 valid_p
9113 &= tdesc_numbered_register (feature, tdesc_data, i,
9114 iwmmxt_names[i - ARM_WR0_REGNUM]);
9115
9116 if (!valid_p)
9117 {
9118 tdesc_data_cleanup (tdesc_data);
9119 return NULL;
9120 }
9121
9122 have_wmmx_registers = 1;
9123 }
9124
9125 /* If we have a VFP unit, check whether the single precision registers
9126 are present. If not, then we will synthesize them as pseudo
9127 registers. */
9128 feature = tdesc_find_feature (tdesc,
9129 "org.gnu.gdb.arm.vfp");
9130 if (feature != NULL)
9131 {
9132 static const char *const vfp_double_names[] = {
9133 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9134 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9135 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9136 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9137 };
9138
9139 /* Require the double precision registers. There must be either
9140 16 or 32. */
9141 valid_p = 1;
9142 for (i = 0; i < 32; i++)
9143 {
9144 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9145 ARM_D0_REGNUM + i,
9146 vfp_double_names[i]);
9147 if (!valid_p)
9148 break;
9149 }
9150 if (!valid_p && i == 16)
9151 valid_p = 1;
9152
9153 /* Also require FPSCR. */
9154 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9155 ARM_FPSCR_REGNUM, "fpscr");
9156 if (!valid_p)
9157 {
9158 tdesc_data_cleanup (tdesc_data);
9159 return NULL;
9160 }
9161
9162 if (tdesc_unnumbered_register (feature, "s0") == 0)
9163 have_vfp_pseudos = 1;
9164
9165 vfp_register_count = i;
9166
9167 /* If we have VFP, also check for NEON. The architecture allows
9168 NEON without VFP (integer vector operations only), but GDB
9169 does not support that. */
9170 feature = tdesc_find_feature (tdesc,
9171 "org.gnu.gdb.arm.neon");
9172 if (feature != NULL)
9173 {
9174 /* NEON requires 32 double-precision registers. */
9175 if (i != 32)
9176 {
9177 tdesc_data_cleanup (tdesc_data);
9178 return NULL;
9179 }
9180
9181 /* If there are quad registers defined by the stub, use
9182 their type; otherwise (normally) provide them with
9183 the default type. */
9184 if (tdesc_unnumbered_register (feature, "q0") == 0)
9185 have_neon_pseudos = 1;
9186
9187 have_neon = 1;
9188 }
9189 }
9190 }
9191
9192 /* If there is already a candidate, use it. */
9193 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9194 best_arch != NULL;
9195 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9196 {
9197 if (arm_abi != ARM_ABI_AUTO
9198 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9199 continue;
9200
9201 if (fp_model != ARM_FLOAT_AUTO
9202 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9203 continue;
9204
9205 /* There are various other properties in tdep that we do not
9206 need to check here: those derived from a target description,
9207 since gdbarches with a different target description are
9208 automatically disqualified. */
9209
9210 /* Do check is_m, though, since it might come from the binary. */
9211 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9212 continue;
9213
9214 /* Found a match. */
9215 break;
9216 }
9217
9218 if (best_arch != NULL)
9219 {
9220 if (tdesc_data != NULL)
9221 tdesc_data_cleanup (tdesc_data);
9222 return best_arch->gdbarch;
9223 }
9224
9225 tdep = XCNEW (struct gdbarch_tdep);
9226 gdbarch = gdbarch_alloc (&info, tdep);
9227
9228 /* Record additional information about the architecture we are defining.
9229 These are gdbarch discriminators, like the OSABI. */
9230 tdep->arm_abi = arm_abi;
9231 tdep->fp_model = fp_model;
9232 tdep->is_m = is_m;
9233 tdep->have_fpa_registers = have_fpa_registers;
9234 tdep->have_wmmx_registers = have_wmmx_registers;
9235 gdb_assert (vfp_register_count == 0
9236 || vfp_register_count == 16
9237 || vfp_register_count == 32);
9238 tdep->vfp_register_count = vfp_register_count;
9239 tdep->have_vfp_pseudos = have_vfp_pseudos;
9240 tdep->have_neon_pseudos = have_neon_pseudos;
9241 tdep->have_neon = have_neon;
9242
9243 arm_register_g_packet_guesses (gdbarch);
9244
9245 /* Breakpoints. */
9246 switch (info.byte_order_for_code)
9247 {
9248 case BFD_ENDIAN_BIG:
9249 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9250 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9251 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9252 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9253
9254 break;
9255
9256 case BFD_ENDIAN_LITTLE:
9257 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9258 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9259 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9260 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9261
9262 break;
9263
9264 default:
9265 internal_error (__FILE__, __LINE__,
9266 _("arm_gdbarch_init: bad byte order for float format"));
9267 }
9268
9269 /* On ARM targets char defaults to unsigned. */
9270 set_gdbarch_char_signed (gdbarch, 0);
9271
9272 /* wchar_t is unsigned under the AAPCS. */
9273 if (tdep->arm_abi == ARM_ABI_AAPCS)
9274 set_gdbarch_wchar_signed (gdbarch, 0);
9275 else
9276 set_gdbarch_wchar_signed (gdbarch, 1);
9277
9278 /* Compute type alignment. */
9279 set_gdbarch_type_align (gdbarch, arm_type_align);
9280
9281 /* Note: for displaced stepping, this includes the breakpoint, and one word
9282 of additional scratch space. This setting isn't used for anything beside
9283 displaced stepping at present. */
9284 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9285
9286 /* This should be low enough for everything. */
9287 tdep->lowest_pc = 0x20;
9288 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9289
9290 /* The default, for both APCS and AAPCS, is to return small
9291 structures in registers. */
9292 tdep->struct_return = reg_struct_return;
9293
9294 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9295 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9296
9297 if (is_m)
9298 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9299
9300 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9301
9302 frame_base_set_default (gdbarch, &arm_normal_base);
9303
9304 /* Address manipulation. */
9305 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9306
9307 /* Advance PC across function entry code. */
9308 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9309
9310 /* Detect whether PC is at a point where the stack has been destroyed. */
9311 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9312
9313 /* Skip trampolines. */
9314 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9315
9316 /* The stack grows downward. */
9317 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9318
9319 /* Breakpoint manipulation. */
9320 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9321 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9322 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9323 arm_breakpoint_kind_from_current_state);
9324
9325 /* Information about registers, etc. */
9326 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9327 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9328 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9329 set_gdbarch_register_type (gdbarch, arm_register_type);
9330 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9331
9332 /* This "info float" is FPA-specific. Use the generic version if we
9333 do not have FPA. */
9334 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9335 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9336
9337 /* Internal <-> external register number maps. */
9338 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9339 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9340
9341 set_gdbarch_register_name (gdbarch, arm_register_name);
9342
9343 /* Returning results. */
9344 set_gdbarch_return_value (gdbarch, arm_return_value);
9345
9346 /* Disassembly. */
9347 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9348
9349 /* Minsymbol frobbing. */
9350 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9351 set_gdbarch_coff_make_msymbol_special (gdbarch,
9352 arm_coff_make_msymbol_special);
9353 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9354
9355 /* Thumb-2 IT block support. */
9356 set_gdbarch_adjust_breakpoint_address (gdbarch,
9357 arm_adjust_breakpoint_address);
9358
9359 /* Virtual tables. */
9360 set_gdbarch_vbit_in_delta (gdbarch, 1);
9361
9362 /* Hook in the ABI-specific overrides, if they have been registered. */
9363 gdbarch_init_osabi (info, gdbarch);
9364
9365 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9366
9367 /* Add some default predicates. */
9368 if (is_m)
9369 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9370 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9371 dwarf2_append_unwinders (gdbarch);
9372 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9373 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9374 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9375
9376 /* Now we have tuned the configuration, set a few final things,
9377 based on what the OS ABI has told us. */
9378
9379 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9380 binaries are always marked. */
9381 if (tdep->arm_abi == ARM_ABI_AUTO)
9382 tdep->arm_abi = ARM_ABI_APCS;
9383
9384 /* Watchpoints are not steppable. */
9385 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9386
9387 /* We used to default to FPA for generic ARM, but almost nobody
9388 uses that now, and we now provide a way for the user to force
9389 the model. So default to the most useful variant. */
9390 if (tdep->fp_model == ARM_FLOAT_AUTO)
9391 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9392
9393 if (tdep->jb_pc >= 0)
9394 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9395
9396 /* Floating point sizes and format. */
9397 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9398 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9399 {
9400 set_gdbarch_double_format
9401 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9402 set_gdbarch_long_double_format
9403 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9404 }
9405 else
9406 {
9407 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9408 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9409 }
9410
9411 if (have_vfp_pseudos)
9412 {
9413 /* NOTE: These are the only pseudo registers used by
9414 the ARM target at the moment. If more are added, a
9415 little more care in numbering will be needed. */
9416
9417 int num_pseudos = 32;
9418 if (have_neon_pseudos)
9419 num_pseudos += 16;
9420 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9421 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9422 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9423 }
9424
9425 if (tdesc_data)
9426 {
9427 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9428
9429 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9430
9431 /* Override tdesc_register_type to adjust the types of VFP
9432 registers for NEON. */
9433 set_gdbarch_register_type (gdbarch, arm_register_type);
9434 }
9435
9436 /* Add standard register aliases. We add aliases even for those
9437 nanes which are used by the current architecture - it's simpler,
9438 and does no harm, since nothing ever lists user registers. */
9439 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9440 user_reg_add (gdbarch, arm_register_aliases[i].name,
9441 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9442
9443 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9444 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9445
9446 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9447
9448 return gdbarch;
9449 }
9450
9451 static void
9452 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9453 {
9454 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9455
9456 if (tdep == NULL)
9457 return;
9458
9459 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9460 (unsigned long) tdep->lowest_pc);
9461 }
9462
9463 #if GDB_SELF_TEST
9464 namespace selftests
9465 {
9466 static void arm_record_test (void);
9467 }
9468 #endif
9469
9470 void
9471 _initialize_arm_tdep (void)
9472 {
9473 long length;
9474 int i, j;
9475 char regdesc[1024], *rdptr = regdesc;
9476 size_t rest = sizeof (regdesc);
9477
9478 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9479
9480 arm_objfile_data_key
9481 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9482
9483 /* Add ourselves to objfile event chain. */
9484 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9485 arm_exidx_data_key
9486 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9487
9488 /* Register an ELF OS ABI sniffer for ARM binaries. */
9489 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9490 bfd_target_elf_flavour,
9491 arm_elf_osabi_sniffer);
9492
9493 /* Initialize the standard target descriptions. */
9494 initialize_tdesc_arm_with_m ();
9495 initialize_tdesc_arm_with_m_fpa_layout ();
9496 initialize_tdesc_arm_with_m_vfp_d16 ();
9497 initialize_tdesc_arm_with_iwmmxt ();
9498 initialize_tdesc_arm_with_vfpv2 ();
9499 initialize_tdesc_arm_with_vfpv3 ();
9500 initialize_tdesc_arm_with_neon ();
9501
9502 /* Add root prefix command for all "set arm"/"show arm" commands. */
9503 add_prefix_cmd ("arm", no_class, set_arm_command,
9504 _("Various ARM-specific commands."),
9505 &setarmcmdlist, "set arm ", 0, &setlist);
9506
9507 add_prefix_cmd ("arm", no_class, show_arm_command,
9508 _("Various ARM-specific commands."),
9509 &showarmcmdlist, "show arm ", 0, &showlist);
9510
9511
9512 arm_disassembler_options = xstrdup ("reg-names-std");
9513 const disasm_options_t *disasm_options
9514 = &disassembler_options_arm ()->options;
9515 int num_disassembly_styles = 0;
9516 for (i = 0; disasm_options->name[i] != NULL; i++)
9517 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9518 num_disassembly_styles++;
9519
9520 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9521 valid_disassembly_styles = XNEWVEC (const char *,
9522 num_disassembly_styles + 1);
9523 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9524 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9525 {
9526 size_t offset = strlen ("reg-names-");
9527 const char *style = disasm_options->name[i];
9528 valid_disassembly_styles[j++] = &style[offset];
9529 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9530 disasm_options->description[i]);
9531 rdptr += length;
9532 rest -= length;
9533 }
9534 /* Mark the end of valid options. */
9535 valid_disassembly_styles[num_disassembly_styles] = NULL;
9536
9537 /* Create the help text. */
9538 std::string helptext = string_printf ("%s%s%s",
9539 _("The valid values are:\n"),
9540 regdesc,
9541 _("The default is \"std\"."));
9542
9543 add_setshow_enum_cmd("disassembler", no_class,
9544 valid_disassembly_styles, &disassembly_style,
9545 _("Set the disassembly style."),
9546 _("Show the disassembly style."),
9547 helptext.c_str (),
9548 set_disassembly_style_sfunc,
9549 show_disassembly_style_sfunc,
9550 &setarmcmdlist, &showarmcmdlist);
9551
9552 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9553 _("Set usage of ARM 32-bit mode."),
9554 _("Show usage of ARM 32-bit mode."),
9555 _("When off, a 26-bit PC will be used."),
9556 NULL,
9557 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9558 mode is %s. */
9559 &setarmcmdlist, &showarmcmdlist);
9560
9561 /* Add a command to allow the user to force the FPU model. */
9562 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9563 _("Set the floating point type."),
9564 _("Show the floating point type."),
9565 _("auto - Determine the FP typefrom the OS-ABI.\n\
9566 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9567 fpa - FPA co-processor (GCC compiled).\n\
9568 softvfp - Software FP with pure-endian doubles.\n\
9569 vfp - VFP co-processor."),
9570 set_fp_model_sfunc, show_fp_model,
9571 &setarmcmdlist, &showarmcmdlist);
9572
9573 /* Add a command to allow the user to force the ABI. */
9574 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9575 _("Set the ABI."),
9576 _("Show the ABI."),
9577 NULL, arm_set_abi, arm_show_abi,
9578 &setarmcmdlist, &showarmcmdlist);
9579
9580 /* Add two commands to allow the user to force the assumed
9581 execution mode. */
9582 add_setshow_enum_cmd ("fallback-mode", class_support,
9583 arm_mode_strings, &arm_fallback_mode_string,
9584 _("Set the mode assumed when symbols are unavailable."),
9585 _("Show the mode assumed when symbols are unavailable."),
9586 NULL, NULL, arm_show_fallback_mode,
9587 &setarmcmdlist, &showarmcmdlist);
9588 add_setshow_enum_cmd ("force-mode", class_support,
9589 arm_mode_strings, &arm_force_mode_string,
9590 _("Set the mode assumed even when symbols are available."),
9591 _("Show the mode assumed even when symbols are available."),
9592 NULL, NULL, arm_show_force_mode,
9593 &setarmcmdlist, &showarmcmdlist);
9594
9595 /* Debugging flag. */
9596 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9597 _("Set ARM debugging."),
9598 _("Show ARM debugging."),
9599 _("When on, arm-specific debugging is enabled."),
9600 NULL,
9601 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9602 &setdebuglist, &showdebuglist);
9603
9604 #if GDB_SELF_TEST
9605 selftests::register_test ("arm-record", selftests::arm_record_test);
9606 #endif
9607
9608 }
9609
9610 /* ARM-reversible process record data structures. */
9611
9612 #define ARM_INSN_SIZE_BYTES 4
9613 #define THUMB_INSN_SIZE_BYTES 2
9614 #define THUMB2_INSN_SIZE_BYTES 4
9615
9616
9617 /* Position of the bit within a 32-bit ARM instruction
9618 that defines whether the instruction is a load or store. */
9619 #define INSN_S_L_BIT_NUM 20
9620
9621 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9622 do \
9623 { \
9624 unsigned int reg_len = LENGTH; \
9625 if (reg_len) \
9626 { \
9627 REGS = XNEWVEC (uint32_t, reg_len); \
9628 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9629 } \
9630 } \
9631 while (0)
9632
9633 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9634 do \
9635 { \
9636 unsigned int mem_len = LENGTH; \
9637 if (mem_len) \
9638 { \
9639 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9640 memcpy(&MEMS->len, &RECORD_BUF[0], \
9641 sizeof(struct arm_mem_r) * LENGTH); \
9642 } \
9643 } \
9644 while (0)
9645
9646 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9647 #define INSN_RECORDED(ARM_RECORD) \
9648 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9649
9650 /* ARM memory record structure. */
9651 struct arm_mem_r
9652 {
9653 uint32_t len; /* Record length. */
9654 uint32_t addr; /* Memory address. */
9655 };
9656
9657 /* ARM instruction record contains opcode of current insn
9658 and execution state (before entry to decode_insn()),
9659 contains list of to-be-modified registers and
9660 memory blocks (on return from decode_insn()). */
9661
9662 typedef struct insn_decode_record_t
9663 {
9664 struct gdbarch *gdbarch;
9665 struct regcache *regcache;
9666 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9667 uint32_t arm_insn; /* Should accommodate thumb. */
9668 uint32_t cond; /* Condition code. */
9669 uint32_t opcode; /* Insn opcode. */
9670 uint32_t decode; /* Insn decode bits. */
9671 uint32_t mem_rec_count; /* No of mem records. */
9672 uint32_t reg_rec_count; /* No of reg records. */
9673 uint32_t *arm_regs; /* Registers to be saved for this record. */
9674 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9675 } insn_decode_record;
9676
9677
9678 /* Checks ARM SBZ and SBO mandatory fields. */
9679
9680 static int
9681 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9682 {
9683 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9684
9685 if (!len)
9686 return 1;
9687
9688 if (!sbo)
9689 ones = ~ones;
9690
9691 while (ones)
9692 {
9693 if (!(ones & sbo))
9694 {
9695 return 0;
9696 }
9697 ones = ones >> 1;
9698 }
9699 return 1;
9700 }
9701
9702 enum arm_record_result
9703 {
9704 ARM_RECORD_SUCCESS = 0,
9705 ARM_RECORD_FAILURE = 1
9706 };
9707
9708 typedef enum
9709 {
9710 ARM_RECORD_STRH=1,
9711 ARM_RECORD_STRD
9712 } arm_record_strx_t;
9713
9714 typedef enum
9715 {
9716 ARM_RECORD=1,
9717 THUMB_RECORD,
9718 THUMB2_RECORD
9719 } record_type_t;
9720
9721
9722 static int
9723 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9724 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9725 {
9726
9727 struct regcache *reg_cache = arm_insn_r->regcache;
9728 ULONGEST u_regval[2]= {0};
9729
9730 uint32_t reg_src1 = 0, reg_src2 = 0;
9731 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9732
9733 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9734 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9735
9736 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9737 {
9738 /* 1) Handle misc store, immediate offset. */
9739 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9740 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9741 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9742 regcache_raw_read_unsigned (reg_cache, reg_src1,
9743 &u_regval[0]);
9744 if (ARM_PC_REGNUM == reg_src1)
9745 {
9746 /* If R15 was used as Rn, hence current PC+8. */
9747 u_regval[0] = u_regval[0] + 8;
9748 }
9749 offset_8 = (immed_high << 4) | immed_low;
9750 /* Calculate target store address. */
9751 if (14 == arm_insn_r->opcode)
9752 {
9753 tgt_mem_addr = u_regval[0] + offset_8;
9754 }
9755 else
9756 {
9757 tgt_mem_addr = u_regval[0] - offset_8;
9758 }
9759 if (ARM_RECORD_STRH == str_type)
9760 {
9761 record_buf_mem[0] = 2;
9762 record_buf_mem[1] = tgt_mem_addr;
9763 arm_insn_r->mem_rec_count = 1;
9764 }
9765 else if (ARM_RECORD_STRD == str_type)
9766 {
9767 record_buf_mem[0] = 4;
9768 record_buf_mem[1] = tgt_mem_addr;
9769 record_buf_mem[2] = 4;
9770 record_buf_mem[3] = tgt_mem_addr + 4;
9771 arm_insn_r->mem_rec_count = 2;
9772 }
9773 }
9774 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9775 {
9776 /* 2) Store, register offset. */
9777 /* Get Rm. */
9778 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9779 /* Get Rn. */
9780 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9781 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9782 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9783 if (15 == reg_src2)
9784 {
9785 /* If R15 was used as Rn, hence current PC+8. */
9786 u_regval[0] = u_regval[0] + 8;
9787 }
9788 /* Calculate target store address, Rn +/- Rm, register offset. */
9789 if (12 == arm_insn_r->opcode)
9790 {
9791 tgt_mem_addr = u_regval[0] + u_regval[1];
9792 }
9793 else
9794 {
9795 tgt_mem_addr = u_regval[1] - u_regval[0];
9796 }
9797 if (ARM_RECORD_STRH == str_type)
9798 {
9799 record_buf_mem[0] = 2;
9800 record_buf_mem[1] = tgt_mem_addr;
9801 arm_insn_r->mem_rec_count = 1;
9802 }
9803 else if (ARM_RECORD_STRD == str_type)
9804 {
9805 record_buf_mem[0] = 4;
9806 record_buf_mem[1] = tgt_mem_addr;
9807 record_buf_mem[2] = 4;
9808 record_buf_mem[3] = tgt_mem_addr + 4;
9809 arm_insn_r->mem_rec_count = 2;
9810 }
9811 }
9812 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9813 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9814 {
9815 /* 3) Store, immediate pre-indexed. */
9816 /* 5) Store, immediate post-indexed. */
9817 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9818 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9819 offset_8 = (immed_high << 4) | immed_low;
9820 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9821 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9822 /* Calculate target store address, Rn +/- Rm, register offset. */
9823 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9824 {
9825 tgt_mem_addr = u_regval[0] + offset_8;
9826 }
9827 else
9828 {
9829 tgt_mem_addr = u_regval[0] - offset_8;
9830 }
9831 if (ARM_RECORD_STRH == str_type)
9832 {
9833 record_buf_mem[0] = 2;
9834 record_buf_mem[1] = tgt_mem_addr;
9835 arm_insn_r->mem_rec_count = 1;
9836 }
9837 else if (ARM_RECORD_STRD == str_type)
9838 {
9839 record_buf_mem[0] = 4;
9840 record_buf_mem[1] = tgt_mem_addr;
9841 record_buf_mem[2] = 4;
9842 record_buf_mem[3] = tgt_mem_addr + 4;
9843 arm_insn_r->mem_rec_count = 2;
9844 }
9845 /* Record Rn also as it changes. */
9846 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9847 arm_insn_r->reg_rec_count = 1;
9848 }
9849 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9850 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9851 {
9852 /* 4) Store, register pre-indexed. */
9853 /* 6) Store, register post -indexed. */
9854 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9855 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9856 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9857 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9858 /* Calculate target store address, Rn +/- Rm, register offset. */
9859 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9860 {
9861 tgt_mem_addr = u_regval[0] + u_regval[1];
9862 }
9863 else
9864 {
9865 tgt_mem_addr = u_regval[1] - u_regval[0];
9866 }
9867 if (ARM_RECORD_STRH == str_type)
9868 {
9869 record_buf_mem[0] = 2;
9870 record_buf_mem[1] = tgt_mem_addr;
9871 arm_insn_r->mem_rec_count = 1;
9872 }
9873 else if (ARM_RECORD_STRD == str_type)
9874 {
9875 record_buf_mem[0] = 4;
9876 record_buf_mem[1] = tgt_mem_addr;
9877 record_buf_mem[2] = 4;
9878 record_buf_mem[3] = tgt_mem_addr + 4;
9879 arm_insn_r->mem_rec_count = 2;
9880 }
9881 /* Record Rn also as it changes. */
9882 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9883 arm_insn_r->reg_rec_count = 1;
9884 }
9885 return 0;
9886 }
9887
9888 /* Handling ARM extension space insns. */
9889
9890 static int
9891 arm_record_extension_space (insn_decode_record *arm_insn_r)
9892 {
9893 int ret = 0; /* Return value: -1:record failure ; 0:success */
9894 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9895 uint32_t record_buf[8], record_buf_mem[8];
9896 uint32_t reg_src1 = 0;
9897 struct regcache *reg_cache = arm_insn_r->regcache;
9898 ULONGEST u_regval = 0;
9899
9900 gdb_assert (!INSN_RECORDED(arm_insn_r));
9901 /* Handle unconditional insn extension space. */
9902
9903 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9904 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9905 if (arm_insn_r->cond)
9906 {
9907 /* PLD has no affect on architectural state, it just affects
9908 the caches. */
9909 if (5 == ((opcode1 & 0xE0) >> 5))
9910 {
9911 /* BLX(1) */
9912 record_buf[0] = ARM_PS_REGNUM;
9913 record_buf[1] = ARM_LR_REGNUM;
9914 arm_insn_r->reg_rec_count = 2;
9915 }
9916 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9917 }
9918
9919
9920 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9921 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9922 {
9923 ret = -1;
9924 /* Undefined instruction on ARM V5; need to handle if later
9925 versions define it. */
9926 }
9927
9928 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9929 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9930 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9931
9932 /* Handle arithmetic insn extension space. */
9933 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9934 && !INSN_RECORDED(arm_insn_r))
9935 {
9936 /* Handle MLA(S) and MUL(S). */
9937 if (in_inclusive_range (insn_op1, 0U, 3U))
9938 {
9939 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9940 record_buf[1] = ARM_PS_REGNUM;
9941 arm_insn_r->reg_rec_count = 2;
9942 }
9943 else if (in_inclusive_range (insn_op1, 4U, 15U))
9944 {
9945 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9946 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9947 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9948 record_buf[2] = ARM_PS_REGNUM;
9949 arm_insn_r->reg_rec_count = 3;
9950 }
9951 }
9952
9953 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9954 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9955 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9956
9957 /* Handle control insn extension space. */
9958
9959 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9960 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9961 {
9962 if (!bit (arm_insn_r->arm_insn,25))
9963 {
9964 if (!bits (arm_insn_r->arm_insn, 4, 7))
9965 {
9966 if ((0 == insn_op1) || (2 == insn_op1))
9967 {
9968 /* MRS. */
9969 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9970 arm_insn_r->reg_rec_count = 1;
9971 }
9972 else if (1 == insn_op1)
9973 {
9974 /* CSPR is going to be changed. */
9975 record_buf[0] = ARM_PS_REGNUM;
9976 arm_insn_r->reg_rec_count = 1;
9977 }
9978 else if (3 == insn_op1)
9979 {
9980 /* SPSR is going to be changed. */
9981 /* We need to get SPSR value, which is yet to be done. */
9982 return -1;
9983 }
9984 }
9985 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9986 {
9987 if (1 == insn_op1)
9988 {
9989 /* BX. */
9990 record_buf[0] = ARM_PS_REGNUM;
9991 arm_insn_r->reg_rec_count = 1;
9992 }
9993 else if (3 == insn_op1)
9994 {
9995 /* CLZ. */
9996 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9997 arm_insn_r->reg_rec_count = 1;
9998 }
9999 }
10000 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10001 {
10002 /* BLX. */
10003 record_buf[0] = ARM_PS_REGNUM;
10004 record_buf[1] = ARM_LR_REGNUM;
10005 arm_insn_r->reg_rec_count = 2;
10006 }
10007 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10008 {
10009 /* QADD, QSUB, QDADD, QDSUB */
10010 record_buf[0] = ARM_PS_REGNUM;
10011 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10012 arm_insn_r->reg_rec_count = 2;
10013 }
10014 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10015 {
10016 /* BKPT. */
10017 record_buf[0] = ARM_PS_REGNUM;
10018 record_buf[1] = ARM_LR_REGNUM;
10019 arm_insn_r->reg_rec_count = 2;
10020
10021 /* Save SPSR also;how? */
10022 return -1;
10023 }
10024 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10025 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10026 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10027 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10028 )
10029 {
10030 if (0 == insn_op1 || 1 == insn_op1)
10031 {
10032 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10033 /* We dont do optimization for SMULW<y> where we
10034 need only Rd. */
10035 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10036 record_buf[1] = ARM_PS_REGNUM;
10037 arm_insn_r->reg_rec_count = 2;
10038 }
10039 else if (2 == insn_op1)
10040 {
10041 /* SMLAL<x><y>. */
10042 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10043 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10044 arm_insn_r->reg_rec_count = 2;
10045 }
10046 else if (3 == insn_op1)
10047 {
10048 /* SMUL<x><y>. */
10049 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10050 arm_insn_r->reg_rec_count = 1;
10051 }
10052 }
10053 }
10054 else
10055 {
10056 /* MSR : immediate form. */
10057 if (1 == insn_op1)
10058 {
10059 /* CSPR is going to be changed. */
10060 record_buf[0] = ARM_PS_REGNUM;
10061 arm_insn_r->reg_rec_count = 1;
10062 }
10063 else if (3 == insn_op1)
10064 {
10065 /* SPSR is going to be changed. */
10066 /* we need to get SPSR value, which is yet to be done */
10067 return -1;
10068 }
10069 }
10070 }
10071
10072 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10073 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10074 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10075
10076 /* Handle load/store insn extension space. */
10077
10078 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10079 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10080 && !INSN_RECORDED(arm_insn_r))
10081 {
10082 /* SWP/SWPB. */
10083 if (0 == insn_op1)
10084 {
10085 /* These insn, changes register and memory as well. */
10086 /* SWP or SWPB insn. */
10087 /* Get memory address given by Rn. */
10088 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10089 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10090 /* SWP insn ?, swaps word. */
10091 if (8 == arm_insn_r->opcode)
10092 {
10093 record_buf_mem[0] = 4;
10094 }
10095 else
10096 {
10097 /* SWPB insn, swaps only byte. */
10098 record_buf_mem[0] = 1;
10099 }
10100 record_buf_mem[1] = u_regval;
10101 arm_insn_r->mem_rec_count = 1;
10102 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10103 arm_insn_r->reg_rec_count = 1;
10104 }
10105 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10106 {
10107 /* STRH. */
10108 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10109 ARM_RECORD_STRH);
10110 }
10111 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10112 {
10113 /* LDRD. */
10114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10115 record_buf[1] = record_buf[0] + 1;
10116 arm_insn_r->reg_rec_count = 2;
10117 }
10118 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10119 {
10120 /* STRD. */
10121 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10122 ARM_RECORD_STRD);
10123 }
10124 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10125 {
10126 /* LDRH, LDRSB, LDRSH. */
10127 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10128 arm_insn_r->reg_rec_count = 1;
10129 }
10130
10131 }
10132
10133 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10134 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10135 && !INSN_RECORDED(arm_insn_r))
10136 {
10137 ret = -1;
10138 /* Handle coprocessor insn extension space. */
10139 }
10140
10141 /* To be done for ARMv5 and later; as of now we return -1. */
10142 if (-1 == ret)
10143 return ret;
10144
10145 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10146 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10147
10148 return ret;
10149 }
10150
10151 /* Handling opcode 000 insns. */
10152
10153 static int
10154 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10155 {
10156 struct regcache *reg_cache = arm_insn_r->regcache;
10157 uint32_t record_buf[8], record_buf_mem[8];
10158 ULONGEST u_regval[2] = {0};
10159
10160 uint32_t reg_src1 = 0;
10161 uint32_t opcode1 = 0;
10162
10163 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10164 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10165 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10166
10167 if (!((opcode1 & 0x19) == 0x10))
10168 {
10169 /* Data-processing (register) and Data-processing (register-shifted
10170 register */
10171 /* Out of 11 shifter operands mode, all the insn modifies destination
10172 register, which is specified by 13-16 decode. */
10173 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10174 record_buf[1] = ARM_PS_REGNUM;
10175 arm_insn_r->reg_rec_count = 2;
10176 }
10177 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10178 {
10179 /* Miscellaneous instructions */
10180
10181 if (3 == arm_insn_r->decode && 0x12 == opcode1
10182 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10183 {
10184 /* Handle BLX, branch and link/exchange. */
10185 if (9 == arm_insn_r->opcode)
10186 {
10187 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10188 and R14 stores the return address. */
10189 record_buf[0] = ARM_PS_REGNUM;
10190 record_buf[1] = ARM_LR_REGNUM;
10191 arm_insn_r->reg_rec_count = 2;
10192 }
10193 }
10194 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10195 {
10196 /* Handle enhanced software breakpoint insn, BKPT. */
10197 /* CPSR is changed to be executed in ARM state, disabling normal
10198 interrupts, entering abort mode. */
10199 /* According to high vector configuration PC is set. */
10200 /* user hit breakpoint and type reverse, in
10201 that case, we need to go back with previous CPSR and
10202 Program Counter. */
10203 record_buf[0] = ARM_PS_REGNUM;
10204 record_buf[1] = ARM_LR_REGNUM;
10205 arm_insn_r->reg_rec_count = 2;
10206
10207 /* Save SPSR also; how? */
10208 return -1;
10209 }
10210 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10211 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10212 {
10213 /* Handle BX, branch and link/exchange. */
10214 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10215 record_buf[0] = ARM_PS_REGNUM;
10216 arm_insn_r->reg_rec_count = 1;
10217 }
10218 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10219 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10220 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10221 {
10222 /* Count leading zeros: CLZ. */
10223 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10224 arm_insn_r->reg_rec_count = 1;
10225 }
10226 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10227 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10228 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10229 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10230 {
10231 /* Handle MRS insn. */
10232 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10233 arm_insn_r->reg_rec_count = 1;
10234 }
10235 }
10236 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10237 {
10238 /* Multiply and multiply-accumulate */
10239
10240 /* Handle multiply instructions. */
10241 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10242 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10243 {
10244 /* Handle MLA and MUL. */
10245 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10246 record_buf[1] = ARM_PS_REGNUM;
10247 arm_insn_r->reg_rec_count = 2;
10248 }
10249 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10250 {
10251 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10252 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10253 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10254 record_buf[2] = ARM_PS_REGNUM;
10255 arm_insn_r->reg_rec_count = 3;
10256 }
10257 }
10258 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10259 {
10260 /* Synchronization primitives */
10261
10262 /* Handling SWP, SWPB. */
10263 /* These insn, changes register and memory as well. */
10264 /* SWP or SWPB insn. */
10265
10266 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10267 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10268 /* SWP insn ?, swaps word. */
10269 if (8 == arm_insn_r->opcode)
10270 {
10271 record_buf_mem[0] = 4;
10272 }
10273 else
10274 {
10275 /* SWPB insn, swaps only byte. */
10276 record_buf_mem[0] = 1;
10277 }
10278 record_buf_mem[1] = u_regval[0];
10279 arm_insn_r->mem_rec_count = 1;
10280 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10281 arm_insn_r->reg_rec_count = 1;
10282 }
10283 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10284 || 15 == arm_insn_r->decode)
10285 {
10286 if ((opcode1 & 0x12) == 2)
10287 {
10288 /* Extra load/store (unprivileged) */
10289 return -1;
10290 }
10291 else
10292 {
10293 /* Extra load/store */
10294 switch (bits (arm_insn_r->arm_insn, 5, 6))
10295 {
10296 case 1:
10297 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10298 {
10299 /* STRH (register), STRH (immediate) */
10300 arm_record_strx (arm_insn_r, &record_buf[0],
10301 &record_buf_mem[0], ARM_RECORD_STRH);
10302 }
10303 else if ((opcode1 & 0x05) == 0x1)
10304 {
10305 /* LDRH (register) */
10306 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10307 arm_insn_r->reg_rec_count = 1;
10308
10309 if (bit (arm_insn_r->arm_insn, 21))
10310 {
10311 /* Write back to Rn. */
10312 record_buf[arm_insn_r->reg_rec_count++]
10313 = bits (arm_insn_r->arm_insn, 16, 19);
10314 }
10315 }
10316 else if ((opcode1 & 0x05) == 0x5)
10317 {
10318 /* LDRH (immediate), LDRH (literal) */
10319 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10320
10321 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10322 arm_insn_r->reg_rec_count = 1;
10323
10324 if (rn != 15)
10325 {
10326 /*LDRH (immediate) */
10327 if (bit (arm_insn_r->arm_insn, 21))
10328 {
10329 /* Write back to Rn. */
10330 record_buf[arm_insn_r->reg_rec_count++] = rn;
10331 }
10332 }
10333 }
10334 else
10335 return -1;
10336 break;
10337 case 2:
10338 if ((opcode1 & 0x05) == 0x0)
10339 {
10340 /* LDRD (register) */
10341 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10342 record_buf[1] = record_buf[0] + 1;
10343 arm_insn_r->reg_rec_count = 2;
10344
10345 if (bit (arm_insn_r->arm_insn, 21))
10346 {
10347 /* Write back to Rn. */
10348 record_buf[arm_insn_r->reg_rec_count++]
10349 = bits (arm_insn_r->arm_insn, 16, 19);
10350 }
10351 }
10352 else if ((opcode1 & 0x05) == 0x1)
10353 {
10354 /* LDRSB (register) */
10355 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10356 arm_insn_r->reg_rec_count = 1;
10357
10358 if (bit (arm_insn_r->arm_insn, 21))
10359 {
10360 /* Write back to Rn. */
10361 record_buf[arm_insn_r->reg_rec_count++]
10362 = bits (arm_insn_r->arm_insn, 16, 19);
10363 }
10364 }
10365 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10366 {
10367 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10368 LDRSB (literal) */
10369 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10370
10371 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10372 arm_insn_r->reg_rec_count = 1;
10373
10374 if (rn != 15)
10375 {
10376 /*LDRD (immediate), LDRSB (immediate) */
10377 if (bit (arm_insn_r->arm_insn, 21))
10378 {
10379 /* Write back to Rn. */
10380 record_buf[arm_insn_r->reg_rec_count++] = rn;
10381 }
10382 }
10383 }
10384 else
10385 return -1;
10386 break;
10387 case 3:
10388 if ((opcode1 & 0x05) == 0x0)
10389 {
10390 /* STRD (register) */
10391 arm_record_strx (arm_insn_r, &record_buf[0],
10392 &record_buf_mem[0], ARM_RECORD_STRD);
10393 }
10394 else if ((opcode1 & 0x05) == 0x1)
10395 {
10396 /* LDRSH (register) */
10397 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10398 arm_insn_r->reg_rec_count = 1;
10399
10400 if (bit (arm_insn_r->arm_insn, 21))
10401 {
10402 /* Write back to Rn. */
10403 record_buf[arm_insn_r->reg_rec_count++]
10404 = bits (arm_insn_r->arm_insn, 16, 19);
10405 }
10406 }
10407 else if ((opcode1 & 0x05) == 0x4)
10408 {
10409 /* STRD (immediate) */
10410 arm_record_strx (arm_insn_r, &record_buf[0],
10411 &record_buf_mem[0], ARM_RECORD_STRD);
10412 }
10413 else if ((opcode1 & 0x05) == 0x5)
10414 {
10415 /* LDRSH (immediate), LDRSH (literal) */
10416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10417 arm_insn_r->reg_rec_count = 1;
10418
10419 if (bit (arm_insn_r->arm_insn, 21))
10420 {
10421 /* Write back to Rn. */
10422 record_buf[arm_insn_r->reg_rec_count++]
10423 = bits (arm_insn_r->arm_insn, 16, 19);
10424 }
10425 }
10426 else
10427 return -1;
10428 break;
10429 default:
10430 return -1;
10431 }
10432 }
10433 }
10434 else
10435 {
10436 return -1;
10437 }
10438
10439 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10440 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10441 return 0;
10442 }
10443
10444 /* Handling opcode 001 insns. */
10445
10446 static int
10447 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10448 {
10449 uint32_t record_buf[8], record_buf_mem[8];
10450
10451 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10452 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10453
10454 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10455 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10456 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10457 )
10458 {
10459 /* Handle MSR insn. */
10460 if (9 == arm_insn_r->opcode)
10461 {
10462 /* CSPR is going to be changed. */
10463 record_buf[0] = ARM_PS_REGNUM;
10464 arm_insn_r->reg_rec_count = 1;
10465 }
10466 else
10467 {
10468 /* SPSR is going to be changed. */
10469 }
10470 }
10471 else if (arm_insn_r->opcode <= 15)
10472 {
10473 /* Normal data processing insns. */
10474 /* Out of 11 shifter operands mode, all the insn modifies destination
10475 register, which is specified by 13-16 decode. */
10476 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10477 record_buf[1] = ARM_PS_REGNUM;
10478 arm_insn_r->reg_rec_count = 2;
10479 }
10480 else
10481 {
10482 return -1;
10483 }
10484
10485 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10486 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10487 return 0;
10488 }
10489
10490 static int
10491 arm_record_media (insn_decode_record *arm_insn_r)
10492 {
10493 uint32_t record_buf[8];
10494
10495 switch (bits (arm_insn_r->arm_insn, 22, 24))
10496 {
10497 case 0:
10498 /* Parallel addition and subtraction, signed */
10499 case 1:
10500 /* Parallel addition and subtraction, unsigned */
10501 case 2:
10502 case 3:
10503 /* Packing, unpacking, saturation and reversal */
10504 {
10505 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10506
10507 record_buf[arm_insn_r->reg_rec_count++] = rd;
10508 }
10509 break;
10510
10511 case 4:
10512 case 5:
10513 /* Signed multiplies */
10514 {
10515 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10516 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10517
10518 record_buf[arm_insn_r->reg_rec_count++] = rd;
10519 if (op1 == 0x0)
10520 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10521 else if (op1 == 0x4)
10522 record_buf[arm_insn_r->reg_rec_count++]
10523 = bits (arm_insn_r->arm_insn, 12, 15);
10524 }
10525 break;
10526
10527 case 6:
10528 {
10529 if (bit (arm_insn_r->arm_insn, 21)
10530 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10531 {
10532 /* SBFX */
10533 record_buf[arm_insn_r->reg_rec_count++]
10534 = bits (arm_insn_r->arm_insn, 12, 15);
10535 }
10536 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10537 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10538 {
10539 /* USAD8 and USADA8 */
10540 record_buf[arm_insn_r->reg_rec_count++]
10541 = bits (arm_insn_r->arm_insn, 16, 19);
10542 }
10543 }
10544 break;
10545
10546 case 7:
10547 {
10548 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10549 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10550 {
10551 /* Permanently UNDEFINED */
10552 return -1;
10553 }
10554 else
10555 {
10556 /* BFC, BFI and UBFX */
10557 record_buf[arm_insn_r->reg_rec_count++]
10558 = bits (arm_insn_r->arm_insn, 12, 15);
10559 }
10560 }
10561 break;
10562
10563 default:
10564 return -1;
10565 }
10566
10567 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10568
10569 return 0;
10570 }
10571
10572 /* Handle ARM mode instructions with opcode 010. */
10573
10574 static int
10575 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10576 {
10577 struct regcache *reg_cache = arm_insn_r->regcache;
10578
10579 uint32_t reg_base , reg_dest;
10580 uint32_t offset_12, tgt_mem_addr;
10581 uint32_t record_buf[8], record_buf_mem[8];
10582 unsigned char wback;
10583 ULONGEST u_regval;
10584
10585 /* Calculate wback. */
10586 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10587 || (bit (arm_insn_r->arm_insn, 21) == 1);
10588
10589 arm_insn_r->reg_rec_count = 0;
10590 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10591
10592 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10593 {
10594 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10595 and LDRT. */
10596
10597 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10598 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10599
10600 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10601 preceeds a LDR instruction having R15 as reg_base, it
10602 emulates a branch and link instruction, and hence we need to save
10603 CPSR and PC as well. */
10604 if (ARM_PC_REGNUM == reg_dest)
10605 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10606
10607 /* If wback is true, also save the base register, which is going to be
10608 written to. */
10609 if (wback)
10610 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10611 }
10612 else
10613 {
10614 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10615
10616 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10617 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10618
10619 /* Handle bit U. */
10620 if (bit (arm_insn_r->arm_insn, 23))
10621 {
10622 /* U == 1: Add the offset. */
10623 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10624 }
10625 else
10626 {
10627 /* U == 0: subtract the offset. */
10628 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10629 }
10630
10631 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10632 bytes. */
10633 if (bit (arm_insn_r->arm_insn, 22))
10634 {
10635 /* STRB and STRBT: 1 byte. */
10636 record_buf_mem[0] = 1;
10637 }
10638 else
10639 {
10640 /* STR and STRT: 4 bytes. */
10641 record_buf_mem[0] = 4;
10642 }
10643
10644 /* Handle bit P. */
10645 if (bit (arm_insn_r->arm_insn, 24))
10646 record_buf_mem[1] = tgt_mem_addr;
10647 else
10648 record_buf_mem[1] = (uint32_t) u_regval;
10649
10650 arm_insn_r->mem_rec_count = 1;
10651
10652 /* If wback is true, also save the base register, which is going to be
10653 written to. */
10654 if (wback)
10655 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10656 }
10657
10658 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10659 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10660 return 0;
10661 }
10662
10663 /* Handling opcode 011 insns. */
10664
10665 static int
10666 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10667 {
10668 struct regcache *reg_cache = arm_insn_r->regcache;
10669
10670 uint32_t shift_imm = 0;
10671 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10672 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10673 uint32_t record_buf[8], record_buf_mem[8];
10674
10675 LONGEST s_word;
10676 ULONGEST u_regval[2];
10677
10678 if (bit (arm_insn_r->arm_insn, 4))
10679 return arm_record_media (arm_insn_r);
10680
10681 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10682 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10683
10684 /* Handle enhanced store insns and LDRD DSP insn,
10685 order begins according to addressing modes for store insns
10686 STRH insn. */
10687
10688 /* LDR or STR? */
10689 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10690 {
10691 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10692 /* LDR insn has a capability to do branching, if
10693 MOV LR, PC is precedded by LDR insn having Rn as R15
10694 in that case, it emulates branch and link insn, and hence we
10695 need to save CSPR and PC as well. */
10696 if (15 != reg_dest)
10697 {
10698 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10699 arm_insn_r->reg_rec_count = 1;
10700 }
10701 else
10702 {
10703 record_buf[0] = reg_dest;
10704 record_buf[1] = ARM_PS_REGNUM;
10705 arm_insn_r->reg_rec_count = 2;
10706 }
10707 }
10708 else
10709 {
10710 if (! bits (arm_insn_r->arm_insn, 4, 11))
10711 {
10712 /* Store insn, register offset and register pre-indexed,
10713 register post-indexed. */
10714 /* Get Rm. */
10715 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10716 /* Get Rn. */
10717 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10718 regcache_raw_read_unsigned (reg_cache, reg_src1
10719 , &u_regval[0]);
10720 regcache_raw_read_unsigned (reg_cache, reg_src2
10721 , &u_regval[1]);
10722 if (15 == reg_src2)
10723 {
10724 /* If R15 was used as Rn, hence current PC+8. */
10725 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10726 u_regval[0] = u_regval[0] + 8;
10727 }
10728 /* Calculate target store address, Rn +/- Rm, register offset. */
10729 /* U == 1. */
10730 if (bit (arm_insn_r->arm_insn, 23))
10731 {
10732 tgt_mem_addr = u_regval[0] + u_regval[1];
10733 }
10734 else
10735 {
10736 tgt_mem_addr = u_regval[1] - u_regval[0];
10737 }
10738
10739 switch (arm_insn_r->opcode)
10740 {
10741 /* STR. */
10742 case 8:
10743 case 12:
10744 /* STR. */
10745 case 9:
10746 case 13:
10747 /* STRT. */
10748 case 1:
10749 case 5:
10750 /* STR. */
10751 case 0:
10752 case 4:
10753 record_buf_mem[0] = 4;
10754 break;
10755
10756 /* STRB. */
10757 case 10:
10758 case 14:
10759 /* STRB. */
10760 case 11:
10761 case 15:
10762 /* STRBT. */
10763 case 3:
10764 case 7:
10765 /* STRB. */
10766 case 2:
10767 case 6:
10768 record_buf_mem[0] = 1;
10769 break;
10770
10771 default:
10772 gdb_assert_not_reached ("no decoding pattern found");
10773 break;
10774 }
10775 record_buf_mem[1] = tgt_mem_addr;
10776 arm_insn_r->mem_rec_count = 1;
10777
10778 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10779 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10780 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10781 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10782 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10783 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10784 )
10785 {
10786 /* Rn is going to be changed in pre-indexed mode and
10787 post-indexed mode as well. */
10788 record_buf[0] = reg_src2;
10789 arm_insn_r->reg_rec_count = 1;
10790 }
10791 }
10792 else
10793 {
10794 /* Store insn, scaled register offset; scaled pre-indexed. */
10795 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10796 /* Get Rm. */
10797 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10798 /* Get Rn. */
10799 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10800 /* Get shift_imm. */
10801 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10802 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10803 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10804 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10805 /* Offset_12 used as shift. */
10806 switch (offset_12)
10807 {
10808 case 0:
10809 /* Offset_12 used as index. */
10810 offset_12 = u_regval[0] << shift_imm;
10811 break;
10812
10813 case 1:
10814 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10815 break;
10816
10817 case 2:
10818 if (!shift_imm)
10819 {
10820 if (bit (u_regval[0], 31))
10821 {
10822 offset_12 = 0xFFFFFFFF;
10823 }
10824 else
10825 {
10826 offset_12 = 0;
10827 }
10828 }
10829 else
10830 {
10831 /* This is arithmetic shift. */
10832 offset_12 = s_word >> shift_imm;
10833 }
10834 break;
10835
10836 case 3:
10837 if (!shift_imm)
10838 {
10839 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10840 &u_regval[1]);
10841 /* Get C flag value and shift it by 31. */
10842 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10843 | (u_regval[0]) >> 1);
10844 }
10845 else
10846 {
10847 offset_12 = (u_regval[0] >> shift_imm) \
10848 | (u_regval[0] <<
10849 (sizeof(uint32_t) - shift_imm));
10850 }
10851 break;
10852
10853 default:
10854 gdb_assert_not_reached ("no decoding pattern found");
10855 break;
10856 }
10857
10858 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10859 /* bit U set. */
10860 if (bit (arm_insn_r->arm_insn, 23))
10861 {
10862 tgt_mem_addr = u_regval[1] + offset_12;
10863 }
10864 else
10865 {
10866 tgt_mem_addr = u_regval[1] - offset_12;
10867 }
10868
10869 switch (arm_insn_r->opcode)
10870 {
10871 /* STR. */
10872 case 8:
10873 case 12:
10874 /* STR. */
10875 case 9:
10876 case 13:
10877 /* STRT. */
10878 case 1:
10879 case 5:
10880 /* STR. */
10881 case 0:
10882 case 4:
10883 record_buf_mem[0] = 4;
10884 break;
10885
10886 /* STRB. */
10887 case 10:
10888 case 14:
10889 /* STRB. */
10890 case 11:
10891 case 15:
10892 /* STRBT. */
10893 case 3:
10894 case 7:
10895 /* STRB. */
10896 case 2:
10897 case 6:
10898 record_buf_mem[0] = 1;
10899 break;
10900
10901 default:
10902 gdb_assert_not_reached ("no decoding pattern found");
10903 break;
10904 }
10905 record_buf_mem[1] = tgt_mem_addr;
10906 arm_insn_r->mem_rec_count = 1;
10907
10908 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10909 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10910 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10911 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10912 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10913 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10914 )
10915 {
10916 /* Rn is going to be changed in register scaled pre-indexed
10917 mode,and scaled post indexed mode. */
10918 record_buf[0] = reg_src2;
10919 arm_insn_r->reg_rec_count = 1;
10920 }
10921 }
10922 }
10923
10924 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10925 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10926 return 0;
10927 }
10928
10929 /* Handle ARM mode instructions with opcode 100. */
10930
10931 static int
10932 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10933 {
10934 struct regcache *reg_cache = arm_insn_r->regcache;
10935 uint32_t register_count = 0, register_bits;
10936 uint32_t reg_base, addr_mode;
10937 uint32_t record_buf[24], record_buf_mem[48];
10938 uint32_t wback;
10939 ULONGEST u_regval;
10940
10941 /* Fetch the list of registers. */
10942 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10943 arm_insn_r->reg_rec_count = 0;
10944
10945 /* Fetch the base register that contains the address we are loading data
10946 to. */
10947 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10948
10949 /* Calculate wback. */
10950 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10951
10952 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10953 {
10954 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10955
10956 /* Find out which registers are going to be loaded from memory. */
10957 while (register_bits)
10958 {
10959 if (register_bits & 0x00000001)
10960 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10961 register_bits = register_bits >> 1;
10962 register_count++;
10963 }
10964
10965
10966 /* If wback is true, also save the base register, which is going to be
10967 written to. */
10968 if (wback)
10969 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10970
10971 /* Save the CPSR register. */
10972 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10973 }
10974 else
10975 {
10976 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10977
10978 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10979
10980 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10981
10982 /* Find out how many registers are going to be stored to memory. */
10983 while (register_bits)
10984 {
10985 if (register_bits & 0x00000001)
10986 register_count++;
10987 register_bits = register_bits >> 1;
10988 }
10989
10990 switch (addr_mode)
10991 {
10992 /* STMDA (STMED): Decrement after. */
10993 case 0:
10994 record_buf_mem[1] = (uint32_t) u_regval
10995 - register_count * INT_REGISTER_SIZE + 4;
10996 break;
10997 /* STM (STMIA, STMEA): Increment after. */
10998 case 1:
10999 record_buf_mem[1] = (uint32_t) u_regval;
11000 break;
11001 /* STMDB (STMFD): Decrement before. */
11002 case 2:
11003 record_buf_mem[1] = (uint32_t) u_regval
11004 - register_count * INT_REGISTER_SIZE;
11005 break;
11006 /* STMIB (STMFA): Increment before. */
11007 case 3:
11008 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11009 break;
11010 default:
11011 gdb_assert_not_reached ("no decoding pattern found");
11012 break;
11013 }
11014
11015 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11016 arm_insn_r->mem_rec_count = 1;
11017
11018 /* If wback is true, also save the base register, which is going to be
11019 written to. */
11020 if (wback)
11021 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11022 }
11023
11024 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11025 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11026 return 0;
11027 }
11028
11029 /* Handling opcode 101 insns. */
11030
11031 static int
11032 arm_record_b_bl (insn_decode_record *arm_insn_r)
11033 {
11034 uint32_t record_buf[8];
11035
11036 /* Handle B, BL, BLX(1) insns. */
11037 /* B simply branches so we do nothing here. */
11038 /* Note: BLX(1) doesnt fall here but instead it falls into
11039 extension space. */
11040 if (bit (arm_insn_r->arm_insn, 24))
11041 {
11042 record_buf[0] = ARM_LR_REGNUM;
11043 arm_insn_r->reg_rec_count = 1;
11044 }
11045
11046 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11047
11048 return 0;
11049 }
11050
11051 static int
11052 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11053 {
11054 printf_unfiltered (_("Process record does not support instruction "
11055 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11056 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11057
11058 return -1;
11059 }
11060
11061 /* Record handler for vector data transfer instructions. */
11062
11063 static int
11064 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11065 {
11066 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11067 uint32_t record_buf[4];
11068
11069 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11070 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11071 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11072 bit_l = bit (arm_insn_r->arm_insn, 20);
11073 bit_c = bit (arm_insn_r->arm_insn, 8);
11074
11075 /* Handle VMOV instruction. */
11076 if (bit_l && bit_c)
11077 {
11078 record_buf[0] = reg_t;
11079 arm_insn_r->reg_rec_count = 1;
11080 }
11081 else if (bit_l && !bit_c)
11082 {
11083 /* Handle VMOV instruction. */
11084 if (bits_a == 0x00)
11085 {
11086 record_buf[0] = reg_t;
11087 arm_insn_r->reg_rec_count = 1;
11088 }
11089 /* Handle VMRS instruction. */
11090 else if (bits_a == 0x07)
11091 {
11092 if (reg_t == 15)
11093 reg_t = ARM_PS_REGNUM;
11094
11095 record_buf[0] = reg_t;
11096 arm_insn_r->reg_rec_count = 1;
11097 }
11098 }
11099 else if (!bit_l && !bit_c)
11100 {
11101 /* Handle VMOV instruction. */
11102 if (bits_a == 0x00)
11103 {
11104 record_buf[0] = ARM_D0_REGNUM + reg_v;
11105
11106 arm_insn_r->reg_rec_count = 1;
11107 }
11108 /* Handle VMSR instruction. */
11109 else if (bits_a == 0x07)
11110 {
11111 record_buf[0] = ARM_FPSCR_REGNUM;
11112 arm_insn_r->reg_rec_count = 1;
11113 }
11114 }
11115 else if (!bit_l && bit_c)
11116 {
11117 /* Handle VMOV instruction. */
11118 if (!(bits_a & 0x04))
11119 {
11120 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11121 + ARM_D0_REGNUM;
11122 arm_insn_r->reg_rec_count = 1;
11123 }
11124 /* Handle VDUP instruction. */
11125 else
11126 {
11127 if (bit (arm_insn_r->arm_insn, 21))
11128 {
11129 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11130 record_buf[0] = reg_v + ARM_D0_REGNUM;
11131 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11132 arm_insn_r->reg_rec_count = 2;
11133 }
11134 else
11135 {
11136 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11137 record_buf[0] = reg_v + ARM_D0_REGNUM;
11138 arm_insn_r->reg_rec_count = 1;
11139 }
11140 }
11141 }
11142
11143 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11144 return 0;
11145 }
11146
11147 /* Record handler for extension register load/store instructions. */
11148
11149 static int
11150 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11151 {
11152 uint32_t opcode, single_reg;
11153 uint8_t op_vldm_vstm;
11154 uint32_t record_buf[8], record_buf_mem[128];
11155 ULONGEST u_regval = 0;
11156
11157 struct regcache *reg_cache = arm_insn_r->regcache;
11158
11159 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11160 single_reg = !bit (arm_insn_r->arm_insn, 8);
11161 op_vldm_vstm = opcode & 0x1b;
11162
11163 /* Handle VMOV instructions. */
11164 if ((opcode & 0x1e) == 0x04)
11165 {
11166 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11167 {
11168 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11169 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11170 arm_insn_r->reg_rec_count = 2;
11171 }
11172 else
11173 {
11174 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11175 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11176
11177 if (single_reg)
11178 {
11179 /* The first S register number m is REG_M:M (M is bit 5),
11180 the corresponding D register number is REG_M:M / 2, which
11181 is REG_M. */
11182 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11183 /* The second S register number is REG_M:M + 1, the
11184 corresponding D register number is (REG_M:M + 1) / 2.
11185 IOW, if bit M is 1, the first and second S registers
11186 are mapped to different D registers, otherwise, they are
11187 in the same D register. */
11188 if (bit_m)
11189 {
11190 record_buf[arm_insn_r->reg_rec_count++]
11191 = ARM_D0_REGNUM + reg_m + 1;
11192 }
11193 }
11194 else
11195 {
11196 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11197 arm_insn_r->reg_rec_count = 1;
11198 }
11199 }
11200 }
11201 /* Handle VSTM and VPUSH instructions. */
11202 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11203 || op_vldm_vstm == 0x12)
11204 {
11205 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11206 uint32_t memory_index = 0;
11207
11208 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11209 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11210 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11211 imm_off32 = imm_off8 << 2;
11212 memory_count = imm_off8;
11213
11214 if (bit (arm_insn_r->arm_insn, 23))
11215 start_address = u_regval;
11216 else
11217 start_address = u_regval - imm_off32;
11218
11219 if (bit (arm_insn_r->arm_insn, 21))
11220 {
11221 record_buf[0] = reg_rn;
11222 arm_insn_r->reg_rec_count = 1;
11223 }
11224
11225 while (memory_count > 0)
11226 {
11227 if (single_reg)
11228 {
11229 record_buf_mem[memory_index] = 4;
11230 record_buf_mem[memory_index + 1] = start_address;
11231 start_address = start_address + 4;
11232 memory_index = memory_index + 2;
11233 }
11234 else
11235 {
11236 record_buf_mem[memory_index] = 4;
11237 record_buf_mem[memory_index + 1] = start_address;
11238 record_buf_mem[memory_index + 2] = 4;
11239 record_buf_mem[memory_index + 3] = start_address + 4;
11240 start_address = start_address + 8;
11241 memory_index = memory_index + 4;
11242 }
11243 memory_count--;
11244 }
11245 arm_insn_r->mem_rec_count = (memory_index >> 1);
11246 }
11247 /* Handle VLDM instructions. */
11248 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11249 || op_vldm_vstm == 0x13)
11250 {
11251 uint32_t reg_count, reg_vd;
11252 uint32_t reg_index = 0;
11253 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11254
11255 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11256 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11257
11258 /* REG_VD is the first D register number. If the instruction
11259 loads memory to S registers (SINGLE_REG is TRUE), the register
11260 number is (REG_VD << 1 | bit D), so the corresponding D
11261 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11262 if (!single_reg)
11263 reg_vd = reg_vd | (bit_d << 4);
11264
11265 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11266 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11267
11268 /* If the instruction loads memory to D register, REG_COUNT should
11269 be divided by 2, according to the ARM Architecture Reference
11270 Manual. If the instruction loads memory to S register, divide by
11271 2 as well because two S registers are mapped to D register. */
11272 reg_count = reg_count / 2;
11273 if (single_reg && bit_d)
11274 {
11275 /* Increase the register count if S register list starts from
11276 an odd number (bit d is one). */
11277 reg_count++;
11278 }
11279
11280 while (reg_count > 0)
11281 {
11282 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11283 reg_count--;
11284 }
11285 arm_insn_r->reg_rec_count = reg_index;
11286 }
11287 /* VSTR Vector store register. */
11288 else if ((opcode & 0x13) == 0x10)
11289 {
11290 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11291 uint32_t memory_index = 0;
11292
11293 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11294 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11295 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11296 imm_off32 = imm_off8 << 2;
11297
11298 if (bit (arm_insn_r->arm_insn, 23))
11299 start_address = u_regval + imm_off32;
11300 else
11301 start_address = u_regval - imm_off32;
11302
11303 if (single_reg)
11304 {
11305 record_buf_mem[memory_index] = 4;
11306 record_buf_mem[memory_index + 1] = start_address;
11307 arm_insn_r->mem_rec_count = 1;
11308 }
11309 else
11310 {
11311 record_buf_mem[memory_index] = 4;
11312 record_buf_mem[memory_index + 1] = start_address;
11313 record_buf_mem[memory_index + 2] = 4;
11314 record_buf_mem[memory_index + 3] = start_address + 4;
11315 arm_insn_r->mem_rec_count = 2;
11316 }
11317 }
11318 /* VLDR Vector load register. */
11319 else if ((opcode & 0x13) == 0x11)
11320 {
11321 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11322
11323 if (!single_reg)
11324 {
11325 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11326 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11327 }
11328 else
11329 {
11330 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11331 /* Record register D rather than pseudo register S. */
11332 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11333 }
11334 arm_insn_r->reg_rec_count = 1;
11335 }
11336
11337 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11338 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11339 return 0;
11340 }
11341
11342 /* Record handler for arm/thumb mode VFP data processing instructions. */
11343
11344 static int
11345 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11346 {
11347 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11348 uint32_t record_buf[4];
11349 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11350 enum insn_types curr_insn_type = INSN_INV;
11351
11352 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11353 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11354 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11355 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11356 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11357 bit_d = bit (arm_insn_r->arm_insn, 22);
11358 /* Mask off the "D" bit. */
11359 opc1 = opc1 & ~0x04;
11360
11361 /* Handle VMLA, VMLS. */
11362 if (opc1 == 0x00)
11363 {
11364 if (bit (arm_insn_r->arm_insn, 10))
11365 {
11366 if (bit (arm_insn_r->arm_insn, 6))
11367 curr_insn_type = INSN_T0;
11368 else
11369 curr_insn_type = INSN_T1;
11370 }
11371 else
11372 {
11373 if (dp_op_sz)
11374 curr_insn_type = INSN_T1;
11375 else
11376 curr_insn_type = INSN_T2;
11377 }
11378 }
11379 /* Handle VNMLA, VNMLS, VNMUL. */
11380 else if (opc1 == 0x01)
11381 {
11382 if (dp_op_sz)
11383 curr_insn_type = INSN_T1;
11384 else
11385 curr_insn_type = INSN_T2;
11386 }
11387 /* Handle VMUL. */
11388 else if (opc1 == 0x02 && !(opc3 & 0x01))
11389 {
11390 if (bit (arm_insn_r->arm_insn, 10))
11391 {
11392 if (bit (arm_insn_r->arm_insn, 6))
11393 curr_insn_type = INSN_T0;
11394 else
11395 curr_insn_type = INSN_T1;
11396 }
11397 else
11398 {
11399 if (dp_op_sz)
11400 curr_insn_type = INSN_T1;
11401 else
11402 curr_insn_type = INSN_T2;
11403 }
11404 }
11405 /* Handle VADD, VSUB. */
11406 else if (opc1 == 0x03)
11407 {
11408 if (!bit (arm_insn_r->arm_insn, 9))
11409 {
11410 if (bit (arm_insn_r->arm_insn, 6))
11411 curr_insn_type = INSN_T0;
11412 else
11413 curr_insn_type = INSN_T1;
11414 }
11415 else
11416 {
11417 if (dp_op_sz)
11418 curr_insn_type = INSN_T1;
11419 else
11420 curr_insn_type = INSN_T2;
11421 }
11422 }
11423 /* Handle VDIV. */
11424 else if (opc1 == 0x08)
11425 {
11426 if (dp_op_sz)
11427 curr_insn_type = INSN_T1;
11428 else
11429 curr_insn_type = INSN_T2;
11430 }
11431 /* Handle all other vfp data processing instructions. */
11432 else if (opc1 == 0x0b)
11433 {
11434 /* Handle VMOV. */
11435 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11436 {
11437 if (bit (arm_insn_r->arm_insn, 4))
11438 {
11439 if (bit (arm_insn_r->arm_insn, 6))
11440 curr_insn_type = INSN_T0;
11441 else
11442 curr_insn_type = INSN_T1;
11443 }
11444 else
11445 {
11446 if (dp_op_sz)
11447 curr_insn_type = INSN_T1;
11448 else
11449 curr_insn_type = INSN_T2;
11450 }
11451 }
11452 /* Handle VNEG and VABS. */
11453 else if ((opc2 == 0x01 && opc3 == 0x01)
11454 || (opc2 == 0x00 && opc3 == 0x03))
11455 {
11456 if (!bit (arm_insn_r->arm_insn, 11))
11457 {
11458 if (bit (arm_insn_r->arm_insn, 6))
11459 curr_insn_type = INSN_T0;
11460 else
11461 curr_insn_type = INSN_T1;
11462 }
11463 else
11464 {
11465 if (dp_op_sz)
11466 curr_insn_type = INSN_T1;
11467 else
11468 curr_insn_type = INSN_T2;
11469 }
11470 }
11471 /* Handle VSQRT. */
11472 else if (opc2 == 0x01 && opc3 == 0x03)
11473 {
11474 if (dp_op_sz)
11475 curr_insn_type = INSN_T1;
11476 else
11477 curr_insn_type = INSN_T2;
11478 }
11479 /* Handle VCVT. */
11480 else if (opc2 == 0x07 && opc3 == 0x03)
11481 {
11482 if (!dp_op_sz)
11483 curr_insn_type = INSN_T1;
11484 else
11485 curr_insn_type = INSN_T2;
11486 }
11487 else if (opc3 & 0x01)
11488 {
11489 /* Handle VCVT. */
11490 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11491 {
11492 if (!bit (arm_insn_r->arm_insn, 18))
11493 curr_insn_type = INSN_T2;
11494 else
11495 {
11496 if (dp_op_sz)
11497 curr_insn_type = INSN_T1;
11498 else
11499 curr_insn_type = INSN_T2;
11500 }
11501 }
11502 /* Handle VCVT. */
11503 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11504 {
11505 if (dp_op_sz)
11506 curr_insn_type = INSN_T1;
11507 else
11508 curr_insn_type = INSN_T2;
11509 }
11510 /* Handle VCVTB, VCVTT. */
11511 else if ((opc2 & 0x0e) == 0x02)
11512 curr_insn_type = INSN_T2;
11513 /* Handle VCMP, VCMPE. */
11514 else if ((opc2 & 0x0e) == 0x04)
11515 curr_insn_type = INSN_T3;
11516 }
11517 }
11518
11519 switch (curr_insn_type)
11520 {
11521 case INSN_T0:
11522 reg_vd = reg_vd | (bit_d << 4);
11523 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11524 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11525 arm_insn_r->reg_rec_count = 2;
11526 break;
11527
11528 case INSN_T1:
11529 reg_vd = reg_vd | (bit_d << 4);
11530 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11531 arm_insn_r->reg_rec_count = 1;
11532 break;
11533
11534 case INSN_T2:
11535 reg_vd = (reg_vd << 1) | bit_d;
11536 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11537 arm_insn_r->reg_rec_count = 1;
11538 break;
11539
11540 case INSN_T3:
11541 record_buf[0] = ARM_FPSCR_REGNUM;
11542 arm_insn_r->reg_rec_count = 1;
11543 break;
11544
11545 default:
11546 gdb_assert_not_reached ("no decoding pattern found");
11547 break;
11548 }
11549
11550 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11551 return 0;
11552 }
11553
11554 /* Handling opcode 110 insns. */
11555
11556 static int
11557 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11558 {
11559 uint32_t op1, op1_ebit, coproc;
11560
11561 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11562 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11563 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11564
11565 if ((coproc & 0x0e) == 0x0a)
11566 {
11567 /* Handle extension register ld/st instructions. */
11568 if (!(op1 & 0x20))
11569 return arm_record_exreg_ld_st_insn (arm_insn_r);
11570
11571 /* 64-bit transfers between arm core and extension registers. */
11572 if ((op1 & 0x3e) == 0x04)
11573 return arm_record_exreg_ld_st_insn (arm_insn_r);
11574 }
11575 else
11576 {
11577 /* Handle coprocessor ld/st instructions. */
11578 if (!(op1 & 0x3a))
11579 {
11580 /* Store. */
11581 if (!op1_ebit)
11582 return arm_record_unsupported_insn (arm_insn_r);
11583 else
11584 /* Load. */
11585 return arm_record_unsupported_insn (arm_insn_r);
11586 }
11587
11588 /* Move to coprocessor from two arm core registers. */
11589 if (op1 == 0x4)
11590 return arm_record_unsupported_insn (arm_insn_r);
11591
11592 /* Move to two arm core registers from coprocessor. */
11593 if (op1 == 0x5)
11594 {
11595 uint32_t reg_t[2];
11596
11597 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11598 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11599 arm_insn_r->reg_rec_count = 2;
11600
11601 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11602 return 0;
11603 }
11604 }
11605 return arm_record_unsupported_insn (arm_insn_r);
11606 }
11607
11608 /* Handling opcode 111 insns. */
11609
11610 static int
11611 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11612 {
11613 uint32_t op, op1_ebit, coproc, bits_24_25;
11614 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11615 struct regcache *reg_cache = arm_insn_r->regcache;
11616
11617 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11618 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11619 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11620 op = bit (arm_insn_r->arm_insn, 4);
11621 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11622
11623 /* Handle arm SWI/SVC system call instructions. */
11624 if (bits_24_25 == 0x3)
11625 {
11626 if (tdep->arm_syscall_record != NULL)
11627 {
11628 ULONGEST svc_operand, svc_number;
11629
11630 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11631
11632 if (svc_operand) /* OABI. */
11633 svc_number = svc_operand - 0x900000;
11634 else /* EABI. */
11635 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11636
11637 return tdep->arm_syscall_record (reg_cache, svc_number);
11638 }
11639 else
11640 {
11641 printf_unfiltered (_("no syscall record support\n"));
11642 return -1;
11643 }
11644 }
11645 else if (bits_24_25 == 0x02)
11646 {
11647 if (op)
11648 {
11649 if ((coproc & 0x0e) == 0x0a)
11650 {
11651 /* 8, 16, and 32-bit transfer */
11652 return arm_record_vdata_transfer_insn (arm_insn_r);
11653 }
11654 else
11655 {
11656 if (op1_ebit)
11657 {
11658 /* MRC, MRC2 */
11659 uint32_t record_buf[1];
11660
11661 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11662 if (record_buf[0] == 15)
11663 record_buf[0] = ARM_PS_REGNUM;
11664
11665 arm_insn_r->reg_rec_count = 1;
11666 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11667 record_buf);
11668 return 0;
11669 }
11670 else
11671 {
11672 /* MCR, MCR2 */
11673 return -1;
11674 }
11675 }
11676 }
11677 else
11678 {
11679 if ((coproc & 0x0e) == 0x0a)
11680 {
11681 /* VFP data-processing instructions. */
11682 return arm_record_vfp_data_proc_insn (arm_insn_r);
11683 }
11684 else
11685 {
11686 /* CDP, CDP2 */
11687 return -1;
11688 }
11689 }
11690 }
11691 else
11692 {
11693 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11694
11695 if (op1 == 5)
11696 {
11697 if ((coproc & 0x0e) != 0x0a)
11698 {
11699 /* MRRC, MRRC2 */
11700 return -1;
11701 }
11702 }
11703 else if (op1 == 4 || op1 == 5)
11704 {
11705 if ((coproc & 0x0e) == 0x0a)
11706 {
11707 /* 64-bit transfers between ARM core and extension */
11708 return -1;
11709 }
11710 else if (op1 == 4)
11711 {
11712 /* MCRR, MCRR2 */
11713 return -1;
11714 }
11715 }
11716 else if (op1 == 0 || op1 == 1)
11717 {
11718 /* UNDEFINED */
11719 return -1;
11720 }
11721 else
11722 {
11723 if ((coproc & 0x0e) == 0x0a)
11724 {
11725 /* Extension register load/store */
11726 }
11727 else
11728 {
11729 /* STC, STC2, LDC, LDC2 */
11730 }
11731 return -1;
11732 }
11733 }
11734
11735 return -1;
11736 }
11737
11738 /* Handling opcode 000 insns. */
11739
11740 static int
11741 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11742 {
11743 uint32_t record_buf[8];
11744 uint32_t reg_src1 = 0;
11745
11746 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11747
11748 record_buf[0] = ARM_PS_REGNUM;
11749 record_buf[1] = reg_src1;
11750 thumb_insn_r->reg_rec_count = 2;
11751
11752 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11753
11754 return 0;
11755 }
11756
11757
11758 /* Handling opcode 001 insns. */
11759
11760 static int
11761 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11762 {
11763 uint32_t record_buf[8];
11764 uint32_t reg_src1 = 0;
11765
11766 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11767
11768 record_buf[0] = ARM_PS_REGNUM;
11769 record_buf[1] = reg_src1;
11770 thumb_insn_r->reg_rec_count = 2;
11771
11772 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11773
11774 return 0;
11775 }
11776
11777 /* Handling opcode 010 insns. */
11778
11779 static int
11780 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11781 {
11782 struct regcache *reg_cache = thumb_insn_r->regcache;
11783 uint32_t record_buf[8], record_buf_mem[8];
11784
11785 uint32_t reg_src1 = 0, reg_src2 = 0;
11786 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11787
11788 ULONGEST u_regval[2] = {0};
11789
11790 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11791
11792 if (bit (thumb_insn_r->arm_insn, 12))
11793 {
11794 /* Handle load/store register offset. */
11795 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11796
11797 if (in_inclusive_range (opB, 4U, 7U))
11798 {
11799 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11800 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11801 record_buf[0] = reg_src1;
11802 thumb_insn_r->reg_rec_count = 1;
11803 }
11804 else if (in_inclusive_range (opB, 0U, 2U))
11805 {
11806 /* STR(2), STRB(2), STRH(2) . */
11807 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11808 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11809 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11810 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11811 if (0 == opB)
11812 record_buf_mem[0] = 4; /* STR (2). */
11813 else if (2 == opB)
11814 record_buf_mem[0] = 1; /* STRB (2). */
11815 else if (1 == opB)
11816 record_buf_mem[0] = 2; /* STRH (2). */
11817 record_buf_mem[1] = u_regval[0] + u_regval[1];
11818 thumb_insn_r->mem_rec_count = 1;
11819 }
11820 }
11821 else if (bit (thumb_insn_r->arm_insn, 11))
11822 {
11823 /* Handle load from literal pool. */
11824 /* LDR(3). */
11825 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11826 record_buf[0] = reg_src1;
11827 thumb_insn_r->reg_rec_count = 1;
11828 }
11829 else if (opcode1)
11830 {
11831 /* Special data instructions and branch and exchange */
11832 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11833 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11834 if ((3 == opcode2) && (!opcode3))
11835 {
11836 /* Branch with exchange. */
11837 record_buf[0] = ARM_PS_REGNUM;
11838 thumb_insn_r->reg_rec_count = 1;
11839 }
11840 else
11841 {
11842 /* Format 8; special data processing insns. */
11843 record_buf[0] = ARM_PS_REGNUM;
11844 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11845 | bits (thumb_insn_r->arm_insn, 0, 2));
11846 thumb_insn_r->reg_rec_count = 2;
11847 }
11848 }
11849 else
11850 {
11851 /* Format 5; data processing insns. */
11852 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11853 if (bit (thumb_insn_r->arm_insn, 7))
11854 {
11855 reg_src1 = reg_src1 + 8;
11856 }
11857 record_buf[0] = ARM_PS_REGNUM;
11858 record_buf[1] = reg_src1;
11859 thumb_insn_r->reg_rec_count = 2;
11860 }
11861
11862 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11863 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11864 record_buf_mem);
11865
11866 return 0;
11867 }
11868
11869 /* Handling opcode 001 insns. */
11870
11871 static int
11872 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11873 {
11874 struct regcache *reg_cache = thumb_insn_r->regcache;
11875 uint32_t record_buf[8], record_buf_mem[8];
11876
11877 uint32_t reg_src1 = 0;
11878 uint32_t opcode = 0, immed_5 = 0;
11879
11880 ULONGEST u_regval = 0;
11881
11882 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11883
11884 if (opcode)
11885 {
11886 /* LDR(1). */
11887 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11888 record_buf[0] = reg_src1;
11889 thumb_insn_r->reg_rec_count = 1;
11890 }
11891 else
11892 {
11893 /* STR(1). */
11894 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11895 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11896 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11897 record_buf_mem[0] = 4;
11898 record_buf_mem[1] = u_regval + (immed_5 * 4);
11899 thumb_insn_r->mem_rec_count = 1;
11900 }
11901
11902 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11903 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11904 record_buf_mem);
11905
11906 return 0;
11907 }
11908
11909 /* Handling opcode 100 insns. */
11910
11911 static int
11912 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11913 {
11914 struct regcache *reg_cache = thumb_insn_r->regcache;
11915 uint32_t record_buf[8], record_buf_mem[8];
11916
11917 uint32_t reg_src1 = 0;
11918 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11919
11920 ULONGEST u_regval = 0;
11921
11922 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11923
11924 if (3 == opcode)
11925 {
11926 /* LDR(4). */
11927 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11928 record_buf[0] = reg_src1;
11929 thumb_insn_r->reg_rec_count = 1;
11930 }
11931 else if (1 == opcode)
11932 {
11933 /* LDRH(1). */
11934 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11935 record_buf[0] = reg_src1;
11936 thumb_insn_r->reg_rec_count = 1;
11937 }
11938 else if (2 == opcode)
11939 {
11940 /* STR(3). */
11941 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11942 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11943 record_buf_mem[0] = 4;
11944 record_buf_mem[1] = u_regval + (immed_8 * 4);
11945 thumb_insn_r->mem_rec_count = 1;
11946 }
11947 else if (0 == opcode)
11948 {
11949 /* STRH(1). */
11950 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11951 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11952 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11953 record_buf_mem[0] = 2;
11954 record_buf_mem[1] = u_regval + (immed_5 * 2);
11955 thumb_insn_r->mem_rec_count = 1;
11956 }
11957
11958 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11959 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11960 record_buf_mem);
11961
11962 return 0;
11963 }
11964
11965 /* Handling opcode 101 insns. */
11966
11967 static int
11968 thumb_record_misc (insn_decode_record *thumb_insn_r)
11969 {
11970 struct regcache *reg_cache = thumb_insn_r->regcache;
11971
11972 uint32_t opcode = 0;
11973 uint32_t register_bits = 0, register_count = 0;
11974 uint32_t index = 0, start_address = 0;
11975 uint32_t record_buf[24], record_buf_mem[48];
11976 uint32_t reg_src1;
11977
11978 ULONGEST u_regval = 0;
11979
11980 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11981
11982 if (opcode == 0 || opcode == 1)
11983 {
11984 /* ADR and ADD (SP plus immediate) */
11985
11986 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11987 record_buf[0] = reg_src1;
11988 thumb_insn_r->reg_rec_count = 1;
11989 }
11990 else
11991 {
11992 /* Miscellaneous 16-bit instructions */
11993 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11994
11995 switch (opcode2)
11996 {
11997 case 6:
11998 /* SETEND and CPS */
11999 break;
12000 case 0:
12001 /* ADD/SUB (SP plus immediate) */
12002 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12003 record_buf[0] = ARM_SP_REGNUM;
12004 thumb_insn_r->reg_rec_count = 1;
12005 break;
12006 case 1: /* fall through */
12007 case 3: /* fall through */
12008 case 9: /* fall through */
12009 case 11:
12010 /* CBNZ, CBZ */
12011 break;
12012 case 2:
12013 /* SXTH, SXTB, UXTH, UXTB */
12014 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12015 thumb_insn_r->reg_rec_count = 1;
12016 break;
12017 case 4: /* fall through */
12018 case 5:
12019 /* PUSH. */
12020 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12021 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12022 while (register_bits)
12023 {
12024 if (register_bits & 0x00000001)
12025 register_count++;
12026 register_bits = register_bits >> 1;
12027 }
12028 start_address = u_regval - \
12029 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12030 thumb_insn_r->mem_rec_count = register_count;
12031 while (register_count)
12032 {
12033 record_buf_mem[(register_count * 2) - 1] = start_address;
12034 record_buf_mem[(register_count * 2) - 2] = 4;
12035 start_address = start_address + 4;
12036 register_count--;
12037 }
12038 record_buf[0] = ARM_SP_REGNUM;
12039 thumb_insn_r->reg_rec_count = 1;
12040 break;
12041 case 10:
12042 /* REV, REV16, REVSH */
12043 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12044 thumb_insn_r->reg_rec_count = 1;
12045 break;
12046 case 12: /* fall through */
12047 case 13:
12048 /* POP. */
12049 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12050 while (register_bits)
12051 {
12052 if (register_bits & 0x00000001)
12053 record_buf[index++] = register_count;
12054 register_bits = register_bits >> 1;
12055 register_count++;
12056 }
12057 record_buf[index++] = ARM_PS_REGNUM;
12058 record_buf[index++] = ARM_SP_REGNUM;
12059 thumb_insn_r->reg_rec_count = index;
12060 break;
12061 case 0xe:
12062 /* BKPT insn. */
12063 /* Handle enhanced software breakpoint insn, BKPT. */
12064 /* CPSR is changed to be executed in ARM state, disabling normal
12065 interrupts, entering abort mode. */
12066 /* According to high vector configuration PC is set. */
12067 /* User hits breakpoint and type reverse, in that case, we need to go back with
12068 previous CPSR and Program Counter. */
12069 record_buf[0] = ARM_PS_REGNUM;
12070 record_buf[1] = ARM_LR_REGNUM;
12071 thumb_insn_r->reg_rec_count = 2;
12072 /* We need to save SPSR value, which is not yet done. */
12073 printf_unfiltered (_("Process record does not support instruction "
12074 "0x%0x at address %s.\n"),
12075 thumb_insn_r->arm_insn,
12076 paddress (thumb_insn_r->gdbarch,
12077 thumb_insn_r->this_addr));
12078 return -1;
12079
12080 case 0xf:
12081 /* If-Then, and hints */
12082 break;
12083 default:
12084 return -1;
12085 };
12086 }
12087
12088 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12089 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12090 record_buf_mem);
12091
12092 return 0;
12093 }
12094
12095 /* Handling opcode 110 insns. */
12096
12097 static int
12098 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12099 {
12100 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12101 struct regcache *reg_cache = thumb_insn_r->regcache;
12102
12103 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12104 uint32_t reg_src1 = 0;
12105 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12106 uint32_t index = 0, start_address = 0;
12107 uint32_t record_buf[24], record_buf_mem[48];
12108
12109 ULONGEST u_regval = 0;
12110
12111 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12112 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12113
12114 if (1 == opcode2)
12115 {
12116
12117 /* LDMIA. */
12118 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12119 /* Get Rn. */
12120 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12121 while (register_bits)
12122 {
12123 if (register_bits & 0x00000001)
12124 record_buf[index++] = register_count;
12125 register_bits = register_bits >> 1;
12126 register_count++;
12127 }
12128 record_buf[index++] = reg_src1;
12129 thumb_insn_r->reg_rec_count = index;
12130 }
12131 else if (0 == opcode2)
12132 {
12133 /* It handles both STMIA. */
12134 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12135 /* Get Rn. */
12136 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12137 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12138 while (register_bits)
12139 {
12140 if (register_bits & 0x00000001)
12141 register_count++;
12142 register_bits = register_bits >> 1;
12143 }
12144 start_address = u_regval;
12145 thumb_insn_r->mem_rec_count = register_count;
12146 while (register_count)
12147 {
12148 record_buf_mem[(register_count * 2) - 1] = start_address;
12149 record_buf_mem[(register_count * 2) - 2] = 4;
12150 start_address = start_address + 4;
12151 register_count--;
12152 }
12153 }
12154 else if (0x1F == opcode1)
12155 {
12156 /* Handle arm syscall insn. */
12157 if (tdep->arm_syscall_record != NULL)
12158 {
12159 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12160 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12161 }
12162 else
12163 {
12164 printf_unfiltered (_("no syscall record support\n"));
12165 return -1;
12166 }
12167 }
12168
12169 /* B (1), conditional branch is automatically taken care in process_record,
12170 as PC is saved there. */
12171
12172 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12173 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12174 record_buf_mem);
12175
12176 return ret;
12177 }
12178
12179 /* Handling opcode 111 insns. */
12180
12181 static int
12182 thumb_record_branch (insn_decode_record *thumb_insn_r)
12183 {
12184 uint32_t record_buf[8];
12185 uint32_t bits_h = 0;
12186
12187 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12188
12189 if (2 == bits_h || 3 == bits_h)
12190 {
12191 /* BL */
12192 record_buf[0] = ARM_LR_REGNUM;
12193 thumb_insn_r->reg_rec_count = 1;
12194 }
12195 else if (1 == bits_h)
12196 {
12197 /* BLX(1). */
12198 record_buf[0] = ARM_PS_REGNUM;
12199 record_buf[1] = ARM_LR_REGNUM;
12200 thumb_insn_r->reg_rec_count = 2;
12201 }
12202
12203 /* B(2) is automatically taken care in process_record, as PC is
12204 saved there. */
12205
12206 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12207
12208 return 0;
12209 }
12210
12211 /* Handler for thumb2 load/store multiple instructions. */
12212
12213 static int
12214 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12215 {
12216 struct regcache *reg_cache = thumb2_insn_r->regcache;
12217
12218 uint32_t reg_rn, op;
12219 uint32_t register_bits = 0, register_count = 0;
12220 uint32_t index = 0, start_address = 0;
12221 uint32_t record_buf[24], record_buf_mem[48];
12222
12223 ULONGEST u_regval = 0;
12224
12225 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12226 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12227
12228 if (0 == op || 3 == op)
12229 {
12230 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12231 {
12232 /* Handle RFE instruction. */
12233 record_buf[0] = ARM_PS_REGNUM;
12234 thumb2_insn_r->reg_rec_count = 1;
12235 }
12236 else
12237 {
12238 /* Handle SRS instruction after reading banked SP. */
12239 return arm_record_unsupported_insn (thumb2_insn_r);
12240 }
12241 }
12242 else if (1 == op || 2 == op)
12243 {
12244 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12245 {
12246 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12247 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12248 while (register_bits)
12249 {
12250 if (register_bits & 0x00000001)
12251 record_buf[index++] = register_count;
12252
12253 register_count++;
12254 register_bits = register_bits >> 1;
12255 }
12256 record_buf[index++] = reg_rn;
12257 record_buf[index++] = ARM_PS_REGNUM;
12258 thumb2_insn_r->reg_rec_count = index;
12259 }
12260 else
12261 {
12262 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12263 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12264 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12265 while (register_bits)
12266 {
12267 if (register_bits & 0x00000001)
12268 register_count++;
12269
12270 register_bits = register_bits >> 1;
12271 }
12272
12273 if (1 == op)
12274 {
12275 /* Start address calculation for LDMDB/LDMEA. */
12276 start_address = u_regval;
12277 }
12278 else if (2 == op)
12279 {
12280 /* Start address calculation for LDMDB/LDMEA. */
12281 start_address = u_regval - register_count * 4;
12282 }
12283
12284 thumb2_insn_r->mem_rec_count = register_count;
12285 while (register_count)
12286 {
12287 record_buf_mem[register_count * 2 - 1] = start_address;
12288 record_buf_mem[register_count * 2 - 2] = 4;
12289 start_address = start_address + 4;
12290 register_count--;
12291 }
12292 record_buf[0] = reg_rn;
12293 record_buf[1] = ARM_PS_REGNUM;
12294 thumb2_insn_r->reg_rec_count = 2;
12295 }
12296 }
12297
12298 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12299 record_buf_mem);
12300 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12301 record_buf);
12302 return ARM_RECORD_SUCCESS;
12303 }
12304
12305 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12306 instructions. */
12307
12308 static int
12309 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12310 {
12311 struct regcache *reg_cache = thumb2_insn_r->regcache;
12312
12313 uint32_t reg_rd, reg_rn, offset_imm;
12314 uint32_t reg_dest1, reg_dest2;
12315 uint32_t address, offset_addr;
12316 uint32_t record_buf[8], record_buf_mem[8];
12317 uint32_t op1, op2, op3;
12318
12319 ULONGEST u_regval[2];
12320
12321 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12322 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12323 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12324
12325 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12326 {
12327 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12328 {
12329 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12330 record_buf[0] = reg_dest1;
12331 record_buf[1] = ARM_PS_REGNUM;
12332 thumb2_insn_r->reg_rec_count = 2;
12333 }
12334
12335 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12336 {
12337 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12338 record_buf[2] = reg_dest2;
12339 thumb2_insn_r->reg_rec_count = 3;
12340 }
12341 }
12342 else
12343 {
12344 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12345 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12346
12347 if (0 == op1 && 0 == op2)
12348 {
12349 /* Handle STREX. */
12350 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12351 address = u_regval[0] + (offset_imm * 4);
12352 record_buf_mem[0] = 4;
12353 record_buf_mem[1] = address;
12354 thumb2_insn_r->mem_rec_count = 1;
12355 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12356 record_buf[0] = reg_rd;
12357 thumb2_insn_r->reg_rec_count = 1;
12358 }
12359 else if (1 == op1 && 0 == op2)
12360 {
12361 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12362 record_buf[0] = reg_rd;
12363 thumb2_insn_r->reg_rec_count = 1;
12364 address = u_regval[0];
12365 record_buf_mem[1] = address;
12366
12367 if (4 == op3)
12368 {
12369 /* Handle STREXB. */
12370 record_buf_mem[0] = 1;
12371 thumb2_insn_r->mem_rec_count = 1;
12372 }
12373 else if (5 == op3)
12374 {
12375 /* Handle STREXH. */
12376 record_buf_mem[0] = 2 ;
12377 thumb2_insn_r->mem_rec_count = 1;
12378 }
12379 else if (7 == op3)
12380 {
12381 /* Handle STREXD. */
12382 address = u_regval[0];
12383 record_buf_mem[0] = 4;
12384 record_buf_mem[2] = 4;
12385 record_buf_mem[3] = address + 4;
12386 thumb2_insn_r->mem_rec_count = 2;
12387 }
12388 }
12389 else
12390 {
12391 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12392
12393 if (bit (thumb2_insn_r->arm_insn, 24))
12394 {
12395 if (bit (thumb2_insn_r->arm_insn, 23))
12396 offset_addr = u_regval[0] + (offset_imm * 4);
12397 else
12398 offset_addr = u_regval[0] - (offset_imm * 4);
12399
12400 address = offset_addr;
12401 }
12402 else
12403 address = u_regval[0];
12404
12405 record_buf_mem[0] = 4;
12406 record_buf_mem[1] = address;
12407 record_buf_mem[2] = 4;
12408 record_buf_mem[3] = address + 4;
12409 thumb2_insn_r->mem_rec_count = 2;
12410 record_buf[0] = reg_rn;
12411 thumb2_insn_r->reg_rec_count = 1;
12412 }
12413 }
12414
12415 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12416 record_buf);
12417 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12418 record_buf_mem);
12419 return ARM_RECORD_SUCCESS;
12420 }
12421
12422 /* Handler for thumb2 data processing (shift register and modified immediate)
12423 instructions. */
12424
12425 static int
12426 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12427 {
12428 uint32_t reg_rd, op;
12429 uint32_t record_buf[8];
12430
12431 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12432 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12433
12434 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12435 {
12436 record_buf[0] = ARM_PS_REGNUM;
12437 thumb2_insn_r->reg_rec_count = 1;
12438 }
12439 else
12440 {
12441 record_buf[0] = reg_rd;
12442 record_buf[1] = ARM_PS_REGNUM;
12443 thumb2_insn_r->reg_rec_count = 2;
12444 }
12445
12446 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12447 record_buf);
12448 return ARM_RECORD_SUCCESS;
12449 }
12450
12451 /* Generic handler for thumb2 instructions which effect destination and PS
12452 registers. */
12453
12454 static int
12455 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12456 {
12457 uint32_t reg_rd;
12458 uint32_t record_buf[8];
12459
12460 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12461
12462 record_buf[0] = reg_rd;
12463 record_buf[1] = ARM_PS_REGNUM;
12464 thumb2_insn_r->reg_rec_count = 2;
12465
12466 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12467 record_buf);
12468 return ARM_RECORD_SUCCESS;
12469 }
12470
12471 /* Handler for thumb2 branch and miscellaneous control instructions. */
12472
12473 static int
12474 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12475 {
12476 uint32_t op, op1, op2;
12477 uint32_t record_buf[8];
12478
12479 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12480 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12481 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12482
12483 /* Handle MSR insn. */
12484 if (!(op1 & 0x2) && 0x38 == op)
12485 {
12486 if (!(op2 & 0x3))
12487 {
12488 /* CPSR is going to be changed. */
12489 record_buf[0] = ARM_PS_REGNUM;
12490 thumb2_insn_r->reg_rec_count = 1;
12491 }
12492 else
12493 {
12494 arm_record_unsupported_insn(thumb2_insn_r);
12495 return -1;
12496 }
12497 }
12498 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12499 {
12500 /* BLX. */
12501 record_buf[0] = ARM_PS_REGNUM;
12502 record_buf[1] = ARM_LR_REGNUM;
12503 thumb2_insn_r->reg_rec_count = 2;
12504 }
12505
12506 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12507 record_buf);
12508 return ARM_RECORD_SUCCESS;
12509 }
12510
12511 /* Handler for thumb2 store single data item instructions. */
12512
12513 static int
12514 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12515 {
12516 struct regcache *reg_cache = thumb2_insn_r->regcache;
12517
12518 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12519 uint32_t address, offset_addr;
12520 uint32_t record_buf[8], record_buf_mem[8];
12521 uint32_t op1, op2;
12522
12523 ULONGEST u_regval[2];
12524
12525 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12526 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12527 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12528 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12529
12530 if (bit (thumb2_insn_r->arm_insn, 23))
12531 {
12532 /* T2 encoding. */
12533 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12534 offset_addr = u_regval[0] + offset_imm;
12535 address = offset_addr;
12536 }
12537 else
12538 {
12539 /* T3 encoding. */
12540 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12541 {
12542 /* Handle STRB (register). */
12543 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12544 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12545 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12546 offset_addr = u_regval[1] << shift_imm;
12547 address = u_regval[0] + offset_addr;
12548 }
12549 else
12550 {
12551 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12552 if (bit (thumb2_insn_r->arm_insn, 10))
12553 {
12554 if (bit (thumb2_insn_r->arm_insn, 9))
12555 offset_addr = u_regval[0] + offset_imm;
12556 else
12557 offset_addr = u_regval[0] - offset_imm;
12558
12559 address = offset_addr;
12560 }
12561 else
12562 address = u_regval[0];
12563 }
12564 }
12565
12566 switch (op1)
12567 {
12568 /* Store byte instructions. */
12569 case 4:
12570 case 0:
12571 record_buf_mem[0] = 1;
12572 break;
12573 /* Store half word instructions. */
12574 case 1:
12575 case 5:
12576 record_buf_mem[0] = 2;
12577 break;
12578 /* Store word instructions. */
12579 case 2:
12580 case 6:
12581 record_buf_mem[0] = 4;
12582 break;
12583
12584 default:
12585 gdb_assert_not_reached ("no decoding pattern found");
12586 break;
12587 }
12588
12589 record_buf_mem[1] = address;
12590 thumb2_insn_r->mem_rec_count = 1;
12591 record_buf[0] = reg_rn;
12592 thumb2_insn_r->reg_rec_count = 1;
12593
12594 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12595 record_buf);
12596 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12597 record_buf_mem);
12598 return ARM_RECORD_SUCCESS;
12599 }
12600
12601 /* Handler for thumb2 load memory hints instructions. */
12602
12603 static int
12604 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12605 {
12606 uint32_t record_buf[8];
12607 uint32_t reg_rt, reg_rn;
12608
12609 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12610 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12611
12612 if (ARM_PC_REGNUM != reg_rt)
12613 {
12614 record_buf[0] = reg_rt;
12615 record_buf[1] = reg_rn;
12616 record_buf[2] = ARM_PS_REGNUM;
12617 thumb2_insn_r->reg_rec_count = 3;
12618
12619 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12620 record_buf);
12621 return ARM_RECORD_SUCCESS;
12622 }
12623
12624 return ARM_RECORD_FAILURE;
12625 }
12626
12627 /* Handler for thumb2 load word instructions. */
12628
12629 static int
12630 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12631 {
12632 uint32_t record_buf[8];
12633
12634 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12635 record_buf[1] = ARM_PS_REGNUM;
12636 thumb2_insn_r->reg_rec_count = 2;
12637
12638 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12639 record_buf);
12640 return ARM_RECORD_SUCCESS;
12641 }
12642
12643 /* Handler for thumb2 long multiply, long multiply accumulate, and
12644 divide instructions. */
12645
12646 static int
12647 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12648 {
12649 uint32_t opcode1 = 0, opcode2 = 0;
12650 uint32_t record_buf[8];
12651
12652 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12653 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12654
12655 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12656 {
12657 /* Handle SMULL, UMULL, SMULAL. */
12658 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12659 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12660 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12661 record_buf[2] = ARM_PS_REGNUM;
12662 thumb2_insn_r->reg_rec_count = 3;
12663 }
12664 else if (1 == opcode1 || 3 == opcode2)
12665 {
12666 /* Handle SDIV and UDIV. */
12667 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12668 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12669 record_buf[2] = ARM_PS_REGNUM;
12670 thumb2_insn_r->reg_rec_count = 3;
12671 }
12672 else
12673 return ARM_RECORD_FAILURE;
12674
12675 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12676 record_buf);
12677 return ARM_RECORD_SUCCESS;
12678 }
12679
12680 /* Record handler for thumb32 coprocessor instructions. */
12681
12682 static int
12683 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12684 {
12685 if (bit (thumb2_insn_r->arm_insn, 25))
12686 return arm_record_coproc_data_proc (thumb2_insn_r);
12687 else
12688 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12689 }
12690
12691 /* Record handler for advance SIMD structure load/store instructions. */
12692
12693 static int
12694 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12695 {
12696 struct regcache *reg_cache = thumb2_insn_r->regcache;
12697 uint32_t l_bit, a_bit, b_bits;
12698 uint32_t record_buf[128], record_buf_mem[128];
12699 uint32_t reg_rn, reg_vd, address, f_elem;
12700 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12701 uint8_t f_ebytes;
12702
12703 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12704 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12705 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12706 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12707 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12708 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12709 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12710 f_elem = 8 / f_ebytes;
12711
12712 if (!l_bit)
12713 {
12714 ULONGEST u_regval = 0;
12715 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12716 address = u_regval;
12717
12718 if (!a_bit)
12719 {
12720 /* Handle VST1. */
12721 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12722 {
12723 if (b_bits == 0x07)
12724 bf_regs = 1;
12725 else if (b_bits == 0x0a)
12726 bf_regs = 2;
12727 else if (b_bits == 0x06)
12728 bf_regs = 3;
12729 else if (b_bits == 0x02)
12730 bf_regs = 4;
12731 else
12732 bf_regs = 0;
12733
12734 for (index_r = 0; index_r < bf_regs; index_r++)
12735 {
12736 for (index_e = 0; index_e < f_elem; index_e++)
12737 {
12738 record_buf_mem[index_m++] = f_ebytes;
12739 record_buf_mem[index_m++] = address;
12740 address = address + f_ebytes;
12741 thumb2_insn_r->mem_rec_count += 1;
12742 }
12743 }
12744 }
12745 /* Handle VST2. */
12746 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12747 {
12748 if (b_bits == 0x09 || b_bits == 0x08)
12749 bf_regs = 1;
12750 else if (b_bits == 0x03)
12751 bf_regs = 2;
12752 else
12753 bf_regs = 0;
12754
12755 for (index_r = 0; index_r < bf_regs; index_r++)
12756 for (index_e = 0; index_e < f_elem; index_e++)
12757 {
12758 for (loop_t = 0; loop_t < 2; loop_t++)
12759 {
12760 record_buf_mem[index_m++] = f_ebytes;
12761 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12762 thumb2_insn_r->mem_rec_count += 1;
12763 }
12764 address = address + (2 * f_ebytes);
12765 }
12766 }
12767 /* Handle VST3. */
12768 else if ((b_bits & 0x0e) == 0x04)
12769 {
12770 for (index_e = 0; index_e < f_elem; index_e++)
12771 {
12772 for (loop_t = 0; loop_t < 3; loop_t++)
12773 {
12774 record_buf_mem[index_m++] = f_ebytes;
12775 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12776 thumb2_insn_r->mem_rec_count += 1;
12777 }
12778 address = address + (3 * f_ebytes);
12779 }
12780 }
12781 /* Handle VST4. */
12782 else if (!(b_bits & 0x0e))
12783 {
12784 for (index_e = 0; index_e < f_elem; index_e++)
12785 {
12786 for (loop_t = 0; loop_t < 4; loop_t++)
12787 {
12788 record_buf_mem[index_m++] = f_ebytes;
12789 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12790 thumb2_insn_r->mem_rec_count += 1;
12791 }
12792 address = address + (4 * f_ebytes);
12793 }
12794 }
12795 }
12796 else
12797 {
12798 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12799
12800 if (bft_size == 0x00)
12801 f_ebytes = 1;
12802 else if (bft_size == 0x01)
12803 f_ebytes = 2;
12804 else if (bft_size == 0x02)
12805 f_ebytes = 4;
12806 else
12807 f_ebytes = 0;
12808
12809 /* Handle VST1. */
12810 if (!(b_bits & 0x0b) || b_bits == 0x08)
12811 thumb2_insn_r->mem_rec_count = 1;
12812 /* Handle VST2. */
12813 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12814 thumb2_insn_r->mem_rec_count = 2;
12815 /* Handle VST3. */
12816 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12817 thumb2_insn_r->mem_rec_count = 3;
12818 /* Handle VST4. */
12819 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12820 thumb2_insn_r->mem_rec_count = 4;
12821
12822 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12823 {
12824 record_buf_mem[index_m] = f_ebytes;
12825 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12826 }
12827 }
12828 }
12829 else
12830 {
12831 if (!a_bit)
12832 {
12833 /* Handle VLD1. */
12834 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12835 thumb2_insn_r->reg_rec_count = 1;
12836 /* Handle VLD2. */
12837 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12838 thumb2_insn_r->reg_rec_count = 2;
12839 /* Handle VLD3. */
12840 else if ((b_bits & 0x0e) == 0x04)
12841 thumb2_insn_r->reg_rec_count = 3;
12842 /* Handle VLD4. */
12843 else if (!(b_bits & 0x0e))
12844 thumb2_insn_r->reg_rec_count = 4;
12845 }
12846 else
12847 {
12848 /* Handle VLD1. */
12849 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12850 thumb2_insn_r->reg_rec_count = 1;
12851 /* Handle VLD2. */
12852 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12853 thumb2_insn_r->reg_rec_count = 2;
12854 /* Handle VLD3. */
12855 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12856 thumb2_insn_r->reg_rec_count = 3;
12857 /* Handle VLD4. */
12858 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12859 thumb2_insn_r->reg_rec_count = 4;
12860
12861 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12862 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12863 }
12864 }
12865
12866 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12867 {
12868 record_buf[index_r] = reg_rn;
12869 thumb2_insn_r->reg_rec_count += 1;
12870 }
12871
12872 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12873 record_buf);
12874 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12875 record_buf_mem);
12876 return 0;
12877 }
12878
12879 /* Decodes thumb2 instruction type and invokes its record handler. */
12880
12881 static unsigned int
12882 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12883 {
12884 uint32_t op, op1, op2;
12885
12886 op = bit (thumb2_insn_r->arm_insn, 15);
12887 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12888 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12889
12890 if (op1 == 0x01)
12891 {
12892 if (!(op2 & 0x64 ))
12893 {
12894 /* Load/store multiple instruction. */
12895 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12896 }
12897 else if ((op2 & 0x64) == 0x4)
12898 {
12899 /* Load/store (dual/exclusive) and table branch instruction. */
12900 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12901 }
12902 else if ((op2 & 0x60) == 0x20)
12903 {
12904 /* Data-processing (shifted register). */
12905 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12906 }
12907 else if (op2 & 0x40)
12908 {
12909 /* Co-processor instructions. */
12910 return thumb2_record_coproc_insn (thumb2_insn_r);
12911 }
12912 }
12913 else if (op1 == 0x02)
12914 {
12915 if (op)
12916 {
12917 /* Branches and miscellaneous control instructions. */
12918 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12919 }
12920 else if (op2 & 0x20)
12921 {
12922 /* Data-processing (plain binary immediate) instruction. */
12923 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12924 }
12925 else
12926 {
12927 /* Data-processing (modified immediate). */
12928 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12929 }
12930 }
12931 else if (op1 == 0x03)
12932 {
12933 if (!(op2 & 0x71 ))
12934 {
12935 /* Store single data item. */
12936 return thumb2_record_str_single_data (thumb2_insn_r);
12937 }
12938 else if (!((op2 & 0x71) ^ 0x10))
12939 {
12940 /* Advanced SIMD or structure load/store instructions. */
12941 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12942 }
12943 else if (!((op2 & 0x67) ^ 0x01))
12944 {
12945 /* Load byte, memory hints instruction. */
12946 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12947 }
12948 else if (!((op2 & 0x67) ^ 0x03))
12949 {
12950 /* Load halfword, memory hints instruction. */
12951 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12952 }
12953 else if (!((op2 & 0x67) ^ 0x05))
12954 {
12955 /* Load word instruction. */
12956 return thumb2_record_ld_word (thumb2_insn_r);
12957 }
12958 else if (!((op2 & 0x70) ^ 0x20))
12959 {
12960 /* Data-processing (register) instruction. */
12961 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12962 }
12963 else if (!((op2 & 0x78) ^ 0x30))
12964 {
12965 /* Multiply, multiply accumulate, abs diff instruction. */
12966 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12967 }
12968 else if (!((op2 & 0x78) ^ 0x38))
12969 {
12970 /* Long multiply, long multiply accumulate, and divide. */
12971 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12972 }
12973 else if (op2 & 0x40)
12974 {
12975 /* Co-processor instructions. */
12976 return thumb2_record_coproc_insn (thumb2_insn_r);
12977 }
12978 }
12979
12980 return -1;
12981 }
12982
12983 namespace {
12984 /* Abstract memory reader. */
12985
12986 class abstract_memory_reader
12987 {
12988 public:
12989 /* Read LEN bytes of target memory at address MEMADDR, placing the
12990 results in GDB's memory at BUF. Return true on success. */
12991
12992 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12993 };
12994
12995 /* Instruction reader from real target. */
12996
12997 class instruction_reader : public abstract_memory_reader
12998 {
12999 public:
13000 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13001 {
13002 if (target_read_memory (memaddr, buf, len))
13003 return false;
13004 else
13005 return true;
13006 }
13007 };
13008
13009 } // namespace
13010
13011 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13012 and positive val on fauilure. */
13013
13014 static int
13015 extract_arm_insn (abstract_memory_reader& reader,
13016 insn_decode_record *insn_record, uint32_t insn_size)
13017 {
13018 gdb_byte buf[insn_size];
13019
13020 memset (&buf[0], 0, insn_size);
13021
13022 if (!reader.read (insn_record->this_addr, buf, insn_size))
13023 return 1;
13024 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13025 insn_size,
13026 gdbarch_byte_order_for_code (insn_record->gdbarch));
13027 return 0;
13028 }
13029
13030 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13031
13032 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13033 dispatch it. */
13034
13035 static int
13036 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13037 record_type_t record_type, uint32_t insn_size)
13038 {
13039
13040 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13041 instruction. */
13042 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13043 {
13044 arm_record_data_proc_misc_ld_str, /* 000. */
13045 arm_record_data_proc_imm, /* 001. */
13046 arm_record_ld_st_imm_offset, /* 010. */
13047 arm_record_ld_st_reg_offset, /* 011. */
13048 arm_record_ld_st_multiple, /* 100. */
13049 arm_record_b_bl, /* 101. */
13050 arm_record_asimd_vfp_coproc, /* 110. */
13051 arm_record_coproc_data_proc /* 111. */
13052 };
13053
13054 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13055 instruction. */
13056 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13057 { \
13058 thumb_record_shift_add_sub, /* 000. */
13059 thumb_record_add_sub_cmp_mov, /* 001. */
13060 thumb_record_ld_st_reg_offset, /* 010. */
13061 thumb_record_ld_st_imm_offset, /* 011. */
13062 thumb_record_ld_st_stack, /* 100. */
13063 thumb_record_misc, /* 101. */
13064 thumb_record_ldm_stm_swi, /* 110. */
13065 thumb_record_branch /* 111. */
13066 };
13067
13068 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13069 uint32_t insn_id = 0;
13070
13071 if (extract_arm_insn (reader, arm_record, insn_size))
13072 {
13073 if (record_debug)
13074 {
13075 printf_unfiltered (_("Process record: error reading memory at "
13076 "addr %s len = %d.\n"),
13077 paddress (arm_record->gdbarch,
13078 arm_record->this_addr), insn_size);
13079 }
13080 return -1;
13081 }
13082 else if (ARM_RECORD == record_type)
13083 {
13084 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13085 insn_id = bits (arm_record->arm_insn, 25, 27);
13086
13087 if (arm_record->cond == 0xf)
13088 ret = arm_record_extension_space (arm_record);
13089 else
13090 {
13091 /* If this insn has fallen into extension space
13092 then we need not decode it anymore. */
13093 ret = arm_handle_insn[insn_id] (arm_record);
13094 }
13095 if (ret != ARM_RECORD_SUCCESS)
13096 {
13097 arm_record_unsupported_insn (arm_record);
13098 ret = -1;
13099 }
13100 }
13101 else if (THUMB_RECORD == record_type)
13102 {
13103 /* As thumb does not have condition codes, we set negative. */
13104 arm_record->cond = -1;
13105 insn_id = bits (arm_record->arm_insn, 13, 15);
13106 ret = thumb_handle_insn[insn_id] (arm_record);
13107 if (ret != ARM_RECORD_SUCCESS)
13108 {
13109 arm_record_unsupported_insn (arm_record);
13110 ret = -1;
13111 }
13112 }
13113 else if (THUMB2_RECORD == record_type)
13114 {
13115 /* As thumb does not have condition codes, we set negative. */
13116 arm_record->cond = -1;
13117
13118 /* Swap first half of 32bit thumb instruction with second half. */
13119 arm_record->arm_insn
13120 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13121
13122 ret = thumb2_record_decode_insn_handler (arm_record);
13123
13124 if (ret != ARM_RECORD_SUCCESS)
13125 {
13126 arm_record_unsupported_insn (arm_record);
13127 ret = -1;
13128 }
13129 }
13130 else
13131 {
13132 /* Throw assertion. */
13133 gdb_assert_not_reached ("not a valid instruction, could not decode");
13134 }
13135
13136 return ret;
13137 }
13138
13139 #if GDB_SELF_TEST
13140 namespace selftests {
13141
13142 /* Provide both 16-bit and 32-bit thumb instructions. */
13143
13144 class instruction_reader_thumb : public abstract_memory_reader
13145 {
13146 public:
13147 template<size_t SIZE>
13148 instruction_reader_thumb (enum bfd_endian endian,
13149 const uint16_t (&insns)[SIZE])
13150 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13151 {}
13152
13153 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13154 {
13155 SELF_CHECK (len == 4 || len == 2);
13156 SELF_CHECK (memaddr % 2 == 0);
13157 SELF_CHECK ((memaddr / 2) < m_insns_size);
13158
13159 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13160 if (len == 4)
13161 {
13162 store_unsigned_integer (&buf[2], 2, m_endian,
13163 m_insns[memaddr / 2 + 1]);
13164 }
13165 return true;
13166 }
13167
13168 private:
13169 enum bfd_endian m_endian;
13170 const uint16_t *m_insns;
13171 size_t m_insns_size;
13172 };
13173
13174 static void
13175 arm_record_test (void)
13176 {
13177 struct gdbarch_info info;
13178 gdbarch_info_init (&info);
13179 info.bfd_arch_info = bfd_scan_arch ("arm");
13180
13181 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13182
13183 SELF_CHECK (gdbarch != NULL);
13184
13185 /* 16-bit Thumb instructions. */
13186 {
13187 insn_decode_record arm_record;
13188
13189 memset (&arm_record, 0, sizeof (insn_decode_record));
13190 arm_record.gdbarch = gdbarch;
13191
13192 static const uint16_t insns[] = {
13193 /* db b2 uxtb r3, r3 */
13194 0xb2db,
13195 /* cd 58 ldr r5, [r1, r3] */
13196 0x58cd,
13197 };
13198
13199 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13200 instruction_reader_thumb reader (endian, insns);
13201 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13202 THUMB_INSN_SIZE_BYTES);
13203
13204 SELF_CHECK (ret == 0);
13205 SELF_CHECK (arm_record.mem_rec_count == 0);
13206 SELF_CHECK (arm_record.reg_rec_count == 1);
13207 SELF_CHECK (arm_record.arm_regs[0] == 3);
13208
13209 arm_record.this_addr += 2;
13210 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13211 THUMB_INSN_SIZE_BYTES);
13212
13213 SELF_CHECK (ret == 0);
13214 SELF_CHECK (arm_record.mem_rec_count == 0);
13215 SELF_CHECK (arm_record.reg_rec_count == 1);
13216 SELF_CHECK (arm_record.arm_regs[0] == 5);
13217 }
13218
13219 /* 32-bit Thumb-2 instructions. */
13220 {
13221 insn_decode_record arm_record;
13222
13223 memset (&arm_record, 0, sizeof (insn_decode_record));
13224 arm_record.gdbarch = gdbarch;
13225
13226 static const uint16_t insns[] = {
13227 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13228 0xee1d, 0x7f70,
13229 };
13230
13231 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13232 instruction_reader_thumb reader (endian, insns);
13233 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13234 THUMB2_INSN_SIZE_BYTES);
13235
13236 SELF_CHECK (ret == 0);
13237 SELF_CHECK (arm_record.mem_rec_count == 0);
13238 SELF_CHECK (arm_record.reg_rec_count == 1);
13239 SELF_CHECK (arm_record.arm_regs[0] == 7);
13240 }
13241 }
13242 } // namespace selftests
13243 #endif /* GDB_SELF_TEST */
13244
13245 /* Cleans up local record registers and memory allocations. */
13246
13247 static void
13248 deallocate_reg_mem (insn_decode_record *record)
13249 {
13250 xfree (record->arm_regs);
13251 xfree (record->arm_mems);
13252 }
13253
13254
13255 /* Parse the current instruction and record the values of the registers and
13256 memory that will be changed in current instruction to record_arch_list".
13257 Return -1 if something is wrong. */
13258
13259 int
13260 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13261 CORE_ADDR insn_addr)
13262 {
13263
13264 uint32_t no_of_rec = 0;
13265 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13266 ULONGEST t_bit = 0, insn_id = 0;
13267
13268 ULONGEST u_regval = 0;
13269
13270 insn_decode_record arm_record;
13271
13272 memset (&arm_record, 0, sizeof (insn_decode_record));
13273 arm_record.regcache = regcache;
13274 arm_record.this_addr = insn_addr;
13275 arm_record.gdbarch = gdbarch;
13276
13277
13278 if (record_debug > 1)
13279 {
13280 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13281 "addr = %s\n",
13282 paddress (gdbarch, arm_record.this_addr));
13283 }
13284
13285 instruction_reader reader;
13286 if (extract_arm_insn (reader, &arm_record, 2))
13287 {
13288 if (record_debug)
13289 {
13290 printf_unfiltered (_("Process record: error reading memory at "
13291 "addr %s len = %d.\n"),
13292 paddress (arm_record.gdbarch,
13293 arm_record.this_addr), 2);
13294 }
13295 return -1;
13296 }
13297
13298 /* Check the insn, whether it is thumb or arm one. */
13299
13300 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13301 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13302
13303
13304 if (!(u_regval & t_bit))
13305 {
13306 /* We are decoding arm insn. */
13307 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13308 }
13309 else
13310 {
13311 insn_id = bits (arm_record.arm_insn, 11, 15);
13312 /* is it thumb2 insn? */
13313 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13314 {
13315 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13316 THUMB2_INSN_SIZE_BYTES);
13317 }
13318 else
13319 {
13320 /* We are decoding thumb insn. */
13321 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13322 THUMB_INSN_SIZE_BYTES);
13323 }
13324 }
13325
13326 if (0 == ret)
13327 {
13328 /* Record registers. */
13329 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13330 if (arm_record.arm_regs)
13331 {
13332 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13333 {
13334 if (record_full_arch_list_add_reg
13335 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13336 ret = -1;
13337 }
13338 }
13339 /* Record memories. */
13340 if (arm_record.arm_mems)
13341 {
13342 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13343 {
13344 if (record_full_arch_list_add_mem
13345 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13346 arm_record.arm_mems[no_of_rec].len))
13347 ret = -1;
13348 }
13349 }
13350
13351 if (record_full_arch_list_add_end ())
13352 ret = -1;
13353 }
13354
13355
13356 deallocate_reg_mem (&arm_record);
13357
13358 return ret;
13359 }
This page took 0.463533 seconds and 5 git commands to generate.